Source-Changes-HG archive
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]
[src/trunk]: src/sys/arch/sparc64/sparc64 Restore revision 1.102.
details: https://anonhg.NetBSD.org/src/rev/d6d8b85d1e30
branches: trunk
changeset: 498402:d6d8b85d1e30
user: pk <pk%NetBSD.org@localhost>
date: Sun Oct 22 21:28:27 2000 +0000
description:
Restore revision 1.102.
I hope it sticks this time..
diffstat:
sys/arch/sparc64/sparc64/locore.s | 166 +++++++++++++++++++------------------
1 files changed, 86 insertions(+), 80 deletions(-)
diffs (truncated from 512 to 300 lines):
diff -r 83a6412af0ad -r d6d8b85d1e30 sys/arch/sparc64/sparc64/locore.s
--- a/sys/arch/sparc64/sparc64/locore.s Sun Oct 22 20:25:31 2000 +0000
+++ b/sys/arch/sparc64/sparc64/locore.s Sun Oct 22 21:28:27 2000 +0000
@@ -1,4 +1,4 @@
-/* $NetBSD: locore.s,v 1.104 2000/10/20 16:43:54 eeh Exp $ */
+/* $NetBSD: locore.s,v 1.105 2000/10/22 21:28:27 pk Exp $ */
/*
* Copyright (c) 1996-2000 Eduardo Horvath
* Copyright (c) 1996 Paul Kranenburg
@@ -239,8 +239,8 @@
#define SPLIT(r0, r1) \
srl r0, 0, r1; \
srlx r0, 32, r0
-
-
+
+
/*
* A handy macro for maintaining instrumentation counters.
* Note that this clobbers %o0 and %o1. Normal usage is
@@ -1552,7 +1552,7 @@
stw r3, [r2+TRACEPTR]; \
1:
-
+
.text
traceit:
set trap_trace, %g2
@@ -2180,7 +2180,7 @@
brz,pn %g4, winfix ! NULL entry? check somewhere else
add %g5, %g4, %g6
DLFLUSH(%g6,%g5)
-1:
+1:
ldxa [%g6] ASI_PHYS_CACHED, %g4
DLFLUSH2(%g5)
brgez,pn %g4, winfix ! Entry invalid? Punt
@@ -2336,7 +2336,7 @@
brz,pn %g4, winfix ! NULL entry? check somewhere else
add %g5, %g4, %g6
DLFLUSH(%g6,%g5)
-1:
+1:
ldxa [%g6] ASI_PHYS_CACHED, %g4
DLFLUSH2(%g5)
brgez,pn %g4, winfix ! Entry invalid? Punt
@@ -3288,7 +3288,7 @@
brz,pn %g4, textfault ! NULL entry? check somewhere else
add %g5, %g4, %g6
DLFLUSH(%g6,%g5)
-1:
+1:
ldxa [%g6] ASI_PHYS_CACHED, %g4
DLFLUSH2(%g5)
brgez,pn %g4, textfault
@@ -4342,26 +4342,26 @@
mov 8, %l7
#ifdef INTRLIST
-1:
+1:
LDPTR [%l4], %l2 ! Check a slot
brz,pn %l2, intrcmplt ! Empty list?
-
+
mov %g0, %l7
CASPTR [%l4] ASI_N, %l2, %l7 ! Grab the entire list
cmp %l7, %l2
bne,pn %icc, 1b
add %sp, CC64FSZ+STKB, %o2 ! tf = %sp + CC64FSZ + STKB
-2:
+2:
LDPTR [%l2 + IH_FUN], %o4 ! ih->ih_fun
LDPTR [%l2 + IH_ARG], %o0 ! ih->ih_arg
-
+
jmpl %o4, %o7 ! handled = (*ih->ih_fun)(...)
movrz %o0, %o2, %o0 ! arg = (arg == 0) ? arg : tf
LDPTR [%l2 + IH_PEND], %l7 ! Clear pending flag
LDPTR [%l2 + IH_CLR], %l1
STPTR %g0, [%l2 + IH_PEND] ! Clear pending flag
membar #Sync
-
+
brz,pn %l1, 0f
add %l5, %o0, %l5
stx %g0, [%l1] ! Clear intr source
@@ -5363,7 +5363,7 @@
dostart:
wrpr %g0, 0, %tick ! XXXXXXX clear %tick register for now
mov 1, %g1
- sllx %g1, 63, %g1
+ sllx %g1, 63, %g1
wr %g1, TICK_CMPR ! XXXXXXX clear and disable %tick_cmpr as well
/*
* Startup.
@@ -5557,10 +5557,9 @@
* %l4 = tmp
* %l5 = tmp && TLB_TAG_ACCESS
* %l6 = tmp && CTX_PRIMARY
- * %l7 = routine to jump to
+ * %l7 = DATA_START
* %g1 = TLB Data for data segment w/o low bits
* %g2 = TLB Data for data segment w/low bits
- * %g3 = DATA_START
*/
#ifdef NO_VCACHE
@@ -5573,7 +5572,7 @@
wrpr %g0, 0, %tl ! Make sure we're not in NUCLEUS mode
sethi %hi(KERNBASE), %l0 ! Find our xlation
- sethi %hi(DATA_START), %g3
+ sethi %hi(DATA_START), %l7
set _C_LABEL(ktextp), %l1 ! Find phys addr
ldx [%l1], %l1 ! The following gets ugly: We need to load the following mask
@@ -5607,10 +5606,10 @@
srlx %l2, 32, %o3
call _C_LABEL(prom_printf)
srl %l2, 0, %o4
-
+
set 1f, %o0 ! Debug printf for DATA page
- srlx %g3, 32, %o1
- srl %g3, 0, %o2
+ srlx %l7, 32, %o1
+ srl %l7, 0, %o2
or %g1, TTE_L|TTE_CP|TTE_CV|TTE_P|TTE_W, %l2 ! And low bits: L=1|CP=1|CV=1|E=0|P=1|W=1(ugh)|G=0
srlx %l2, 32, %o3
call _C_LABEL(prom_printf)
@@ -5623,7 +5622,7 @@
#endif
set 0x400000, %l3 ! Demap all of kernel dmmu text segment
mov %l0, %l5
- mov %g3, %l6
+ mov %l7, %l6
set 0x2000, %l4 ! 8K page size
add %l0, %l3, %l3
0:
@@ -5644,9 +5643,11 @@
set TLB_TAG_ACCESS, %l5 ! Now map it back in with a locked TTE
#ifdef NO_VCACHE
- or %l1, TTE_L|TTE_CP|TTE_P, %l2 ! And low bits: L=1|CP=1|CV=0(ugh)|E=0|P=1|W=0|G=0
+ ! And low bits: L=1|CP=1|CV=0(ugh)|E=0|P=1|W=0|G=0
+ or %l1, TTE_L|TTE_CP|TTE_P, %l2
#else
- or %l1, TTE_L|TTE_CP|TTE_CV|TTE_P, %l2 ! And low bits: L=1|CP=1|CV=1|E=0|P=1|W=0|G=0
+ ! And low bits: L=1|CP=1|CV=1|E=0|P=1|W=0|G=0
+ or %l1, TTE_L|TTE_CP|TTE_CV|TTE_P, %l2
#endif
set 1f, %o5
stxa %l0, [%l5] ASI_DMMU ! Same for DMMU
@@ -5655,11 +5656,13 @@
membar #Sync ! We may need more membar #Sync in here
flush %o5 ! Make IMMU see this too
#ifdef NO_VCACHE
- or %g1, TTE_L|TTE_CP|TTE_P|TTE_W, %l2 ! And low bits: L=1|CP=1|CV=0(ugh)|E=0|P=1|W=1|G=0
+ ! And low bits: L=1|CP=1|CV=0(ugh)|E=0|P=1|W=1|G=0
+ or %g1, TTE_L|TTE_CP|TTE_P|TTE_W, %l2
#else
- or %g1, TTE_L|TTE_CP|TTE_CV|TTE_P|TTE_W, %l2 ! And low bits: L=1|CP=1|CV=1|E=0|P=1|W=1|G=0
-#endif
- stxa %g3, [%l5] ASI_DMMU ! Same for DMMU
+ ! And low bits: L=1|CP=1|CV=1|E=0|P=1|W=1|G=0
+ or %g1, TTE_L|TTE_CP|TTE_CV|TTE_P|TTE_W, %l2
+#endif
+ stxa %l7, [%l5] ASI_DMMU ! Same for DMMU
membar #Sync ! We may need more membar #Sync in here
stxa %l2, [%g0] ASI_DMMU_DATA_IN ! Same for DMMU
membar #Sync ! We may need more membar #Sync in here
@@ -5679,15 +5682,15 @@
.text
#endif
#if 1
- !!
- !! Finished the DMMU, now we need to do the IMMU which is more difficult 'cause
- !! we're execting instructions through the IMMU while we're flushing it. We need
- !! to remap the entire kernel to a new context, flush the entire context 0 IMMU,
- !! map it back into context 0, switch to context 0, and flush context 1.
- !!
- !!
- !! First, map in the kernel text as context==1
- !!
+ /*
+ * Finished the DMMU, now we need to do the IMMU which is more
+ * difficult because we're execting instructions through the IMMU
+ * while we're flushing it. We need to remap the entire kernel
+ * to a new context, flush the entire context 0 IMMU, map it back
+ * into context 0, switch to context 0, and flush context 1.
+ *
+ * First, map in the kernel text as context==1
+ */
set TLB_TAG_ACCESS, %l5
or %l1, TTE_CP|TTE_P, %l2 ! And low bits: L=0|CP=1|CV=0|E=0|P=1|G=0
or %l0, 1, %l4 ! Context = 1
@@ -5703,7 +5706,7 @@
membar #Sync ! We may need more membar #Sync in here
flush %o5 ! Make IMMU see this too
- or %g3, 1, %l4 ! Do the data segment, too
+ or %l7, 1, %l4 ! Do the data segment, too
or %g1, TTE_CP|TTE_P, %l2 ! And low bits: L=0|CP=1|CV=0|E=0|P=1|G=0
stxa %l2, [%g0] ASI_DMMU_DATA_IN ! Store it
membar #Sync ! We may need more membar #Sync in here
@@ -5730,7 +5733,7 @@
!!
set 0x400000, %l3 ! Demap all of kernel immu segment
or %l0, 0x020, %l5 ! Context = Nucleus
- or %g3, 0x020, %g5
+ or %l7, 0x020, %g5
set 0x2000, %l4 ! 8K page size
add %l0, %l3, %l3
0:
@@ -5750,9 +5753,11 @@
!!
set TLB_TAG_ACCESS, %l5
#ifdef NO_VCACHE
- or %l1, TTE_L|TTE_CP|TTE_P, %l2 ! And low bits: L=1|CP=1|CV=0|E=0|P=1|W=0|G=0
+ ! And low bits: L=1|CP=1|CV=0|E=0|P=1|W=0|G=0
+ or %l1, TTE_L|TTE_CP|TTE_P, %l2
#else
- or %l1, TTE_L|TTE_CP|TTE_CV|TTE_P, %l2 ! And low bits: L=1|CP=1|CV=1|E=0|P=1|W=0|G=0
+ ! And low bits: L=1|CP=1|CV=1|E=0|P=1|W=0|G=0
+ or %l1, TTE_L|TTE_CP|TTE_CV|TTE_P, %l2
#endif
set 1f, %o5
stxa %l0, [%l5] ASI_IMMU ! Make IMMU point to it
@@ -6249,12 +6254,12 @@
#endif
!! Try using cache_flush_phys for a change.
-
+
mov -1, %o1 ! Generate mask for tag: bits [29..2]
srlx %o0, 13-2, %o2 ! Tag is VA bits <40:13> in bits <29:2>
srl %o1, 2, %o1 ! Now we have bits <29:0> set
andn %o1, 3, %o1 ! Now we have bits <29:2> set
-
+
set (2*NBPG), %o5
clr %o4
1:
@@ -6269,7 +6274,7 @@
2:
brnz,pt %o5, 1b
inc 16, %o4
-
+
!! Now do the I$
mov -1, %o1 ! Generate mask for tag: bits [35..8]
srlx %o0, 13-8, %o2
@@ -6290,7 +6295,7 @@
2:
brnz,pt %o5, 1b
inc 16, %o4
-
+
sethi %hi(KERNBASE), %o5
flush %o5
membar #Sync
@@ -6320,15 +6325,15 @@
movrz %o4, %o3, %o4 ! If start == end we need to wrap
!! Clear from start to end
-1:
+1:
stxa %g0, [%o0] ASI_DCACHE_TAG
dec 16, %o4
- xor %o5, %o0, %o3 ! Second way
+ xor %o5, %o0, %o3 ! Second way
stxa %g0, [%o0] ASI_ICACHE_TAG
stxa %g0, [%o3] ASI_ICACHE_TAG
brgz,pt %o4, 1b
inc 16, %o0
-2:
+2:
sethi %hi(KERNBASE), %o5
flush %o5
membar #Sync
@@ -6337,15 +6342,15 @@
!! We got a hole. Clear from start to hole
clr %o4
-3:
+3:
stxa %g0, [%o4] ASI_DCACHE_TAG
dec 16, %o1
- xor %o5, %o4, %g1 ! Second way
+ xor %o5, %o4, %g1 ! Second way
stxa %g0, [%o4] ASI_ICACHE_TAG
stxa %g0, [%g1] ASI_ICACHE_TAG
brgz,pt %o1, 3b
inc 16, %o4
-
+
!! Now clear to the end.
sub %o3, %o2, %o4 ! Size to clear (NBPG - end)
Home |
Main Index |
Thread Index |
Old Index