Source-Changes-HG archive
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]
[src/trunk]: src/sys/arch/sparc64/include Fixup signed/unsigned issues so 32-...
details: https://anonhg.NetBSD.org/src/rev/28ed1ae6f93c
branches: trunk
changeset: 487496:28ed1ae6f93c
user: eeh <eeh%NetBSD.org@localhost>
date: Thu Jun 08 17:43:24 2000 +0000
description:
Fixup signed/unsigned issues so 32-bit addresses are not sign extended.
diffstat:
sys/arch/sparc64/include/ctlreg.h | 128 +++++++++++++++++++------------------
1 files changed, 65 insertions(+), 63 deletions(-)
diffs (truncated from 456 to 300 lines):
diff -r f736c4f12544 -r 28ed1ae6f93c sys/arch/sparc64/include/ctlreg.h
--- a/sys/arch/sparc64/include/ctlreg.h Thu Jun 08 17:41:46 2000 +0000
+++ b/sys/arch/sparc64/include/ctlreg.h Thu Jun 08 17:43:24 2000 +0000
@@ -1,4 +1,4 @@
-/* $NetBSD: ctlreg.h,v 1.15 2000/06/07 09:16:41 pk Exp $ */
+/* $NetBSD: ctlreg.h,v 1.16 2000/06/08 17:43:24 eeh Exp $ */
/*
* Copyright (c) 1996-1999 Eduardo Horvath
@@ -434,23 +434,25 @@
/* load byte from alternate address space */
#ifdef DCACHE_BUG
#define lduba(loc, asi) ({ \
- register int _lduba_v; \
+ register unsigned int _lduba_v; \
if (PHYS_ASI(asi)) { \
__asm __volatile("wr %2,%%g0,%%asi; " \
" andn %1,0x1f,%0; stxa %%g0,[%0] %3; membar #Sync; " \
" lduba [%1]%%asi,%0" : "=&r" (_lduba_v) : \
- "r" ((long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
+ "r" ((unsigned long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
} else { \
- __asm __volatile("wr %2,%%g0,%%asi; lduba [%1]%%asi,%0" : "=r" (_lduba_v) : \
- "r" ((long)(loc)), "r" (asi)); \
+ __asm __volatile("wr %2,%%g0,%%asi; lduba [%1]%%asi,%0" : \
+ "=r" (_lduba_v) : \
+ "r" ((unsigned long)(loc)), "r" (asi)); \
} \
_lduba_v; \
})
#else
#define lduba(loc, asi) ({ \
- register int _lduba_v; \
- __asm __volatile("wr %2,%%g0,%%asi; lduba [%1]%%asi,%0" : "=r" (_lduba_v) : \
- "r" ((long)(loc)), "r" (asi)); \
+ register unsigned int _lduba_v; \
+ __asm __volatile("wr %2,%%g0,%%asi; lduba [%1]%%asi,%0" : \
+ "=r" (_lduba_v) : \
+ "r" ((unsigned long)(loc)), "r" (asi)); \
_lduba_v; \
})
#endif
@@ -458,7 +460,7 @@
/* load byte from alternate address space */
#ifdef DCACHE_BUG
#define lduba(loc, asi) ({ \
- register int _lduba_v, _loc_hi, _pstate; \
+ register unsigned int _lduba_v, _loc_hi, _pstate; \
_loc_hi = (((u_int64_t)loc)>>32); \
if (PHYS_ASI(asi)) { \
__asm __volatile("wr %4,%%g0,%%asi; " \
@@ -466,25 +468,25 @@
" sllx %3,32,%0; or %0,%2,%0; wrpr %1,8,%%pstate; " \
" membar #Sync; lduba [%0]%%asi,%0; wrpr %1,0,%%pstate" : \
"=&r" (_lduba_v), "=&r" (_pstate) : \
- "r" ((long)(loc)), "r" (_loc_hi), \
+ "r" ((unsigned long)(loc)), "r" (_loc_hi), \
"r" (asi), "n" (ASI_DCACHE_TAG)); \
} else { \
__asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; " \
" rdpr %%pstate,%1; wrpr %1,8,%%pstate; " \
" or %0,%2,%0; lduba [%0]%%asi,%0; wrpr %1,0,%%pstate" : \
"=&r" (_lduba_v), "=&r" (_pstate) : \
- "r" ((long)(loc)), "r" (_loc_hi), "r" (asi)); \
+ "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
} \
_lduba_v; \
})
#else
#define lduba(loc, asi) ({ \
- register int _lduba_v, _loc_hi, _pstate; \
+ register unsigned int _lduba_v, _loc_hi, _pstate; \
_loc_hi = (((u_int64_t)loc)>>32); \
__asm __volatile("wr %4,%%g0,%%asi; rdpr %%pstate,%1; sllx %3,32,%0; " \
" wrpr %1,8,%%pstate; or %0,%2,%0; lduba [%0]%%asi,%0; wrpr %1,0,%%pstate" : \
"=&r" (_lduba_v), "=&r" (_pstate) : \
- "r" ((long)(loc)), "r" (_loc_hi), "r" (asi)); \
+ "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
_lduba_v; \
})
#endif
@@ -494,23 +496,23 @@
/* load half-word from alternate address space */
#ifdef DCACHE_BUG
#define lduha(loc, asi) ({ \
- register int _lduha_v; \
+ register unsigned int _lduha_v; \
if (PHYS_ASI(asi)) { \
__asm __volatile("wr %2,%%g0,%%asi; " \
" andn %1,0x1f,%0; stxa %%g0,[%0] %3; membar #Sync; " \
" lduha [%1]%%asi,%0" : "=&r" (_lduha_v) : \
- "r" ((long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
+ "r" ((unsigned long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
} else { \
__asm __volatile("wr %2,%%g0,%%asi; lduha [%1]%%asi,%0" : "=r" (_lduha_v) : \
- "r" ((long)(loc)), "r" (asi)); \
+ "r" ((unsigned long)(loc)), "r" (asi)); \
} \
_lduha_v; \
})
#else
#define lduha(loc, asi) ({ \
- register int _lduha_v; \
+ register unsigned int _lduha_v; \
__asm __volatile("wr %2,%%g0,%%asi; lduha [%1]%%asi,%0" : "=r" (_lduha_v) : \
- "r" ((long)(loc)), "r" (asi)); \
+ "r" ((unsigned long)(loc)), "r" (asi)); \
_lduha_v; \
})
#endif
@@ -518,32 +520,32 @@
/* load half-word from alternate address space */
#ifdef DCACHE_BUG
#define lduha(loc, asi) ({ \
- register int _lduha_v, _loc_hi, _pstate; \
+ register unsigned int _lduha_v, _loc_hi, _pstate; \
_loc_hi = (((u_int64_t)loc)>>32); \
if (PHYS_ASI(asi)) { \
__asm __volatile("wr %4,%%g0,%%asi; rdpr %%pstate,%1; " \
" andn %2,0x1f,%0; stxa %%g0,[%0] %5; wrpr %1,8,%%pstate; sllx %3,32,%0; " \
" or %0,%2,%0; membar #Sync; lduha [%0]%%asi,%0; wrpr %1,0,%%pstate" : \
"=&r" (_lduha_v), "=&r" (_pstate) : \
- "r" ((long)(loc)), "r" (_loc_hi), \
+ "r" ((unsigned long)(loc)), "r" (_loc_hi), \
"r" (asi), "n" (ASI_DCACHE_TAG)); \
} else { \
__asm __volatile("wr %4,%%g0,%%asi; rdpr %%pstate,%1; " \
" sllx %3,32,%0; wrpr %1,8,%%pstate; " \
" or %0,%2,%0; lduha [%0]%%asi,%0; wrpr %1,0,%%pstate" : \
"=&r" (_lduha_v), "=&r" (_pstate) : \
- "r" ((long)(loc)), "r" (_loc_hi), "r" (asi)); \
+ "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
} \
_lduha_v; \
})
#else
#define lduha(loc, asi) ({ \
- register int _lduha_v, _loc_hi, _pstate; \
+ register unsigned int _lduha_v, _loc_hi, _pstate; \
_loc_hi = (((u_int64_t)loc)>>32); \
__asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; rdpr %%pstate,%1; " \
" or %0,%2,%0; wrpr %1,8,%%pstate; lduha [%0]%%asi,%0; wrpr %1,0,%%pstate" : \
"=&r" (_lduha_v), "=&r" (_pstate) : \
- "r" ((long)(loc)), "r" (_loc_hi), "r" (asi)); \
+ "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
_lduha_v; \
})
#endif
@@ -558,10 +560,10 @@
__asm __volatile("wr %2,%%g0,%%asi; " \
" andn %1,0x1f,%0; stxa %%g0,[%0] %3; membar #Sync; " \
" lda [%1]%%asi,%0" : "=&r" (_lda_v) : \
- "r" ((long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
+ "r" ((unsigned long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
} else { \
__asm __volatile("wr %2,%%g0,%%asi; lda [%1]%%asi,%0" : "=r" (_lda_v) : \
- "r" ((long)(loc)), "r" (asi)); \
+ "r" ((unsigned long)(loc)), "r" (asi)); \
} \
_lda_v; \
})
@@ -573,10 +575,10 @@
__asm __volatile("wr %2,%%g0,%%asi; " \
" andn %1,0x1f,%0; stxa %%g0,[%0] %3; membar #Sync; " \
" ldswa [%1]%%asi,%0" : "=&r" (_lda_v) : \
- "r" ((long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
+ "r" ((unsigned long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
} else { \
__asm __volatile("wr %2,%%g0,%%asi; ldswa [%1]%%asi,%0" : "=r" (_lda_v) : \
- "r" ((long)(loc)), "r" (asi)); \
+ "r" ((unsigned long)(loc)), "r" (asi)); \
} \
_lda_v; \
})
@@ -584,14 +586,14 @@
#define lda(loc, asi) ({ \
register int _lda_v; \
__asm __volatile("wr %2,%%g0,%%asi; lda [%1]%%asi,%0" : "=r" (_lda_v) : \
- "r" ((long)(loc)), "r" (asi)); \
+ "r" ((unsigned long)(loc)), "r" (asi)); \
_lda_v; \
})
#define ldswa(loc, asi) ({ \
register int _lda_v; \
__asm __volatile("wr %2,%%g0,%%asi; ldswa [%1]%%asi,%0" : "=r" (_lda_v) : \
- "r" ((long)(loc)), "r" (asi)); \
+ "r" ((unsigned long)(loc)), "r" (asi)); \
_lda_v; \
})
#endif
@@ -606,14 +608,14 @@
" andn %2,0x1f,%0; stxa %%g0,[%0] %5; wrpr %1,8,%%pstate; " \
" sllx %3,32,%0; or %0,%2,%0; membar #Sync;lda [%0]%%asi,%0; " \
" wrpr %1,0,%%pstate" : "=&r" (_lda_v), "=&r" (_pstate) : \
- "r" ((long)(loc)), "r" (_loc_hi), \
+ "r" ((unsigned long)(loc)), "r" (_loc_hi), \
"r" (asi), "n" (ASI_DCACHE_TAG)); \
} else { \
__asm __volatile("wr %4,%%g0,%%asi; rdpr %%pstate,%1;" \
" sllx %3,32,%0; wrpr %1,8,%%pstate;" \
" or %0,%2,%0; lda [%0]%%asi,%0; wrpr %1,0,%%pstate" : \
"=&r" (_lda_v), "=&r" (_pstate) : \
- "r" ((long)(loc)), "r" (_loc_hi), "r" (asi)); \
+ "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
} \
_lda_v; \
})
@@ -627,14 +629,14 @@
" andn %2,0x1f,%0; stxa %%g0,[%0] %5; wrpr %1,8,%%pstate; sllx %3,32,%0;" \
" or %0,%2,%0; membar #Sync; ldswa [%0]%%asi,%0; wrpr %1,0,%%pstate" : \
"=&r" (_lda_v), "=&r" (_pstate) : \
- "r" ((long)(loc)), "r" (_loc_hi), \
+ "r" ((unsigned long)(loc)), "r" (_loc_hi), \
"r" (asi), "n" (ASI_DCACHE_TAG)); \
} else { \
__asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0;" \
" rdpr %%pstate,%1; wrpr %1,8,%%pstate;" \
" or %0,%2,%0; ldswa [%0]%%asi,%0; wrpr %1,0,%%pstate" : \
"=&r" (_lda_v), "=&r" (_pstate) : \
- "r" ((long)(loc)), "r" (_loc_hi), "r" (asi)); \
+ "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
} \
_lda_v; \
})
@@ -645,7 +647,7 @@
__asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; rdpr %%pstate,%1;" \
" wrpr %1,8,%%pstate; or %0,%2,%0; lda [%0]%%asi,%0; wrpr %1,0,%%pstate" : \
"=&r" (_lda_v), "=&r" (_pstate) : \
- "r" ((long)(loc)), "r" (_loc_hi), "r" (asi)); \
+ "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
_lda_v; \
})
@@ -655,7 +657,7 @@
__asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; rdpr %%pstate,%1;" \
" wrpr %1,8,%%pstate; or %0,%2,%0; ldswa [%0]%%asi,%0; wrpr %1,0,%pstate" : \
"=&r" (_lda_v), "=&r" (_pstate) : \
- "r" ((long)(loc)), "r" (_loc_hi), "r" (asi)); \
+ "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
_lda_v; \
})
#endif
@@ -671,10 +673,10 @@
__asm __volatile("wr %2,%%g0,%%asi; " \
" andn %1,0x1f,%0; stxa %%g0,[%0] %3; membar #Sync; " \
" ldda [%1]%%asi,%0" : "=&r" (_lda_v) : \
- "r" ((long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
+ "r" ((unsigned long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
} else { \
__asm __volatile("wr %2,%%g0,%%asi; ldda [%1]%%asi,%0" : "=r" (_lda_v) : \
- "r" ((long)(loc)), "r" (asi)); \
+ "r" ((unsigned long)(loc)), "r" (asi)); \
} \
_lda_v; \
})
@@ -688,12 +690,12 @@
" andn %2,0x1f,%0; rdpr %%pstate,%1; stxa %%g0,[%0] %5; wrpr %1,8,%%pstate;" \
" sllx %3,32,%0; or %0,%2,%0; membar #Sync; ldda [%0]%%asi,%0; wrpr %1,0,%%pstate" :\
"=&r" (_lda_v), "=&r" (_pstate) : \
- "r" ((long)(loc)), "r" (_loc_hi), "r" (asi), "n" (ASI_DCACHE_TAG)); \
+ "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi), "n" (ASI_DCACHE_TAG)); \
} else { \
__asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; " \
" rdpr %%pstate,%1; or %0,%2,%0; wrpr %1,8,%%pstate; ldda [%0]%%asi,%0;" \
" wrpr %1,0,%%pstate" : "=&r" (_lda_v), "=&r" (_pstate) : \
- "r" ((long)(loc)), "r" (_loc_hi), "r" (asi)); \
+ "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
} \
_lda_v; \
})
@@ -702,22 +704,22 @@
#ifdef __arch64__
/* native load 64-bit int from alternate address space w/64-bit compiler*/
#define ldxa(loc, asi) ({ \
- register long _lda_v; \
+ register unsigned long _lda_v; \
if (PHYS_ASI(asi)) { \
__asm __volatile("wr %2,%%g0,%%asi; "\
" andn %1,0x1f,%0; stxa %%g0,[%0] %3; membar #Sync; " \
" ldxa [%1]%%asi,%0" : "=&r" (_lda_v) : \
- "r" ((long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
+ "r" ((unsigned long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
} else { \
__asm __volatile("wr %2,%%g0,%%asi; ldxa [%1]%%asi,%0" : "=r" (_lda_v) : \
- "r" ((long)(loc)), "r" (asi)); \
+ "r" ((unsigned long)(loc)), "r" (asi)); \
} \
_lda_v; \
})
#else
/* native load 64-bit int from alternate address space w/32-bit compiler*/
#define ldxa(loc, asi) ({ \
- register long _ldxa_lo, _ldxa_hi, _loc_hi; \
+ register unsigned long _ldxa_lo, _ldxa_hi, _loc_hi; \
_loc_hi = (((u_int64_t)loc)>>32); \
if (PHYS_ASI(asi)) { \
__asm __volatile("wr %4,%%g0,%%asi; " \
@@ -725,14 +727,14 @@
" sllx %3,32,%0; wrpr %1,8,%%pstate; or %0,%2,%0; membar #Sync; ldxa [%0]%%asi,%0; " \
Home |
Main Index |
Thread Index |
Old Index