Source-Changes-HG archive
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]
[src/trunk]: src/sys/arch/sparc64/include Bring together similar inline asm c...
details: https://anonhg.NetBSD.org/src/rev/9f2df90fa2b7
branches: trunk
changeset: 749647:9f2df90fa2b7
user: nakayama <nakayama%NetBSD.org@localhost>
date: Sat Dec 05 07:58:57 2009 +0000
description:
Bring together similar inline asm codes of ld*a and st*a definitions
using macro, also remove unnecessary membar #Sync and %asi restore in
the case of 32-bit kernel.
While there, introduce an optimization case that asi is constant if
we are using gcc.
diffstat:
sys/arch/sparc64/include/ctlreg.h | 866 +++++++++++++++----------------------
1 files changed, 345 insertions(+), 521 deletions(-)
diffs (truncated from 912 to 300 lines):
diff -r 2a51fbc17cd5 -r 9f2df90fa2b7 sys/arch/sparc64/include/ctlreg.h
--- a/sys/arch/sparc64/include/ctlreg.h Sat Dec 05 07:33:18 2009 +0000
+++ b/sys/arch/sparc64/include/ctlreg.h Sat Dec 05 07:58:57 2009 +0000
@@ -1,4 +1,4 @@
-/* $NetBSD: ctlreg.h,v 1.46 2009/11/28 21:07:02 mrg Exp $ */
+/* $NetBSD: ctlreg.h,v 1.47 2009/12/05 07:58:57 nakayama Exp $ */
/*
* Copyright (c) 1996-2002 Eduardo Horvath
@@ -477,490 +477,372 @@
*/
#ifdef __arch64__
-static __inline u_char
-lduba(paddr_t loc, int asi)
-{
- register unsigned int _lduba_v;
+
+/* 64-bit kernel, non-constant */
+#define SPARC64_LD_NONCONST(ld) \
+ __asm volatile( \
+ "wr %2,%%g0,%%asi; " \
+ #ld " [%1]%%asi,%0 " \
+ : "=r" (_v) \
+ : "r" ((__uintptr_t)(loc)), "r" (asi))
- __asm volatile(
- "wr %2, %%g0, %%asi; "
- "lduba [%1]%%asi, %0 "
- : "=r" (_lduba_v)
- : "r" ((unsigned long)(loc)), "r" (asi));
- return (_lduba_v);
+#if defined(__GNUC__) && defined(__OPTIMIZE__)
+#define SPARC64_LD_DEF(ld, type, vtype) \
+static __inline type ld(paddr_t loc, int asi) \
+{ \
+ vtype _v; \
+ if (__builtin_constant_p(asi)) \
+ __asm volatile( \
+ #ld " [%1]%2,%0 " \
+ : "=r" (_v) \
+ : "r" ((__uintptr_t)(loc)), "n" (asi)); \
+ else \
+ SPARC64_LD_NONCONST(ld); \
+ return _v; \
}
#else
-static __inline u_char
-lduba(paddr_t loc, int asi)
-{
- register unsigned int _lduba_v, _loc_hi, _pstate;
+#define SPARC64_LD_DEF(ld, type, vtype) \
+static __inline type ld(paddr_t loc, int asi) \
+{ \
+ vtype _v; \
+ SPARC64_LD_NONCONST(ld); \
+ return _v; \
+}
+#endif
+#define SPARC64_LD_DEF64(ld, type) SPARC64_LD_DEF(ld, type, uint64_t)
+
+#else /* __arch64__ */
+
+/* 32-bit kernel, MMU bypass, non-constant */
+#define SPARC64_LD_PHYS_NONCONST(ld) \
+ __asm volatile( \
+ "rdpr %%pstate,%1; " \
+ "sllx %3,32,%0; " \
+ "wrpr %1,8,%%pstate; " \
+ "or %0,%2,%0; " \
+ "wr %4,%%g0,%%asi; " \
+ #ld " [%0]%%asi,%0; " \
+ "wrpr %1,0,%%pstate " \
+ : "=&r" (_v), "=&r" (_pstate) \
+ : "r" ((uint32_t)(loc)), "r" (_hi), "r" (asi))
+/* 32-bit kernel, non-constant */
+#define SPARC64_LD_NONCONST(ld) \
+ __asm volatile( \
+ "wr %2,%%g0,%%asi; " \
+ #ld " [%1]%%asi,%0 " \
+ : "=&r" (_v) \
+ : "r" ((uint32_t)(loc)), "r" (asi))
+/* 32-bit kernel, MMU bypass, non-constant, 64-bit value */
+#define SPARC64_LD_PHYS_NONCONST64(ld) \
+ __asm volatile( \
+ "rdpr %%pstate,%1; " \
+ "sllx %3,32,%0; " \
+ "wrpr %1,8,%%pstate; " \
+ "or %0,%2,%0; " \
+ "wr %4,%%g0,%%asi; " \
+ #ld " [%0]%%asi,%0; " \
+ "wrpr %1,0,%%pstate; " \
+ "srlx %0,32,%1; " \
+ "srl %0,0,%0 " \
+ : "=&r" (_vlo), "=&r" (_vhi) \
+ : "r" ((uint32_t)(loc)), "r" (_hi), "r" (asi))
+/* 32-bit kernel, non-constant, 64-bit value */
+#define SPARC64_LD_NONCONST64(ld) \
+ __asm volatile( \
+ "wr %3,%%g0,%%asi; " \
+ #ld " [%2]%%asi,%0; " \
+ "srlx %0,32,%1; " \
+ "srl %0,0,%0 " \
+ : "=&r" (_vlo), "=&r" (_vhi) \
+ : "r" ((uint32_t)(loc)), "r" (asi))
- _loc_hi = (((uint64_t)loc)>>32);
- if (PHYS_ASI(asi)) {
- __asm volatile(
- "wr %4,%%g0,%%asi; "
- "sllx %3,32,%0; "
- "rdpr %%pstate,%1; "
- "or %0,%2,%0; "
- "wrpr %1,8,%%pstate; "
- "membar #Sync; "
- "lduba [%0]%%asi,%0; "
- "wrpr %1,0,%%pstate; "
- "membar #Sync; "
- "wr %%g0, 0x82, %%asi "
- : "=&r" (_lduba_v), "=&r" (_pstate)
- : "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
- } else {
- __asm volatile(
- "wr %3,%%g0,%%asi; "
- "sllx %2,32,%0; "
- "or %0,%1,%0; "
- "lduba [%0]%%asi,%0; "
- "wr %%g0, 0x82, %%asi "
- : "=&r" (_lduba_v)
- : "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
- }
- return (_lduba_v);
+#if defined(__GNUC__) && defined(__OPTIMIZE__)
+#define SPARC64_LD_DEF(ld, type, vtype) \
+static __inline type ld(paddr_t loc, int asi) \
+{ \
+ vtype _v; \
+ uint32_t _hi, _pstate; \
+ if (PHYS_ASI(asi)) { \
+ _hi = (uint64_t)(loc) >> 32; \
+ if (__builtin_constant_p(asi)) \
+ __asm volatile( \
+ "rdpr %%pstate,%1; " \
+ "sllx %3,32,%0; " \
+ "wrpr %1,8,%%pstate; " \
+ "or %0,%2,%0; " \
+ #ld " [%0]%4,%0; " \
+ "wrpr %1,0,%%pstate; " \
+ : "=&r" (_v), "=&r" (_pstate) \
+ : "r" ((uint32_t)(loc)), "r" (_hi), \
+ "n" (asi)); \
+ else \
+ SPARC64_LD_PHYS_NONCONST(ld); \
+ } else { \
+ if (__builtin_constant_p(asi)) \
+ __asm volatile( \
+ #ld " [%1]%2,%0 " \
+ : "=&r" (_v) \
+ : "r" ((uint32_t)(loc)), "n" (asi)); \
+ else \
+ SPARC64_LD_NONCONST(ld); \
+ } \
+ return _v; \
+}
+#define SPARC64_LD_DEF64(ld, type) \
+static __inline type ld(paddr_t loc, int asi) \
+{ \
+ uint32_t _vlo, _vhi, _hi; \
+ if (PHYS_ASI(asi)) { \
+ _hi = (uint64_t)(loc) >> 32; \
+ if (__builtin_constant_p(asi)) \
+ __asm volatile( \
+ "rdpr %%pstate,%1; " \
+ "sllx %3,32,%0; " \
+ "wrpr %1,8,%%pstate; " \
+ "or %0,%2,%0; " \
+ #ld " [%0]%4,%0; " \
+ "wrpr %1,0,%%pstate; " \
+ "srlx %0,32,%1; " \
+ "srl %0,0,%0 " \
+ : "=&r" (_vlo), "=&r" (_vhi) \
+ : "r" ((uint32_t)(loc)), "r" (_hi), \
+ "n" (asi)); \
+ else \
+ SPARC64_LD_PHYS_NONCONST64(ld); \
+ } else { \
+ if (__builtin_constant_p(asi)) \
+ __asm volatile( \
+ #ld " [%2]%3,%0; " \
+ "srlx %0,32,%1; " \
+ "srl %0,0,%0 " \
+ : "=&r" (_vlo), "=&r" (_vhi) \
+ : "r" ((uint32_t)(loc)), "n" (asi)); \
+ else \
+ SPARC64_LD_NONCONST64(ld); \
+ } \
+ return ((uint64_t)_vhi << 32) | _vlo; \
+}
+#else
+#define SPARC64_LD_DEF(ld, type, vtype) \
+static __inline type ld(paddr_t loc, int asi) \
+{ \
+ vtype _v; \
+ uint32_t _hi, _pstate; \
+ if (PHYS_ASI(asi)) { \
+ _hi = (uint64_t)(loc) >> 32; \
+ SPARC64_LD_PHYS_NONCONST(ld); \
+ } else \
+ SPARC64_LD_NONCONST(ld); \
+ return _v; \
+}
+#define SPARC64_LD_DEF64(ld, type) \
+static __inline type ld(paddr_t loc, int asi) \
+{ \
+ uint32_t _vlo, _vhi, _hi; \
+ if (PHYS_ASI(asi)) { \
+ _hi = (uint64_t)(loc) >> 32; \
+ SPARC64_LD_PHYS_NONCONST64(ld); \
+ } else \
+ SPARC64_LD_NONCONST64(ld); \
+ return ((uint64_t)_vhi << 32) | _vlo; \
}
#endif
-#ifdef __arch64__
-/* load half-word from alternate address space */
-static __inline u_short
-lduha(paddr_t loc, int asi)
-{
- register unsigned int _lduha_v;
-
- __asm volatile(
- "wr %2, %%g0, %%asi; "
- "lduha [%1]%%asi, %0 "
- : "=r" (_lduha_v)
- : "r" ((unsigned long)(loc)), "r" (asi));
- return (_lduha_v);
-}
-#else
-/* load half-word from alternate address space */
-static __inline u_short
-lduha(paddr_t loc, int asi) {
- register unsigned int _lduha_v, _loc_hi, _pstate;
-
- _loc_hi = (((uint64_t)loc)>>32);
+#endif /* __arch64__ */
- if (PHYS_ASI(asi)) {
- __asm volatile(
- "wr %4,%%g0,%%asi; "
- "sllx %3,32,%0; "
- "rdpr %%pstate,%1; "
- "wrpr %1,8,%%pstate; "
- "or %0,%2,%0; "
- "membar #Sync; "
- "lduha [%0]%%asi,%0; "
- "wrpr %1,0,%%pstate; "
- "membar #Sync; "
- "wr %%g0, 0x82, %%asi "
- : "=&r" (_lduha_v), "=&r" (_pstate)
- : "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
- } else {
- __asm volatile(
- "wr %3,%%g0,%%asi; "
- "sllx %2,32,%0; "
- "or %0,%1,%0; "
- "lduha [%0]%%asi,%0; "
- "wr %%g0, 0x82, %%asi "
- : "=&r" (_lduha_v)
- : "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
- }
- return (_lduha_v);
-}
-#endif
+/* load byte from alternate address space */
+SPARC64_LD_DEF(lduba, uint8_t, uint32_t)
+/* load half-word from alternate address space */
+SPARC64_LD_DEF(lduha, uint16_t, uint32_t)
+/* load unsigned int from alternate address space */
+SPARC64_LD_DEF(lda, uint32_t, uint32_t)
+/* load signed int from alternate address space */
+SPARC64_LD_DEF(ldswa, int, int)
+/* load 64-bit unsigned int from alternate address space */
+SPARC64_LD_DEF64(ldxa, uint64_t)
#ifdef __arch64__
-/* load unsigned int from alternate address space */
-static __inline u_int
-lda(paddr_t loc, int asi)
-{
- register unsigned int _lda_v;
- __asm volatile(
- "wr %2,%%g0,%%asi; "
- "lda [%1]%%asi,%0 "
- : "=r" (_lda_v)
- : "r" ((unsigned long)(loc)), "r" (asi));
- return (_lda_v);
-}
Home |
Main Index |
Thread Index |
Old Index