Source-Changes-HG archive
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]
[src/netbsd-9]: src/sys/arch/arm/include Pull up following revision(s) (reque...
details: https://anonhg.NetBSD.org/src/rev/dcc2fd7c1df8
branches: netbsd-9
changeset: 962069:dcc2fd7c1df8
user: martin <martin%NetBSD.org@localhost>
date: Fri Apr 30 13:54:00 2021 +0000
description:
Pull up following revision(s) (requested by skrll in ticket #1261):
sys/arch/arm/include/lock.h: revision 1.38
common/lib/libc/arch/arm/atomic/membar_ops.S: revision 1.7
common/lib/libc/arch/arm/atomic/atomic_swap_16.S: revision 1.5
common/lib/libc/arch/arm/atomic/atomic_swap_64.S: revision 1.12
common/lib/libc/arch/arm/atomic/atomic_swap.S: revision 1.17
Add the appropriate memory barrier before the lock is cleared in
__sync_lock_release_{1,2,4,8}. That is, all reads and write for the
inner shareability domain before the lock clear store.
Improve the membar_ops barriers - no need to use dsb and wait for
completion. Also, we only to act on the inner shareability domain.
Fix the barrier confusion. From Riastradh - thanks!.
diffstat:
common/lib/libc/arch/arm/atomic/atomic_swap.S | 6 +-
common/lib/libc/arch/arm/atomic/atomic_swap_16.S | 7 +++-
common/lib/libc/arch/arm/atomic/atomic_swap_64.S | 7 +++-
common/lib/libc/arch/arm/atomic/membar_ops.S | 8 ++--
sys/arch/arm/include/lock.h | 39 +++++++++++------------
5 files changed, 38 insertions(+), 29 deletions(-)
diffs (197 lines):
diff -r 6dc5c38b1964 -r dcc2fd7c1df8 common/lib/libc/arch/arm/atomic/atomic_swap.S
--- a/common/lib/libc/arch/arm/atomic/atomic_swap.S Fri Apr 30 13:49:55 2021 +0000
+++ b/common/lib/libc/arch/arm/atomic/atomic_swap.S Fri Apr 30 13:54:00 2021 +0000
@@ -1,4 +1,4 @@
-/* $NetBSD: atomic_swap.S,v 1.14.18.1 2021/04/26 18:34:28 martin Exp $ */
+/* $NetBSD: atomic_swap.S,v 1.14.18.2 2021/04/30 13:54:00 martin Exp $ */
/*-
* Copyright (c) 2007,2012 The NetBSD Foundation, Inc.
@@ -88,7 +88,7 @@
ENTRY_NP(__sync_lock_release_4)
mov r1, #0
#ifdef _ARM_ARCH_7
- dmb
+ dmb ishst
#else
mcr p15, 0, r1, c7, c10, 5 /* data memory barrier */
#endif
@@ -129,7 +129,7 @@
ENTRY_NP(__sync_lock_release_1)
mov r1, #0
#ifdef _ARM_ARCH_7
- dmb
+ dmb ishst
#else
mcr p15, 0, r1, c7, c10, 5 /* data memory barrier */
#endif
diff -r 6dc5c38b1964 -r dcc2fd7c1df8 common/lib/libc/arch/arm/atomic/atomic_swap_16.S
--- a/common/lib/libc/arch/arm/atomic/atomic_swap_16.S Fri Apr 30 13:49:55 2021 +0000
+++ b/common/lib/libc/arch/arm/atomic/atomic_swap_16.S Fri Apr 30 13:54:00 2021 +0000
@@ -1,4 +1,4 @@
-/* $NetBSD: atomic_swap_16.S,v 1.4 2015/05/17 20:57:11 justin Exp $ */
+/* $NetBSD: atomic_swap_16.S,v 1.4.18.1 2021/04/30 13:54:00 martin Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
@@ -58,6 +58,11 @@
#if (!defined(_KERNEL) || !defined(_RUMPKERNEL)) && !defined(_STANDALONE)
ENTRY_NP(__sync_lock_release_2)
mov r1, #0
+#ifdef _ARM_ARCH_7
+ dmb ishst
+#else
+ mcr p15, 0, r1, c7, c10, 5 /* data memory barrier */
+#endif
strh r1, [r0]
RET
END(__sync_lock_release_2)
diff -r 6dc5c38b1964 -r dcc2fd7c1df8 common/lib/libc/arch/arm/atomic/atomic_swap_64.S
--- a/common/lib/libc/arch/arm/atomic/atomic_swap_64.S Fri Apr 30 13:49:55 2021 +0000
+++ b/common/lib/libc/arch/arm/atomic/atomic_swap_64.S Fri Apr 30 13:54:00 2021 +0000
@@ -1,4 +1,4 @@
-/* $NetBSD: atomic_swap_64.S,v 1.10.18.1 2021/04/26 18:34:28 martin Exp $ */
+/* $NetBSD: atomic_swap_64.S,v 1.10.18.2 2021/04/30 13:54:00 martin Exp $ */
/*-
* Copyright (c) 2012 The NetBSD Foundation, Inc.
* All rights reserved.
@@ -57,6 +57,11 @@
ENTRY_NP(__sync_lock_release_8)
mov r2, #0
mov r3, #0
+#ifdef _ARM_ARCH_7
+ dmb ishst
+#else
+ mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
+#endif
strd r2, r3, [r0]
RET
END(__sync_lock_release_8)
diff -r 6dc5c38b1964 -r dcc2fd7c1df8 common/lib/libc/arch/arm/atomic/membar_ops.S
--- a/common/lib/libc/arch/arm/atomic/membar_ops.S Fri Apr 30 13:49:55 2021 +0000
+++ b/common/lib/libc/arch/arm/atomic/membar_ops.S Fri Apr 30 13:54:00 2021 +0000
@@ -1,4 +1,4 @@
-/* $NetBSD: membar_ops.S,v 1.6 2014/03/28 21:32:41 skrll Exp $ */
+/* $NetBSD: membar_ops.S,v 1.6.28.1 2021/04/30 13:54:00 martin Exp $ */
/*-
* Copyright (c) 2008 The NetBSD Foundation, Inc.
* All rights reserved.
@@ -34,10 +34,10 @@
ENTRY_NP(_membar_producer)
#ifdef _ARM_ARCH_7
- dsb
+ dmb ishst
#else
mov r0, #0
- mcr p15, 0, r0, c7, c10, 4 /* Data Synchronization Barrier */
+ mcr p15, 0, r0, c7, c10, 5 /* Data Memory Barrier */
#endif
RET
END(_membar_producer)
@@ -47,7 +47,7 @@
ENTRY_NP(_membar_sync)
#ifdef _ARM_ARCH_7
- dmb
+ dmb ish
#else
mov r0, #0
mcr p15, 0, r0, c7, c10, 5 /* Data Memory Barrier */
diff -r 6dc5c38b1964 -r dcc2fd7c1df8 sys/arch/arm/include/lock.h
--- a/sys/arch/arm/include/lock.h Fri Apr 30 13:49:55 2021 +0000
+++ b/sys/arch/arm/include/lock.h Fri Apr 30 13:54:00 2021 +0000
@@ -1,4 +1,4 @@
-/* $NetBSD: lock.h,v 1.33.8.1 2021/04/26 18:34:28 martin Exp $ */
+/* $NetBSD: lock.h,v 1.33.8.2 2021/04/30 13:54:00 martin Exp $ */
/*-
* Copyright (c) 2000, 2001 The NetBSD Foundation, Inc.
@@ -139,32 +139,34 @@
}
#endif /* !_ARM_ARCH_6 */
+/* load/dmb implies load-acquire */
static __inline void
-__arm_membar_producer(void)
+__arm_load_dmb(void)
{
#if defined(_ARM_ARCH_7)
- __asm __volatile("dsb" ::: "memory");
-#elif defined(_ARM_ARCH_6)
- __asm __volatile("mcr\tp15,0,%0,c7,c10,4" :: "r"(0) : "memory");
-#endif
-}
-
-static __inline void
-__arm_membar_consumer(void)
-{
-#if defined(_ARM_ARCH_7)
- __asm __volatile("dmb" ::: "memory");
+ __asm __volatile("dmb ish" ::: "memory");
#elif defined(_ARM_ARCH_6)
__asm __volatile("mcr\tp15,0,%0,c7,c10,5" :: "r"(0) : "memory");
#endif
}
+/* dmb/store implies store-release */
+static __inline void
+__arm_dmb_store(void)
+{
+#if defined(_ARM_ARCH_7)
+ __asm __volatile("dmb ish" ::: "memory");
+#elif defined(_ARM_ARCH_6)
+ __asm __volatile("mcr\tp15,0,%0,c7,c10,5" :: "r"(0) : "memory");
+#endif
+}
+
+
static __inline void __unused
__cpu_simple_lock_init(__cpu_simple_lock_t *__alp)
{
*__alp = __SIMPLELOCK_UNLOCKED;
- __arm_membar_producer();
}
#if !defined(__thumb__) || defined(_ARM_ARCH_T2)
@@ -172,12 +174,11 @@
__cpu_simple_lock(__cpu_simple_lock_t *__alp)
{
#if defined(_ARM_ARCH_6)
- __arm_membar_consumer();
do {
/* spin */
} while (__arm_load_exclusive(__alp) != __SIMPLELOCK_UNLOCKED
|| __arm_store_exclusive(__alp, __SIMPLELOCK_LOCKED));
- __arm_membar_producer();
+ __arm_load_dmb();
#else
while (__swp(__SIMPLELOCK_LOCKED, __alp) != __SIMPLELOCK_UNLOCKED)
continue;
@@ -192,13 +193,12 @@
__cpu_simple_lock_try(__cpu_simple_lock_t *__alp)
{
#if defined(_ARM_ARCH_6)
- __arm_membar_consumer();
do {
if (__arm_load_exclusive(__alp) != __SIMPLELOCK_UNLOCKED) {
return 0;
}
} while (__arm_store_exclusive(__alp, __SIMPLELOCK_LOCKED));
- __arm_membar_producer();
+ __arm_load_dmb();
return 1;
#else
return (__swp(__SIMPLELOCK_LOCKED, __alp) == __SIMPLELOCK_UNLOCKED);
@@ -221,9 +221,8 @@
:: "r"(__SIMPLELOCK_UNLOCKED), "r"(__alp) : "memory");
}
#else
- __arm_membar_consumer();
+ __arm_dmb_store();
*__alp = __SIMPLELOCK_UNLOCKED;
- __arm_membar_producer();
#endif
}
Home |
Main Index |
Thread Index |
Old Index