Source-Changes-HG archive
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]
[src/trunk]: src/sys/arch/mips/include Bring down from nathanw_sa branch.
details: https://anonhg.NetBSD.org/src/rev/b9e5b9b37302
branches: trunk
changeset: 536456:b9e5b9b37302
user: gmcgarry <gmcgarry%NetBSD.org@localhost>
date: Mon Sep 16 07:00:43 2002 +0000
description:
Bring down from nathanw_sa branch.
diffstat:
sys/arch/mips/include/lock.h | 115 +++++++++++++++++++++++++++++++++++++++++-
1 files changed, 111 insertions(+), 4 deletions(-)
diffs (139 lines):
diff -r ba998141aaf1 -r b9e5b9b37302 sys/arch/mips/include/lock.h
--- a/sys/arch/mips/include/lock.h Mon Sep 16 04:31:46 2002 +0000
+++ b/sys/arch/mips/include/lock.h Mon Sep 16 07:00:43 2002 +0000
@@ -1,11 +1,11 @@
-/* $NetBSD: lock.h,v 1.2 2000/05/02 04:41:07 thorpej Exp $ */
+/* $NetBSD: lock.h,v 1.3 2002/09/16 07:00:43 gmcgarry Exp $ */
/*-
- * Copyright (c) 2000 The NetBSD Foundation, Inc.
+ * Copyright (c) 2001 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
- * by Jason R. Thorpe.
+ * by Wayne Knowles.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -37,7 +37,9 @@
*/
/*
- * Machine-dependent spin lock operations.
+ * Machine-dependent spin lock operations for MIPS R4000 Processors.
+ *
+ * Note: R3000 doesn't have any atomic update instructions
*/
#ifndef _MIPS_LOCK_H_
@@ -48,4 +50,109 @@
#define __SIMPLELOCK_LOCKED 1
#define __SIMPLELOCK_UNLOCKED 0
+#ifndef _KERNEL
+
+static __inline void
+__cpu_simple_lock_init(__cpu_simple_lock_t *lp)
+{
+
+ __asm __volatile(
+ "# -- BEGIN __cpu_simple_lock_init\n"
+ " .set push \n"
+ " .set mips2 \n"
+ " sw $0, %0 \n"
+ " sync \n"
+ " .set pop \n"
+ "# -- END __cpu_simple_lock_init\n"
+ : "=m" (*lp));
+}
+
+static __inline void
+__cpu_simple_lock(__cpu_simple_lock_t *lp)
+{
+ unsigned long t0;
+
+ /*
+ * Note, if we detect that the lock is held when
+ * we do the initial load-locked, we spin using
+ * a non-locked load to save the coherency logic
+ * some work.
+ */
+
+ __asm __volatile(
+ "# -- BEGIN __cpu_simple_lock \n"
+ " .set push \n"
+ " .set mips2 \n"
+ "1: ll %0, %3 \n"
+ " bnez %0, 2f \n"
+ " nop # BDslot \n"
+ " li %0, %2 \n"
+ " sc %0, %1 \n"
+ " beqz %0, 1b \n"
+ " nop # BDslot \n"
+ " nop \n"
+ " sync \n"
+ " j 3f \n"
+ " nop \n"
+ " nop \n"
+ "2: lw %0, %3 \n"
+ " bnez %0, 2b \n"
+ " nop # BDslot \n"
+ " j 1b \n"
+ " nop \n"
+ "3: \n"
+ " .set pop \n"
+ "# -- END __cpu_simple_lock \n"
+ : "=r" (t0), "+m" (*lp)
+ : "i" (__SIMPLELOCK_LOCKED), "1" (*lp));
+}
+
+static __inline int
+__cpu_simple_lock_try(__cpu_simple_lock_t *lp)
+{
+ unsigned long t0, v0;
+
+ __asm __volatile(
+ "# -- BEGIN __cpu_simple_lock_try\n"
+ " .set push \n"
+ " .set mips2 \n"
+ "1: ll %0, %4 \n"
+ " bnez %0, 2f \n"
+ " nop # BDslot \n"
+ " li %0, %3 \n"
+ " sc %0, %2 \n"
+ " beqz %0, 2f \n"
+ " nop # BDslot \n"
+ " li %1, 1 \n"
+ " sync \n"
+ " j 3f \n"
+ " nop \n"
+ " nop \n"
+ "2: li %1, 0 \n"
+ "3: \n"
+ " .set pop \n"
+ "# -- END __cpu_simple_lock_try \n"
+ : "=r" (t0), "=r" (v0), "+m" (*lp)
+ : "i" (__SIMPLELOCK_LOCKED), "2" (*lp));
+
+ return (v0 != 0);
+}
+
+static __inline void
+__cpu_simple_unlock(__cpu_simple_lock_t *lp)
+{
+
+ __asm __volatile(
+ "# -- BEGIN __cpu_simple_unlock \n"
+ " .set push \n"
+ " .set mips2 \n"
+ " sync \n"
+ " sw $0, %0 \n"
+ " .set pop \n"
+ "# -- END __cpu_simple_unlock \n"
+ : "=m" (*lp));
+}
+
+#endif
+
#endif /* _MIPS_LOCK_H_ */
Home |
Main Index |
Thread Index |
Old Index