Source-Changes-HG archive
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]
[src/trunk]: src/sys/arch/arm/include/arm32 Revert the *_PROT_RO_* define cha...
details: https://anonhg.NetBSD.org/src/rev/f1c996883803
branches: trunk
changeset: 356040:f1c996883803
user: skrll <skrll%NetBSD.org@localhost>
date: Tue Aug 29 06:28:26 2017 +0000
description:
Revert the *_PROT_RO_* define changes and do all the work in the *_PROT()
macros for RO kernel pages.
cats boots again - no idea how it slipped through my testing previously
diffstat:
sys/arch/arm/include/arm32/pmap.h | 57 +++++++++++++++++++++++---------------
1 files changed, 35 insertions(+), 22 deletions(-)
diffs (114 lines):
diff -r ae5437e7424e -r f1c996883803 sys/arch/arm/include/arm32/pmap.h
--- a/sys/arch/arm/include/arm32/pmap.h Tue Aug 29 04:58:38 2017 +0000
+++ b/sys/arch/arm/include/arm32/pmap.h Tue Aug 29 06:28:26 2017 +0000
@@ -1,4 +1,4 @@
-/* $NetBSD: pmap.h,v 1.151 2017/07/11 20:42:17 skrll Exp $ */
+/* $NetBSD: pmap.h,v 1.152 2017/08/29 06:28:26 skrll Exp $ */
/*
* Copyright (c) 2002, 2003 Wasabi Systems, Inc.
@@ -728,12 +728,12 @@
*/
#define L1_S_PROT_U_generic (L1_S_AP(AP_U))
#define L1_S_PROT_W_generic (L1_S_AP(AP_W))
-#define L1_S_PROT_RO_generic (L1_S_AP(AP_R)) /* AP_W == AP_R */
+#define L1_S_PROT_RO_generic (0)
#define L1_S_PROT_MASK_generic (L1_S_PROT_U|L1_S_PROT_W|L1_S_PROT_RO)
#define L1_S_PROT_U_xscale (L1_S_AP(AP_U))
#define L1_S_PROT_W_xscale (L1_S_AP(AP_W))
-#define L1_S_PROT_RO_xscale (L1_S_AP(AP_R)) /* AP_W == AP_R */
+#define L1_S_PROT_RO_xscale (0)
#define L1_S_PROT_MASK_xscale (L1_S_PROT_U|L1_S_PROT_W|L1_S_PROT_RO)
#define L1_S_PROT_U_armv6 (L1_S_AP(AP_R) | L1_S_AP(AP_U))
@@ -754,12 +754,12 @@
#define L2_L_PROT_U_generic (L2_AP(AP_U))
#define L2_L_PROT_W_generic (L2_AP(AP_W))
-#define L2_L_PROT_RO_generic (L2_AP(AP_R))
+#define L2_L_PROT_RO_generic (0)
#define L2_L_PROT_MASK_generic (L2_L_PROT_U|L2_L_PROT_W|L2_L_PROT_RO)
#define L2_L_PROT_U_xscale (L2_AP(AP_U))
#define L2_L_PROT_W_xscale (L2_AP(AP_W))
-#define L2_L_PROT_RO_xscale (L2_AP(AP_R))
+#define L2_L_PROT_RO_xscale (0)
#define L2_L_PROT_MASK_xscale (L2_L_PROT_U|L2_L_PROT_W|L2_L_PROT_RO)
#define L2_L_PROT_U_armv6n (L2_AP0(AP_R) | L2_AP0(AP_U))
@@ -780,12 +780,12 @@
#define L2_S_PROT_U_generic (L2_AP(AP_U))
#define L2_S_PROT_W_generic (L2_AP(AP_W))
-#define L2_S_PROT_RO_generic (L2_AP(AP_R))
+#define L2_S_PROT_RO_generic (0)
#define L2_S_PROT_MASK_generic (L2_S_PROT_U|L2_S_PROT_W|L2_S_PROT_RO)
#define L2_S_PROT_U_xscale (L2_AP0(AP_U))
#define L2_S_PROT_W_xscale (L2_AP0(AP_W))
-#define L2_S_PROT_RO_xscale (L2_AP(AP_R))
+#define L2_S_PROT_RO_xscale (0)
#define L2_S_PROT_MASK_xscale (L2_S_PROT_U|L2_S_PROT_W|L2_S_PROT_RO)
#define L2_S_PROT_U_armv6n (L2_AP0(AP_R) | L2_AP0(AP_U))
@@ -1028,31 +1028,44 @@
#define l1pte_set_writable(pte) (((pte) & ~L1_S_PROT_RO) | L1_S_PROT_W)
#define l1pte_set_readonly(pte) (((pte) & ~L1_S_PROT_W) | L1_S_PROT_RO)
-#define l2pte_set_writable(pte) (L2_S_PROT_W == L2_S_PROT_RO ? \
- ((pte) | L2_S_PROT_W) : (((pte) & ~L2_S_PROT_RO) | L2_S_PROT_W))
-
-#define l2pte_set_readonly(pte) (L2_S_PROT_W == L2_S_PROT_RO ? \
- ((pte) & ~L2_S_PROT_RO) : (((pte) & ~L2_S_PROT_W) | L2_S_PROT_RO))
+#define l2pte_set_writable(pte) (((pte) & ~L2_S_PROT_RO) | L2_S_PROT_W)
+#define l2pte_set_readonly(pte) (((pte) & ~L2_S_PROT_W) | L2_S_PROT_RO)
#define l2pte_writable_p(pte) (((pte) & L2_S_PROT_W) == L2_S_PROT_W && \
- (L2_S_PROT_W == L2_S_PROT_RO || \
+ (L2_S_PROT_RO == 0 || \
((pte) & L2_S_PROT_RO) != L2_S_PROT_RO))
/*
* These macros return various bits based on kernel/user and protection.
* Note that the compiler will usually fold these at compile time.
*/
-#define L1_S_PROT(ku, pr) ((((ku) == PTE_USER) ? L1_S_PROT_U : 0) | \
- (((pr) & VM_PROT_WRITE) ? L1_S_PROT_W : \
- (L1_S_PROT_W == L1_S_PROT_RO ? 0 : L1_S_PROT_RO)))
+
+#define L1_S_PROT(ku, pr) ( \
+ (((ku) == PTE_USER) ? \
+ L1_S_PROT_U | (((pr) & VM_PROT_WRITE) ? L1_S_PROT_W : 0) \
+ : \
+ (((L1_S_PROT_RO && \
+ ((pr) & (VM_PROT_READ | VM_PROT_WRITE)) == VM_PROT_READ) ? \
+ L1_S_PROT_RO : L1_S_PROT_W))) \
+ )
-#define L2_L_PROT(ku, pr) ((((ku) == PTE_USER) ? L2_L_PROT_U : 0) | \
- (((pr) & VM_PROT_WRITE) ? L2_L_PROT_W : \
- (L2_L_PROT_W == L2_L_PROT_RO ? 0 : L2_L_PROT_RO)))
+#define L2_L_PROT(ku, pr) ( \
+ (((ku) == PTE_USER) ? \
+ L2_L_PROT_U | (((pr) & VM_PROT_WRITE) ? L2_L_PROT_W : 0) \
+ : \
+ (((L2_L_PROT_RO && \
+ ((pr) & (VM_PROT_READ | VM_PROT_WRITE)) == VM_PROT_READ) ? \
+ L2_L_PROT_RO : L2_L_PROT_W))) \
+ )
-#define L2_S_PROT(ku, pr) ((((ku) == PTE_USER) ? L2_S_PROT_U : 0) | \
- (((pr) & VM_PROT_WRITE) ? L2_S_PROT_W : \
- (L2_S_PROT_W == L2_S_PROT_RO ? 0 : L2_S_PROT_RO)))
+#define L2_S_PROT(ku, pr) ( \
+ (((ku) == PTE_USER) ? \
+ L2_S_PROT_U | (((pr) & VM_PROT_WRITE) ? L2_S_PROT_W : 0) \
+ : \
+ (((L2_S_PROT_RO && \
+ ((pr) & (VM_PROT_READ | VM_PROT_WRITE)) == VM_PROT_READ) ? \
+ L2_S_PROT_RO : L2_S_PROT_W))) \
+ )
/*
* Macros to test if a mapping is mappable with an L1 SuperSection,
Home |
Main Index |
Thread Index |
Old Index