Source-Changes-HG archive
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]
[src/trunk]: src/sys/arch/amd64/amd64 Revert all my latest changes, and resto...
details: https://anonhg.NetBSD.org/src/rev/5a95b75921f1
branches: trunk
changeset: 359716:5a95b75921f1
user: maxv <maxv%NetBSD.org@localhost>
date: Thu Feb 22 08:36:31 2018 +0000
description:
Revert all my latest changes, and restore this file back to how it was
in rev1.24. I wanted to replace the functions dynamically for SVS, but
that was a dumb idea, we'll just hotpatch instead.
diffstat:
sys/arch/amd64/amd64/amd64_trap.S | 275 ++++++++++++++++++++-----------------
1 files changed, 146 insertions(+), 129 deletions(-)
diffs (truncated from 363 to 300 lines):
diff -r 9a3a31bbbd44 -r 5a95b75921f1 sys/arch/amd64/amd64/amd64_trap.S
--- a/sys/arch/amd64/amd64/amd64_trap.S Thu Feb 22 08:33:43 2018 +0000
+++ b/sys/arch/amd64/amd64/amd64_trap.S Thu Feb 22 08:36:31 2018 +0000
@@ -1,4 +1,4 @@
-/* $NetBSD: amd64_trap.S,v 1.32 2018/02/18 14:32:31 maxv Exp $ */
+/* $NetBSD: amd64_trap.S,v 1.33 2018/02/22 08:36:31 maxv Exp $ */
/*
* Copyright (c) 1998, 2007, 2008, 2017 The NetBSD Foundation, Inc.
@@ -95,48 +95,27 @@
#define PRE_TRAP
#endif
-#ifdef XEN
-/*
- * I don't believe XEN generates in-kernel traps for the
- * equivalent of iret, if it does this code would be needed
- * in order to copy the user segment registers into the fault frame.
- */
-#define check_swapgs alltraps
-#endif
-
-#define TRAP(a) PRE_TRAP ; pushq $(a)
-#define ZTRAP(a) PRE_TRAP ; pushq $0 ; pushq $(a)
-
-.macro TRAP_ENTRY_POINT name,code,is_ztrap
-IDTVEC(\name)
- .if \is_ztrap
- ZTRAP(\code)
- .else
- TRAP(\code)
- .endif
- INTRENTRY
+#define TRAPENTRY \
+ INTRENTRY ; \
jmp .Lalltraps_noentry
-IDTVEC_END(\name)
-.endm
+
+#define TRAP_NJ(a) PRE_TRAP ; pushq $(a)
+#define ZTRAP_NJ(a) PRE_TRAP ; pushq $0 ; pushq $(a)
+#define TRAP(a) TRAP_NJ(a) ; TRAPENTRY
+#define ZTRAP(a) ZTRAP_NJ(a) ; TRAPENTRY
+
+ .text
-.macro TRAP_ENTRY_POINT_SWAPGS name,code,is_ztrap
-IDTVEC(\name)
- .if \is_ztrap
- ZTRAP(\code)
- .else
- TRAP(\code)
- .endif
- jmp check_swapgs
-IDTVEC_END(\name)
-.endm
+ TEXT_USER_BEGIN
-.macro TRAP_ENTRY_POINT_NMI name,code
-IDTVEC(\name)
- ZTRAP(\code)
-#if defined(XEN)
- INTRENTRY
- jmp .Lalltraps_noentry
-#else
+IDTVEC(trap00)
+ ZTRAP(T_DIVIDE)
+IDTVEC_END(trap00)
+
+IDTVEC(trap01)
+ ZTRAP(T_TRCTRAP)
+IDTVEC_END(trap01)
+
/*
* Non Maskable Interrupts are a special case: they can be triggered even
* with interrupts disabled, and once triggered they block further NMIs
@@ -148,6 +127,11 @@
* We need to be careful about %gs too, because it is possible that we were
* running in kernel mode with a userland %gs.
*/
+IDTVEC(trap02)
+#if defined(XEN)
+ ZTRAP(T_NMI)
+#else
+ ZTRAP_NJ(T_NMI)
subq $TF_REGSIZE,%rsp
INTR_SAVE_GPRS
SVS_ENTER_ALTSTACK
@@ -182,16 +166,14 @@
addq $TF_REGSIZE+16,%rsp
iretq
#endif
-IDTVEC_END(\name)
-.endm
+IDTVEC_END(trap02)
-.macro TRAP_ENTRY_POINT_BPT name,code
-IDTVEC(\name)
- ZTRAP(\code)
+IDTVEC(trap03)
+#ifndef KDTRACE_HOOKS
+ ZTRAP(T_BPTFLT)
+#else
+ ZTRAP_NJ(T_BPTFLT)
INTRENTRY
-#ifndef KDTRACE_HOOKS
- jmp .Lalltraps_noentry
-#else
STI(si)
/*
* DTrace Function Boundary Trace (fbt) probes are triggered
@@ -213,12 +195,22 @@
movq dtrace_invop_jump_addr, %rax
jmpq *dtrace_invop_jump_addr
#endif
-IDTVEC_END(\name)
-.endm
+IDTVEC_END(trap03)
+
+IDTVEC(trap04)
+ ZTRAP(T_OFLOW)
+IDTVEC_END(trap04)
-.macro TRAP_ENTRY_POINT_DNA name,code
-IDTVEC(\name)
- ZTRAP(\code)
+IDTVEC(trap05)
+ ZTRAP(T_BOUND)
+IDTVEC_END(trap05)
+
+IDTVEC(trap06)
+ ZTRAP(T_PRIVINFLT)
+IDTVEC_END(trap06)
+
+IDTVEC(trap07)
+ ZTRAP_NJ(T_DNA)
INTRENTRY
#ifdef DIAGNOSTIC
movl CPUVAR(ILEVEL),%ebx
@@ -226,20 +218,17 @@
movq %rsp,%rdi
call _C_LABEL(fpudna)
jmp .Lalltraps_checkusr
-IDTVEC_END(\name)
-.endm
+IDTVEC_END(trap07)
-.macro TRAP_ENTRY_POINT_DOUBLE name,code
-IDTVEC(\name)
- TRAP(\code)
-#if defined(XEN)
- INTRENTRY
- jmp .Lalltraps_noentry
-#else
/*
* Double faults execute on a particular stack, and we must not jump out
* of it. So don't enable interrupts.
*/
+IDTVEC(trap08)
+#if defined(XEN)
+ TRAP(T_DOUBLEFLT)
+#else
+ TRAP_NJ(T_DOUBLEFLT)
subq $TF_REGSIZE,%rsp
INTR_SAVE_GPRS
SVS_ENTER_ALTSTACK
@@ -268,16 +257,56 @@
addq $TF_REGSIZE+16,%rsp
iretq
#endif
-IDTVEC_END(\name)
-.endm
+IDTVEC_END(trap08)
+
+IDTVEC(trap09)
+ ZTRAP(T_FPOPFLT)
+IDTVEC_END(trap09)
+
+IDTVEC(trap10)
+ TRAP(T_TSSFLT)
+IDTVEC_END(trap10)
+
+#ifdef XEN
+/*
+ * I don't believe XEN generates in-kernel traps for the
+ * equivalent of iret, if it does this code would be needed
+ * in order to copy the user segment registers into the fault frame.
+ */
+#define check_swapgs alltraps
+#endif
+
+IDTVEC(trap11) /* #NP() Segment not present */
+ TRAP_NJ(T_SEGNPFLT)
+ jmp check_swapgs
+IDTVEC_END(trap11)
-.macro TRAP_ENTRY_POINT_FPU name,code,is_ztrap
-IDTVEC(\name)
- .if \is_ztrap
- ZTRAP(\code)
- .else
- TRAP(\code)
- .endif
+IDTVEC(trap12) /* #SS() Stack exception */
+ TRAP_NJ(T_STKFLT)
+ jmp check_swapgs
+IDTVEC_END(trap12)
+
+IDTVEC(trap13) /* #GP() General protection */
+ TRAP_NJ(T_PROTFLT)
+ jmp check_swapgs
+IDTVEC_END(trap13)
+
+IDTVEC(trap14)
+ TRAP(T_PAGEFLT)
+IDTVEC_END(trap14)
+
+IDTVEC(trap15)
+ ZTRAP_NJ(T_ASTFLT)
+ INTRENTRY
+#ifdef DIAGNOSTIC
+ movl CPUVAR(ILEVEL),%ebx
+#endif
+ jmp .Lalltraps_checkusr
+IDTVEC_END(trap15)
+
+IDTVEC(trap16)
+ ZTRAP_NJ(T_ARITHTRAP)
+.Ldo_fputrap:
INTRENTRY
#ifdef DIAGNOSTIC
movl CPUVAR(ILEVEL),%ebx
@@ -285,23 +314,56 @@
movq %rsp,%rdi
call _C_LABEL(fputrap)
jmp .Lalltraps_checkusr
-IDTVEC_END(\name)
-.endm
+IDTVEC_END(trap16)
+
+IDTVEC(trap17)
+ TRAP(T_ALIGNFLT)
+IDTVEC_END(trap17)
+
+IDTVEC(trap18)
+ ZTRAP(T_MCA)
+IDTVEC_END(trap18)
+
+IDTVEC(trap19)
+ ZTRAP_NJ(T_XMM)
+ jmp .Ldo_fputrap
+IDTVEC_END(trap19)
-.macro TRAP_ENTRY_POINT_SPUR name,code,is_ztrap
-IDTVEC(\name)
- .if \is_ztrap
- ZTRAP(\code)
- .else
- TRAP(\code)
- .endif
+IDTVEC(trap20)
+IDTVEC(trap21)
+IDTVEC(trap22)
+IDTVEC(trap23)
+IDTVEC(trap24)
+IDTVEC(trap25)
+IDTVEC(trap26)
+IDTVEC(trap27)
+IDTVEC(trap28)
+IDTVEC(trap29)
+IDTVEC(trap30)
+IDTVEC(trap31)
+ /* 20 - 31 reserved for future exp */
+ ZTRAP(T_RESERVED)
+IDTVEC_END(trap20)
+IDTVEC_END(trap21)
+IDTVEC_END(trap22)
+IDTVEC_END(trap23)
+IDTVEC_END(trap24)
+IDTVEC_END(trap25)
+IDTVEC_END(trap26)
+IDTVEC_END(trap27)
+IDTVEC_END(trap28)
+IDTVEC_END(trap29)
+IDTVEC_END(trap30)
+IDTVEC_END(trap31)
+
+IDTVEC(intrspurious)
+ ZTRAP_NJ(T_ASTFLT)
INTRENTRY
#ifdef DIAGNOSTIC
movl CPUVAR(ILEVEL),%ebx
#endif
jmp .Lalltraps_checkusr
-IDTVEC_END(\name)
-.endm
+IDTVEC_END(intrspurious)
#ifndef check_swapgs
/*
Home |
Main Index |
Thread Index |
Old Index