Source-Changes-HG archive
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]
[src/trunk]: src/sys/arch/amd64/amd64 Always use END() markers when declaring...
details: https://anonhg.NetBSD.org/src/rev/3dfa4ac1aa6c
branches: trunk
changeset: 357195:3dfa4ac1aa6c
user: maxv <maxv%NetBSD.org@localhost>
date: Mon Oct 30 17:06:42 2017 +0000
description:
Always use END() markers when declaring functions in assembly, so that ld
can compute the size of the functions. A few remain.
While here, fix a bug in the INTRSTUB macro: we are falling through
resume_, but it is aligned, so it looks like we're executing the inter-
function padding - which probably happens to contain NOPs, but that's
still bad.
diffstat:
sys/arch/amd64/amd64/copy.S | 21 +++++-
sys/arch/amd64/amd64/cpufunc.S | 105 +++++++++++++++++++++++++++++++-
sys/arch/amd64/amd64/linux32_sigcode.S | 3 +-
sys/arch/amd64/amd64/linux_sigcode.S | 3 +-
sys/arch/amd64/amd64/vector.S | 10 ++-
5 files changed, 131 insertions(+), 11 deletions(-)
diffs (truncated from 839 to 300 lines):
diff -r 49a14dd569ab -r 3dfa4ac1aa6c sys/arch/amd64/amd64/copy.S
--- a/sys/arch/amd64/amd64/copy.S Mon Oct 30 16:01:19 2017 +0000
+++ b/sys/arch/amd64/amd64/copy.S Mon Oct 30 17:06:42 2017 +0000
@@ -1,4 +1,4 @@
-/* $NetBSD: copy.S,v 1.26 2017/10/17 07:02:50 maxv Exp $ */
+/* $NetBSD: copy.S,v 1.27 2017/10/30 17:06:42 maxv Exp $ */
/*
* Copyright (c) 2001 Wasabi Systems, Inc.
@@ -105,6 +105,7 @@
popq %rdi
leaveq
ret
+END(do_pmap_load)
/*
* SMAP functions. ret+int3+int3 is patched dynamically to STAC/CLAC.
@@ -116,6 +117,7 @@
int3
int3
ret
+END(smap_enable)
ENTRY(smap_disable)
.Lstacpatch:
@@ -123,6 +125,7 @@
int3
int3
ret
+END(smap_disable)
/*
* Copy routines from and to userland, plus a few more. See the
@@ -190,6 +193,7 @@
.Lkcopy_end:
xorq %rax,%rax
ret
+END(kcopy)
ENTRY(copyout)
DEFERRED_SWITCH_CHECK
@@ -219,6 +223,7 @@
xorl %eax,%eax
ret
DEFERRED_SWITCH_CALL
+END(copyout)
ENTRY(copyin)
DEFERRED_SWITCH_CHECK
@@ -249,6 +254,7 @@
xorl %eax,%eax
ret
DEFERRED_SWITCH_CALL
+END(copyin)
NENTRY(copy_efault)
movq $EFAULT,%rax
@@ -308,6 +314,7 @@
movq $ENAMETOOLONG,%rax
jmp copystr_return
DEFERRED_SWITCH_CALL
+END(copyoutstr)
ENTRY(copyinstr)
DEFERRED_SWITCH_CHECK
@@ -351,6 +358,7 @@
movq $ENAMETOOLONG,%rax
jmp copystr_return
DEFERRED_SWITCH_CALL
+END(copyinstr)
ENTRY(copystr_efault)
movl $EFAULT,%eax
@@ -394,7 +402,7 @@
movq %r8,(%rcx)
7: ret
-
+END(copystr)
ENTRY(fuswintr)
cmpl $TLBSTATE_VALID,CPUVAR(TLBSTATE)
@@ -412,6 +420,7 @@
movq $0,PCB_ONFAULT(%rcx)
ret
+END(fuswintr)
ENTRY(fubyte)
DEFERRED_SWITCH_CHECK
@@ -429,6 +438,7 @@
movq $0,PCB_ONFAULT(%rcx)
ret
DEFERRED_SWITCH_CALL
+END(fubyte)
ENTRY(suswintr)
cmpl $TLBSTATE_VALID,CPUVAR(TLBSTATE)
@@ -447,6 +457,7 @@
xorq %rax,%rax
movq %rax,PCB_ONFAULT(%rcx)
ret
+END(suswintr)
ENTRY(subyte)
DEFERRED_SWITCH_CHECK
@@ -466,6 +477,7 @@
movq %rax,PCB_ONFAULT(%rcx)
ret
DEFERRED_SWITCH_CALL
+END(subyte)
/*
* These are the same, but must reside at different addresses,
@@ -476,16 +488,19 @@
movq $0,PCB_ONFAULT(%rcx)
movl $-1,%eax
ret
+END(fusuintrfailure)
ENTRY(fusufailure)
callq smap_enable
movq $0,PCB_ONFAULT(%rcx)
movl $-1,%eax
ret
+END(fusufailure)
ENTRY(fusuaddrfault)
movl $-1,%eax
ret
+END(fusuaddrfault)
/*
* Compare-and-swap the 64-bit integer in the user-space.
@@ -516,6 +531,7 @@
xorq %rax,%rax
ret
DEFERRED_SWITCH_CALL
+END(ucas_64)
/*
* int ucas_32(volatile int32_t *uptr, int32_t old, int32_t new, int32_t *ret);
@@ -544,6 +560,7 @@
xorq %rax,%rax
ret
DEFERRED_SWITCH_CALL
+END(ucas_32)
ENTRY(ucas_efault)
movq $EFAULT,%rax
diff -r 49a14dd569ab -r 3dfa4ac1aa6c sys/arch/amd64/amd64/cpufunc.S
--- a/sys/arch/amd64/amd64/cpufunc.S Mon Oct 30 16:01:19 2017 +0000
+++ b/sys/arch/amd64/amd64/cpufunc.S Mon Oct 30 17:06:42 2017 +0000
@@ -1,6 +1,6 @@
-/* $NetBSD: cpufunc.S,v 1.29 2017/10/15 11:31:00 maxv Exp $ */
+/* $NetBSD: cpufunc.S,v 1.30 2017/10/30 17:06:42 maxv Exp $ */
-/*-
+/*
* Copyright (c) 1998, 2007, 2008 The NetBSD Foundation, Inc.
* All rights reserved.
*
@@ -51,23 +51,28 @@
ENTRY(x86_lfence)
lfence
ret
+END(x86_lfence)
ENTRY(x86_sfence)
sfence
ret
+END(x86_sfence)
ENTRY(x86_mfence)
mfence
ret
+END(x86_mfence)
#ifndef XEN
ENTRY(invlpg)
invlpg (%rdi)
ret
+END(invlpg)
ENTRY(lidt)
lidt (%rdi)
ret
+END(lidt)
ENTRY(lldt)
cmpl %edi, CPUVAR(CURLDT)
@@ -77,51 +82,63 @@
movl %edi, CPUVAR(CURLDT)
lldt %di
ret
+END(lldt)
ENTRY(ltr)
ltr %di
ret
+END(ltr)
ENTRY(lcr0)
movq %rdi, %cr0
ret
+END(lcr0)
ENTRY(rcr0)
movq %cr0, %rax
ret
+END(rcr0)
ENTRY(lcr2)
movq %rdi, %cr2
ret
+END(lcr2)
ENTRY(rcr2)
movq %cr2, %rax
ret
+END(rcr2)
ENTRY(lcr3)
movq %rdi, %cr3
ret
+END(lcr3)
ENTRY(rcr3)
movq %cr3, %rax
ret
+END(rcr3)
#endif
ENTRY(lcr4)
movq %rdi, %cr4
ret
+END(lcr4)
ENTRY(rcr4)
movq %cr4, %rax
ret
+END(rcr4)
ENTRY(lcr8)
movq %rdi, %cr8
ret
+END(lcr8)
ENTRY(rcr8)
movq %cr8, %rax
ret
+END(rcr8)
/*
* Big hammer: flush all TLB entries, including ones from PTE's
@@ -151,73 +168,90 @@
movq %rdx, %cr4
movq %rax, %cr4
ret
+END(tlbflushg)
ENTRY(tlbflush)
1:
movq %cr3, %rax
movq %rax, %cr3
ret
+END(tlbflush)
ENTRY(ldr0)
movq %rdi, %dr0
ret
+END(ldr0)
ENTRY(rdr0)
movq %dr0, %rax
ret
+END(rdr0)
ENTRY(ldr1)
movq %rdi, %dr1
ret
+END(ldr1)
ENTRY(rdr1)
movq %dr1, %rax
ret
+END(rdr1)
ENTRY(ldr2)
movq %rdi, %dr2
ret
+END(ldr2)
ENTRY(rdr2)
movq %dr2, %rax
ret
+END(rdr2)
ENTRY(ldr3)
movq %rdi, %dr3
Home |
Main Index |
Thread Index |
Old Index