tech-toolchain archive
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]
Avoiding traditional CPP for assembler
Hi all,
the attached patch allows us to slowly move to modern assembler-with-cpp
without the legacy -traditional-cpp. The biggest difference is use if of
## as string concat and # as stringify as oppossed to /**/ comments and
magic substition inside strings. The patch starts this on i386 and
amd64. For amd64 I did a double build of src with and without the patch
to check for differences. Other platforms can be converted individually.
Comments?
Joerg
Index: src/common/lib/libc/arch/i386/atomic/atomic.S
===================================================================
--- src/common/lib/libc/arch/i386/atomic/atomic.S
+++ src/common/lib/libc/arch/i386/atomic/atomic.S
@@ -37,11 +37,11 @@
#else
#define ALIAS(f, t) WEAK_ALIAS(f,t)
#endif
#ifdef _HARDKERNEL
-#define LOCK(n) .Lpatch/**/n: lock
+#define LOCK(n) .Lpatch ## n: lock
#define ENDLABEL(a) _ALIGN_TEXT; LABEL(a)
#else
#define LOCK(n) lock
#define ENDLABEL(a) /* nothing */
#endif
Index: src/common/lib/libc/arch/x86_64/atomic/atomic.S
===================================================================
--- src/common/lib/libc/arch/x86_64/atomic/atomic.S
+++ src/common/lib/libc/arch/x86_64/atomic/atomic.S
@@ -37,11 +37,11 @@
#else
#define ALIAS(f, t) WEAK_ALIAS(f,t)
#endif
#ifdef _HARDKERNEL
-#define LOCK(n) .Lpatch/**/n: lock
+#define LOCK(n) .Lpatch ## n: lock
#define ENDLABEL(a) _ALIGN_TEXT; LABEL(a)
#else
#define LOCK(n) lock
#define ENDLABEL(a) /* nothing */
#endif
Index: src/lib/libc/compat/arch/i386/sys/compat___sigreturn14.S
===================================================================
--- src/lib/libc/compat/arch/i386/sys/compat___sigreturn14.S
+++ src/lib/libc/compat/arch/i386/sys/compat___sigreturn14.S
@@ -50,10 +50,10 @@
#define MCOUNT_SYMBOL __mcount
#else
#define MCOUNT_SYMBOL mcount
#endif
#define ENTRY(x) \
- .globl _/**/x; _ALIGN_TEXT; _/**/x: pusha ; \
+ .globl _ ## x; _ALIGN_TEXT; _ ## x: pusha ; \
.data; 1:; .long 0; .text; movl $1b,%eax; call MCOUNT_SYMBOL; popa ; nop
#endif /* GPROF */
PSEUDO(__sigreturn14,compat_16___sigreturn14)
Index: src/lib/libc/compat/arch/i386/sys/compat_sigreturn.S
===================================================================
--- src/lib/libc/compat/arch/i386/sys/compat_sigreturn.S
+++ src/lib/libc/compat/arch/i386/sys/compat_sigreturn.S
@@ -50,13 +50,13 @@
#define MCOUNT_SYMBOL __mcount
#else
#define MCOUNT_SYMBOL mcount
#endif
#define ENTRY(x) \
- .globl _/**/x; _ALIGN_TEXT; _/**/x: pusha ; \
+ .globl _ ## x; _ALIGN_TEXT; _ ## x: pusha ; \
.data; 1:; .long 0; .text; movl $1b,%eax; call MCOUNT_SYMBOL; popa ; nop
#endif /* GPROF */
WARN_REFERENCES(sigreturn, \
"warning: reference to compatibility sigreturn()")
PSEUDO(sigreturn,compat_13_sigreturn13)
Index: src/share/mk/bsd.dep.mk
===================================================================
--- src/share/mk/bsd.dep.mk
+++ src/share/mk/bsd.dep.mk
@@ -13,12 +13,11 @@
##### Build rules
# some of the rules involve .h sources, so remove them from mkdep line
.if defined(SRCS) # {
-_TRADITIONAL_CPP?=-traditional-cpp
-__acpp_flags= ${_TRADITIONAL_CPP}
+__acpp_flags= ${_ASM_TRADITIONAL_CPP}
__DPSRCS.all= ${SRCS:C/\.(c|m|s|S|C|cc|cpp|cxx)$/.d/} \
${DPSRCS:C/\.(c|m|s|S|C|cc|cpp|cxx)$/.d/}
__DPSRCS.d= ${__DPSRCS.all:O:u:M*.d}
__DPSRCS.notd= ${__DPSRCS.all:O:u:N*.d}
Index: src/share/mk/sys.mk
===================================================================
--- src/share/mk/sys.mk
+++ src/share/mk/sys.mk
@@ -13,11 +13,16 @@
AS?= as
AFLAGS?=
COMPILE.s?= ${CC} ${AFLAGS} -c
LINK.s?= ${CC} ${AFLAGS} ${LDFLAGS}
-COMPILE.S?= ${CC} ${AFLAGS} ${CPPFLAGS} -c -traditional-cpp
+.if ${MACHINE_ARCH} == "i386" || ${MACHINE_ARCH} == "x86_64"
+_ASM_TRADITIONAL_CPP= -x assembler-with-cpp
+.else
+_ASM_TRADITIONAL_CPP= -traditional-cpp
+.endif
+COMPILE.S?= ${CC} ${AFLAGS} ${CPPFLAGS} ${_ASM_TRADITIONAL_CPP} -c
LINK.S?= ${CC} ${AFLAGS} ${CPPFLAGS} ${LDFLAGS}
CC?= cc
.if ${MACHINE_ARCH} == "alpha" || \
${MACHINE_ARCH} == "arm" || \
Index: src/sys/arch/amd64/amd64/lock_stubs.S
===================================================================
--- src/sys/arch/amd64/amd64/lock_stubs.S
+++ src/sys/arch/amd64/amd64/lock_stubs.S
@@ -45,12 +45,12 @@
#include <machine/frameasm.h>
#include "assym.h"
#define ENDLABEL(name,a) .align a; LABEL(name)
-#define LOCK(num) .Lpatch/**/num: lock
-#define RET(num) .Lret/**/num: ret; nop; nop; ret
+#define LOCK(num) .Lpatch ## num: lock
+#define RET(num) .Lret ## num: ret; nop; nop; ret
#ifndef LOCKDEBUG
/*
* void mutex_enter(kmutex_t *mtx);
Index: src/sys/arch/amd64/amd64/vector.S
===================================================================
--- src/sys/arch/amd64/amd64/vector.S
+++ src/sys/arch/amd64/amd64/vector.S
@@ -390,13 +390,13 @@
*
*/
/* XXX See comment in locore.s */
#ifdef __ELF__
-#define XINTR(name,num) Xintr_/**/name/**/num
+#define XINTR(name,num) Xintr_ ## name ## num
#else
-#define XINTR(name,num) _Xintr_/**/name/**/num
+#define XINTR(name,num) _Xintr_ ## name ## num
#endif
#if NLAPIC > 0
#ifdef MULTIPROCESSOR
IDTVEC(recurse_lapic_ipi)
@@ -644,22 +644,22 @@
* This macro defines the generic stub code. Its arguments modifiy it
* for specific PICs.
*/
#define INTRSTUB(name, num, early_ack, late_ack, mask, unmask,
level_mask) \
-IDTVEC(recurse_/**/name/**/num)
;\
+IDTVEC(recurse_ ## name ## num)
;\
INTR_RECURSE_HWFRAME ;\
subq $8,%rsp ;\
pushq $T_ASTFLT /* trap # for doing ASTs */ ;\
INTRENTRY ;\
-IDTVEC(resume_/**/name/**/num) \
+IDTVEC(resume_ ## name ## num) \
movq $IREENT_MAGIC,TF_ERR(%rsp) ;\
movl %ebx,%r13d ;\
movq CPUVAR(ISOURCES) + (num) * 8, %r14 ;\
movl IS_MAXLEVEL(%r14),%ebx ;\
jmp 1f ;\
-IDTVEC(intr_/**/name/**/num) ;\
+IDTVEC(intr_ ## name ## num) ;\
pushq $0 /* dummy error code */ ;\
pushq $T_ASTFLT /* trap # for doing ASTs */ ;\
INTRENTRY ;\
movq CPUVAR(ISOURCES) + (num) * 8, %r14 ;\
mask(num) /* mask it in hardware */ ;\
@@ -993,16 +993,16 @@
#endif
#else /* !XEN */
/* Resume/recurse procedures for spl() */
#define XENINTRSTUB(name, num, early_ack, late_ack, mask, unmask,
level_mask) \
-IDTVEC(recurse_/**/name/**/num)
;\
+IDTVEC(recurse_ ## name ## num)
;\
INTR_RECURSE_HWFRAME ;\
subq $8,%rsp ;\
pushq $T_ASTFLT /* trap # for doing ASTs */ ;\
INTRENTRY ;\
-IDTVEC(resume_/**/name/**/num) \
+IDTVEC(resume_ ## name ## num) \
movq $IREENT_MAGIC,TF_ERR(%rsp) ;\
movl %ebx,%r13d ;\
movq CPUVAR(ISOURCES) + (num) * 8, %r14 ;\
1: \
pushq %r13 ;\
Index: src/sys/arch/amd64/conf/Makefile.amd64
===================================================================
--- src/sys/arch/amd64/conf/Makefile.amd64
+++ src/sys/arch/amd64/conf/Makefile.amd64
@@ -35,11 +35,11 @@
##
DEFCOPTS= -O2
CPPFLAGS+= -Damd64 -Dx86_64
CFLAGS+= -mcmodel=kernel
CFLAGS+= -mno-red-zone
-AFLAGS+= -x assembler-with-cpp -traditional-cpp
+AFLAGS+= -x assembler-with-cpp
##
## (3) libkern and compat
##
KERN_AS= obj
Index: src/sys/arch/amd64/include/frameasm.h
===================================================================
--- src/sys/arch/amd64/include/frameasm.h
+++ src/sys/arch/amd64/include/frameasm.h
@@ -130,20 +130,20 @@
#define CHECK_ASTPENDING(reg) cmpl $0, L_MD_ASTPENDING(reg)
#define CLEAR_ASTPENDING(reg) movl $0, L_MD_ASTPENDING(reg)
#ifdef XEN
#define CLI(temp_reg) \
- movl CPUVAR(CPUID),%e/**/temp_reg ; \
- shlq $6,%r/**/temp_reg ; \
- addq CPUVAR(VCPU),%r/**/temp_reg ; \
- movb $1,EVTCHN_UPCALL_MASK(%r/**/temp_reg)
+ movl CPUVAR(CPUID),%e ## temp_reg ; \
+ shlq $6,%r ## temp_reg ; \
+ addq CPUVAR(VCPU),%r ## temp_reg ; \
+ movb $1,EVTCHN_UPCALL_MASK(%r ## temp_reg)
#define STI(temp_reg) \
- movl CPUVAR(CPUID),%e/**/temp_reg ; \
- shlq $6,%r/**/temp_reg ; \
- addq CPUVAR(VCPU),%r/**/temp_reg ; \
- movb $0,EVTCHN_UPCALL_MASK(%r/**/temp_reg)
+ movl CPUVAR(CPUID),%e ## temp_reg ; \
+ shlq $6,%r ## temp_reg ; \
+ addq CPUVAR(VCPU),%r ## temp_reg ; \
+ movb $0,EVTCHN_UPCALL_MASK(%r ## temp_reg)
#else /* XEN */
#define CLI(temp_reg) cli
#define STI(temp_reg) sti
#endif /* XEN */
#endif /* _AMD64_MACHINE_FRAMEASM_H */
Index: src/sys/arch/i386/conf/Makefile.i386
===================================================================
--- src/sys/arch/i386/conf/Makefile.i386
+++ src/sys/arch/i386/conf/Makefile.i386
@@ -32,11 +32,11 @@
##
## (2) compile settings
##
CPPFLAGS+= -Di386
-AFLAGS+= -x assembler-with-cpp -traditional-cpp
+AFLAGS+= -x assembler-with-cpp
##
## (3) libkern and compat
##
OPT_MODULAR= %MODULAR%
Index: src/sys/arch/i386/i386/lock_stubs.S
===================================================================
--- src/sys/arch/i386/i386/lock_stubs.S
+++ src/sys/arch/i386/i386/lock_stubs.S
@@ -43,12 +43,12 @@
#include "assym.h"
#define ALIGN64 .align 64
#define ALIGN32 .align 32
-#define LOCK(num) .Lpatch/**/num: lock
-#define RET(num) .Lret/**/num: ret; nop; nop; ret
+#define LOCK(num) .Lpatch ## num: lock
+#define RET(num) .Lret ## num: ret; nop; nop; ret
#define ENDLABEL(name,a) .align a; LABEL(name)
#if !defined(LOCKDEBUG)
/*
Index: src/sys/arch/i386/i386/vector.S
===================================================================
--- src/sys/arch/i386/i386/vector.S
+++ src/sys/arch/i386/i386/vector.S
@@ -400,26 +400,26 @@
* This macro defines the generic stub code. Its arguments modifiy it
* for specific PICs.
*/
#define INTRSTUB(name, num, early_ack, late_ack, mask, unmask,
level_mask) \
-IDTVEC(recurse_/**/name/**/num)
;\
+IDTVEC(recurse_ ## name ## num)
;\
pushfl ;\
pushl %cs ;\
pushl %esi ;\
subl $4,%esp ;\
pushl $T_ASTFLT /* trap # for doing ASTs */ ;\
INTRENTRY ;\
-IDTVEC_END(recurse_/**/name/**/num) ;\
-IDTVEC(resume_/**/name/**/num) \
+IDTVEC_END(recurse_ ## name ## num) ;\
+IDTVEC(resume_ ## name ## num) \
movl $IREENT_MAGIC,TF_ERR(%esp) ;\
movl %ebx,%esi ;\
movl CPUVAR(ISOURCES) + (num) * 4, %ebp ;\
movl IS_MAXLEVEL(%ebp),%ebx ;\
jmp 1f ;\
-IDTVEC_END(resume_/**/name/**/num) ;\
-IDTVEC(intr_/**/name/**/num) ;\
+IDTVEC_END(resume_ ## name ## num) ;\
+IDTVEC(intr_ ## name ## num) ;\
pushl $0 /* dummy error code */ ;\
pushl $T_ASTFLT /* trap # for doing ASTs */ ;\
INTRENTRY ;\
movl CPUVAR(ISOURCES) + (num) * 4, %ebp ;\
mask(num) /* mask it in hardware */ ;\
@@ -471,11 +471,11 @@
pushl %esp /* for unmask */ ;\
unmask(num) ;\
late_ack(num) ;\
addl $4,%esp ;\
INTRFASTEXIT ;\
-IDTVEC_END(intr_/**/name/**/num)
+IDTVEC_END(intr_ ## name ## num)
#define ICUADDR IO_ICU1
INTRSTUB(legacy,0,i8259_asm_ack1,voidop,i8259_asm_mask,i8259_asm_unmask,
voidop)
@@ -755,19 +755,19 @@
#endif
#else /* XEN */
#define voidop(num)
#define XENINTRSTUB(name, num, early_ack, late_ack, mask, unmask,
level_mask) \
-IDTVEC(recurse_/**/name/**/num)
;\
+IDTVEC(recurse_ ## name ## num)
;\
pushfl ;\
pushl %cs ;\
pushl %esi ;\
subl $4,%esp ;\
pushl $T_ASTFLT /* trap # for doing ASTs */ ;\
INTRENTRY ;\
movl $_C_LABEL(Xdoreti), %esi; /* we now have a trap frame, so loop
using doreti instead */ ;\
-IDTVEC(resume_/**/name/**/num) \
+IDTVEC(resume_ ## name ## num) \
movl $IREENT_MAGIC,TF_ERR(%esp) ;\
pushl %ebx ;\
movl CPUVAR(ISOURCES) + (num) * 4, %ebp ;\
movl $num,CPUVAR(ILEVEL) ;\
IDEPTH_INCR /* leaves old %esp on stack */ ;\
Index: src/sys/arch/xen/conf/Makefile.xen
===================================================================
--- src/sys/arch/xen/conf/Makefile.xen
+++ src/sys/arch/xen/conf/Makefile.xen
@@ -38,11 +38,11 @@
##
## (2) compile settings
##
DEFCOPTS= -O2
CPPFLAGS+= -D${XEN_BUILD}
-AFLAGS+= -x assembler-with-cpp -traditional-cpp ${DBG} -D__XEN__
+AFLAGS+= -x assembler-with-cpp ${DBG} -D__XEN__
EXTRA_INCLUDES= -I${.CURDIR}/xen-ma
.if ${XEN_BUILD} == "amd64"
CPPFLAGS+= -Dx86_64
CFLAGS+= -mcmodel=kernel
Home |
Main Index |
Thread Index |
Old Index