arm64: sysreg: Make mrs_s and msr_s macros work with Clang and LTO

Clang's integrated assembler does not allow assembly macros defined
in one inline asm block using the .macro directive to be used across
separate asm blocks. LLVM developers consider this a feature and not a
bug, recommending code refactoring:

  https://bugs.llvm.org/show_bug.cgi?id=19749

As binutils doesn't allow macros to be redefined, this change uses
UNDEFINE_MRS_S and UNDEFINE_MSR_S to define corresponding macros
in-place and workaround gcc and clang limitations on redefining macros
across different assembler blocks.

Specifically, the current state after preprocessing looks like this:

asm volatile(".macro mXX_s ... .endm");
void f()
{
	asm volatile("mXX_s a, b");
}

With GCC, it gives macro redefinition error because sysreg.h is included
in multiple source files, and assembler code for all of them is later
combined for LTO (I've seen an intermediate file with hundreds of
identical definitions).

With clang, it gives macro undefined error because clang doesn't allow
sharing macros between inline asm statements.

I also seem to remember catching another sort of undefined error with
GCC due to reordering of macro definition asm statement and generated
asm code for function that uses the macro.

The solution with defining and undefining for each use, while certainly
not elegant, satisfies both GCC and clang, LTO and non-LTO.

Co-developed-by: Alex Matveev <alxmtvv@gmail.com>
Co-developed-by: Yury Norov <ynorov@caviumnetworks.com>
Co-developed-by: Sami Tolvanen <samitolvanen@google.com>
Reviewed-by: Nick Desaulniers <ndesaulniers@google.com>
Reviewed-by: Mark Rutland <mark.rutland@arm.com>
Signed-off-by: Kees Cook <keescook@chromium.org>
Signed-off-by: Will Deacon <will.deacon@arm.com>
This commit is contained in:
Kees Cook 2019-04-24 09:55:37 -07:00 committed by Will Deacon
parent 06a916feca
commit be604c616c
3 changed files with 38 additions and 19 deletions

View File

@ -43,7 +43,7 @@ static inline void arch_local_irq_enable(void)
asm volatile(ALTERNATIVE( asm volatile(ALTERNATIVE(
"msr daifclr, #2 // arch_local_irq_enable\n" "msr daifclr, #2 // arch_local_irq_enable\n"
"nop", "nop",
"msr_s " __stringify(SYS_ICC_PMR_EL1) ",%0\n" __msr_s(SYS_ICC_PMR_EL1, "%0")
"dsb sy", "dsb sy",
ARM64_HAS_IRQ_PRIO_MASKING) ARM64_HAS_IRQ_PRIO_MASKING)
: :
@ -55,7 +55,7 @@ static inline void arch_local_irq_disable(void)
{ {
asm volatile(ALTERNATIVE( asm volatile(ALTERNATIVE(
"msr daifset, #2 // arch_local_irq_disable", "msr daifset, #2 // arch_local_irq_disable",
"msr_s " __stringify(SYS_ICC_PMR_EL1) ", %0", __msr_s(SYS_ICC_PMR_EL1, "%0"),
ARM64_HAS_IRQ_PRIO_MASKING) ARM64_HAS_IRQ_PRIO_MASKING)
: :
: "r" ((unsigned long) GIC_PRIO_IRQOFF) : "r" ((unsigned long) GIC_PRIO_IRQOFF)
@ -86,7 +86,7 @@ static inline unsigned long arch_local_save_flags(void)
"mov %0, %1\n" "mov %0, %1\n"
"nop\n" "nop\n"
"nop", "nop",
"mrs_s %0, " __stringify(SYS_ICC_PMR_EL1) "\n" __mrs_s("%0", SYS_ICC_PMR_EL1)
"ands %1, %1, " __stringify(PSR_I_BIT) "\n" "ands %1, %1, " __stringify(PSR_I_BIT) "\n"
"csel %0, %0, %2, eq", "csel %0, %0, %2, eq",
ARM64_HAS_IRQ_PRIO_MASKING) ARM64_HAS_IRQ_PRIO_MASKING)
@ -116,7 +116,7 @@ static inline void arch_local_irq_restore(unsigned long flags)
asm volatile(ALTERNATIVE( asm volatile(ALTERNATIVE(
"msr daif, %0\n" "msr daif, %0\n"
"nop", "nop",
"msr_s " __stringify(SYS_ICC_PMR_EL1) ", %0\n" __msr_s(SYS_ICC_PMR_EL1, "%0")
"dsb sy", "dsb sy",
ARM64_HAS_IRQ_PRIO_MASKING) ARM64_HAS_IRQ_PRIO_MASKING)
: "+r" (flags) : "+r" (flags)

View File

@ -30,7 +30,7 @@
({ \ ({ \
u64 reg; \ u64 reg; \
asm volatile(ALTERNATIVE("mrs %0, " __stringify(r##nvh),\ asm volatile(ALTERNATIVE("mrs %0, " __stringify(r##nvh),\
"mrs_s %0, " __stringify(r##vh),\ __mrs_s("%0", r##vh), \
ARM64_HAS_VIRT_HOST_EXTN) \ ARM64_HAS_VIRT_HOST_EXTN) \
: "=r" (reg)); \ : "=r" (reg)); \
reg; \ reg; \
@ -40,7 +40,7 @@
do { \ do { \
u64 __val = (u64)(v); \ u64 __val = (u64)(v); \
asm volatile(ALTERNATIVE("msr " __stringify(r##nvh) ", %x0",\ asm volatile(ALTERNATIVE("msr " __stringify(r##nvh) ", %x0",\
"msr_s " __stringify(r##vh) ", %x0",\ __msr_s(r##vh, "%x0"), \
ARM64_HAS_VIRT_HOST_EXTN) \ ARM64_HAS_VIRT_HOST_EXTN) \
: : "rZ" (__val)); \ : : "rZ" (__val)); \
} while (0) } while (0)

View File

@ -760,20 +760,39 @@
#include <linux/build_bug.h> #include <linux/build_bug.h>
#include <linux/types.h> #include <linux/types.h>
asm( #define __DEFINE_MRS_MSR_S_REGNUM \
" .irp num,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30\n" " .irp num,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30\n" \
" .equ .L__reg_num_x\\num, \\num\n" " .equ .L__reg_num_x\\num, \\num\n" \
" .endr\n" " .endr\n" \
" .equ .L__reg_num_xzr, 31\n" " .equ .L__reg_num_xzr, 31\n"
"\n"
" .macro mrs_s, rt, sreg\n" #define DEFINE_MRS_S \
__emit_inst(0xd5200000|(\\sreg)|(.L__reg_num_\\rt)) __DEFINE_MRS_MSR_S_REGNUM \
" .macro mrs_s, rt, sreg\n" \
__emit_inst(0xd5200000|(\\sreg)|(.L__reg_num_\\rt)) \
" .endm\n" " .endm\n"
"\n"
" .macro msr_s, sreg, rt\n" #define DEFINE_MSR_S \
__emit_inst(0xd5000000|(\\sreg)|(.L__reg_num_\\rt)) __DEFINE_MRS_MSR_S_REGNUM \
" .macro msr_s, sreg, rt\n" \
__emit_inst(0xd5000000|(\\sreg)|(.L__reg_num_\\rt)) \
" .endm\n" " .endm\n"
);
#define UNDEFINE_MRS_S \
" .purgem mrs_s\n"
#define UNDEFINE_MSR_S \
" .purgem msr_s\n"
#define __mrs_s(v, r) \
DEFINE_MRS_S \
" mrs_s " v ", " __stringify(r) "\n" \
UNDEFINE_MRS_S
#define __msr_s(r, v) \
DEFINE_MSR_S \
" msr_s " __stringify(r) ", " v "\n" \
UNDEFINE_MSR_S
/* /*
* Unlike read_cpuid, calls to read_sysreg are never expected to be * Unlike read_cpuid, calls to read_sysreg are never expected to be
@ -801,13 +820,13 @@ asm(
*/ */
#define read_sysreg_s(r) ({ \ #define read_sysreg_s(r) ({ \
u64 __val; \ u64 __val; \
asm volatile("mrs_s %0, " __stringify(r) : "=r" (__val)); \ asm volatile(__mrs_s("%0", r) : "=r" (__val)); \
__val; \ __val; \
}) })
#define write_sysreg_s(v, r) do { \ #define write_sysreg_s(v, r) do { \
u64 __val = (u64)(v); \ u64 __val = (u64)(v); \
asm volatile("msr_s " __stringify(r) ", %x0" : : "rZ" (__val)); \ asm volatile(__msr_s(r, "%x0") : : "rZ" (__val)); \
} while (0) } while (0)
/* /*