x86: Add straight-line-speculation mitigation
Make use of an upcoming GCC feature to mitigate straight-line-speculation for x86: https://gcc.gnu.org/g:53a643f8568067d7700a9f2facc8ba39974973d3 https://gcc.gnu.org/bugzilla/show_bug.cgi?id=102952 https://bugs.llvm.org/show_bug.cgi?id=52323 It's built tested on x86_64-allyesconfig using GCC-12 and GCC-11. Maintenance overhead of this should be fairly low due to objtool validation. Size overhead of all these additional int3 instructions comes to: text data bss dec hex filename 22267751 6933356 2011368 31212475 1dc43bb defconfig-build/vmlinux 22804126 6933356 1470696 31208178 1dc32f2 defconfig-build/vmlinux.sls Or roughly 2.4% additional text. Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Signed-off-by: Borislav Petkov <bp@suse.de> Link: https://lore.kernel.org/r/20211204134908.140103474@infradead.org
This commit is contained in:
parent
26c44b776d
commit
e463a09af2
|
@ -472,6 +472,18 @@ config RETPOLINE
|
|||
branches. Requires a compiler with -mindirect-branch=thunk-extern
|
||||
support for full protection. The kernel may run slower.
|
||||
|
||||
config CC_HAS_SLS
|
||||
def_bool $(cc-option,-mharden-sls=all)
|
||||
|
||||
config SLS
|
||||
bool "Mitigate Straight-Line-Speculation"
|
||||
depends on CC_HAS_SLS && X86_64
|
||||
default n
|
||||
help
|
||||
Compile the kernel with straight-line-speculation options to guard
|
||||
against straight line speculation. The kernel image might be slightly
|
||||
larger.
|
||||
|
||||
config X86_CPU_RESCTRL
|
||||
bool "x86 CPU resource control support"
|
||||
depends on X86 && (CPU_SUP_INTEL || CPU_SUP_AMD)
|
||||
|
|
|
@ -191,6 +191,10 @@ ifdef CONFIG_RETPOLINE
|
|||
endif
|
||||
endif
|
||||
|
||||
ifdef CONFIG_SLS
|
||||
KBUILD_CFLAGS += -mharden-sls=all
|
||||
endif
|
||||
|
||||
KBUILD_LDFLAGS += -m elf_$(UTS_MACHINE)
|
||||
|
||||
ifdef CONFIG_LTO_CLANG
|
||||
|
|
|
@ -18,9 +18,19 @@
|
|||
#define __ALIGN_STR __stringify(__ALIGN)
|
||||
#endif
|
||||
|
||||
#ifdef CONFIG_SLS
|
||||
#define RET ret; int3
|
||||
#else
|
||||
#define RET ret
|
||||
#endif
|
||||
|
||||
#else /* __ASSEMBLY__ */
|
||||
|
||||
#ifdef CONFIG_SLS
|
||||
#define ASM_RET "ret; int3\n\t"
|
||||
#else
|
||||
#define ASM_RET "ret\n\t"
|
||||
#endif
|
||||
|
||||
#endif /* __ASSEMBLY__ */
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@
|
|||
__ARCH_DEFINE_STATIC_CALL_TRAMP(name, ".byte 0xe9; .long " #func " - (. + 4)")
|
||||
|
||||
#define ARCH_DEFINE_STATIC_CALL_NULL_TRAMP(name) \
|
||||
__ARCH_DEFINE_STATIC_CALL_TRAMP(name, "ret; nop; nop; nop; nop")
|
||||
__ARCH_DEFINE_STATIC_CALL_TRAMP(name, "ret; int3; nop; nop; nop")
|
||||
|
||||
|
||||
#define ARCH_ADD_TRAMP_KEY(name) \
|
||||
|
|
|
@ -303,7 +303,7 @@ union ftrace_op_code_union {
|
|||
} __attribute__((packed));
|
||||
};
|
||||
|
||||
#define RET_SIZE 1
|
||||
#define RET_SIZE 1 + IS_ENABLED(CONFIG_SLS)
|
||||
|
||||
static unsigned long
|
||||
create_trampoline(struct ftrace_ops *ops, unsigned int *tramp_size)
|
||||
|
|
|
@ -17,6 +17,8 @@ enum insn_type {
|
|||
*/
|
||||
static const u8 xor5rax[] = { 0x66, 0x66, 0x48, 0x31, 0xc0 };
|
||||
|
||||
static const u8 retinsn[] = { RET_INSN_OPCODE, 0xcc, 0xcc, 0xcc, 0xcc };
|
||||
|
||||
static void __ref __static_call_transform(void *insn, enum insn_type type, void *func)
|
||||
{
|
||||
const void *emulate = NULL;
|
||||
|
@ -42,8 +44,7 @@ static void __ref __static_call_transform(void *insn, enum insn_type type, void
|
|||
break;
|
||||
|
||||
case RET:
|
||||
code = text_gen_insn(RET_INSN_OPCODE, insn, func);
|
||||
size = RET_INSN_SIZE;
|
||||
code = &retinsn;
|
||||
break;
|
||||
}
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ SYM_FUNC_START(__memmove)
|
|||
/* FSRM implies ERMS => no length checks, do the copy directly */
|
||||
.Lmemmove_begin_forward:
|
||||
ALTERNATIVE "cmp $0x20, %rdx; jb 1f", "", X86_FEATURE_FSRM
|
||||
ALTERNATIVE "", "movq %rdx, %rcx; rep movsb; RET", X86_FEATURE_ERMS
|
||||
ALTERNATIVE "", __stringify(movq %rdx, %rcx; rep movsb; RET), X86_FEATURE_ERMS
|
||||
|
||||
/*
|
||||
* movsq instruction have many startup latency
|
||||
|
|
|
@ -34,7 +34,7 @@ SYM_INNER_LABEL(__x86_indirect_thunk_\reg, SYM_L_GLOBAL)
|
|||
|
||||
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; jmp *%\reg), \
|
||||
__stringify(RETPOLINE \reg), X86_FEATURE_RETPOLINE, \
|
||||
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *%\reg), X86_FEATURE_RETPOLINE_AMD
|
||||
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *%\reg; int3), X86_FEATURE_RETPOLINE_AMD
|
||||
|
||||
.endm
|
||||
|
||||
|
|
|
@ -234,7 +234,8 @@ objtool_args = \
|
|||
$(if $(CONFIG_GCOV_KERNEL)$(CONFIG_LTO_CLANG), --no-unreachable)\
|
||||
$(if $(CONFIG_RETPOLINE), --retpoline) \
|
||||
$(if $(CONFIG_X86_SMAP), --uaccess) \
|
||||
$(if $(CONFIG_FTRACE_MCOUNT_USE_OBJTOOL), --mcount)
|
||||
$(if $(CONFIG_FTRACE_MCOUNT_USE_OBJTOOL), --mcount) \
|
||||
$(if $(CONFIG_SLS), --sls)
|
||||
|
||||
cmd_objtool = $(if $(objtool-enabled), ; $(objtool) $(objtool_args) $@)
|
||||
cmd_gen_objtooldep = $(if $(objtool-enabled), { echo ; echo '$@: $$(wildcard $(objtool))' ; } >> $(dot-target).cmd)
|
||||
|
|
|
@ -139,6 +139,9 @@ objtool_link()
|
|||
if [ -n "${CONFIG_X86_SMAP}" ]; then
|
||||
objtoolopt="${objtoolopt} --uaccess"
|
||||
fi
|
||||
if [ -n "${CONFIG_SLS}" ]; then
|
||||
objtoolopt="${objtoolopt} --sls"
|
||||
fi
|
||||
info OBJTOOL ${1}
|
||||
tools/objtool/objtool ${objtoolcmd} ${objtoolopt} ${1}
|
||||
fi
|
||||
|
|
Loading…
Reference in New Issue