x86/fpu: Return proper error codes from user access functions

When *RSTOR from user memory raises an exception, there is no way to
differentiate them. That's bad because it forces the slow path even when
the failure was not a fault. If the operation raised eg. #GP then going
through the slow path is pointless.

Use _ASM_EXTABLE_FAULT() which stores the trap number and let the exception
fixup return the negated trap number as error.

This allows to separate the fast path and let it handle faults directly and
avoid the slow path for all other exceptions.

Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Signed-off-by: Borislav Petkov <bp@suse.de>
Link: https://lkml.kernel.org/r/20210623121457.601480369@linutronix.de
This commit is contained in:
Thomas Gleixner 2021-06-23 14:02:30 +02:00 committed by Borislav Petkov
parent 0a6c2e9ec9
commit aee8c67a4f
1 changed files with 12 additions and 7 deletions

View File

@ -88,6 +88,7 @@ static inline void fpstate_init_soft(struct swregs_state *soft) {}
#endif #endif
extern void save_fpregs_to_fpstate(struct fpu *fpu); extern void save_fpregs_to_fpstate(struct fpu *fpu);
/* Returns 0 or the negated trap number, which results in -EFAULT for #PF */
#define user_insn(insn, output, input...) \ #define user_insn(insn, output, input...) \
({ \ ({ \
int err; \ int err; \
@ -95,14 +96,14 @@ extern void save_fpregs_to_fpstate(struct fpu *fpu);
might_fault(); \ might_fault(); \
\ \
asm volatile(ASM_STAC "\n" \ asm volatile(ASM_STAC "\n" \
"1:" #insn "\n\t" \ "1: " #insn "\n" \
"2: " ASM_CLAC "\n" \ "2: " ASM_CLAC "\n" \
".section .fixup,\"ax\"\n" \ ".section .fixup,\"ax\"\n" \
"3: movl $-1,%[err]\n" \ "3: negl %%eax\n" \
" jmp 2b\n" \ " jmp 2b\n" \
".previous\n" \ ".previous\n" \
_ASM_EXTABLE(1b, 3b) \ _ASM_EXTABLE_FAULT(1b, 3b) \
: [err] "=r" (err), output \ : [err] "=a" (err), output \
: "0"(0), input); \ : "0"(0), input); \
err; \ err; \
}) })
@ -196,16 +197,20 @@ static inline void fxsave(struct fxregs_state *fx)
#define XRSTOR ".byte " REX_PREFIX "0x0f,0xae,0x2f" #define XRSTOR ".byte " REX_PREFIX "0x0f,0xae,0x2f"
#define XRSTORS ".byte " REX_PREFIX "0x0f,0xc7,0x1f" #define XRSTORS ".byte " REX_PREFIX "0x0f,0xc7,0x1f"
/*
* After this @err contains 0 on success or the negated trap number when
* the operation raises an exception. For faults this results in -EFAULT.
*/
#define XSTATE_OP(op, st, lmask, hmask, err) \ #define XSTATE_OP(op, st, lmask, hmask, err) \
asm volatile("1:" op "\n\t" \ asm volatile("1:" op "\n\t" \
"xor %[err], %[err]\n" \ "xor %[err], %[err]\n" \
"2:\n\t" \ "2:\n\t" \
".pushsection .fixup,\"ax\"\n\t" \ ".pushsection .fixup,\"ax\"\n\t" \
"3: movl $-2,%[err]\n\t" \ "3: negl %%eax\n\t" \
"jmp 2b\n\t" \ "jmp 2b\n\t" \
".popsection\n\t" \ ".popsection\n\t" \
_ASM_EXTABLE(1b, 3b) \ _ASM_EXTABLE_FAULT(1b, 3b) \
: [err] "=r" (err) \ : [err] "=a" (err) \
: "D" (st), "m" (*st), "a" (lmask), "d" (hmask) \ : "D" (st), "m" (*st), "a" (lmask), "d" (hmask) \
: "memory") : "memory")