x86/paravirt: Make read_cr2() CALLEE_SAVE
The one paravirt read_cr2() implementation (Xen) is actually quite trivial and doesn't need to clobber anything other than the return register. Making read_cr2() CALLEE_SAVE avoids all the PUSH/POP nonsense and allows more convenient use from assembly. Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Signed-off-by: Thomas Gleixner <tglx@linutronix.de> Reviewed-by: Juergen Gross <jgross@suse.com> Cc: bp@alien8.de Cc: rostedt@goodmis.org Cc: luto@kernel.org Cc: torvalds@linux-foundation.org Cc: hpa@zytor.com Cc: dave.hansen@linux.intel.com Cc: zhe.he@windriver.com Cc: joel@joelfernandes.org Cc: devel@etsukata.com Link: https://lkml.kernel.org/r/20190711114335.887392493@infradead.org
This commit is contained in:
parent
406de552c2
commit
55aedddb61
|
@ -343,3 +343,9 @@ For 32-bit we have the following conventions - kernel is built with
|
||||||
.Lafter_call_\@:
|
.Lafter_call_\@:
|
||||||
#endif
|
#endif
|
||||||
.endm
|
.endm
|
||||||
|
|
||||||
|
#ifdef CONFIG_PARAVIRT_XXL
|
||||||
|
#define GET_CR2_INTO(reg) GET_CR2_INTO_AX ; _ASM_MOV %_ASM_AX, reg
|
||||||
|
#else
|
||||||
|
#define GET_CR2_INTO(reg) _ASM_MOV %cr2, reg
|
||||||
|
#endif
|
||||||
|
|
|
@ -116,7 +116,7 @@ static inline void write_cr0(unsigned long x)
|
||||||
|
|
||||||
static inline unsigned long read_cr2(void)
|
static inline unsigned long read_cr2(void)
|
||||||
{
|
{
|
||||||
return PVOP_CALL0(unsigned long, mmu.read_cr2);
|
return PVOP_CALLEE0(unsigned long, mmu.read_cr2);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void write_cr2(unsigned long x)
|
static inline void write_cr2(unsigned long x)
|
||||||
|
@ -909,13 +909,7 @@ extern void default_banner(void);
|
||||||
ANNOTATE_RETPOLINE_SAFE; \
|
ANNOTATE_RETPOLINE_SAFE; \
|
||||||
call PARA_INDIRECT(pv_ops+PV_CPU_swapgs); \
|
call PARA_INDIRECT(pv_ops+PV_CPU_swapgs); \
|
||||||
)
|
)
|
||||||
#endif
|
|
||||||
|
|
||||||
#define GET_CR2_INTO_RAX \
|
|
||||||
ANNOTATE_RETPOLINE_SAFE; \
|
|
||||||
call PARA_INDIRECT(pv_ops+PV_MMU_read_cr2);
|
|
||||||
|
|
||||||
#ifdef CONFIG_PARAVIRT_XXL
|
|
||||||
#define USERGS_SYSRET64 \
|
#define USERGS_SYSRET64 \
|
||||||
PARA_SITE(PARA_PATCH(PV_CPU_usergs_sysret64), \
|
PARA_SITE(PARA_PATCH(PV_CPU_usergs_sysret64), \
|
||||||
ANNOTATE_RETPOLINE_SAFE; \
|
ANNOTATE_RETPOLINE_SAFE; \
|
||||||
|
@ -929,9 +923,19 @@ extern void default_banner(void);
|
||||||
call PARA_INDIRECT(pv_ops+PV_IRQ_save_fl); \
|
call PARA_INDIRECT(pv_ops+PV_IRQ_save_fl); \
|
||||||
PV_RESTORE_REGS(clobbers | CLBR_CALLEE_SAVE);)
|
PV_RESTORE_REGS(clobbers | CLBR_CALLEE_SAVE);)
|
||||||
#endif
|
#endif
|
||||||
#endif
|
#endif /* CONFIG_PARAVIRT_XXL */
|
||||||
|
#endif /* CONFIG_X86_64 */
|
||||||
|
|
||||||
|
#ifdef CONFIG_PARAVIRT_XXL
|
||||||
|
|
||||||
|
#define GET_CR2_INTO_AX \
|
||||||
|
PARA_SITE(PARA_PATCH(PV_MMU_read_cr2), \
|
||||||
|
ANNOTATE_RETPOLINE_SAFE; \
|
||||||
|
call PARA_INDIRECT(pv_ops+PV_MMU_read_cr2); \
|
||||||
|
)
|
||||||
|
|
||||||
|
#endif /* CONFIG_PARAVIRT_XXL */
|
||||||
|
|
||||||
#endif /* CONFIG_X86_32 */
|
|
||||||
|
|
||||||
#endif /* __ASSEMBLY__ */
|
#endif /* __ASSEMBLY__ */
|
||||||
#else /* CONFIG_PARAVIRT */
|
#else /* CONFIG_PARAVIRT */
|
||||||
|
|
|
@ -220,7 +220,7 @@ struct pv_mmu_ops {
|
||||||
void (*exit_mmap)(struct mm_struct *mm);
|
void (*exit_mmap)(struct mm_struct *mm);
|
||||||
|
|
||||||
#ifdef CONFIG_PARAVIRT_XXL
|
#ifdef CONFIG_PARAVIRT_XXL
|
||||||
unsigned long (*read_cr2)(void);
|
struct paravirt_callee_save read_cr2;
|
||||||
void (*write_cr2)(unsigned long);
|
void (*write_cr2)(unsigned long);
|
||||||
|
|
||||||
unsigned long (*read_cr3)(void);
|
unsigned long (*read_cr3)(void);
|
||||||
|
|
|
@ -76,6 +76,7 @@ static void __used common(void)
|
||||||
BLANK();
|
BLANK();
|
||||||
OFFSET(XEN_vcpu_info_mask, vcpu_info, evtchn_upcall_mask);
|
OFFSET(XEN_vcpu_info_mask, vcpu_info, evtchn_upcall_mask);
|
||||||
OFFSET(XEN_vcpu_info_pending, vcpu_info, evtchn_upcall_pending);
|
OFFSET(XEN_vcpu_info_pending, vcpu_info, evtchn_upcall_pending);
|
||||||
|
OFFSET(XEN_vcpu_info_arch_cr2, vcpu_info, arch.cr2);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
BLANK();
|
BLANK();
|
||||||
|
|
|
@ -29,9 +29,7 @@
|
||||||
#ifdef CONFIG_PARAVIRT_XXL
|
#ifdef CONFIG_PARAVIRT_XXL
|
||||||
#include <asm/asm-offsets.h>
|
#include <asm/asm-offsets.h>
|
||||||
#include <asm/paravirt.h>
|
#include <asm/paravirt.h>
|
||||||
#define GET_CR2_INTO(reg) GET_CR2_INTO_RAX ; movq %rax, reg
|
|
||||||
#else
|
#else
|
||||||
#define GET_CR2_INTO(reg) movq %cr2, reg
|
|
||||||
#define INTERRUPT_RETURN iretq
|
#define INTERRUPT_RETURN iretq
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -323,7 +321,7 @@ early_idt_handler_common:
|
||||||
|
|
||||||
cmpq $14,%rsi /* Page fault? */
|
cmpq $14,%rsi /* Page fault? */
|
||||||
jnz 10f
|
jnz 10f
|
||||||
GET_CR2_INTO(%rdi) /* Can clobber any volatile register if pv */
|
GET_CR2_INTO(%rdi) /* can clobber %rax if pv */
|
||||||
call early_make_pgtable
|
call early_make_pgtable
|
||||||
andl %eax,%eax
|
andl %eax,%eax
|
||||||
jz 20f /* All good */
|
jz 20f /* All good */
|
||||||
|
|
|
@ -370,7 +370,7 @@ struct paravirt_patch_template pv_ops = {
|
||||||
.mmu.exit_mmap = paravirt_nop,
|
.mmu.exit_mmap = paravirt_nop,
|
||||||
|
|
||||||
#ifdef CONFIG_PARAVIRT_XXL
|
#ifdef CONFIG_PARAVIRT_XXL
|
||||||
.mmu.read_cr2 = native_read_cr2,
|
.mmu.read_cr2 = __PV_IS_CALLEE_SAVE(native_read_cr2),
|
||||||
.mmu.write_cr2 = native_write_cr2,
|
.mmu.write_cr2 = native_write_cr2,
|
||||||
.mmu.read_cr3 = __native_read_cr3,
|
.mmu.read_cr3 = __native_read_cr3,
|
||||||
.mmu.write_cr3 = native_write_cr3,
|
.mmu.write_cr3 = native_write_cr3,
|
||||||
|
|
|
@ -998,7 +998,8 @@ void __init xen_setup_vcpu_info_placement(void)
|
||||||
__PV_IS_CALLEE_SAVE(xen_irq_disable_direct);
|
__PV_IS_CALLEE_SAVE(xen_irq_disable_direct);
|
||||||
pv_ops.irq.irq_enable =
|
pv_ops.irq.irq_enable =
|
||||||
__PV_IS_CALLEE_SAVE(xen_irq_enable_direct);
|
__PV_IS_CALLEE_SAVE(xen_irq_enable_direct);
|
||||||
pv_ops.mmu.read_cr2 = xen_read_cr2_direct;
|
pv_ops.mmu.read_cr2 =
|
||||||
|
__PV_IS_CALLEE_SAVE(xen_read_cr2_direct);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1307,16 +1307,6 @@ static void xen_write_cr2(unsigned long cr2)
|
||||||
this_cpu_read(xen_vcpu)->arch.cr2 = cr2;
|
this_cpu_read(xen_vcpu)->arch.cr2 = cr2;
|
||||||
}
|
}
|
||||||
|
|
||||||
static unsigned long xen_read_cr2(void)
|
|
||||||
{
|
|
||||||
return this_cpu_read(xen_vcpu)->arch.cr2;
|
|
||||||
}
|
|
||||||
|
|
||||||
unsigned long xen_read_cr2_direct(void)
|
|
||||||
{
|
|
||||||
return this_cpu_read(xen_vcpu_info.arch.cr2);
|
|
||||||
}
|
|
||||||
|
|
||||||
static noinline void xen_flush_tlb(void)
|
static noinline void xen_flush_tlb(void)
|
||||||
{
|
{
|
||||||
struct mmuext_op *op;
|
struct mmuext_op *op;
|
||||||
|
@ -2397,7 +2387,7 @@ static void xen_leave_lazy_mmu(void)
|
||||||
}
|
}
|
||||||
|
|
||||||
static const struct pv_mmu_ops xen_mmu_ops __initconst = {
|
static const struct pv_mmu_ops xen_mmu_ops __initconst = {
|
||||||
.read_cr2 = xen_read_cr2,
|
.read_cr2 = __PV_IS_CALLEE_SAVE(xen_read_cr2),
|
||||||
.write_cr2 = xen_write_cr2,
|
.write_cr2 = xen_write_cr2,
|
||||||
|
|
||||||
.read_cr3 = xen_read_cr3,
|
.read_cr3 = xen_read_cr3,
|
||||||
|
|
|
@ -10,6 +10,7 @@
|
||||||
#include <asm/percpu.h>
|
#include <asm/percpu.h>
|
||||||
#include <asm/processor-flags.h>
|
#include <asm/processor-flags.h>
|
||||||
#include <asm/frame.h>
|
#include <asm/frame.h>
|
||||||
|
#include <asm/asm.h>
|
||||||
|
|
||||||
#include <linux/linkage.h>
|
#include <linux/linkage.h>
|
||||||
|
|
||||||
|
@ -135,3 +136,18 @@ ENTRY(check_events)
|
||||||
FRAME_END
|
FRAME_END
|
||||||
ret
|
ret
|
||||||
ENDPROC(check_events)
|
ENDPROC(check_events)
|
||||||
|
|
||||||
|
ENTRY(xen_read_cr2)
|
||||||
|
FRAME_BEGIN
|
||||||
|
_ASM_MOV PER_CPU_VAR(xen_vcpu), %_ASM_AX
|
||||||
|
_ASM_MOV XEN_vcpu_info_arch_cr2(%_ASM_AX), %_ASM_AX
|
||||||
|
FRAME_END
|
||||||
|
ret
|
||||||
|
ENDPROC(xen_read_cr2);
|
||||||
|
|
||||||
|
ENTRY(xen_read_cr2_direct)
|
||||||
|
FRAME_BEGIN
|
||||||
|
_ASM_MOV PER_CPU_VAR(xen_vcpu_info) + XEN_vcpu_info_arch_cr2, %_ASM_AX
|
||||||
|
FRAME_END
|
||||||
|
ret
|
||||||
|
ENDPROC(xen_read_cr2_direct);
|
||||||
|
|
|
@ -134,6 +134,9 @@ __visible void xen_irq_disable_direct(void);
|
||||||
__visible unsigned long xen_save_fl_direct(void);
|
__visible unsigned long xen_save_fl_direct(void);
|
||||||
__visible void xen_restore_fl_direct(unsigned long);
|
__visible void xen_restore_fl_direct(unsigned long);
|
||||||
|
|
||||||
|
__visible unsigned long xen_read_cr2(void);
|
||||||
|
__visible unsigned long xen_read_cr2_direct(void);
|
||||||
|
|
||||||
/* These are not functions, and cannot be called normally */
|
/* These are not functions, and cannot be called normally */
|
||||||
__visible void xen_iret(void);
|
__visible void xen_iret(void);
|
||||||
__visible void xen_sysret32(void);
|
__visible void xen_sysret32(void);
|
||||||
|
|
Loading…
Reference in New Issue