KVM: PPC: bookehv: Use a Macro for saving/restoring guest registers to/from their 64 bit copies.
Introduced PPC_STD/PPC_LD macros for saving/restoring guest registers to/from their 64 bit copies. Signed-off-by: Varun Sethi <Varun.Sethi@freescale.com> Signed-off-by: Alexander Graf <agraf@suse.de>
This commit is contained in:
parent
6e35994d1f
commit
185e4188da
|
@ -20,6 +20,14 @@
|
|||
#ifndef __POWERPC_KVM_ASM_H__
|
||||
#define __POWERPC_KVM_ASM_H__
|
||||
|
||||
#ifdef CONFIG_64BIT
|
||||
#define PPC_STD(sreg, offset, areg) std sreg, (offset)(areg)
|
||||
#define PPC_LD(treg, offset, areg) ld treg, (offset)(areg)
|
||||
#else
|
||||
#define PPC_STD(sreg, offset, areg) stw sreg, (offset+4)(areg)
|
||||
#define PPC_LD(treg, offset, areg) lwz treg, (offset+4)(areg)
|
||||
#endif
|
||||
|
||||
/* IVPR must be 64KiB-aligned. */
|
||||
#define VCPU_SIZE_ORDER 4
|
||||
#define VCPU_SIZE_LOG (VCPU_SIZE_ORDER + 12)
|
||||
|
|
|
@ -93,11 +93,7 @@
|
|||
#endif
|
||||
|
||||
oris r8, r6, MSR_CE@h
|
||||
#ifdef CONFIG_64BIT
|
||||
std r6, (VCPU_SHARED_MSR)(r11)
|
||||
#else
|
||||
stw r6, (VCPU_SHARED_MSR + 4)(r11)
|
||||
#endif
|
||||
PPC_STD(r6, VCPU_SHARED_MSR, r11)
|
||||
ori r8, r8, MSR_ME | MSR_RI
|
||||
PPC_STL r5, VCPU_PC(r4)
|
||||
|
||||
|
@ -335,11 +331,7 @@ _GLOBAL(kvmppc_resume_host)
|
|||
stw r5, VCPU_SHARED_MAS0(r11)
|
||||
mfspr r7, SPRN_MAS2
|
||||
stw r6, VCPU_SHARED_MAS1(r11)
|
||||
#ifdef CONFIG_64BIT
|
||||
std r7, (VCPU_SHARED_MAS2)(r11)
|
||||
#else
|
||||
stw r7, (VCPU_SHARED_MAS2 + 4)(r11)
|
||||
#endif
|
||||
PPC_STD(r7, VCPU_SHARED_MAS2, r11)
|
||||
mfspr r5, SPRN_MAS3
|
||||
mfspr r6, SPRN_MAS4
|
||||
stw r5, VCPU_SHARED_MAS7_3+4(r11)
|
||||
|
@ -527,11 +519,7 @@ lightweight_exit:
|
|||
stw r3, VCPU_HOST_MAS6(r4)
|
||||
lwz r3, VCPU_SHARED_MAS0(r11)
|
||||
lwz r5, VCPU_SHARED_MAS1(r11)
|
||||
#ifdef CONFIG_64BIT
|
||||
ld r6, (VCPU_SHARED_MAS2)(r11)
|
||||
#else
|
||||
lwz r6, (VCPU_SHARED_MAS2 + 4)(r11)
|
||||
#endif
|
||||
PPC_LD(r6, VCPU_SHARED_MAS2, r11)
|
||||
lwz r7, VCPU_SHARED_MAS7_3+4(r11)
|
||||
lwz r8, VCPU_SHARED_MAS4(r11)
|
||||
mtspr SPRN_MAS0, r3
|
||||
|
@ -565,11 +553,7 @@ lightweight_exit:
|
|||
PPC_LL r6, VCPU_CTR(r4)
|
||||
PPC_LL r7, VCPU_CR(r4)
|
||||
PPC_LL r8, VCPU_PC(r4)
|
||||
#ifdef CONFIG_64BIT
|
||||
ld r9, (VCPU_SHARED_MSR)(r11)
|
||||
#else
|
||||
lwz r9, (VCPU_SHARED_MSR + 4)(r11)
|
||||
#endif
|
||||
PPC_LD(r9, VCPU_SHARED_MSR, r11)
|
||||
PPC_LL r0, VCPU_GPR(r0)(r4)
|
||||
PPC_LL r1, VCPU_GPR(r1)(r4)
|
||||
PPC_LL r2, VCPU_GPR(r2)(r4)
|
||||
|
|
Loading…
Reference in New Issue