powerpc/32: replace LOAD_MSR_KERNEL() by LOAD_REG_IMMEDIATE()

LOAD_MSR_KERNEL() and LOAD_REG_IMMEDIATE() are doing the same thing
in the same way. Drop LOAD_MSR_KERNEL()

Signed-off-by: Christophe Leroy <christophe.leroy@c-s.fr>
Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
Link: https://lore.kernel.org/r/8f04a6df0bc8949517fd8236d50c15008ccf9231.1566311636.git.christophe.leroy@c-s.fr
This commit is contained in:
Christophe Leroy 2019-08-20 14:34:13 +00:00 committed by Michael Ellerman
parent c691b4b83b
commit ba18025fb0
2 changed files with 13 additions and 26 deletions

View File

@ -230,7 +230,7 @@ transfer_to_handler_cont:
*/ */
lis r12,reenable_mmu@h lis r12,reenable_mmu@h
ori r12,r12,reenable_mmu@l ori r12,r12,reenable_mmu@l
LOAD_MSR_KERNEL(r0, MSR_KERNEL) LOAD_REG_IMMEDIATE(r0, MSR_KERNEL)
mtspr SPRN_SRR0,r12 mtspr SPRN_SRR0,r12
mtspr SPRN_SRR1,r0 mtspr SPRN_SRR1,r0
SYNC SYNC
@ -304,7 +304,7 @@ stack_ovf:
addi r1,r1,THREAD_SIZE-STACK_FRAME_OVERHEAD addi r1,r1,THREAD_SIZE-STACK_FRAME_OVERHEAD
lis r9,StackOverflow@ha lis r9,StackOverflow@ha
addi r9,r9,StackOverflow@l addi r9,r9,StackOverflow@l
LOAD_MSR_KERNEL(r10,MSR_KERNEL) LOAD_REG_IMMEDIATE(r10,MSR_KERNEL)
#if defined(CONFIG_PPC_8xx) && defined(CONFIG_PERF_EVENTS) #if defined(CONFIG_PPC_8xx) && defined(CONFIG_PERF_EVENTS)
mtspr SPRN_NRI, r0 mtspr SPRN_NRI, r0
#endif #endif
@ -324,7 +324,7 @@ trace_syscall_entry_irq_off:
bl trace_hardirqs_on bl trace_hardirqs_on
/* Now enable for real */ /* Now enable for real */
LOAD_MSR_KERNEL(r10, MSR_KERNEL | MSR_EE) LOAD_REG_IMMEDIATE(r10, MSR_KERNEL | MSR_EE)
mtmsr r10 mtmsr r10
REST_GPR(0, r1) REST_GPR(0, r1)
@ -394,7 +394,7 @@ ret_from_syscall:
#endif #endif
mr r6,r3 mr r6,r3
/* disable interrupts so current_thread_info()->flags can't change */ /* disable interrupts so current_thread_info()->flags can't change */
LOAD_MSR_KERNEL(r10,MSR_KERNEL) /* doesn't include MSR_EE */ LOAD_REG_IMMEDIATE(r10,MSR_KERNEL) /* doesn't include MSR_EE */
/* Note: We don't bother telling lockdep about it */ /* Note: We don't bother telling lockdep about it */
SYNC SYNC
MTMSRD(r10) MTMSRD(r10)
@ -824,7 +824,7 @@ ret_from_except:
* can't change between when we test it and when we return * can't change between when we test it and when we return
* from the interrupt. */ * from the interrupt. */
/* Note: We don't bother telling lockdep about it */ /* Note: We don't bother telling lockdep about it */
LOAD_MSR_KERNEL(r10,MSR_KERNEL) LOAD_REG_IMMEDIATE(r10,MSR_KERNEL)
SYNC /* Some chip revs have problems here... */ SYNC /* Some chip revs have problems here... */
MTMSRD(r10) /* disable interrupts */ MTMSRD(r10) /* disable interrupts */
@ -991,7 +991,7 @@ END_FTR_SECTION_IFSET(CPU_FTR_NEED_PAIRED_STWCX)
* can restart the exception exit path at the label * can restart the exception exit path at the label
* exc_exit_restart below. -- paulus * exc_exit_restart below. -- paulus
*/ */
LOAD_MSR_KERNEL(r10,MSR_KERNEL & ~MSR_RI) LOAD_REG_IMMEDIATE(r10,MSR_KERNEL & ~MSR_RI)
SYNC SYNC
MTMSRD(r10) /* clear the RI bit */ MTMSRD(r10) /* clear the RI bit */
.globl exc_exit_restart .globl exc_exit_restart
@ -1066,7 +1066,7 @@ exc_exit_restart_end:
REST_NVGPRS(r1); \ REST_NVGPRS(r1); \
lwz r3,_MSR(r1); \ lwz r3,_MSR(r1); \
andi. r3,r3,MSR_PR; \ andi. r3,r3,MSR_PR; \
LOAD_MSR_KERNEL(r10,MSR_KERNEL); \ LOAD_REG_IMMEDIATE(r10,MSR_KERNEL); \
bne user_exc_return; \ bne user_exc_return; \
lwz r0,GPR0(r1); \ lwz r0,GPR0(r1); \
lwz r2,GPR2(r1); \ lwz r2,GPR2(r1); \
@ -1236,7 +1236,7 @@ recheck:
* neither. Those disable/enable cycles used to peek at * neither. Those disable/enable cycles used to peek at
* TI_FLAGS aren't advertised. * TI_FLAGS aren't advertised.
*/ */
LOAD_MSR_KERNEL(r10,MSR_KERNEL) LOAD_REG_IMMEDIATE(r10,MSR_KERNEL)
SYNC SYNC
MTMSRD(r10) /* disable interrupts */ MTMSRD(r10) /* disable interrupts */
lwz r9,TI_FLAGS(r2) lwz r9,TI_FLAGS(r2)
@ -1329,7 +1329,7 @@ _GLOBAL(enter_rtas)
lwz r4,RTASBASE(r4) lwz r4,RTASBASE(r4)
mfmsr r9 mfmsr r9
stw r9,8(r1) stw r9,8(r1)
LOAD_MSR_KERNEL(r0,MSR_KERNEL) LOAD_REG_IMMEDIATE(r0,MSR_KERNEL)
SYNC /* disable interrupts so SRR0/1 */ SYNC /* disable interrupts so SRR0/1 */
MTMSRD(r0) /* don't get trashed */ MTMSRD(r0) /* don't get trashed */
li r9,MSR_KERNEL & ~(MSR_IR|MSR_DR) li r9,MSR_KERNEL & ~(MSR_IR|MSR_DR)

View File

@ -4,19 +4,6 @@
#include <asm/ptrace.h> /* for STACK_FRAME_REGS_MARKER */ #include <asm/ptrace.h> /* for STACK_FRAME_REGS_MARKER */
/*
* MSR_KERNEL is > 0x8000 on 4xx/Book-E since it include MSR_CE.
*/
.macro __LOAD_MSR_KERNEL r, x
.if \x >= 0x8000
lis \r, (\x)@h
ori \r, \r, (\x)@l
.else
li \r, (\x)
.endif
.endm
#define LOAD_MSR_KERNEL(r, x) __LOAD_MSR_KERNEL r, x
/* /*
* Exception entry code. This code runs with address translation * Exception entry code. This code runs with address translation
* turned off, i.e. using physical addresses. * turned off, i.e. using physical addresses.
@ -92,7 +79,7 @@
#ifdef CONFIG_40x #ifdef CONFIG_40x
rlwinm r9,r9,0,14,12 /* clear MSR_WE (necessary?) */ rlwinm r9,r9,0,14,12 /* clear MSR_WE (necessary?) */
#else #else
LOAD_MSR_KERNEL(r10, MSR_KERNEL & ~(MSR_IR|MSR_DR)) /* can take exceptions */ LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~(MSR_IR|MSR_DR)) /* can take exceptions */
MTMSRD(r10) /* (except for mach check in rtas) */ MTMSRD(r10) /* (except for mach check in rtas) */
#endif #endif
lis r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */ lis r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */
@ -140,10 +127,10 @@
* otherwise we might risk taking an interrupt before we tell lockdep * otherwise we might risk taking an interrupt before we tell lockdep
* they are enabled. * they are enabled.
*/ */
LOAD_MSR_KERNEL(r10, MSR_KERNEL) LOAD_REG_IMMEDIATE(r10, MSR_KERNEL)
rlwimi r10, r9, 0, MSR_EE rlwimi r10, r9, 0, MSR_EE
#else #else
LOAD_MSR_KERNEL(r10, MSR_KERNEL | MSR_EE) LOAD_REG_IMMEDIATE(r10, MSR_KERNEL | MSR_EE)
#endif #endif
#if defined(CONFIG_PPC_8xx) && defined(CONFIG_PERF_EVENTS) #if defined(CONFIG_PPC_8xx) && defined(CONFIG_PERF_EVENTS)
mtspr SPRN_NRI, r0 mtspr SPRN_NRI, r0
@ -187,7 +174,7 @@ label:
#define EXC_XFER_TEMPLATE(hdlr, trap, msr, tfer, ret) \ #define EXC_XFER_TEMPLATE(hdlr, trap, msr, tfer, ret) \
li r10,trap; \ li r10,trap; \
stw r10,_TRAP(r11); \ stw r10,_TRAP(r11); \
LOAD_MSR_KERNEL(r10, msr); \ LOAD_REG_IMMEDIATE(r10, msr); \
bl tfer; \ bl tfer; \
.long hdlr; \ .long hdlr; \
.long ret .long ret