powerpc: Add the ability to save Altivec without giving it up
This patch adds the ability to be able to save the VEC registers to the thread struct without giving up (disabling the facility) next time the process returns to userspace. This patch builds on a previous optimisation for the FPU registers in the thread copy path to avoid a possibly pointless reload of VEC state. Signed-off-by: Cyril Bur <cyrilbur@gmail.com> Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
This commit is contained in:
parent
8792468da5
commit
6f515d842e
|
@ -43,12 +43,13 @@ static inline void flush_fp_to_thread(struct task_struct *t) { }
|
|||
extern void enable_kernel_altivec(void);
|
||||
extern void flush_altivec_to_thread(struct task_struct *);
|
||||
extern void giveup_altivec(struct task_struct *);
|
||||
extern void __giveup_altivec(struct task_struct *);
|
||||
extern void save_altivec(struct task_struct *);
|
||||
static inline void disable_kernel_altivec(void)
|
||||
{
|
||||
msr_check_and_clear(MSR_VEC);
|
||||
}
|
||||
#else
|
||||
static inline void save_altivec(struct task_struct *t) { }
|
||||
static inline void __giveup_altivec(struct task_struct *t) { }
|
||||
#endif
|
||||
|
||||
|
|
|
@ -213,6 +213,16 @@ static int restore_fp(struct task_struct *tsk) { return 0; }
|
|||
#ifdef CONFIG_ALTIVEC
|
||||
#define loadvec(thr) ((thr).load_vec)
|
||||
|
||||
static void __giveup_altivec(struct task_struct *tsk)
|
||||
{
|
||||
save_altivec(tsk);
|
||||
tsk->thread.regs->msr &= ~MSR_VEC;
|
||||
#ifdef CONFIG_VSX
|
||||
if (cpu_has_feature(CPU_FTR_VSX))
|
||||
tsk->thread.regs->msr &= ~MSR_VSX;
|
||||
#endif
|
||||
}
|
||||
|
||||
void giveup_altivec(struct task_struct *tsk)
|
||||
{
|
||||
check_if_tm_restore_required(tsk);
|
||||
|
@ -472,7 +482,7 @@ void save_all(struct task_struct *tsk)
|
|||
save_fpu(tsk);
|
||||
|
||||
if (usermsr & MSR_VEC)
|
||||
__giveup_altivec(tsk);
|
||||
save_altivec(tsk);
|
||||
|
||||
if (usermsr & MSR_VSX)
|
||||
__giveup_vsx(tsk);
|
||||
|
|
|
@ -106,36 +106,20 @@ _GLOBAL(load_up_altivec)
|
|||
blr
|
||||
|
||||
/*
|
||||
* __giveup_altivec(tsk)
|
||||
* Disable VMX for the task given as the argument,
|
||||
* and save the vector registers in its thread_struct.
|
||||
* save_altivec(tsk)
|
||||
* Save the vector registers to its thread_struct
|
||||
*/
|
||||
_GLOBAL(__giveup_altivec)
|
||||
_GLOBAL(save_altivec)
|
||||
addi r3,r3,THREAD /* want THREAD of task */
|
||||
PPC_LL r7,THREAD_VRSAVEAREA(r3)
|
||||
PPC_LL r5,PT_REGS(r3)
|
||||
PPC_LCMPI 0,r7,0
|
||||
bne 2f
|
||||
addi r7,r3,THREAD_VRSTATE
|
||||
2: PPC_LCMPI 0,r5,0
|
||||
SAVE_32VRS(0,r4,r7)
|
||||
2: SAVE_32VRS(0,r4,r7)
|
||||
mfvscr v0
|
||||
li r4,VRSTATE_VSCR
|
||||
stvx v0,r4,r7
|
||||
beq 1f
|
||||
PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
|
||||
#ifdef CONFIG_VSX
|
||||
BEGIN_FTR_SECTION
|
||||
lis r3,(MSR_VEC|MSR_VSX)@h
|
||||
FTR_SECTION_ELSE
|
||||
lis r3,MSR_VEC@h
|
||||
ALT_FTR_SECTION_END_IFSET(CPU_FTR_VSX)
|
||||
#else
|
||||
lis r3,MSR_VEC@h
|
||||
#endif
|
||||
andc r4,r4,r3 /* disable FP for previous task */
|
||||
PPC_STL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
|
||||
1:
|
||||
blr
|
||||
|
||||
#ifdef CONFIG_VSX
|
||||
|
|
Loading…
Reference in New Issue