arm64: assembler: remove conditional NEON yield macros
The users of the conditional NEON yield macros have all been switched to the simplified cond_yield macro, and so the NEON specific ones can be removed. Signed-off-by: Ard Biesheuvel <ardb@kernel.org> Acked-by: Will Deacon <will@kernel.org> Link: https://lore.kernel.org/r/20210302090118.30666-2-ardb@kernel.org Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
This commit is contained in:
parent
1e28eed176
commit
27248fe1ab
|
@ -692,76 +692,6 @@ USER(\label, ic ivau, \tmp2) // invalidate I line PoU
|
|||
isb
|
||||
.endm
|
||||
|
||||
/*
|
||||
* Check whether to yield to another runnable task from kernel mode NEON code
|
||||
* (which runs with preemption disabled).
|
||||
*
|
||||
* if_will_cond_yield_neon
|
||||
* // pre-yield patchup code
|
||||
* do_cond_yield_neon
|
||||
* // post-yield patchup code
|
||||
* endif_yield_neon <label>
|
||||
*
|
||||
* where <label> is optional, and marks the point where execution will resume
|
||||
* after a yield has been performed. If omitted, execution resumes right after
|
||||
* the endif_yield_neon invocation. Note that the entire sequence, including
|
||||
* the provided patchup code, will be omitted from the image if
|
||||
* CONFIG_PREEMPTION is not defined.
|
||||
*
|
||||
* As a convenience, in the case where no patchup code is required, the above
|
||||
* sequence may be abbreviated to
|
||||
*
|
||||
* cond_yield_neon <label>
|
||||
*
|
||||
* Note that the patchup code does not support assembler directives that change
|
||||
* the output section, any use of such directives is undefined.
|
||||
*
|
||||
* The yield itself consists of the following:
|
||||
* - Check whether the preempt count is exactly 1 and a reschedule is also
|
||||
* needed. If so, calling of preempt_enable() in kernel_neon_end() will
|
||||
* trigger a reschedule. If it is not the case, yielding is pointless.
|
||||
* - Disable and re-enable kernel mode NEON, and branch to the yield fixup
|
||||
* code.
|
||||
*
|
||||
* This macro sequence may clobber all CPU state that is not guaranteed by the
|
||||
* AAPCS to be preserved across an ordinary function call.
|
||||
*/
|
||||
|
||||
.macro cond_yield_neon, lbl
|
||||
if_will_cond_yield_neon
|
||||
do_cond_yield_neon
|
||||
endif_yield_neon \lbl
|
||||
.endm
|
||||
|
||||
.macro if_will_cond_yield_neon
|
||||
#ifdef CONFIG_PREEMPTION
|
||||
get_current_task x0
|
||||
ldr x0, [x0, #TSK_TI_PREEMPT]
|
||||
sub x0, x0, #PREEMPT_DISABLE_OFFSET
|
||||
cbz x0, .Lyield_\@
|
||||
/* fall through to endif_yield_neon */
|
||||
.subsection 1
|
||||
.Lyield_\@ :
|
||||
#else
|
||||
.section ".discard.cond_yield_neon", "ax"
|
||||
#endif
|
||||
.endm
|
||||
|
||||
.macro do_cond_yield_neon
|
||||
bl kernel_neon_end
|
||||
bl kernel_neon_begin
|
||||
.endm
|
||||
|
||||
.macro endif_yield_neon, lbl
|
||||
.ifnb \lbl
|
||||
b \lbl
|
||||
.else
|
||||
b .Lyield_out_\@
|
||||
.endif
|
||||
.previous
|
||||
.Lyield_out_\@ :
|
||||
.endm
|
||||
|
||||
/*
|
||||
* Check whether preempt-disabled code should yield as soon as it
|
||||
* is able. This is the case if re-enabling preemption a single
|
||||
|
|
Loading…
Reference in New Issue