LoongArch: Re-tab the assembly files

Reflow the *.S files for better stylistic consistency, namely hard tabs
after mnemonic position, and vertical alignment of the first operand
with hard tabs. Tab width is obviously 8. Some pre-existing intra-block
vertical alignments are preserved.

Signed-off-by: WANG Xuerui <git@xen0n.name>
Signed-off-by: Huacai Chen <chenhuacai@loongson.cn>
This commit is contained in:
WANG Xuerui 2022-07-26 23:57:22 +08:00 committed by Huacai Chen
parent 1fdb9a9249
commit f5c3c22f21
7 changed files with 163 additions and 163 deletions

View File

@ -27,7 +27,7 @@ SYM_FUNC_START(handle_syscall)
addi.d sp, sp, -PT_SIZE
cfi_st t2, PT_R3
cfi_rel_offset sp, PT_R3
cfi_rel_offset sp, PT_R3
st.d zero, sp, PT_R0
csrrd t2, LOONGARCH_CSR_PRMD
st.d t2, sp, PT_PRMD
@ -50,7 +50,7 @@ SYM_FUNC_START(handle_syscall)
cfi_st a7, PT_R11
csrrd ra, LOONGARCH_CSR_ERA
st.d ra, sp, PT_ERA
cfi_rel_offset ra, PT_ERA
cfi_rel_offset ra, PT_ERA
cfi_st tp, PT_R2
cfi_st u0, PT_R21

View File

@ -27,78 +27,78 @@
.endm
.macro sc_save_fp base
EX fst.d $f0, \base, (0 * FPU_REG_WIDTH)
EX fst.d $f1, \base, (1 * FPU_REG_WIDTH)
EX fst.d $f2, \base, (2 * FPU_REG_WIDTH)
EX fst.d $f3, \base, (3 * FPU_REG_WIDTH)
EX fst.d $f4, \base, (4 * FPU_REG_WIDTH)
EX fst.d $f5, \base, (5 * FPU_REG_WIDTH)
EX fst.d $f6, \base, (6 * FPU_REG_WIDTH)
EX fst.d $f7, \base, (7 * FPU_REG_WIDTH)
EX fst.d $f8, \base, (8 * FPU_REG_WIDTH)
EX fst.d $f9, \base, (9 * FPU_REG_WIDTH)
EX fst.d $f10, \base, (10 * FPU_REG_WIDTH)
EX fst.d $f11, \base, (11 * FPU_REG_WIDTH)
EX fst.d $f12, \base, (12 * FPU_REG_WIDTH)
EX fst.d $f13, \base, (13 * FPU_REG_WIDTH)
EX fst.d $f14, \base, (14 * FPU_REG_WIDTH)
EX fst.d $f15, \base, (15 * FPU_REG_WIDTH)
EX fst.d $f16, \base, (16 * FPU_REG_WIDTH)
EX fst.d $f17, \base, (17 * FPU_REG_WIDTH)
EX fst.d $f18, \base, (18 * FPU_REG_WIDTH)
EX fst.d $f19, \base, (19 * FPU_REG_WIDTH)
EX fst.d $f20, \base, (20 * FPU_REG_WIDTH)
EX fst.d $f21, \base, (21 * FPU_REG_WIDTH)
EX fst.d $f22, \base, (22 * FPU_REG_WIDTH)
EX fst.d $f23, \base, (23 * FPU_REG_WIDTH)
EX fst.d $f24, \base, (24 * FPU_REG_WIDTH)
EX fst.d $f25, \base, (25 * FPU_REG_WIDTH)
EX fst.d $f26, \base, (26 * FPU_REG_WIDTH)
EX fst.d $f27, \base, (27 * FPU_REG_WIDTH)
EX fst.d $f28, \base, (28 * FPU_REG_WIDTH)
EX fst.d $f29, \base, (29 * FPU_REG_WIDTH)
EX fst.d $f30, \base, (30 * FPU_REG_WIDTH)
EX fst.d $f31, \base, (31 * FPU_REG_WIDTH)
EX fst.d $f0, \base, (0 * FPU_REG_WIDTH)
EX fst.d $f1, \base, (1 * FPU_REG_WIDTH)
EX fst.d $f2, \base, (2 * FPU_REG_WIDTH)
EX fst.d $f3, \base, (3 * FPU_REG_WIDTH)
EX fst.d $f4, \base, (4 * FPU_REG_WIDTH)
EX fst.d $f5, \base, (5 * FPU_REG_WIDTH)
EX fst.d $f6, \base, (6 * FPU_REG_WIDTH)
EX fst.d $f7, \base, (7 * FPU_REG_WIDTH)
EX fst.d $f8, \base, (8 * FPU_REG_WIDTH)
EX fst.d $f9, \base, (9 * FPU_REG_WIDTH)
EX fst.d $f10, \base, (10 * FPU_REG_WIDTH)
EX fst.d $f11, \base, (11 * FPU_REG_WIDTH)
EX fst.d $f12, \base, (12 * FPU_REG_WIDTH)
EX fst.d $f13, \base, (13 * FPU_REG_WIDTH)
EX fst.d $f14, \base, (14 * FPU_REG_WIDTH)
EX fst.d $f15, \base, (15 * FPU_REG_WIDTH)
EX fst.d $f16, \base, (16 * FPU_REG_WIDTH)
EX fst.d $f17, \base, (17 * FPU_REG_WIDTH)
EX fst.d $f18, \base, (18 * FPU_REG_WIDTH)
EX fst.d $f19, \base, (19 * FPU_REG_WIDTH)
EX fst.d $f20, \base, (20 * FPU_REG_WIDTH)
EX fst.d $f21, \base, (21 * FPU_REG_WIDTH)
EX fst.d $f22, \base, (22 * FPU_REG_WIDTH)
EX fst.d $f23, \base, (23 * FPU_REG_WIDTH)
EX fst.d $f24, \base, (24 * FPU_REG_WIDTH)
EX fst.d $f25, \base, (25 * FPU_REG_WIDTH)
EX fst.d $f26, \base, (26 * FPU_REG_WIDTH)
EX fst.d $f27, \base, (27 * FPU_REG_WIDTH)
EX fst.d $f28, \base, (28 * FPU_REG_WIDTH)
EX fst.d $f29, \base, (29 * FPU_REG_WIDTH)
EX fst.d $f30, \base, (30 * FPU_REG_WIDTH)
EX fst.d $f31, \base, (31 * FPU_REG_WIDTH)
.endm
.macro sc_restore_fp base
EX fld.d $f0, \base, (0 * FPU_REG_WIDTH)
EX fld.d $f1, \base, (1 * FPU_REG_WIDTH)
EX fld.d $f2, \base, (2 * FPU_REG_WIDTH)
EX fld.d $f3, \base, (3 * FPU_REG_WIDTH)
EX fld.d $f4, \base, (4 * FPU_REG_WIDTH)
EX fld.d $f5, \base, (5 * FPU_REG_WIDTH)
EX fld.d $f6, \base, (6 * FPU_REG_WIDTH)
EX fld.d $f7, \base, (7 * FPU_REG_WIDTH)
EX fld.d $f8, \base, (8 * FPU_REG_WIDTH)
EX fld.d $f9, \base, (9 * FPU_REG_WIDTH)
EX fld.d $f10, \base, (10 * FPU_REG_WIDTH)
EX fld.d $f11, \base, (11 * FPU_REG_WIDTH)
EX fld.d $f12, \base, (12 * FPU_REG_WIDTH)
EX fld.d $f13, \base, (13 * FPU_REG_WIDTH)
EX fld.d $f14, \base, (14 * FPU_REG_WIDTH)
EX fld.d $f15, \base, (15 * FPU_REG_WIDTH)
EX fld.d $f16, \base, (16 * FPU_REG_WIDTH)
EX fld.d $f17, \base, (17 * FPU_REG_WIDTH)
EX fld.d $f18, \base, (18 * FPU_REG_WIDTH)
EX fld.d $f19, \base, (19 * FPU_REG_WIDTH)
EX fld.d $f20, \base, (20 * FPU_REG_WIDTH)
EX fld.d $f21, \base, (21 * FPU_REG_WIDTH)
EX fld.d $f22, \base, (22 * FPU_REG_WIDTH)
EX fld.d $f23, \base, (23 * FPU_REG_WIDTH)
EX fld.d $f24, \base, (24 * FPU_REG_WIDTH)
EX fld.d $f25, \base, (25 * FPU_REG_WIDTH)
EX fld.d $f26, \base, (26 * FPU_REG_WIDTH)
EX fld.d $f27, \base, (27 * FPU_REG_WIDTH)
EX fld.d $f28, \base, (28 * FPU_REG_WIDTH)
EX fld.d $f29, \base, (29 * FPU_REG_WIDTH)
EX fld.d $f30, \base, (30 * FPU_REG_WIDTH)
EX fld.d $f31, \base, (31 * FPU_REG_WIDTH)
EX fld.d $f0, \base, (0 * FPU_REG_WIDTH)
EX fld.d $f1, \base, (1 * FPU_REG_WIDTH)
EX fld.d $f2, \base, (2 * FPU_REG_WIDTH)
EX fld.d $f3, \base, (3 * FPU_REG_WIDTH)
EX fld.d $f4, \base, (4 * FPU_REG_WIDTH)
EX fld.d $f5, \base, (5 * FPU_REG_WIDTH)
EX fld.d $f6, \base, (6 * FPU_REG_WIDTH)
EX fld.d $f7, \base, (7 * FPU_REG_WIDTH)
EX fld.d $f8, \base, (8 * FPU_REG_WIDTH)
EX fld.d $f9, \base, (9 * FPU_REG_WIDTH)
EX fld.d $f10, \base, (10 * FPU_REG_WIDTH)
EX fld.d $f11, \base, (11 * FPU_REG_WIDTH)
EX fld.d $f12, \base, (12 * FPU_REG_WIDTH)
EX fld.d $f13, \base, (13 * FPU_REG_WIDTH)
EX fld.d $f14, \base, (14 * FPU_REG_WIDTH)
EX fld.d $f15, \base, (15 * FPU_REG_WIDTH)
EX fld.d $f16, \base, (16 * FPU_REG_WIDTH)
EX fld.d $f17, \base, (17 * FPU_REG_WIDTH)
EX fld.d $f18, \base, (18 * FPU_REG_WIDTH)
EX fld.d $f19, \base, (19 * FPU_REG_WIDTH)
EX fld.d $f20, \base, (20 * FPU_REG_WIDTH)
EX fld.d $f21, \base, (21 * FPU_REG_WIDTH)
EX fld.d $f22, \base, (22 * FPU_REG_WIDTH)
EX fld.d $f23, \base, (23 * FPU_REG_WIDTH)
EX fld.d $f24, \base, (24 * FPU_REG_WIDTH)
EX fld.d $f25, \base, (25 * FPU_REG_WIDTH)
EX fld.d $f26, \base, (26 * FPU_REG_WIDTH)
EX fld.d $f27, \base, (27 * FPU_REG_WIDTH)
EX fld.d $f28, \base, (28 * FPU_REG_WIDTH)
EX fld.d $f29, \base, (29 * FPU_REG_WIDTH)
EX fld.d $f30, \base, (30 * FPU_REG_WIDTH)
EX fld.d $f31, \base, (31 * FPU_REG_WIDTH)
.endm
.macro sc_save_fcc base, tmp0, tmp1
movcf2gr \tmp0, $fcc0
move \tmp1, \tmp0
move \tmp1, \tmp0
movcf2gr \tmp0, $fcc1
bstrins.d \tmp1, \tmp0, 15, 8
movcf2gr \tmp0, $fcc2
@ -113,11 +113,11 @@
bstrins.d \tmp1, \tmp0, 55, 48
movcf2gr \tmp0, $fcc7
bstrins.d \tmp1, \tmp0, 63, 56
EX st.d \tmp1, \base, 0
EX st.d \tmp1, \base, 0
.endm
.macro sc_restore_fcc base, tmp0, tmp1
EX ld.d \tmp0, \base, 0
EX ld.d \tmp0, \base, 0
bstrpick.d \tmp1, \tmp0, 7, 0
movgr2cf $fcc0, \tmp1
bstrpick.d \tmp1, \tmp0, 15, 8
@ -138,11 +138,11 @@
.macro sc_save_fcsr base, tmp0
movfcsr2gr \tmp0, fcsr0
EX st.w \tmp0, \base, 0
EX st.w \tmp0, \base, 0
.endm
.macro sc_restore_fcsr base, tmp0
EX ld.w \tmp0, \base, 0
EX ld.w \tmp0, \base, 0
movgr2fcsr fcsr0, \tmp0
.endm
@ -151,9 +151,9 @@
*/
SYM_FUNC_START(_save_fp)
fpu_save_csr a0 t1
fpu_save_double a0 t1 # clobbers t1
fpu_save_double a0 t1 # clobbers t1
fpu_save_cc a0 t1 t2 # clobbers t1, t2
jr ra
jr ra
SYM_FUNC_END(_save_fp)
EXPORT_SYMBOL(_save_fp)
@ -161,10 +161,10 @@ EXPORT_SYMBOL(_save_fp)
* Restore a thread's fp context.
*/
SYM_FUNC_START(_restore_fp)
fpu_restore_double a0 t1 # clobbers t1
fpu_restore_csr a0 t1
fpu_restore_cc a0 t1 t2 # clobbers t1, t2
jr ra
fpu_restore_double a0 t1 # clobbers t1
fpu_restore_csr a0 t1
fpu_restore_cc a0 t1 t2 # clobbers t1, t2
jr ra
SYM_FUNC_END(_restore_fp)
/*
@ -225,11 +225,11 @@ SYM_FUNC_END(_init_fpu)
* a2: fcsr
*/
SYM_FUNC_START(_save_fp_context)
sc_save_fcc a1 t1 t2
sc_save_fcsr a2 t1
sc_save_fp a0
li.w a0, 0 # success
jr ra
sc_save_fcc a1 t1 t2
sc_save_fcsr a2 t1
sc_save_fp a0
li.w a0, 0 # success
jr ra
SYM_FUNC_END(_save_fp_context)
/*
@ -238,11 +238,11 @@ SYM_FUNC_END(_save_fp_context)
* a2: fcsr
*/
SYM_FUNC_START(_restore_fp_context)
sc_restore_fp a0
sc_restore_fcc a1 t1 t2
sc_restore_fcsr a2 t1
li.w a0, 0 # success
jr ra
sc_restore_fp a0
sc_restore_fcc a1 t1 t2
sc_restore_fcsr a2 t1
li.w a0, 0 # success
jr ra
SYM_FUNC_END(_restore_fp_context)
SYM_FUNC_START(fault)

View File

@ -35,16 +35,16 @@ SYM_FUNC_START(handle_vint)
BACKUP_T0T1
SAVE_ALL
la.abs t1, __arch_cpu_idle
LONG_L t0, sp, PT_ERA
LONG_L t0, sp, PT_ERA
/* 32 byte rollback region */
ori t0, t0, 0x1f
xori t0, t0, 0x1f
bne t0, t1, 1f
LONG_S t0, sp, PT_ERA
LONG_S t0, sp, PT_ERA
1: move a0, sp
move a1, sp
la.abs t0, do_vint
jirl ra, t0, 0
jirl ra, t0, 0
RESTORE_ALL_AND_RET
SYM_FUNC_END(handle_vint)
@ -72,7 +72,7 @@ SYM_FUNC_END(except_vec_cex)
build_prep_\prep
move a0, sp
la.abs t0, do_\handler
jirl ra, t0, 0
jirl ra, t0, 0
RESTORE_ALL_AND_RET
SYM_FUNC_END(handle_\exception)
.endm

View File

@ -85,8 +85,8 @@ SYM_CODE_START(smpboot_entry)
ld.d sp, t0, CPU_BOOT_STACK
ld.d tp, t0, CPU_BOOT_TINFO
la.abs t0, 0f
jr t0
la.abs t0, 0f
jr t0
0:
bl start_secondary
SYM_CODE_END(smpboot_entry)

View File

@ -24,8 +24,8 @@ SYM_FUNC_START(__switch_to)
move tp, a2
cpu_restore_nonscratch a1
li.w t0, _THREAD_SIZE - 32
PTR_ADD t0, t0, tp
li.w t0, _THREAD_SIZE - 32
PTR_ADD t0, t0, tp
set_saved_sp t0, t1, t2
ldptr.d t1, a1, THREAD_CSRPRMD

View File

@ -10,75 +10,75 @@
.align 5
SYM_FUNC_START(clear_page)
lu12i.w t0, 1 << (PAGE_SHIFT - 12)
add.d t0, t0, a0
lu12i.w t0, 1 << (PAGE_SHIFT - 12)
add.d t0, t0, a0
1:
st.d zero, a0, 0
st.d zero, a0, 8
st.d zero, a0, 16
st.d zero, a0, 24
st.d zero, a0, 32
st.d zero, a0, 40
st.d zero, a0, 48
st.d zero, a0, 56
addi.d a0, a0, 128
st.d zero, a0, -64
st.d zero, a0, -56
st.d zero, a0, -48
st.d zero, a0, -40
st.d zero, a0, -32
st.d zero, a0, -24
st.d zero, a0, -16
st.d zero, a0, -8
bne t0, a0, 1b
st.d zero, a0, 0
st.d zero, a0, 8
st.d zero, a0, 16
st.d zero, a0, 24
st.d zero, a0, 32
st.d zero, a0, 40
st.d zero, a0, 48
st.d zero, a0, 56
addi.d a0, a0, 128
st.d zero, a0, -64
st.d zero, a0, -56
st.d zero, a0, -48
st.d zero, a0, -40
st.d zero, a0, -32
st.d zero, a0, -24
st.d zero, a0, -16
st.d zero, a0, -8
bne t0, a0, 1b
jr ra
jr ra
SYM_FUNC_END(clear_page)
EXPORT_SYMBOL(clear_page)
.align 5
SYM_FUNC_START(copy_page)
lu12i.w t8, 1 << (PAGE_SHIFT - 12)
add.d t8, t8, a0
lu12i.w t8, 1 << (PAGE_SHIFT - 12)
add.d t8, t8, a0
1:
ld.d t0, a1, 0
ld.d t1, a1, 8
ld.d t2, a1, 16
ld.d t3, a1, 24
ld.d t4, a1, 32
ld.d t5, a1, 40
ld.d t6, a1, 48
ld.d t7, a1, 56
ld.d t0, a1, 0
ld.d t1, a1, 8
ld.d t2, a1, 16
ld.d t3, a1, 24
ld.d t4, a1, 32
ld.d t5, a1, 40
ld.d t6, a1, 48
ld.d t7, a1, 56
st.d t0, a0, 0
st.d t1, a0, 8
ld.d t0, a1, 64
ld.d t1, a1, 72
st.d t2, a0, 16
st.d t3, a0, 24
ld.d t2, a1, 80
ld.d t3, a1, 88
st.d t4, a0, 32
st.d t5, a0, 40
ld.d t4, a1, 96
ld.d t5, a1, 104
st.d t6, a0, 48
st.d t7, a0, 56
ld.d t6, a1, 112
ld.d t7, a1, 120
addi.d a0, a0, 128
addi.d a1, a1, 128
st.d t0, a0, 0
st.d t1, a0, 8
ld.d t0, a1, 64
ld.d t1, a1, 72
st.d t2, a0, 16
st.d t3, a0, 24
ld.d t2, a1, 80
ld.d t3, a1, 88
st.d t4, a0, 32
st.d t5, a0, 40
ld.d t4, a1, 96
ld.d t5, a1, 104
st.d t6, a0, 48
st.d t7, a0, 56
ld.d t6, a1, 112
ld.d t7, a1, 120
addi.d a0, a0, 128
addi.d a1, a1, 128
st.d t0, a0, -64
st.d t1, a0, -56
st.d t2, a0, -48
st.d t3, a0, -40
st.d t4, a0, -32
st.d t5, a0, -24
st.d t6, a0, -16
st.d t7, a0, -8
st.d t0, a0, -64
st.d t1, a0, -56
st.d t2, a0, -48
st.d t3, a0, -40
st.d t4, a0, -32
st.d t5, a0, -24
st.d t6, a0, -16
st.d t7, a0, -8
bne t8, a0, 1b
jr ra
bne t8, a0, 1b
jr ra
SYM_FUNC_END(copy_page)
EXPORT_SYMBOL(copy_page)

View File

@ -18,7 +18,7 @@
REG_S a2, sp, PT_BVADDR
li.w a1, \write
la.abs t0, do_page_fault
jirl ra, t0, 0
jirl ra, t0, 0
RESTORE_ALL_AND_RET
SYM_FUNC_END(tlb_do_page_fault_\write)
.endm
@ -34,7 +34,7 @@ SYM_FUNC_START(handle_tlb_protect)
csrrd a2, LOONGARCH_CSR_BADV
REG_S a2, sp, PT_BVADDR
la.abs t0, do_page_fault
jirl ra, t0, 0
jirl ra, t0, 0
RESTORE_ALL_AND_RET
SYM_FUNC_END(handle_tlb_protect)
@ -151,8 +151,8 @@ tlb_huge_update_load:
st.d t0, t1, 0
#endif
addu16i.d t1, zero, -(CSR_TLBIDX_EHINV >> 16)
addi.d ra, t1, 0
csrxchg ra, t1, LOONGARCH_CSR_TLBIDX
addi.d ra, t1, 0
csrxchg ra, t1, LOONGARCH_CSR_TLBIDX
tlbwr
csrxchg zero, t1, LOONGARCH_CSR_TLBIDX
@ -319,8 +319,8 @@ tlb_huge_update_store:
st.d t0, t1, 0
#endif
addu16i.d t1, zero, -(CSR_TLBIDX_EHINV >> 16)
addi.d ra, t1, 0
csrxchg ra, t1, LOONGARCH_CSR_TLBIDX
addi.d ra, t1, 0
csrxchg ra, t1, LOONGARCH_CSR_TLBIDX
tlbwr
csrxchg zero, t1, LOONGARCH_CSR_TLBIDX
@ -454,7 +454,7 @@ leave_modify:
ertn
#ifdef CONFIG_64BIT
vmalloc_modify:
la.abs t1, swapper_pg_dir
la.abs t1, swapper_pg_dir
b vmalloc_done_modify
#endif
@ -512,14 +512,14 @@ tlb_huge_update_modify:
/* Set huge page tlb entry size */
addu16i.d t0, zero, (CSR_TLBIDX_PS >> 16)
addu16i.d t1, zero, (PS_HUGE_SIZE << (CSR_TLBIDX_PS_SHIFT - 16))
csrxchg t1, t0, LOONGARCH_CSR_TLBIDX
csrxchg t1, t0, LOONGARCH_CSR_TLBIDX
tlbwr
/* Reset default page size */
addu16i.d t0, zero, (CSR_TLBIDX_PS >> 16)
addu16i.d t1, zero, (PS_DEFAULT_SIZE << (CSR_TLBIDX_PS_SHIFT - 16))
csrxchg t1, t0, LOONGARCH_CSR_TLBIDX
csrxchg t1, t0, LOONGARCH_CSR_TLBIDX
nopage_tlb_modify:
dbar 0