OMAP3: ASM sleep code format rework

Cosmetic fixes to the code:
- white spaces and tabs,
- alignement,
- comments rephrase and typos,
- multi-line comments

Tested on N900 and Beagleboard with full RET and OFF modes,
using cpuidle and suspend.

Signed-off-by: Jean Pihet <j-pihet@ti.com>
Acked-by: Santosh Shilimkar <santosh.shilimkar@ti.com>
Tested-by: Nishanth Menon <nm@ti.com>
Signed-off-by: Kevin Hilman <khilman@deeprootsystems.com>
This commit is contained in:
Jean Pihet 2010-12-18 16:49:57 +01:00 committed by Kevin Hilman
parent 8352129166
commit bb1c9034b3
1 changed files with 117 additions and 107 deletions

View File

@ -1,6 +1,4 @@
/* /*
* linux/arch/arm/mach-omap2/sleep.S
*
* (C) Copyright 2007 * (C) Copyright 2007
* Texas Instruments * Texas Instruments
* Karthik Dasu <karthik-dp@ti.com> * Karthik Dasu <karthik-dp@ti.com>
@ -81,20 +79,20 @@
.text .text
/* Function call to get the restore pointer for resume from OFF */ /* Function call to get the restore pointer for resume from OFF */
ENTRY(get_restore_pointer) ENTRY(get_restore_pointer)
stmfd sp!, {lr} @ save registers on stack stmfd sp!, {lr} @ save registers on stack
adr r0, restore adr r0, restore
ldmfd sp!, {pc} @ restore regs and return ldmfd sp!, {pc} @ restore regs and return
ENTRY(get_restore_pointer_sz) ENTRY(get_restore_pointer_sz)
.word . - get_restore_pointer .word . - get_restore_pointer
.text .text
/* Function call to get the restore pointer for 3630 resume from OFF */ /* Function call to get the restore pointer for 3630 resume from OFF */
ENTRY(get_omap3630_restore_pointer) ENTRY(get_omap3630_restore_pointer)
stmfd sp!, {lr} @ save registers on stack stmfd sp!, {lr} @ save registers on stack
adr r0, restore_3630 adr r0, restore_3630
ldmfd sp!, {pc} @ restore regs and return ldmfd sp!, {pc} @ restore regs and return
ENTRY(get_omap3630_restore_pointer_sz) ENTRY(get_omap3630_restore_pointer_sz)
.word . - get_omap3630_restore_pointer .word . - get_omap3630_restore_pointer
.text .text
/* Function call to get the restore pointer for ES3 to resume from OFF */ /* Function call to get the restore pointer for ES3 to resume from OFF */
@ -112,16 +110,16 @@ ENTRY(get_es3_restore_pointer_sz)
* place on 3630. Hopefully some version in the future may not need this. * place on 3630. Hopefully some version in the future may not need this.
*/ */
ENTRY(enable_omap3630_toggle_l2_on_restore) ENTRY(enable_omap3630_toggle_l2_on_restore)
stmfd sp!, {lr} @ save registers on stack stmfd sp!, {lr} @ save registers on stack
/* Setup so that we will disable and enable l2 */ /* Setup so that we will disable and enable l2 */
mov r1, #0x1 mov r1, #0x1
str r1, l2dis_3630 str r1, l2dis_3630
ldmfd sp!, {pc} @ restore regs and return ldmfd sp!, {pc} @ restore regs and return
.text
/* Function to call rom code to save secure ram context */ /* Function to call rom code to save secure ram context */
ENTRY(save_secure_ram_context) ENTRY(save_secure_ram_context)
stmfd sp!, {r1-r12, lr} @ save registers on stack stmfd sp!, {r1-r12, lr} @ save registers on stack
adr r3, api_params @ r3 points to parameters adr r3, api_params @ r3 points to parameters
str r0, [r3,#0x4] @ r0 has sdram address str r0, [r3,#0x4] @ r0 has sdram address
ldr r12, high_mask ldr r12, high_mask
@ -165,14 +163,14 @@ ENTRY(save_secure_ram_context_sz)
* *
* *
* Notes: * Notes:
* - this code gets copied to internal SRAM at boot. The execution pointer * - this code gets copied to internal SRAM at boot and after wake-up
* in SRAM is _omap_sram_idle. * from OFF mode. The execution pointer in SRAM is _omap_sram_idle.
* - when the OMAP wakes up it continues at different execution points * - when the OMAP wakes up it continues at different execution points
* depending on the low power mode (non-OFF vs OFF modes), * depending on the low power mode (non-OFF vs OFF modes),
* cf. 'Resume path for xxx mode' comments. * cf. 'Resume path for xxx mode' comments.
*/ */
ENTRY(omap34xx_cpu_suspend) ENTRY(omap34xx_cpu_suspend)
stmfd sp!, {r0-r12, lr} @ save registers on stack stmfd sp!, {r0-r12, lr} @ save registers on stack
/* /*
* r0 contains restore pointer in sdram * r0 contains restore pointer in sdram
@ -280,9 +278,9 @@ clean_l2:
* - should be faster and will change with kernel * - should be faster and will change with kernel
* - 'might' have to copy address, load and jump to it * - 'might' have to copy address, load and jump to it
*/ */
ldr r1, kernel_flush ldr r1, kernel_flush
mov lr, pc mov lr, pc
bx r1 bx r1
omap3_do_wfi: omap3_do_wfi:
ldr r4, sdrc_power @ read the SDRC_POWER register ldr r4, sdrc_power @ read the SDRC_POWER register
@ -375,18 +373,18 @@ restore_3630:
/* Fall through to common code for the remaining logic */ /* Fall through to common code for the remaining logic */
restore: restore:
/* /*
* Check what was the reason for mpu reset and store the reason in r9: * Check what was the reason for mpu reset and store the reason in r9:
* 0 - No context lost * 0 - No context lost
* 1 - Only L1 and logic lost * 1 - Only L1 and logic lost
* 2 - Only L2 lost - In this case, we wont be here * 2 - Only L2 lost - In this case, we wont be here
* 3 - Both L1 and L2 lost * 3 - Both L1 and L2 lost
*/ */
ldr r1, pm_pwstctrl_mpu ldr r1, pm_pwstctrl_mpu
ldr r2, [r1] ldr r2, [r1]
and r2, r2, #0x3 and r2, r2, #0x3
cmp r2, #0x0 @ Check if target power state was OFF or RET cmp r2, #0x0 @ Check if target power state was OFF or RET
moveq r9, #0x3 @ MPU OFF => L1 and L2 lost moveq r9, #0x3 @ MPU OFF => L1 and L2 lost
movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation
bne logic_l1_restore bne logic_l1_restore
@ -402,71 +400,74 @@ skipl2dis:
and r1, #0x700 and r1, #0x700
cmp r1, #0x300 cmp r1, #0x300
beq l2_inv_gp beq l2_inv_gp
mov r0, #40 @ set service ID for PPA mov r0, #40 @ set service ID for PPA
mov r12, r0 @ copy secure Service ID in r12 mov r12, r0 @ copy secure Service ID in r12
mov r1, #0 @ set task id for ROM code in r1 mov r1, #0 @ set task id for ROM code in r1
mov r2, #4 @ set some flags in r2, r6 mov r2, #4 @ set some flags in r2, r6
mov r6, #0xff mov r6, #0xff
adr r3, l2_inv_api_params @ r3 points to dummy parameters adr r3, l2_inv_api_params @ r3 points to dummy parameters
mcr p15, 0, r0, c7, c10, 4 @ data write barrier mcr p15, 0, r0, c7, c10, 4 @ data write barrier
mcr p15, 0, r0, c7, c10, 5 @ data memory barrier mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
.word 0xE1600071 @ call SMI monitor (smi #1) .word 0xE1600071 @ call SMI monitor (smi #1)
/* Write to Aux control register to set some bits */ /* Write to Aux control register to set some bits */
mov r0, #42 @ set service ID for PPA mov r0, #42 @ set service ID for PPA
mov r12, r0 @ copy secure Service ID in r12 mov r12, r0 @ copy secure Service ID in r12
mov r1, #0 @ set task id for ROM code in r1 mov r1, #0 @ set task id for ROM code in r1
mov r2, #4 @ set some flags in r2, r6 mov r2, #4 @ set some flags in r2, r6
mov r6, #0xff mov r6, #0xff
ldr r4, scratchpad_base ldr r4, scratchpad_base
ldr r3, [r4, #0xBC] @ r3 points to parameters ldr r3, [r4, #0xBC] @ r3 points to parameters
mcr p15, 0, r0, c7, c10, 4 @ data write barrier mcr p15, 0, r0, c7, c10, 4 @ data write barrier
mcr p15, 0, r0, c7, c10, 5 @ data memory barrier mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
.word 0xE1600071 @ call SMI monitor (smi #1) .word 0xE1600071 @ call SMI monitor (smi #1)
#ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE #ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE
/* Restore L2 aux control register */ /* Restore L2 aux control register */
@ set service ID for PPA @ set service ID for PPA
mov r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID mov r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID
mov r12, r0 @ copy service ID in r12 mov r12, r0 @ copy service ID in r12
mov r1, #0 @ set task ID for ROM code in r1 mov r1, #0 @ set task ID for ROM code in r1
mov r2, #4 @ set some flags in r2, r6 mov r2, #4 @ set some flags in r2, r6
mov r6, #0xff mov r6, #0xff
ldr r4, scratchpad_base ldr r4, scratchpad_base
ldr r3, [r4, #0xBC] ldr r3, [r4, #0xBC]
adds r3, r3, #8 @ r3 points to parameters adds r3, r3, #8 @ r3 points to parameters
mcr p15, 0, r0, c7, c10, 4 @ data write barrier mcr p15, 0, r0, c7, c10, 4 @ data write barrier
mcr p15, 0, r0, c7, c10, 5 @ data memory barrier mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
.word 0xE1600071 @ call SMI monitor (smi #1) .word 0xE1600071 @ call SMI monitor (smi #1)
#endif #endif
b logic_l1_restore b logic_l1_restore
l2_inv_api_params: l2_inv_api_params:
.word 0x1, 0x00 .word 0x1, 0x00
l2_inv_gp: l2_inv_gp:
/* Execute smi to invalidate L2 cache */ /* Execute smi to invalidate L2 cache */
mov r12, #0x1 @ set up to invalide L2 mov r12, #0x1 @ set up to invalidate L2
smi: .word 0xE1600070 @ Call SMI monitor (smieq) .word 0xE1600070 @ Call SMI monitor (smieq)
/* Write to Aux control register to set some bits */ /* Write to Aux control register to set some bits */
ldr r4, scratchpad_base ldr r4, scratchpad_base
ldr r3, [r4,#0xBC] ldr r3, [r4,#0xBC]
ldr r0, [r3,#4] ldr r0, [r3,#4]
mov r12, #0x3 mov r12, #0x3
.word 0xE1600070 @ Call SMI monitor (smieq) .word 0xE1600070 @ Call SMI monitor (smieq)
ldr r4, scratchpad_base ldr r4, scratchpad_base
ldr r3, [r4,#0xBC] ldr r3, [r4,#0xBC]
ldr r0, [r3,#12] ldr r0, [r3,#12]
mov r12, #0x2 mov r12, #0x2
.word 0xE1600070 @ Call SMI monitor (smieq) .word 0xE1600070 @ Call SMI monitor (smieq)
logic_l1_restore: logic_l1_restore:
ldr r1, l2dis_3630 ldr r1, l2dis_3630
cmp r1, #0x1 @ Do we need to re-enable L2 on 3630? cmp r1, #0x1 @ Test if L2 re-enable needed on 3630
bne skipl2reen bne skipl2reen
mrc p15, 0, r1, c1, c0, 1 mrc p15, 0, r1, c1, c0, 1
orr r1, r1, #2 @ re-enable L2 cache orr r1, r1, #2 @ re-enable L2 cache
mcr p15, 0, r1, c1, c0, 1 mcr p15, 0, r1, c1, c0, 1
skipl2reen: skipl2reen:
mov r1, #0 mov r1, #0
/* Invalidate all instruction caches to PoU /*
* and flush branch target cache */ * Invalidate all instruction caches to PoU
* and flush branch target cache
*/
mcr p15, 0, r1, c7, c5, 0 mcr p15, 0, r1, c7, c5, 0
ldr r4, scratchpad_base ldr r4, scratchpad_base
@ -487,33 +488,33 @@ skipl2reen:
MCR p15, 0, r6, c2, c0, 1 MCR p15, 0, r6, c2, c0, 1
/* Translation table base control register */ /* Translation table base control register */
MCR p15, 0, r7, c2, c0, 2 MCR p15, 0, r7, c2, c0, 2
/*domain access Control Register */ /* Domain access Control Register */
MCR p15, 0, r8, c3, c0, 0 MCR p15, 0, r8, c3, c0, 0
/* data fault status Register */ /* Data fault status Register */
MCR p15, 0, r9, c5, c0, 0 MCR p15, 0, r9, c5, c0, 0
ldmia r3!,{r4-r8} ldmia r3!,{r4-r8}
/* instruction fault status Register */ /* Instruction fault status Register */
MCR p15, 0, r4, c5, c0, 1 MCR p15, 0, r4, c5, c0, 1
/*Data Auxiliary Fault Status Register */ /* Data Auxiliary Fault Status Register */
MCR p15, 0, r5, c5, c1, 0 MCR p15, 0, r5, c5, c1, 0
/*Instruction Auxiliary Fault Status Register*/ /* Instruction Auxiliary Fault Status Register*/
MCR p15, 0, r6, c5, c1, 1 MCR p15, 0, r6, c5, c1, 1
/*Data Fault Address Register */ /* Data Fault Address Register */
MCR p15, 0, r7, c6, c0, 0 MCR p15, 0, r7, c6, c0, 0
/*Instruction Fault Address Register*/ /* Instruction Fault Address Register*/
MCR p15, 0, r8, c6, c0, 2 MCR p15, 0, r8, c6, c0, 2
ldmia r3!,{r4-r7} ldmia r3!,{r4-r7}
/* user r/w thread and process ID */ /* User r/w thread and process ID */
MCR p15, 0, r4, c13, c0, 2 MCR p15, 0, r4, c13, c0, 2
/* user ro thread and process ID */ /* User ro thread and process ID */
MCR p15, 0, r5, c13, c0, 3 MCR p15, 0, r5, c13, c0, 3
/*Privileged only thread and process ID */ /* Privileged only thread and process ID */
MCR p15, 0, r6, c13, c0, 4 MCR p15, 0, r6, c13, c0, 4
/* cache size selection */ /* Cache size selection */
MCR p15, 2, r7, c0, c0, 0 MCR p15, 2, r7, c0, c0, 0
ldmia r3!,{r4-r8} ldmia r3!,{r4-r8}
/* Data TLB lockdown registers */ /* Data TLB lockdown registers */
MCR p15, 0, r4, c10, c0, 0 MCR p15, 0, r4, c10, c0, 0
/* Instruction TLB lockdown registers */ /* Instruction TLB lockdown registers */
@ -525,26 +526,27 @@ skipl2reen:
/* Context PID */ /* Context PID */
MCR p15, 0, r8, c13, c0, 1 MCR p15, 0, r8, c13, c0, 1
ldmia r3!,{r4-r5} ldmia r3!,{r4-r5}
/* primary memory remap register */ /* Primary memory remap register */
MCR p15, 0, r4, c10, c2, 0 MCR p15, 0, r4, c10, c2, 0
/*normal memory remap register */ /* Normal memory remap register */
MCR p15, 0, r5, c10, c2, 1 MCR p15, 0, r5, c10, c2, 1
/* Restore cpsr */ /* Restore cpsr */
ldmia r3!,{r4} /*load CPSR from SDRAM*/ ldmia r3!,{r4} @ load CPSR from SDRAM
msr cpsr, r4 /*store cpsr */ msr cpsr, r4 @ store cpsr
/* Enabling MMU here */ /* Enabling MMU here */
mrc p15, 0, r7, c2, c0, 2 /* Read TTBRControl */ mrc p15, 0, r7, c2, c0, 2 @ Read TTBRControl
/* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/ /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1 */
and r7, #0x7 and r7, #0x7
cmp r7, #0x0 cmp r7, #0x0
beq usettbr0 beq usettbr0
ttbr_error: ttbr_error:
/* More work needs to be done to support N[0:2] value other than 0 /*
* So looping here so that the error can be detected * More work needs to be done to support N[0:2] value other than 0
*/ * So looping here so that the error can be detected
*/
b ttbr_error b ttbr_error
usettbr0: usettbr0:
mrc p15, 0, r2, c2, c0, 0 mrc p15, 0, r2, c2, c0, 0
@ -552,21 +554,25 @@ usettbr0:
and r2, r5 and r2, r5
mov r4, pc mov r4, pc
ldr r5, table_index_mask ldr r5, table_index_mask
and r4, r5 /* r4 = 31 to 20 bits of pc */ and r4, r5 @ r4 = 31 to 20 bits of pc
/* Extract the value to be written to table entry */ /* Extract the value to be written to table entry */
ldr r1, table_entry ldr r1, table_entry
add r1, r1, r4 /* r1 has value to be written to table entry*/ /* r1 has the value to be written to table entry*/
add r1, r1, r4
/* Getting the address of table entry to modify */ /* Getting the address of table entry to modify */
lsr r4, #18 lsr r4, #18
add r2, r4 /* r2 has the location which needs to be modified */ /* r2 has the location which needs to be modified */
add r2, r4
/* Storing previous entry of location being modified */ /* Storing previous entry of location being modified */
ldr r5, scratchpad_base ldr r5, scratchpad_base
ldr r4, [r2] ldr r4, [r2]
str r4, [r5, #0xC0] str r4, [r5, #0xC0]
/* Modify the table entry */ /* Modify the table entry */
str r1, [r2] str r1, [r2]
/* Storing address of entry being modified /*
* - will be restored after enabling MMU */ * Storing address of entry being modified
* - will be restored after enabling MMU
*/
ldr r5, scratchpad_base ldr r5, scratchpad_base
str r2, [r5, #0xC4] str r2, [r5, #0xC4]
@ -575,8 +581,11 @@ usettbr0:
mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array
mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB
mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB
/* Restore control register but dont enable caches here*/ /*
/* Caches will be enabled after restoring MMU table entry */ * Restore control register. This enables the MMU.
* The caches and prediction are not enabled here, they
* will be enabled after restoring the MMU table entry.
*/
ldmia r3!, {r4} ldmia r3!, {r4}
/* Store previous value of control register in scratchpad */ /* Store previous value of control register in scratchpad */
str r4, [r5, #0xC8] str r4, [r5, #0xC8]
@ -589,7 +598,7 @@ usettbr0:
* == Exit point from OFF mode == * == Exit point from OFF mode ==
* ============================== * ==============================
*/ */
ldmfd sp!, {r0-r12, pc} @ restore regs and return ldmfd sp!, {r0-r12, pc} @ restore regs and return
/* /*
@ -655,55 +664,56 @@ ENTRY(es3_sdrc_fix_sz)
/* Make sure SDRC accesses are ok */ /* Make sure SDRC accesses are ok */
wait_sdrc_ok: wait_sdrc_ok:
/* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this. */ /* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this */
ldr r4, cm_idlest_ckgen ldr r4, cm_idlest_ckgen
wait_dpll3_lock: wait_dpll3_lock:
ldr r5, [r4] ldr r5, [r4]
tst r5, #1 tst r5, #1
beq wait_dpll3_lock beq wait_dpll3_lock
ldr r4, cm_idlest1_core ldr r4, cm_idlest1_core
wait_sdrc_ready: wait_sdrc_ready:
ldr r5, [r4] ldr r5, [r4]
tst r5, #0x2 tst r5, #0x2
bne wait_sdrc_ready bne wait_sdrc_ready
/* allow DLL powerdown upon hw idle req */ /* allow DLL powerdown upon hw idle req */
ldr r4, sdrc_power ldr r4, sdrc_power
ldr r5, [r4] ldr r5, [r4]
bic r5, r5, #0x40 bic r5, r5, #0x40
str r5, [r4] str r5, [r4]
is_dll_in_lock_mode:
/* Is dll in lock mode? */ is_dll_in_lock_mode:
ldr r4, sdrc_dlla_ctrl /* Is dll in lock mode? */
ldr r5, [r4] ldr r4, sdrc_dlla_ctrl
tst r5, #0x4 ldr r5, [r4]
bxne lr tst r5, #0x4
/* wait till dll locks */ bxne lr @ Return if locked
/* wait till dll locks */
wait_dll_lock_timed: wait_dll_lock_timed:
ldr r4, wait_dll_lock_counter ldr r4, wait_dll_lock_counter
add r4, r4, #1 add r4, r4, #1
str r4, wait_dll_lock_counter str r4, wait_dll_lock_counter
ldr r4, sdrc_dlla_status ldr r4, sdrc_dlla_status
mov r6, #8 /* Wait 20uS for lock */ /* Wait 20uS for lock */
mov r6, #8
wait_dll_lock: wait_dll_lock:
subs r6, r6, #0x1 subs r6, r6, #0x1
beq kick_dll beq kick_dll
ldr r5, [r4] ldr r5, [r4]
and r5, r5, #0x4 and r5, r5, #0x4
cmp r5, #0x4 cmp r5, #0x4
bne wait_dll_lock bne wait_dll_lock
bx lr bx lr @ Return when locked
/* disable/reenable DLL if not locked */ /* disable/reenable DLL if not locked */
kick_dll: kick_dll:
ldr r4, sdrc_dlla_ctrl ldr r4, sdrc_dlla_ctrl
ldr r5, [r4] ldr r5, [r4]
mov r6, r5 mov r6, r5
bic r6, #(1<<3) /* disable dll */ bic r6, #(1<<3) @ disable dll
str r6, [r4] str r6, [r4]
dsb dsb
orr r6, r6, #(1<<3) /* enable dll */ orr r6, r6, #(1<<3) @ enable dll
str r6, [r4] str r6, [r4]
dsb dsb
ldr r4, kick_counter ldr r4, kick_counter
@ -728,7 +738,7 @@ scratchpad_base:
sram_base: sram_base:
.word SRAM_BASE_P + 0x8000 .word SRAM_BASE_P + 0x8000
sdrc_power: sdrc_power:
.word SDRC_POWER_V .word SDRC_POWER_V
ttbrbit_mask: ttbrbit_mask:
.word 0xFFFFC000 .word 0xFFFFC000
table_index_mask: table_index_mask:
@ -742,9 +752,9 @@ control_stat:
control_mem_rta: control_mem_rta:
.word CONTROL_MEM_RTA_CTRL .word CONTROL_MEM_RTA_CTRL
kernel_flush: kernel_flush:
.word v7_flush_dcache_all .word v7_flush_dcache_all
l2dis_3630: l2dis_3630:
.word 0 .word 0
/* /*
* When exporting to userspace while the counters are in SRAM, * When exporting to userspace while the counters are in SRAM,
* these 2 words need to be at the end to facilitate retrival! * these 2 words need to be at the end to facilitate retrival!