[PATCH] x86_64: Small assembly improvements

Save a byte here and there.  Ultimatively useless, but these things always
catch my eyes when reading the code so just fix them for now.

Also I got at least one patch fixing of them already, which gives a good
excuse.

Signed-off-by: Andi Kleen <ak@suse.de>
Signed-off-by: Andrew Morton <akpm@osdl.org>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>
This commit is contained in:
Andi Kleen 2005-07-28 21:15:48 -07:00 committed by Linus Torvalds
parent 3ba80e7595
commit 3829ee6b1b
2 changed files with 13 additions and 13 deletions

View File

@ -76,7 +76,7 @@
.macro FAKE_STACK_FRAME child_rip
/* push in order ss, rsp, eflags, cs, rip */
xorq %rax, %rax
xorl %eax, %eax
pushq %rax /* ss */
CFI_ADJUST_CFA_OFFSET 8
pushq %rax /* rsp */
@ -423,7 +423,7 @@ ENTRY(stub_rt_sigreturn)
testl $3,CS(%rdi)
je 1f
swapgs
1: addl $1,%gs:pda_irqcount # RED-PEN should check preempt count
1: incl %gs:pda_irqcount # RED-PEN should check preempt count
movq %gs:pda_irqstackptr,%rax
cmoveq %rax,%rsp
pushq %rdi # save old stack
@ -436,7 +436,7 @@ ENTRY(common_interrupt)
ret_from_intr:
popq %rdi
cli
subl $1,%gs:pda_irqcount
decl %gs:pda_irqcount
#ifdef CONFIG_DEBUG_INFO
movq RBP(%rdi),%rbp
#endif
@ -494,7 +494,7 @@ retint_signal:
sti
SAVE_REST
movq $-1,ORIG_RAX(%rsp)
xorq %rsi,%rsi # oldset
xorl %esi,%esi # oldset
movq %rsp,%rdi # &pt_regs
call do_notify_resume
RESTORE_REST
@ -752,7 +752,7 @@ child_rip:
movq %rsi, %rdi
call *%rax
# exit
xorq %rdi, %rdi
xorl %edi, %edi
call do_exit
/*

View File

@ -137,14 +137,14 @@ startup_64:
wrmsr
/* Setup cr0 */
xorq %rax, %rax
btsq $31, %rax /* Enable paging */
btsq $0, %rax /* Enable protected mode */
btsq $1, %rax /* Enable MP */
btsq $4, %rax /* Enable ET */
btsq $5, %rax /* Enable NE */
btsq $16, %rax /* Enable WP */
btsq $18, %rax /* Enable AM */
#define CR0_PM 1 /* protected mode */
#define CR0_MP (1<<1)
#define CR0_ET (1<<4)
#define CR0_NE (1<<5)
#define CR0_WP (1<<16)
#define CR0_AM (1<<18)
#define CR0_PAGING (1<<31)
movl $CR0_PM|CR0_MP|CR0_ET|CR0_NE|CR0_WP|CR0_AM|CR0_PAGING,%eax
/* Make changes effective */
movq %rax, %cr0