[tip:x86/asm] x86/asm: Replace "MOVQ $imm, %reg" with MOVL

From: tip-bot for Denys Vlasenko
Date: Thu Apr 02 2015 - 08:27:05 EST


Commit-ID: a734b4a23e4b5a5bba577d11b6e2ff21f6ca4fce
Gitweb: http://git.kernel.org/tip/a734b4a23e4b5a5bba577d11b6e2ff21f6ca4fce
Author: Denys Vlasenko <dvlasenk@xxxxxxxxxx>
AuthorDate: Tue, 31 Mar 2015 19:00:10 +0200
Committer: Ingo Molnar <mingo@xxxxxxxxxx>
CommitDate: Wed, 1 Apr 2015 13:17:39 +0200

x86/asm: Replace "MOVQ $imm, %reg" with MOVL

There is no reason to use MOVQ to load a non-negative immediate
constant value into a 64-bit register. MOVL does the same, since
the upper 32 bits are zero-extended by the CPU.

This makes the code a bit smaller, while leaving functionality
unchanged.

Signed-off-by: Denys Vlasenko <dvlasenk@xxxxxxxxxx>
Cc: Alexei Starovoitov <ast@xxxxxxxxxxxx>
Cc: Andy Lutomirski <luto@xxxxxxxxxxxxxx>
Cc: Borislav Petkov <bp@xxxxxxxxx>
Cc: Frederic Weisbecker <fweisbec@xxxxxxxxx>
Cc: H. Peter Anvin <hpa@xxxxxxxxx>
Cc: Kees Cook <keescook@xxxxxxxxxxxx>
Cc: Linus Torvalds <torvalds@xxxxxxxxxxxxxxxxxxxx>
Cc: Oleg Nesterov <oleg@xxxxxxxxxx>
Cc: Steven Rostedt <rostedt@xxxxxxxxxxx>
Cc: Will Drewry <wad@xxxxxxxxxxxx>
Link: http://lkml.kernel.org/r/1427821211-25099-8-git-send-email-dvlasenk@xxxxxxxxxx
Signed-off-by: Ingo Molnar <mingo@xxxxxxxxxx>
---
arch/x86/crypto/crc32c-pcl-intel-asm_64.S | 2 +-
arch/x86/crypto/twofish-x86_64-asm_64.S | 4 ++--
arch/x86/kernel/relocate_kernel_64.S | 8 ++++----
3 files changed, 7 insertions(+), 7 deletions(-)

diff --git a/arch/x86/crypto/crc32c-pcl-intel-asm_64.S b/arch/x86/crypto/crc32c-pcl-intel-asm_64.S
index 26d49eb..225be06 100644
--- a/arch/x86/crypto/crc32c-pcl-intel-asm_64.S
+++ b/arch/x86/crypto/crc32c-pcl-intel-asm_64.S
@@ -178,7 +178,7 @@ continue_block:
## 2a) PROCESS FULL BLOCKS:
################################################################
full_block:
- movq $128,%rax
+ movl $128,%eax
lea 128*8*2(block_0), block_1
lea 128*8*3(block_0), block_2
add $128*8*1, block_0
diff --git a/arch/x86/crypto/twofish-x86_64-asm_64.S b/arch/x86/crypto/twofish-x86_64-asm_64.S
index a039d21..a350c99 100644
--- a/arch/x86/crypto/twofish-x86_64-asm_64.S
+++ b/arch/x86/crypto/twofish-x86_64-asm_64.S
@@ -264,7 +264,7 @@ ENTRY(twofish_enc_blk)
movq R1, 8(%rsi)

popq R1
- movq $1,%rax
+ movl $1,%eax
ret
ENDPROC(twofish_enc_blk)

@@ -316,6 +316,6 @@ ENTRY(twofish_dec_blk)
movq R1, 8(%rsi)

popq R1
- movq $1,%rax
+ movl $1,%eax
ret
ENDPROC(twofish_dec_blk)
diff --git a/arch/x86/kernel/relocate_kernel_64.S b/arch/x86/kernel/relocate_kernel_64.S
index 04cb179..98111b3 100644
--- a/arch/x86/kernel/relocate_kernel_64.S
+++ b/arch/x86/kernel/relocate_kernel_64.S
@@ -123,7 +123,7 @@ identity_mapped:
* Set cr4 to a known state:
* - physical address extension enabled
*/
- movq $X86_CR4_PAE, %rax
+ movl $X86_CR4_PAE, %eax
movq %rax, %cr4

jmp 1f
@@ -246,17 +246,17 @@ swap_pages:
movq %rsi, %rax

movq %r10, %rdi
- movq $512, %rcx
+ movl $512, %ecx
rep ; movsq

movq %rax, %rdi
movq %rdx, %rsi
- movq $512, %rcx
+ movl $512, %ecx
rep ; movsq

movq %rdx, %rdi
movq %r10, %rsi
- movq $512, %rcx
+ movl $512, %ecx
rep ; movsq

lea PAGE_SIZE(%rax), %rsi
--
To unsubscribe from this list: send the line "unsubscribe linux-kernel" in
the body of a message to majordomo@xxxxxxxxxxxxxxx
More majordomo info at http://vger.kernel.org/majordomo-info.html
Please read the FAQ at http://www.tux.org/lkml/