x86/asm: Replace "MOVQ $imm, %reg" with MOVL
authorDenys Vlasenko <dvlasenk@redhat.com>
Tue, 31 Mar 2015 17:00:10 +0000 (19:00 +0200)
committerIngo Molnar <mingo@kernel.org>
Wed, 1 Apr 2015 11:17:39 +0000 (13:17 +0200)
There is no reason to use MOVQ to load a non-negative immediate
constant value into a 64-bit register. MOVL does the same, since
the upper 32 bits are zero-extended by the CPU.

This makes the code a bit smaller, while leaving functionality
unchanged.

Signed-off-by: Denys Vlasenko <dvlasenk@redhat.com>
Cc: Alexei Starovoitov <ast@plumgrid.com>
Cc: Andy Lutomirski <luto@amacapital.net>
Cc: Borislav Petkov <bp@alien8.de>
Cc: Frederic Weisbecker <fweisbec@gmail.com>
Cc: H. Peter Anvin <hpa@zytor.com>
Cc: Kees Cook <keescook@chromium.org>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Oleg Nesterov <oleg@redhat.com>
Cc: Steven Rostedt <rostedt@goodmis.org>
Cc: Will Drewry <wad@chromium.org>
Link: http://lkml.kernel.org/r/1427821211-25099-8-git-send-email-dvlasenk@redhat.com
Signed-off-by: Ingo Molnar <mingo@kernel.org>
arch/x86/crypto/crc32c-pcl-intel-asm_64.S
arch/x86/crypto/twofish-x86_64-asm_64.S
arch/x86/kernel/relocate_kernel_64.S

index 26d49eb..225be06 100644 (file)
@@ -178,7 +178,7 @@ continue_block:
        ## 2a) PROCESS FULL BLOCKS:
        ################################################################
 full_block:
-       movq    $128,%rax
+       movl    $128,%eax
        lea     128*8*2(block_0), block_1
        lea     128*8*3(block_0), block_2
        add     $128*8*1, block_0
index a039d21..a350c99 100644 (file)
@@ -264,7 +264,7 @@ ENTRY(twofish_enc_blk)
        movq    R1,     8(%rsi)
 
        popq    R1
-       movq    $1,%rax
+       movl    $1,%eax
        ret
 ENDPROC(twofish_enc_blk)
 
@@ -316,6 +316,6 @@ ENTRY(twofish_dec_blk)
        movq    R1,     8(%rsi)
 
        popq    R1
-       movq    $1,%rax
+       movl    $1,%eax
        ret
 ENDPROC(twofish_dec_blk)
index 04cb179..98111b3 100644 (file)
@@ -123,7 +123,7 @@ identity_mapped:
         * Set cr4 to a known state:
         *  - physical address extension enabled
         */
-       movq    $X86_CR4_PAE, %rax
+       movl    $X86_CR4_PAE, %eax
        movq    %rax, %cr4
 
        jmp 1f
@@ -246,17 +246,17 @@ swap_pages:
        movq    %rsi, %rax
 
        movq    %r10, %rdi
-       movq    $512,   %rcx
+       movl    $512, %ecx
        rep ; movsq
 
        movq    %rax, %rdi
        movq    %rdx, %rsi
-       movq    $512,   %rcx
+       movl    $512, %ecx
        rep ; movsq
 
        movq    %rdx, %rdi
        movq    %r10, %rsi
-       movq    $512,   %rcx
+       movl    $512, %ecx
        rep ; movsq
 
        lea     PAGE_SIZE(%rax), %rsi