summaryrefslogtreecommitdiff
path: root/arch/x86
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2022-03-31 11:17:39 -0700
committerLinus Torvalds <torvalds@linux-foundation.org>2022-03-31 11:17:39 -0700
commit93235e3df29c084a37e0daed17801c6adfce4cb6 (patch)
treefa69253ba2d70050522e605e2b089e5888fa9223 /arch/x86
parent787af64d05cd528aac9ad16752d11bb1c6061bb9 (diff)
parentaa8e73eed7d3084c18dd16d195748661c7e881b5 (diff)
Merge tag 'v5.18-p1' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto fixes from Herbert Xu: - Missing Kconfig dependency on arm that leads to boot failure - x86 SLS fixes - Reference leak in the stm32 driver * tag 'v5.18-p1' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: crypto: x86/sm3 - Fixup SLS crypto: x86/poly1305 - Fixup SLS crypto: x86/chacha20 - Avoid spurious jumps to other functions crypto: stm32 - fix reference leak in stm32_crc_remove crypto: arm/aes-neonbs-cbc - Select generic cbc and aes
Diffstat (limited to 'arch/x86')
-rw-r--r--arch/x86/crypto/chacha-avx512vl-x86_64.S4
-rw-r--r--arch/x86/crypto/poly1305-x86_64-cryptogams.pl38
-rw-r--r--arch/x86/crypto/sm3-avx-asm_64.S2
3 files changed, 22 insertions, 22 deletions
diff --git a/arch/x86/crypto/chacha-avx512vl-x86_64.S b/arch/x86/crypto/chacha-avx512vl-x86_64.S
index 946f74dd6fba..259383e1ad44 100644
--- a/arch/x86/crypto/chacha-avx512vl-x86_64.S
+++ b/arch/x86/crypto/chacha-avx512vl-x86_64.S
@@ -172,7 +172,7 @@ SYM_FUNC_START(chacha_2block_xor_avx512vl)
# xor remaining bytes from partial register into output
mov %rcx,%rax
and $0xf,%rcx
- jz .Ldone8
+ jz .Ldone2
mov %rax,%r9
and $~0xf,%r9
@@ -438,7 +438,7 @@ SYM_FUNC_START(chacha_4block_xor_avx512vl)
# xor remaining bytes from partial register into output
mov %rcx,%rax
and $0xf,%rcx
- jz .Ldone8
+ jz .Ldone4
mov %rax,%r9
and $~0xf,%r9
diff --git a/arch/x86/crypto/poly1305-x86_64-cryptogams.pl b/arch/x86/crypto/poly1305-x86_64-cryptogams.pl
index 71fae5a09e56..2077ce7a5647 100644
--- a/arch/x86/crypto/poly1305-x86_64-cryptogams.pl
+++ b/arch/x86/crypto/poly1305-x86_64-cryptogams.pl
@@ -297,7 +297,7 @@ ___
$code.=<<___;
mov \$1,%eax
.Lno_key:
- ret
+ RET
___
&end_function("poly1305_init_x86_64");
@@ -373,7 +373,7 @@ $code.=<<___;
.cfi_adjust_cfa_offset -48
.Lno_data:
.Lblocks_epilogue:
- ret
+ RET
.cfi_endproc
___
&end_function("poly1305_blocks_x86_64");
@@ -399,7 +399,7 @@ $code.=<<___;
mov %rax,0($mac) # write result
mov %rcx,8($mac)
- ret
+ RET
___
&end_function("poly1305_emit_x86_64");
if ($avx) {
@@ -429,7 +429,7 @@ ___
&poly1305_iteration();
$code.=<<___;
pop $ctx
- ret
+ RET
.size __poly1305_block,.-__poly1305_block
.type __poly1305_init_avx,\@abi-omnipotent
@@ -594,7 +594,7 @@ __poly1305_init_avx:
lea -48-64($ctx),$ctx # size [de-]optimization
pop %rbp
- ret
+ RET
.size __poly1305_init_avx,.-__poly1305_init_avx
___
@@ -747,7 +747,7 @@ $code.=<<___;
.cfi_restore %rbp
.Lno_data_avx:
.Lblocks_avx_epilogue:
- ret
+ RET
.cfi_endproc
.align 32
@@ -1452,7 +1452,7 @@ $code.=<<___ if (!$win64);
___
$code.=<<___;
vzeroupper
- ret
+ RET
.cfi_endproc
___
&end_function("poly1305_blocks_avx");
@@ -1508,7 +1508,7 @@ $code.=<<___;
mov %rax,0($mac) # write result
mov %rcx,8($mac)
- ret
+ RET
___
&end_function("poly1305_emit_avx");
@@ -1675,7 +1675,7 @@ $code.=<<___;
.cfi_restore %rbp
.Lno_data_avx2$suffix:
.Lblocks_avx2_epilogue$suffix:
- ret
+ RET
.cfi_endproc
.align 32
@@ -2201,7 +2201,7 @@ $code.=<<___ if (!$win64);
___
$code.=<<___;
vzeroupper
- ret
+ RET
.cfi_endproc
___
if($avx > 2 && $avx512) {
@@ -2792,7 +2792,7 @@ $code.=<<___ if (!$win64);
.cfi_def_cfa_register %rsp
___
$code.=<<___;
- ret
+ RET
.cfi_endproc
___
@@ -2893,7 +2893,7 @@ $code.=<<___ if ($flavour =~ /elf32/);
___
$code.=<<___;
mov \$1,%eax
- ret
+ RET
.size poly1305_init_base2_44,.-poly1305_init_base2_44
___
{
@@ -3010,7 +3010,7 @@ poly1305_blocks_vpmadd52:
jnz .Lblocks_vpmadd52_4x
.Lno_data_vpmadd52:
- ret
+ RET
.size poly1305_blocks_vpmadd52,.-poly1305_blocks_vpmadd52
___
}
@@ -3451,7 +3451,7 @@ poly1305_blocks_vpmadd52_4x:
vzeroall
.Lno_data_vpmadd52_4x:
- ret
+ RET
.size poly1305_blocks_vpmadd52_4x,.-poly1305_blocks_vpmadd52_4x
___
}
@@ -3824,7 +3824,7 @@ $code.=<<___;
vzeroall
.Lno_data_vpmadd52_8x:
- ret
+ RET
.size poly1305_blocks_vpmadd52_8x,.-poly1305_blocks_vpmadd52_8x
___
}
@@ -3861,7 +3861,7 @@ poly1305_emit_base2_44:
mov %rax,0($mac) # write result
mov %rcx,8($mac)
- ret
+ RET
.size poly1305_emit_base2_44,.-poly1305_emit_base2_44
___
} } }
@@ -3916,7 +3916,7 @@ xor128_encrypt_n_pad:
.Ldone_enc:
mov $otp,%rax
- ret
+ RET
.size xor128_encrypt_n_pad,.-xor128_encrypt_n_pad
.globl xor128_decrypt_n_pad
@@ -3967,7 +3967,7 @@ xor128_decrypt_n_pad:
.Ldone_dec:
mov $otp,%rax
- ret
+ RET
.size xor128_decrypt_n_pad,.-xor128_decrypt_n_pad
___
}
@@ -4109,7 +4109,7 @@ avx_handler:
pop %rbx
pop %rdi
pop %rsi
- ret
+ RET
.size avx_handler,.-avx_handler
.section .pdata
diff --git a/arch/x86/crypto/sm3-avx-asm_64.S b/arch/x86/crypto/sm3-avx-asm_64.S
index 71e6aae23e17..b12b9efb5ec5 100644
--- a/arch/x86/crypto/sm3-avx-asm_64.S
+++ b/arch/x86/crypto/sm3-avx-asm_64.S
@@ -513,5 +513,5 @@ SYM_FUNC_START(sm3_transform_avx)
movq %rbp, %rsp;
popq %rbp;
- ret;
+ RET;
SYM_FUNC_END(sm3_transform_avx)