summaryrefslogtreecommitdiff
path: root/arch/x86/crypto/cast5-avx-x86_64-asm_64.S
diff options
context:
space:
mode:
authorJiri Slaby <jslaby@suse.cz>2019-10-11 13:50:46 +0200
committerBorislav Petkov <bp@suse.de>2019-10-18 10:07:15 +0200
commit74d8b90a889022e306b543ff2147a6941c99b354 (patch)
tree12c813b199e4e2822f825cb08201cc677337b867 /arch/x86/crypto/cast5-avx-x86_64-asm_64.S
parentef77e6880be8fa3033544109e29417c8710fd3f2 (diff)
x86/asm/crypto: Annotate local functions
Use the newly added SYM_FUNC_START_LOCAL to annotate beginnings of all functions which do not have ".globl" annotation, but their endings are annotated by ENDPROC. This is needed to balance ENDPROC for tools that generate debuginfo. These function names are not prepended with ".L" as they might appear in call traces and they wouldn't be visible after such change. To be symmetric, the functions' ENDPROCs are converted to the new SYM_FUNC_END. Signed-off-by: Jiri Slaby <jslaby@suse.cz> Signed-off-by: Borislav Petkov <bp@suse.de> Cc: "David S. Miller" <davem@davemloft.net> Cc: Herbert Xu <herbert@gondor.apana.org.au> Cc: "H. Peter Anvin" <hpa@zytor.com> Cc: Ingo Molnar <mingo@redhat.com> Cc: linux-arch@vger.kernel.org Cc: linux-crypto@vger.kernel.org Cc: Thomas Gleixner <tglx@linutronix.de> Cc: x86-ml <x86@kernel.org> Link: https://lkml.kernel.org/r/20191011115108.12392-7-jslaby@suse.cz
Diffstat (limited to 'arch/x86/crypto/cast5-avx-x86_64-asm_64.S')
-rw-r--r--arch/x86/crypto/cast5-avx-x86_64-asm_64.S8
1 files changed, 4 insertions, 4 deletions
diff --git a/arch/x86/crypto/cast5-avx-x86_64-asm_64.S b/arch/x86/crypto/cast5-avx-x86_64-asm_64.S
index dc55c3332fcc..ef86c6a966de 100644
--- a/arch/x86/crypto/cast5-avx-x86_64-asm_64.S
+++ b/arch/x86/crypto/cast5-avx-x86_64-asm_64.S
@@ -209,7 +209,7 @@
.text
.align 16
-__cast5_enc_blk16:
+SYM_FUNC_START_LOCAL(__cast5_enc_blk16)
/* input:
* %rdi: ctx
* RL1: blocks 1 and 2
@@ -280,10 +280,10 @@ __cast5_enc_blk16:
outunpack_blocks(RR4, RL4, RTMP, RX, RKM);
ret;
-ENDPROC(__cast5_enc_blk16)
+SYM_FUNC_END(__cast5_enc_blk16)
.align 16
-__cast5_dec_blk16:
+SYM_FUNC_START_LOCAL(__cast5_dec_blk16)
/* input:
* %rdi: ctx
* RL1: encrypted blocks 1 and 2
@@ -357,7 +357,7 @@ __cast5_dec_blk16:
.L__skip_dec:
vpsrldq $4, RKR, RKR;
jmp .L__dec_tail;
-ENDPROC(__cast5_dec_blk16)
+SYM_FUNC_END(__cast5_dec_blk16)
ENTRY(cast5_ecb_enc_16way)
/* input: