summaryrefslogtreecommitdiff
path: root/arch/x86/entry/entry_64.S
diff options
context:
space:
mode:
authorThomas Gleixner <tglx@linutronix.de>2022-09-15 13:10:51 +0200
committerPeter Zijlstra <peterz@infradead.org>2022-10-17 16:41:00 +0200
commit67e93ddd5d0b84ac17bddb13d98533e425282421 (patch)
tree3fe1334943e0c932a9f2f82c21cb4eee04483207 /arch/x86/entry/entry_64.S
parent1d293758e548aa6ff65e4dd3f5a9bc2a34b38ce3 (diff)
x86/entry: Align SYM_CODE_START() variants
Explicitly align a bunch of commonly called SYM_CODE_START() symbols. Signed-off-by: Thomas Gleixner <tglx@linutronix.de> Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Link: https://lore.kernel.org/r/20220915111144.144068841@infradead.org
Diffstat (limited to 'arch/x86/entry/entry_64.S')
-rw-r--r--arch/x86/entry/entry_64.S16
1 files changed, 10 insertions, 6 deletions
diff --git a/arch/x86/entry/entry_64.S b/arch/x86/entry/entry_64.S
index 9953d966d124..e635f962afb8 100644
--- a/arch/x86/entry/entry_64.S
+++ b/arch/x86/entry/entry_64.S
@@ -284,7 +284,8 @@ SYM_FUNC_END(__switch_to_asm)
* r12: kernel thread arg
*/
.pushsection .text, "ax"
-SYM_CODE_START(ret_from_fork)
+ __FUNC_ALIGN
+SYM_CODE_START_NOALIGN(ret_from_fork)
UNWIND_HINT_EMPTY
ANNOTATE_NOENDBR // copy_thread
movq %rax, %rdi
@@ -600,13 +601,13 @@ SYM_CODE_END(\asmsym)
* shared between 32 and 64 bit and emit the __irqentry_text_* markers
* so the stacktrace boundary checks work.
*/
- .align 16
+ __ALIGN
.globl __irqentry_text_start
__irqentry_text_start:
#include <asm/idtentry.h>
- .align 16
+ __ALIGN
.globl __irqentry_text_end
__irqentry_text_end:
ANNOTATE_NOENDBR
@@ -828,7 +829,8 @@ EXPORT_SYMBOL(asm_load_gs_index)
*
* C calling convention: exc_xen_hypervisor_callback(struct *pt_regs)
*/
-SYM_CODE_START_LOCAL(exc_xen_hypervisor_callback)
+ __FUNC_ALIGN
+SYM_CODE_START_LOCAL_NOALIGN(exc_xen_hypervisor_callback)
/*
* Since we don't modify %rdi, evtchn_do_upall(struct *pt_regs) will
@@ -856,7 +858,8 @@ SYM_CODE_END(exc_xen_hypervisor_callback)
* We distinguish between categories by comparing each saved segment register
* with its current contents: any discrepancy means we in category 1.
*/
-SYM_CODE_START(xen_failsafe_callback)
+ __FUNC_ALIGN
+SYM_CODE_START_NOALIGN(xen_failsafe_callback)
UNWIND_HINT_EMPTY
ENDBR
movl %ds, %ecx
@@ -1516,7 +1519,8 @@ SYM_CODE_END(ignore_sysret)
#endif
.pushsection .text, "ax"
-SYM_CODE_START(rewind_stack_and_make_dead)
+ __FUNC_ALIGN
+SYM_CODE_START_NOALIGN(rewind_stack_and_make_dead)
UNWIND_HINT_FUNC
/* Prevent any naive code from trying to unwind to our caller. */
xorl %ebp, %ebp