x86/paravirt: Properly align PV functions
authorThomas Gleixner <tglx@linutronix.de>
Thu, 15 Sep 2022 11:10:50 +0000 (13:10 +0200)
committerPeter Zijlstra <peterz@infradead.org>
Mon, 17 Oct 2022 14:40:59 +0000 (16:40 +0200)
Ensure inline asm functions are consistently aligned with compiler
generated and SYM_FUNC_START*() functions.

Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: Juergen Gross <jgross@suse.com>
Link: https://lore.kernel.org/r/20220915111144.038540008@infradead.org
arch/x86/include/asm/paravirt.h
arch/x86/include/asm/qspinlock_paravirt.h
arch/x86/kernel/kvm.c
arch/x86/kernel/paravirt.c

index 2a0b8dd4ec33553c30584f520bed2f818f85ce62..1be66c15ecbd7d8a874fb622a70ae6d4965e7fd7 100644 (file)
@@ -665,6 +665,7 @@ bool __raw_callee_save___native_vcpu_is_preempted(long cpu);
        asm(".pushsection " section ", \"ax\";"                         \
            ".globl " PV_THUNK_NAME(func) ";"                           \
            ".type " PV_THUNK_NAME(func) ", @function;"                 \
+           ASM_FUNC_ALIGN                                              \
            PV_THUNK_NAME(func) ":"                                     \
            ASM_ENDBR                                                   \
            FRAME_BEGIN                                                 \
index 60ece592b22077328a89b5979f7f340bcca46e8c..082551b3c75ed0a9a8cba60de527ab447f4504f4 100644 (file)
@@ -40,7 +40,7 @@ __PV_CALLEE_SAVE_REGS_THUNK(__pv_queued_spin_unlock_slowpath, ".spinlock.text");
 asm    (".pushsection .spinlock.text;"
        ".globl " PV_UNLOCK ";"
        ".type " PV_UNLOCK ", @function;"
-       ".align 4,0x90;"
+       ASM_FUNC_ALIGN
        PV_UNLOCK ": "
        ASM_ENDBR
        FRAME_BEGIN
index d4e48b4a438b25d7170890ca2c281d4d96013985..95fb85bea11187b4459c383ef322645e2bc5dd95 100644 (file)
@@ -802,6 +802,7 @@ asm(
 ".pushsection .text;"
 ".global __raw_callee_save___kvm_vcpu_is_preempted;"
 ".type __raw_callee_save___kvm_vcpu_is_preempted, @function;"
+ASM_FUNC_ALIGN
 "__raw_callee_save___kvm_vcpu_is_preempted:"
 ASM_ENDBR
 "movq  __per_cpu_offset(,%rdi,8), %rax;"
index 7ca2d46c08cc9efc7130388da9ea1d8ecedcf94e..e244c49b52d7eaae3213726d12d006c809d3559a 100644 (file)
@@ -40,6 +40,7 @@
 extern void _paravirt_nop(void);
 asm (".pushsection .entry.text, \"ax\"\n"
      ".global _paravirt_nop\n"
+     ASM_FUNC_ALIGN
      "_paravirt_nop:\n\t"
      ASM_ENDBR
      ASM_RET
@@ -50,6 +51,7 @@ asm (".pushsection .entry.text, \"ax\"\n"
 /* stub always returning 0. */
 asm (".pushsection .entry.text, \"ax\"\n"
      ".global paravirt_ret0\n"
+     ASM_FUNC_ALIGN
      "paravirt_ret0:\n\t"
      ASM_ENDBR
      "xor %" _ASM_AX ", %" _ASM_AX ";\n\t"