arm64: Use BTI C directly and unconditionally
authorMark Brown <broonie@kernel.org>
Tue, 14 Dec 2021 15:27:14 +0000 (15:27 +0000)
committerCatalin Marinas <catalin.marinas@arm.com>
Tue, 14 Dec 2021 18:12:58 +0000 (18:12 +0000)
Now we have a macro for BTI C that looks like a regular instruction change
all the users of the current BTI_C macro to just emit a BTI C directly and
remove the macro.

This does mean that we now unconditionally BTI annotate all assembly
functions, meaning that they are worse in this respect than code generated
by the compiler. The overhead should be minimal for implementations with a
reasonable HINT implementation.

Signed-off-by: Mark Brown <broonie@kernel.org>
Reviewed-by: Ard Biesheuvel <ardb@kernel.org>
Acked-by: Mark Rutland <mark.rutland@arm.com>
Link: https://lore.kernel.org/r/20211214152714.2380849-4-broonie@kernel.org
Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
arch/arm64/include/asm/linkage.h
arch/arm64/kernel/entry-ftrace.S
arch/arm64/lib/kasan_sw_tags.S

index 1cfa8bb33eddf8c99cb8aa08e321cc60cc14efcf..9065e4749b42a154efae495b7ce8ec24591c5ab0 100644 (file)
@@ -4,16 +4,6 @@
 #define __ALIGN                .align 2
 #define __ALIGN_STR    ".align 2"
 
-#if defined(CONFIG_ARM64_BTI_KERNEL) && defined(__aarch64__)
-
-#define BTI_C bti c ;
-
-#else
-
-#define BTI_C
-
-#endif
-
 /*
  * When using in-kernel BTI we need to ensure that PCS-conformant
  * assembly functions have suitable annotations.  Override
  */
 #define SYM_FUNC_START(name)                           \
        SYM_START(name, SYM_L_GLOBAL, SYM_A_ALIGN)      \
-       BTI_C
+       bti c ;
 
 #define SYM_FUNC_START_NOALIGN(name)                   \
        SYM_START(name, SYM_L_GLOBAL, SYM_A_NONE)       \
-       BTI_C
+       bti c ;
 
 #define SYM_FUNC_START_LOCAL(name)                     \
        SYM_START(name, SYM_L_LOCAL, SYM_A_ALIGN)       \
-       BTI_C
+       bti c ;
 
 #define SYM_FUNC_START_LOCAL_NOALIGN(name)             \
        SYM_START(name, SYM_L_LOCAL, SYM_A_NONE)        \
-       BTI_C
+       bti c ;
 
 #define SYM_FUNC_START_WEAK(name)                      \
        SYM_START(name, SYM_L_WEAK, SYM_A_ALIGN)        \
-       BTI_C
+       bti c ;
 
 #define SYM_FUNC_START_WEAK_NOALIGN(name)              \
        SYM_START(name, SYM_L_WEAK, SYM_A_NONE)         \
-       BTI_C
+       bti c ;
 
 /*
  * Annotate a function as position independent, i.e., safe to be called before
index 8cf970d219f5d896a7805c446e9854dadf7d0cd9..e535480a4069b5bcf8dbde672c93747037e2cd01 100644 (file)
        .endm
 
 SYM_CODE_START(ftrace_regs_caller)
-#ifdef BTI_C
-       BTI_C
-#endif
+       bti     c
        ftrace_regs_entry       1
        b       ftrace_common
 SYM_CODE_END(ftrace_regs_caller)
 
 SYM_CODE_START(ftrace_caller)
-#ifdef BTI_C
-       BTI_C
-#endif
+       bti     c
        ftrace_regs_entry       0
        b       ftrace_common
 SYM_CODE_END(ftrace_caller)
index 5b04464c045eb552fc6416826ca9f4cd9b1820f8..20784ce75defb22e6c77421db7d8637ef9c3ef3c 100644 (file)
@@ -38,9 +38,7 @@
  * incremented by 256 prior to return).
  */
 SYM_CODE_START(__hwasan_tag_mismatch)
-#ifdef BTI_C
-       BTI_C
-#endif
+       bti     c
        add     x29, sp, #232
        stp     x2, x3, [sp, #8 * 2]
        stp     x4, x5, [sp, #8 * 4]