arm64/entry: deduplicate SW PAN entry/exit routines
authorArd Biesheuvel <ardb@kernel.org>
Tue, 21 Jul 2020 08:33:15 +0000 (10:33 +0200)
committerCatalin Marinas <catalin.marinas@arm.com>
Thu, 23 Jul 2020 11:38:55 +0000 (12:38 +0100)
Factor the 12 copies of the SW PAN entry and exit code into callable
subroutines, and use alternatives patching to either emit a 'bl'
instruction to call them, or a NOP if h/w PAN is found to be available
at runtime.

Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
Link: https://lore.kernel.org/r/20200721083315.4816-1-ardb@kernel.org
Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
arch/arm64/kernel/entry.S

index 5304d193c79dd3a67bca8d72ec9afbf1d530df2e..7b9a7c45ef851f855e0d5af35d5a936f35666ce5 100644 (file)
@@ -209,28 +209,9 @@ alternative_cb_end
        add     x29, sp, #S_STACKFRAME
 
 #ifdef CONFIG_ARM64_SW_TTBR0_PAN
-       /*
-        * Set the TTBR0 PAN bit in SPSR. When the exception is taken from
-        * EL0, there is no need to check the state of TTBR0_EL1 since
-        * accesses are always enabled.
-        * Note that the meaning of this bit differs from the ARMv8.1 PAN
-        * feature as all TTBR0_EL1 accesses are disabled, not just those to
-        * user mappings.
-        */
-alternative_if ARM64_HAS_PAN
-       b       1f                              // skip TTBR0 PAN
+alternative_if_not ARM64_HAS_PAN
+       bl      __swpan_entry_el\el
 alternative_else_nop_endif
-
-       .if     \el != 0
-       mrs     x21, ttbr0_el1
-       tst     x21, #TTBR_ASID_MASK            // Check for the reserved ASID
-       orr     x23, x23, #PSR_PAN_BIT          // Set the emulated PAN in the saved SPSR
-       b.eq    1f                              // TTBR0 access already disabled
-       and     x23, x23, #~PSR_PAN_BIT         // Clear the emulated PAN in the saved SPSR
-       .endif
-
-       __uaccess_ttbr0_disable x21
-1:
 #endif
 
        stp     x22, x23, [sp, #S_PC]
@@ -284,34 +265,9 @@ alternative_else_nop_endif
        .endif
 
 #ifdef CONFIG_ARM64_SW_TTBR0_PAN
-       /*
-        * Restore access to TTBR0_EL1. If returning to EL0, no need for SPSR
-        * PAN bit checking.
-        */
-alternative_if ARM64_HAS_PAN
-       b       2f                              // skip TTBR0 PAN
+alternative_if_not ARM64_HAS_PAN
+       bl      __swpan_exit_el\el
 alternative_else_nop_endif
-
-       .if     \el != 0
-       tbnz    x22, #22, 1f                    // Skip re-enabling TTBR0 access if the PSR_PAN_BIT is set
-       .endif
-
-       __uaccess_ttbr0_enable x0, x1
-
-       .if     \el == 0
-       /*
-        * Enable errata workarounds only if returning to user. The only
-        * workaround currently required for TTBR0_EL1 changes are for the
-        * Cavium erratum 27456 (broadcast TLBI instructions may cause I-cache
-        * corruption).
-        */
-       bl      post_ttbr_update_workaround
-       .endif
-1:
-       .if     \el != 0
-       and     x22, x22, #~PSR_PAN_BIT         // ARMv8.0 CPUs do not understand this bit
-       .endif
-2:
 #endif
 
        .if     \el == 0
@@ -391,6 +347,49 @@ alternative_insn eret, nop, ARM64_UNMAP_KERNEL_AT_EL0
        sb
        .endm
 
+#ifdef CONFIG_ARM64_SW_TTBR0_PAN
+       /*
+        * Set the TTBR0 PAN bit in SPSR. When the exception is taken from
+        * EL0, there is no need to check the state of TTBR0_EL1 since
+        * accesses are always enabled.
+        * Note that the meaning of this bit differs from the ARMv8.1 PAN
+        * feature as all TTBR0_EL1 accesses are disabled, not just those to
+        * user mappings.
+        */
+SYM_CODE_START_LOCAL(__swpan_entry_el1)
+       mrs     x21, ttbr0_el1
+       tst     x21, #TTBR_ASID_MASK            // Check for the reserved ASID
+       orr     x23, x23, #PSR_PAN_BIT          // Set the emulated PAN in the saved SPSR
+       b.eq    1f                              // TTBR0 access already disabled
+       and     x23, x23, #~PSR_PAN_BIT         // Clear the emulated PAN in the saved SPSR
+SYM_INNER_LABEL(__swpan_entry_el0, SYM_L_LOCAL)
+       __uaccess_ttbr0_disable x21
+1:     ret
+SYM_CODE_END(__swpan_entry_el1)
+
+       /*
+        * Restore access to TTBR0_EL1. If returning to EL0, no need for SPSR
+        * PAN bit checking.
+        */
+SYM_CODE_START_LOCAL(__swpan_exit_el1)
+       tbnz    x22, #22, 1f                    // Skip re-enabling TTBR0 access if the PSR_PAN_BIT is set
+       __uaccess_ttbr0_enable x0, x1
+1:     and     x22, x22, #~PSR_PAN_BIT         // ARMv8.0 CPUs do not understand this bit
+       ret
+SYM_CODE_END(__swpan_exit_el1)
+
+SYM_CODE_START_LOCAL(__swpan_exit_el0)
+       __uaccess_ttbr0_enable x0, x1
+       /*
+        * Enable errata workarounds only if returning to user. The only
+        * workaround currently required for TTBR0_EL1 changes are for the
+        * Cavium erratum 27456 (broadcast TLBI instructions may cause I-cache
+        * corruption).
+        */
+       b       post_ttbr_update_workaround
+SYM_CODE_END(__swpan_exit_el0)
+#endif
+
        .macro  irq_stack_entry
        mov     x19, sp                 // preserve the original sp
 #ifdef CONFIG_SHADOW_CALL_STACK