arm64: entry: Don't assume tramp_vectors is the start of the vectors
authorJames Morse <james.morse@arm.com>
Wed, 24 Nov 2021 13:40:09 +0000 (13:40 +0000)
committerGreg Kroah-Hartman <gregkh@linuxfoundation.org>
Fri, 11 Mar 2022 11:11:52 +0000 (12:11 +0100)
commit ed50da7764535f1e24432ded289974f2bf2b0c5a upstream.

The tramp_ventry macro uses tramp_vectors as the address of the vectors
when calculating which ventry in the 'full fat' vectors to branch to.

While there is one set of tramp_vectors, this will be true.
Adding multiple sets of vectors will break this assumption.

Move the generation of the vectors to a macro, and pass the start
of the vectors as an argument to tramp_ventry.

Reviewed-by: Russell King (Oracle) <rmk+kernel@armlinux.org.uk>
Reviewed-by: Catalin Marinas <catalin.marinas@arm.com>
Signed-off-by: James Morse <james.morse@arm.com>
Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
arch/arm64/kernel/entry.S

index 98987880190a1d62a8fd157ac8ae318db8f3e6f6..44cac269ff3c7b820d5e7820f22db1892ca93f0d 100644 (file)
@@ -822,7 +822,7 @@ alternative_else_nop_endif
        sub     \dst, \dst, PAGE_SIZE
        .endm
 
-       .macro tramp_ventry, regsize = 64
+       .macro tramp_ventry, vector_start, regsize
        .align  7
 1:
        .if     \regsize == 64
@@ -845,10 +845,10 @@ alternative_insn isb, nop, ARM64_WORKAROUND_QCOM_FALKOR_E1003
        ldr     x30, =vectors
 #endif
 alternative_if_not ARM64_WORKAROUND_CAVIUM_TX2_219_PRFM
-       prfm    plil1strm, [x30, #(1b - tramp_vectors)]
+       prfm    plil1strm, [x30, #(1b - \vector_start)]
 alternative_else_nop_endif
        msr     vbar_el1, x30
-       add     x30, x30, #(1b - tramp_vectors + 4)
+       add     x30, x30, #(1b - \vector_start + 4)
        isb
        ret
 .org 1b + 128  // Did we overflow the ventry slot?
@@ -867,19 +867,21 @@ alternative_else_nop_endif
        sb
        .endm
 
-       .align  11
-SYM_CODE_START_NOALIGN(tramp_vectors)
+       .macro  generate_tramp_vector
+.Lvector_start\@:
        .space  0x400
 
-       tramp_ventry
-       tramp_ventry
-       tramp_ventry
-       tramp_ventry
+       .rept   4
+       tramp_ventry    .Lvector_start\@, 64
+       .endr
+       .rept   4
+       tramp_ventry    .Lvector_start\@, 32
+       .endr
+       .endm
 
-       tramp_ventry    32
-       tramp_ventry    32
-       tramp_ventry    32
-       tramp_ventry    32
+       .align  11
+SYM_CODE_START_NOALIGN(tramp_vectors)
+       generate_tramp_vector
 SYM_CODE_END(tramp_vectors)
 
 SYM_CODE_START(tramp_exit_native)