arm64: clean up symbol aliasing
authorMark Rutland <mark.rutland@arm.com>
Wed, 16 Feb 2022 16:22:27 +0000 (16:22 +0000)
committerWill Deacon <will@kernel.org>
Tue, 22 Feb 2022 16:21:34 +0000 (16:21 +0000)
Now that we have SYM_FUNC_ALIAS() and SYM_FUNC_ALIAS_WEAK(), use those
to simplify and more consistently define function aliases across
arch/arm64.

Aliases are now defined in terms of a canonical function name. For
position-independent functions I've made the __pi_<func> name the
canonical name, and defined other alises in terms of this.

The SYM_FUNC_{START,END}_PI(func) macros obscure the __pi_<func> name,
and make this hard to seatch for. The SYM_FUNC_START_WEAK_PI() macro
also obscures the fact that the __pi_<func> fymbol is global and the
<func> symbol is weak. For clarity, I have removed these macros and used
SYM_FUNC_{START,END}() directly with the __pi_<func> name.

For example:

SYM_FUNC_START_WEAK_PI(func)
... asm insns ...
SYM_FUNC_END_PI(func)
EXPORT_SYMBOL(func)

... becomes:

SYM_FUNC_START(__pi_func)
... asm insns ...
SYM_FUNC_END(__pi_func)

SYM_FUNC_ALIAS_WEAK(func, __pi_func)
EXPORT_SYMBOL(func)

For clarity, where there are multiple annotations such as
EXPORT_SYMBOL(), I've tried to keep annotations grouped by symbol. For
example, where a function has a name and an alias which are both
exported, this is organised as:

SYM_FUNC_START(func)
... asm insns ...
SYM_FUNC_END(func)
EXPORT_SYMBOL(func)

SYM_FUNC_ALIAS(alias, func)
EXPORT_SYMBOL(alias)

For consistency with the other string functions, I've defined strrchr as
a position-independent function, as it can safely be used as such even
though we have no users today.

As we no longer use SYM_FUNC_{START,END}_ALIAS(), our local copies are
removed. The common versions will be removed by a subsequent patch.

There should be no functional change as a result of this patch.

Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Acked-by: Ard Biesheuvel <ardb@kernel.org>
Acked-by: Catalin Marinas <catalin.marinas@arm.com>
Acked-by: Josh Poimboeuf <jpoimboe@redhat.com>
Acked-by: Mark Brown <broonie@kernel.org>
Cc: Joey Gouly <joey.gouly@arm.com>
Cc: Will Deacon <will@kernel.org>
Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Link: https://lore.kernel.org/r/20220216162229.1076788-3-mark.rutland@arm.com
Signed-off-by: Will Deacon <will@kernel.org>
15 files changed:
arch/arm64/include/asm/linkage.h
arch/arm64/kvm/hyp/nvhe/cache.S
arch/arm64/lib/clear_page.S
arch/arm64/lib/copy_page.S
arch/arm64/lib/memchr.S
arch/arm64/lib/memcmp.S
arch/arm64/lib/memcpy.S
arch/arm64/lib/memset.S
arch/arm64/lib/strchr.S
arch/arm64/lib/strcmp.S
arch/arm64/lib/strlen.S
arch/arm64/lib/strncmp.S
arch/arm64/lib/strnlen.S
arch/arm64/lib/strrchr.S
arch/arm64/mm/cache.S

index b77e9b3f5371c9f38def6f6b4d3f66eb3557a875..43f8c25b3fda655577859cf7a8ce59c1a049ed6b 100644 (file)
        SYM_START(name, SYM_L_WEAK, SYM_A_NONE)         \
        bti c ;
 
-/*
- * Annotate a function as position independent, i.e., safe to be called before
- * the kernel virtual mapping is activated.
- */
-#define SYM_FUNC_START_PI(x)                   \
-               SYM_FUNC_START_ALIAS(__pi_##x); \
-               SYM_FUNC_START(x)
-
-#define SYM_FUNC_START_WEAK_PI(x)              \
-               SYM_FUNC_START_ALIAS(__pi_##x); \
-               SYM_FUNC_START_WEAK(x)
-
-#define SYM_FUNC_START_WEAK_ALIAS_PI(x)                \
-               SYM_FUNC_START_ALIAS(__pi_##x); \
-               SYM_START(x, SYM_L_WEAK, SYM_A_ALIGN)
-
-#define SYM_FUNC_END_PI(x)                     \
-               SYM_FUNC_END(x);                \
-               SYM_FUNC_END_ALIAS(__pi_##x)
-
-#define SYM_FUNC_END_ALIAS_PI(x)               \
-               SYM_FUNC_END_ALIAS(x);          \
-               SYM_FUNC_END_ALIAS(__pi_##x)
-
 #endif
index 958734f4d6b0ed820ee7fc006d2fd38d2cf0a105..0c367eb5f4e28dce0dcd0986c10413b5361f5faf 100644 (file)
@@ -7,7 +7,8 @@
 #include <asm/assembler.h>
 #include <asm/alternative.h>
 
-SYM_FUNC_START_PI(dcache_clean_inval_poc)
+SYM_FUNC_START(__pi_dcache_clean_inval_poc)
        dcache_by_line_op civac, sy, x0, x1, x2, x3
        ret
-SYM_FUNC_END_PI(dcache_clean_inval_poc)
+SYM_FUNC_END(__pi_dcache_clean_inval_poc)
+SYM_FUNC_ALIAS(dcache_clean_inval_poc, __pi_dcache_clean_inval_poc)
index 1fd5d790ab800321786324dbc4ea8804920e07f1..ebde40e7fa2b2f3a6f7b2e604f3c0b9bb572457c 100644 (file)
@@ -14,7 +14,7 @@
  * Parameters:
  *     x0 - dest
  */
-SYM_FUNC_START_PI(clear_page)
+SYM_FUNC_START(__pi_clear_page)
        mrs     x1, dczid_el0
        tbnz    x1, #4, 2f      /* Branch if DC ZVA is prohibited */
        and     w1, w1, #0xf
@@ -35,5 +35,6 @@ SYM_FUNC_START_PI(clear_page)
        tst     x0, #(PAGE_SIZE - 1)
        b.ne    2b
        ret
-SYM_FUNC_END_PI(clear_page)
+SYM_FUNC_END(__pi_clear_page)
+SYM_FUNC_ALIAS(clear_page, __pi_clear_page)
 EXPORT_SYMBOL(clear_page)
index 29144f4cd4492741729f7189f17e8edb86f7e39f..c336d2ffdec55975d8beb31fe36c2b815cfa764e 100644 (file)
@@ -17,7 +17,7 @@
  *     x0 - dest
  *     x1 - src
  */
-SYM_FUNC_START_PI(copy_page)
+SYM_FUNC_START(__pi_copy_page)
 alternative_if ARM64_HAS_NO_HW_PREFETCH
        // Prefetch three cache lines ahead.
        prfm    pldl1strm, [x1, #128]
@@ -75,5 +75,6 @@ alternative_else_nop_endif
        stnp    x16, x17, [x0, #112 - 256]
 
        ret
-SYM_FUNC_END_PI(copy_page)
+SYM_FUNC_END(__pi_copy_page)
+SYM_FUNC_ALIAS(copy_page, __pi_copy_page)
 EXPORT_SYMBOL(copy_page)
index 7c2276fdab543231568751b44c732faf0fa007fd..37a9f2a4f7f4b5fb1170e2b78353f6e020818444 100644 (file)
@@ -38,7 +38,7 @@
 
        .p2align 4
        nop
-SYM_FUNC_START_WEAK_PI(memchr)
+SYM_FUNC_START(__pi_memchr)
        and     chrin, chrin, #0xff
        lsr     wordcnt, cntin, #3
        cbz     wordcnt, L(byte_loop)
@@ -71,5 +71,6 @@ CPU_LE(       rev     tmp, tmp)
 L(not_found):
        mov     result, #0
        ret
-SYM_FUNC_END_PI(memchr)
+SYM_FUNC_END(__pi_memchr)
+SYM_FUNC_ALIAS_WEAK(memchr, __pi_memchr)
 EXPORT_SYMBOL_NOKASAN(memchr)
index 7d956384222ff24c344dcb6857024ff6a861fde4..a5ccf2c55f911954eb9a340eb499d12e2742d215 100644 (file)
@@ -32,7 +32,7 @@
 #define tmp1           x7
 #define tmp2           x8
 
-SYM_FUNC_START_WEAK_PI(memcmp)
+SYM_FUNC_START(__pi_memcmp)
        subs    limit, limit, 8
        b.lo    L(less8)
 
@@ -134,6 +134,6 @@ L(byte_loop):
        b.eq    L(byte_loop)
        sub     result, data1w, data2w
        ret
-
-SYM_FUNC_END_PI(memcmp)
+SYM_FUNC_END(__pi_memcmp)
+SYM_FUNC_ALIAS_WEAK(memcmp, __pi_memcmp)
 EXPORT_SYMBOL_NOKASAN(memcmp)
index b82fd64ee1e1c01391187997345c82f08dc58f15..4ab48d49c451564a4edb24b5a4ff2d158b85be5c 100644 (file)
    The loop tail is handled by always copying 64 bytes from the end.
 */
 
-SYM_FUNC_START_ALIAS(__memmove)
-SYM_FUNC_START_WEAK_ALIAS_PI(memmove)
-SYM_FUNC_START_ALIAS(__memcpy)
-SYM_FUNC_START_WEAK_PI(memcpy)
+SYM_FUNC_START(__pi_memcpy)
        add     srcend, src, count
        add     dstend, dstin, count
        cmp     count, 128
@@ -241,12 +238,16 @@ L(copy64_from_start):
        stp     B_l, B_h, [dstin, 16]
        stp     C_l, C_h, [dstin]
        ret
+SYM_FUNC_END(__pi_memcpy)
 
-SYM_FUNC_END_PI(memcpy)
-EXPORT_SYMBOL(memcpy)
-SYM_FUNC_END_ALIAS(__memcpy)
+SYM_FUNC_ALIAS(__memcpy, __pi_memcpy)
 EXPORT_SYMBOL(__memcpy)
-SYM_FUNC_END_ALIAS_PI(memmove)
-EXPORT_SYMBOL(memmove)
-SYM_FUNC_END_ALIAS(__memmove)
+SYM_FUNC_ALIAS_WEAK(memcpy, __memcpy)
+EXPORT_SYMBOL(memcpy)
+
+SYM_FUNC_ALIAS(__pi_memmove, __pi_memcpy)
+
+SYM_FUNC_ALIAS(__memmove, __pi_memmove)
 EXPORT_SYMBOL(__memmove)
+SYM_FUNC_ALIAS_WEAK(memmove, __memmove)
+EXPORT_SYMBOL(memmove)
index a9c1c9a01ea906954953c6dce74d4c3e482328da..a5aebe82ad73b963d0afd331d68b13e87b5a7f40 100644 (file)
@@ -42,8 +42,7 @@ dst           .req    x8
 tmp3w          .req    w9
 tmp3           .req    x9
 
-SYM_FUNC_START_ALIAS(__memset)
-SYM_FUNC_START_WEAK_PI(memset)
+SYM_FUNC_START(__pi_memset)
        mov     dst, dstin      /* Preserve return value.  */
        and     A_lw, val, #255
        orr     A_lw, A_lw, A_lw, lsl #8
@@ -202,7 +201,10 @@ SYM_FUNC_START_WEAK_PI(memset)
        ands    count, count, zva_bits_x
        b.ne    .Ltail_maybe_long
        ret
-SYM_FUNC_END_PI(memset)
-EXPORT_SYMBOL(memset)
-SYM_FUNC_END_ALIAS(__memset)
+SYM_FUNC_END(__pi_memset)
+
+SYM_FUNC_ALIAS(__memset, __pi_memset)
 EXPORT_SYMBOL(__memset)
+
+SYM_FUNC_ALIAS_WEAK(memset, __pi_memset)
+EXPORT_SYMBOL(memset)
index 1f47eae3b0d6d618d24c347db7c2da9ffce98068..94ee67a6b212c1f2ad6e582b1a5ac91c84e1eca9 100644 (file)
@@ -18,7 +18,7 @@
  * Returns:
  *     x0 - address of first occurrence of 'c' or 0
  */
-SYM_FUNC_START_WEAK(strchr)
+SYM_FUNC_START(__pi_strchr)
        and     w1, w1, #0xff
 1:     ldrb    w2, [x0], #1
        cmp     w2, w1
@@ -28,5 +28,7 @@ SYM_FUNC_START_WEAK(strchr)
        cmp     w2, w1
        csel    x0, x0, xzr, eq
        ret
-SYM_FUNC_END(strchr)
+SYM_FUNC_END(__pi_strchr)
+
+SYM_FUNC_ALIAS_WEAK(strchr, __pi_strchr)
 EXPORT_SYMBOL_NOKASAN(strchr)
index 83bcad72ec97205f4e7dfd04d5d92ee304d81c13..cda7de747efcfb83795abc8f49c22a5a12966775 100644 (file)
@@ -41,7 +41,7 @@
 
        /* Start of performance-critical section  -- one 64B cache line.  */
        .align 6
-SYM_FUNC_START_WEAK_PI(strcmp)
+SYM_FUNC_START(__pi_strcmp)
        eor     tmp1, src1, src2
        mov     zeroones, #REP8_01
        tst     tmp1, #7
@@ -171,6 +171,6 @@ L(loop_misaligned):
 L(done):
        sub     result, data1, data2
        ret
-
-SYM_FUNC_END_PI(strcmp)
+SYM_FUNC_END(__pi_strcmp)
+SYM_FUNC_ALIAS_WEAK(strcmp, __pi_strcmp)
 EXPORT_SYMBOL_NOHWKASAN(strcmp)
index 1648790e91b3ce5b8db6e7627ed73a43b7f5c39d..4919fe81ae540edcee6c9abc459f420e00f378c8 100644 (file)
@@ -79,7 +79,7 @@
           whether the first fetch, which may be misaligned, crosses a page
           boundary.  */
 
-SYM_FUNC_START_WEAK_PI(strlen)
+SYM_FUNC_START(__pi_strlen)
        and     tmp1, srcin, MIN_PAGE_SIZE - 1
        mov     zeroones, REP8_01
        cmp     tmp1, MIN_PAGE_SIZE - 16
@@ -208,6 +208,6 @@ L(page_cross):
        csel    data1, data1, tmp4, eq
        csel    data2, data2, tmp2, eq
        b       L(page_cross_entry)
-
-SYM_FUNC_END_PI(strlen)
+SYM_FUNC_END(__pi_strlen)
+SYM_FUNC_ALIAS_WEAK(strlen, __pi_strlen)
 EXPORT_SYMBOL_NOKASAN(strlen)
index e42bcfcd37e6f691a8b78e3c17a6330272cbefd0..a848abcec975e79e93fca50c8066b8421049c6b3 100644 (file)
@@ -44,7 +44,7 @@
 #define endloop                x15
 #define count          mask
 
-SYM_FUNC_START_WEAK_PI(strncmp)
+SYM_FUNC_START(__pi_strncmp)
        cbz     limit, L(ret0)
        eor     tmp1, src1, src2
        mov     zeroones, #REP8_01
@@ -256,6 +256,6 @@ L(done_loop):
 L(ret0):
        mov     result, #0
        ret
-
-SYM_FUNC_END_PI(strncmp)
+SYM_FUNC_END(__pi_strncmp)
+SYM_FUNC_ALIAS_WEAK(strncmp, __pi_strncmp)
 EXPORT_SYMBOL_NOHWKASAN(strncmp)
index b72913a990389a22be61fc981a730816e9a427b6..d5ac0e10a01db79d040ad653b8fa7fb4daa5b555 100644 (file)
@@ -47,7 +47,7 @@ limit_wd      .req    x14
 #define REP8_7f 0x7f7f7f7f7f7f7f7f
 #define REP8_80 0x8080808080808080
 
-SYM_FUNC_START_WEAK_PI(strnlen)
+SYM_FUNC_START(__pi_strnlen)
        cbz     limit, .Lhit_limit
        mov     zeroones, #REP8_01
        bic     src, srcin, #15
@@ -156,5 +156,7 @@ CPU_LE( lsr tmp2, tmp2, tmp4 )      /* Shift (tmp1 & 63).  */
 .Lhit_limit:
        mov     len, limit
        ret
-SYM_FUNC_END_PI(strnlen)
+SYM_FUNC_END(__pi_strnlen)
+
+SYM_FUNC_ALIAS_WEAK(strnlen, __pi_strnlen)
 EXPORT_SYMBOL_NOKASAN(strnlen)
index 13132d1ed6d127913883f3215a3c0819cbb5598e..a5123cf0ce125aa3b9842ba638eb9a3bc3987b92 100644 (file)
@@ -18,7 +18,7 @@
  * Returns:
  *     x0 - address of last occurrence of 'c' or 0
  */
-SYM_FUNC_START_WEAK_PI(strrchr)
+SYM_FUNC_START(__pi_strrchr)
        mov     x3, #0
        and     w1, w1, #0xff
 1:     ldrb    w2, [x0], #1
@@ -29,5 +29,6 @@ SYM_FUNC_START_WEAK_PI(strrchr)
        b       1b
 2:     mov     x0, x3
        ret
-SYM_FUNC_END_PI(strrchr)
+SYM_FUNC_END(__pi_strrchr)
+SYM_FUNC_ALIAS_WEAK(strrchr, __pi_strrchr)
 EXPORT_SYMBOL_NOKASAN(strrchr)
index 7d0563db42014b40185fa94c62ae99fadd403916..0ea6cc25dc66356de86d262cc3d48bb993ec3cd0 100644 (file)
@@ -107,10 +107,11 @@ SYM_FUNC_END(icache_inval_pou)
  *     - start   - virtual start address of region
  *     - end     - virtual end address of region
  */
-SYM_FUNC_START_PI(dcache_clean_inval_poc)
+SYM_FUNC_START(__pi_dcache_clean_inval_poc)
        dcache_by_line_op civac, sy, x0, x1, x2, x3
        ret
-SYM_FUNC_END_PI(dcache_clean_inval_poc)
+SYM_FUNC_END(__pi_dcache_clean_inval_poc)
+SYM_FUNC_ALIAS(dcache_clean_inval_poc, __pi_dcache_clean_inval_poc)
 
 /*
  *     dcache_clean_pou(start, end)
@@ -140,7 +141,7 @@ SYM_FUNC_END(dcache_clean_pou)
  *     - start   - kernel start address of region
  *     - end     - kernel end address of region
  */
-SYM_FUNC_START_PI(dcache_inval_poc)
+SYM_FUNC_START(__pi_dcache_inval_poc)
        dcache_line_size x2, x3
        sub     x3, x2, #1
        tst     x1, x3                          // end cache line aligned?
@@ -158,7 +159,8 @@ SYM_FUNC_START_PI(dcache_inval_poc)
        b.lo    2b
        dsb     sy
        ret
-SYM_FUNC_END_PI(dcache_inval_poc)
+SYM_FUNC_END(__pi_dcache_inval_poc)
+SYM_FUNC_ALIAS(dcache_inval_poc, __pi_dcache_inval_poc)
 
 /*
  *     dcache_clean_poc(start, end)
@@ -169,10 +171,11 @@ SYM_FUNC_END_PI(dcache_inval_poc)
  *     - start   - virtual start address of region
  *     - end     - virtual end address of region
  */
-SYM_FUNC_START_PI(dcache_clean_poc)
+SYM_FUNC_START(__pi_dcache_clean_poc)
        dcache_by_line_op cvac, sy, x0, x1, x2, x3
        ret
-SYM_FUNC_END_PI(dcache_clean_poc)
+SYM_FUNC_END(__pi_dcache_clean_poc)
+SYM_FUNC_ALIAS(dcache_clean_poc, __pi_dcache_clean_poc)
 
 /*
  *     dcache_clean_pop(start, end)
@@ -183,13 +186,14 @@ SYM_FUNC_END_PI(dcache_clean_poc)
  *     - start   - virtual start address of region
  *     - end     - virtual end address of region
  */
-SYM_FUNC_START_PI(dcache_clean_pop)
+SYM_FUNC_START(__pi_dcache_clean_pop)
        alternative_if_not ARM64_HAS_DCPOP
        b       dcache_clean_poc
        alternative_else_nop_endif
        dcache_by_line_op cvap, sy, x0, x1, x2, x3
        ret
-SYM_FUNC_END_PI(dcache_clean_pop)
+SYM_FUNC_END(__pi_dcache_clean_pop)
+SYM_FUNC_ALIAS(dcache_clean_pop, __pi_dcache_clean_pop)
 
 /*
  *     __dma_flush_area(start, size)
@@ -199,11 +203,12 @@ SYM_FUNC_END_PI(dcache_clean_pop)
  *     - start   - virtual start address of region
  *     - size    - size in question
  */
-SYM_FUNC_START_PI(__dma_flush_area)
+SYM_FUNC_START(__pi___dma_flush_area)
        add     x1, x0, x1
        dcache_by_line_op civac, sy, x0, x1, x2, x3
        ret
-SYM_FUNC_END_PI(__dma_flush_area)
+SYM_FUNC_END(__pi___dma_flush_area)
+SYM_FUNC_ALIAS(__dma_flush_area, __pi___dma_flush_area)
 
 /*
  *     __dma_map_area(start, size, dir)
@@ -211,12 +216,13 @@ SYM_FUNC_END_PI(__dma_flush_area)
  *     - size  - size of region
  *     - dir   - DMA direction
  */
-SYM_FUNC_START_PI(__dma_map_area)
+SYM_FUNC_START(__pi___dma_map_area)
        add     x1, x0, x1
        cmp     w2, #DMA_FROM_DEVICE
        b.eq    __pi_dcache_inval_poc
        b       __pi_dcache_clean_poc
-SYM_FUNC_END_PI(__dma_map_area)
+SYM_FUNC_END(__pi___dma_map_area)
+SYM_FUNC_ALIAS(__dma_map_area, __pi___dma_map_area)
 
 /*
  *     __dma_unmap_area(start, size, dir)
@@ -224,9 +230,10 @@ SYM_FUNC_END_PI(__dma_map_area)
  *     - size  - size of region
  *     - dir   - DMA direction
  */
-SYM_FUNC_START_PI(__dma_unmap_area)
+SYM_FUNC_START(__pi___dma_unmap_area)
        add     x1, x0, x1
        cmp     w2, #DMA_TO_DEVICE
        b.ne    __pi_dcache_inval_poc
        ret
-SYM_FUNC_END_PI(__dma_unmap_area)
+SYM_FUNC_END(__pi___dma_unmap_area)
+SYM_FUNC_ALIAS(__dma_unmap_area, __pi___dma_unmap_area)