riscv: remove unused functions in traps_misaligned.c
authorClément Léger <cleger@rivosinc.com>
Wed, 4 Oct 2023 15:13:58 +0000 (17:13 +0200)
committerPalmer Dabbelt <palmer@rivosinc.com>
Wed, 1 Nov 2023 15:34:52 +0000 (08:34 -0700)
Replace macros by the only two function calls that are done from this
file, store_u8() and load_u8().

Signed-off-by: Clément Léger <cleger@rivosinc.com>
Link: https://lore.kernel.org/r/20231004151405.521596-2-cleger@rivosinc.com
Signed-off-by: Palmer Dabbelt <palmer@rivosinc.com>
arch/riscv/kernel/traps_misaligned.c

index 378f5b151443564020e775edfc13e6e90e557152..e7bfb33089c12a0b473a1ea048133b8b49c9f514 100644 (file)
 #define PRECISION_S 0
 #define PRECISION_D 1
 
-#define DECLARE_UNPRIVILEGED_LOAD_FUNCTION(type, insn)                 \
-static inline type load_##type(const type *addr)                       \
-{                                                                      \
-       type val;                                                       \
-       asm (#insn " %0, %1"                                            \
-       : "=&r" (val) : "m" (*addr));                                   \
-       return val;                                                     \
-}
+static inline u8 load_u8(const u8 *addr)
+{
+       u8 val;
 
-#define DECLARE_UNPRIVILEGED_STORE_FUNCTION(type, insn)                        \
-static inline void store_##type(type *addr, type val)                  \
-{                                                                      \
-       asm volatile (#insn " %0, %1\n"                                 \
-       : : "r" (val), "m" (*addr));                                    \
-}
+       asm volatile("lbu %0, %1" : "=&r" (val) : "m" (*addr));
 
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u8, lbu)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u16, lhu)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s8, lb)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s16, lh)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s32, lw)
-DECLARE_UNPRIVILEGED_STORE_FUNCTION(u8, sb)
-DECLARE_UNPRIVILEGED_STORE_FUNCTION(u16, sh)
-DECLARE_UNPRIVILEGED_STORE_FUNCTION(u32, sw)
-#if defined(CONFIG_64BIT)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u32, lwu)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u64, ld)
-DECLARE_UNPRIVILEGED_STORE_FUNCTION(u64, sd)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(ulong, ld)
-#else
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u32, lw)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(ulong, lw)
-
-static inline u64 load_u64(const u64 *addr)
-{
-       return load_u32((u32 *)addr)
-               + ((u64)load_u32((u32 *)addr + 1) << 32);
+       return val;
 }
 
-static inline void store_u64(u64 *addr, u64 val)
+static inline void store_u8(u8 *addr, u8 val)
 {
-       store_u32((u32 *)addr, val);
-       store_u32((u32 *)addr + 1, val >> 32);
+       asm volatile ("sb %0, %1\n" : : "r" (val), "m" (*addr));
 }
-#endif
 
 static inline ulong get_insn(ulong mepc)
 {