static_call,x86: Robustify trampoline patching
authorPeter Zijlstra <peterz@infradead.org>
Sat, 30 Oct 2021 07:47:58 +0000 (09:47 +0200)
committerPeter Zijlstra <peterz@infradead.org>
Thu, 11 Nov 2021 12:09:31 +0000 (13:09 +0100)
Add a few signature bytes after the static call trampoline and verify
those bytes match before patching the trampoline. This avoids patching
random other JMPs (such as CFI jump-table entries) instead.

These bytes decode as:

   d:   53                      push   %rbx
   e:   43 54                   rex.XB push %r12

And happen to spell "SCT".

Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Link: https://lkml.kernel.org/r/20211030074758.GT174703@worktop.programming.kicks-ass.net
arch/x86/include/asm/static_call.h
arch/x86/kernel/static_call.c
tools/objtool/check.c

index cbb67b6030f97856b85b1caf1edfa27246b9e1fe..39ebe05118691dcafbad974393b9559291cd3d5d 100644 (file)
@@ -27,6 +27,7 @@
            ".globl " STATIC_CALL_TRAMP_STR(name) "             \n"     \
            STATIC_CALL_TRAMP_STR(name) ":                      \n"     \
            insns "                                             \n"     \
+           ".byte 0x53, 0x43, 0x54                             \n"     \
            ".type " STATIC_CALL_TRAMP_STR(name) ", @function   \n"     \
            ".size " STATIC_CALL_TRAMP_STR(name) ", . - " STATIC_CALL_TRAMP_STR(name) " \n" \
            ".popsection                                        \n")
index ea028e736831a32b87dc1cbf67b1d5fd52d7dac7..9c407a33a77413466ea928648a29c83a250013c5 100644 (file)
@@ -56,10 +56,15 @@ static void __ref __static_call_transform(void *insn, enum insn_type type, void
        text_poke_bp(insn, code, size, emulate);
 }
 
-static void __static_call_validate(void *insn, bool tail)
+static void __static_call_validate(void *insn, bool tail, bool tramp)
 {
        u8 opcode = *(u8 *)insn;
 
+       if (tramp && memcmp(insn+5, "SCT", 3)) {
+               pr_err("trampoline signature fail");
+               BUG();
+       }
+
        if (tail) {
                if (opcode == JMP32_INSN_OPCODE ||
                    opcode == RET_INSN_OPCODE)
@@ -74,7 +79,8 @@ static void __static_call_validate(void *insn, bool tail)
        /*
         * If we ever trigger this, our text is corrupt, we'll probably not live long.
         */
-       WARN_ONCE(1, "unexpected static_call insn opcode 0x%x at %pS\n", opcode, insn);
+       pr_err("unexpected static_call insn opcode 0x%x at %pS\n", opcode, insn);
+       BUG();
 }
 
 static inline enum insn_type __sc_insn(bool null, bool tail)
@@ -97,12 +103,12 @@ void arch_static_call_transform(void *site, void *tramp, void *func, bool tail)
        mutex_lock(&text_mutex);
 
        if (tramp) {
-               __static_call_validate(tramp, true);
+               __static_call_validate(tramp, true, true);
                __static_call_transform(tramp, __sc_insn(!func, true), func);
        }
 
        if (IS_ENABLED(CONFIG_HAVE_STATIC_CALL_INLINE) && site) {
-               __static_call_validate(site, tail);
+               __static_call_validate(site, tail, false);
                __static_call_transform(site, __sc_insn(!func, tail), func);
        }
 
index add39902166d6bb2d9bd52fd983487f03e921d2e..21735829b860cac3e197a0564a14a992b12e2289 100644 (file)
@@ -3310,6 +3310,9 @@ static bool ignore_unreachable_insn(struct objtool_file *file, struct instructio
        if (!insn->func)
                return false;
 
+       if (insn->func->static_call_tramp)
+               return true;
+
        /*
         * CONFIG_UBSAN_TRAP inserts a UD2 when it sees
         * __builtin_unreachable().  The BUG() macro has an unreachable() after