.if \vector == X86_TRAP_BP
/* #BP advances %rip to the next instruction */
- UNWIND_HINT_IRET_REGS offset=\has_error_code*8 signal=0
+ UNWIND_HINT_IRET_ENTRY offset=\has_error_code*8 signal=0
.else
- UNWIND_HINT_IRET_REGS offset=\has_error_code*8
+ UNWIND_HINT_IRET_ENTRY offset=\has_error_code*8
.endif
ENDBR
*/
.macro idtentry_mce_db vector asmsym cfunc
SYM_CODE_START(\asmsym)
- UNWIND_HINT_IRET_REGS
+ UNWIND_HINT_IRET_ENTRY
ENDBR
ASM_CLAC
cld
*/
.macro idtentry_vc vector asmsym cfunc
SYM_CODE_START(\asmsym)
- UNWIND_HINT_IRET_REGS
+ UNWIND_HINT_IRET_ENTRY
ENDBR
ASM_CLAC
cld
*/
.macro idtentry_df vector asmsym cfunc
SYM_CODE_START(\asmsym)
- UNWIND_HINT_IRET_REGS offset=8
+ UNWIND_HINT_IRET_ENTRY offset=8
ENDBR
ASM_CLAC
cld
FENCE_SWAPGS_KERNEL_ENTRY
CALL_DEPTH_ACCOUNT
leaq 8(%rsp), %rax /* return pt_regs pointer */
- ANNOTATE_UNRET_END
+ VALIDATE_UNRET_END
RET
.Lbstep_iret:
* when PAGE_TABLE_ISOLATION is in use. Do not clobber.
*/
SYM_CODE_START(asm_exc_nmi)
- UNWIND_HINT_IRET_REGS
+ UNWIND_HINT_IRET_ENTRY
ENDBR
/*
* Abuse ANNOTATE_RETPOLINE_SAFE on a NOP to indicate UNRET_END, should
* eventually turn into it's own annotation.
*/
-.macro ANNOTATE_UNRET_END
-#ifdef CONFIG_DEBUG_ENTRY
+.macro VALIDATE_UNRET_END
+#if defined(CONFIG_NOINSTR_VALIDATION) && defined(CONFIG_CPU_UNRET_ENTRY)
ANNOTATE_RETPOLINE_SAFE
nop
#endif
.macro UNTRAIN_RET
#if defined(CONFIG_CPU_UNRET_ENTRY) || defined(CONFIG_CPU_IBPB_ENTRY) || \
defined(CONFIG_CALL_DEPTH_TRACKING)
- ANNOTATE_UNRET_END
+ VALIDATE_UNRET_END
ALTERNATIVE_3 "", \
CALL_ZEN_UNTRAIN_RET, X86_FEATURE_UNRET, \
"call entry_ibpb", X86_FEATURE_ENTRY_IBPB, \
.macro UNTRAIN_RET_FROM_CALL
#if defined(CONFIG_CPU_UNRET_ENTRY) || defined(CONFIG_CPU_IBPB_ENTRY) || \
defined(CONFIG_CALL_DEPTH_TRACKING)
- ANNOTATE_UNRET_END
+ VALIDATE_UNRET_END
ALTERNATIVE_3 "", \
CALL_ZEN_UNTRAIN_RET, X86_FEATURE_UNRET, \
"call entry_ibpb", X86_FEATURE_ENTRY_IBPB, \
.endm
.macro UNWIND_HINT_ENTRY
- UNWIND_HINT type=UNWIND_HINT_TYPE_ENTRY end=1
+ VALIDATE_UNRET_BEGIN
+ UNWIND_HINT_EMPTY
.endm
.macro UNWIND_HINT_REGS base=%rsp offset=0 indirect=0 extra=1 partial=0 signal=1
UNWIND_HINT_REGS base=\base offset=\offset partial=1 signal=\signal
.endm
+.macro UNWIND_HINT_IRET_ENTRY base=%rsp offset=0 signal=1
+ VALIDATE_UNRET_BEGIN
+ UNWIND_HINT_IRET_REGS base=\base offset=\offset signal=\signal
+.endm
+
.macro UNWIND_HINT_FUNC
UNWIND_HINT sp_reg=ORC_REG_SP sp_offset=8 type=UNWIND_HINT_TYPE_FUNC
.endm
UNWIND_HINT_IRET_REGS offset=8
ENDBR
- ANNOTATE_UNRET_END
-
/* Build pt_regs */
PUSH_AND_CLEAR_REGS
SYM_CODE_START_LOCAL(early_idt_handler_common)
UNWIND_HINT_IRET_REGS offset=16
- ANNOTATE_UNRET_END
/*
* The stack is the hardware frame, an error code or zero, and the
* vector number.
UNWIND_HINT_IRET_REGS offset=8
ENDBR
- ANNOTATE_UNRET_END
-
/* Build pt_regs */
PUSH_AND_CLEAR_REGS
.popsection
.endm
+/*
+ * Use objtool to validate the entry requirement that all code paths do
+ * VALIDATE_UNRET_END before RET.
+ *
+ * NOTE: The macro must be used at the beginning of a global symbol, otherwise
+ * it will be ignored.
+ */
+.macro VALIDATE_UNRET_BEGIN
+#if defined(CONFIG_NOINSTR_VALIDATION) && defined(CONFIG_CPU_UNRET_ENTRY)
+.Lhere_\@:
+ .pushsection .discard.validate_unret
+ .long .Lhere_\@ - .
+ .popsection
+#endif
+.endm
+
.macro REACHABLE
.Lhere_\@:
.pushsection .discard.reachable
#define UNWIND_HINT_TYPE_REGS_PARTIAL 2
/* The below hint types don't have corresponding ORC types */
#define UNWIND_HINT_TYPE_FUNC 3
-#define UNWIND_HINT_TYPE_ENTRY 4
-#define UNWIND_HINT_TYPE_SAVE 5
-#define UNWIND_HINT_TYPE_RESTORE 6
+#define UNWIND_HINT_TYPE_SAVE 4
+#define UNWIND_HINT_TYPE_RESTORE 5
#endif /* _LINUX_OBJTOOL_TYPES_H */
#define UNWIND_HINT_TYPE_REGS_PARTIAL 2
/* The below hint types don't have corresponding ORC types */
#define UNWIND_HINT_TYPE_FUNC 3
-#define UNWIND_HINT_TYPE_ENTRY 4
-#define UNWIND_HINT_TYPE_SAVE 5
-#define UNWIND_HINT_TYPE_RESTORE 6
+#define UNWIND_HINT_TYPE_SAVE 4
+#define UNWIND_HINT_TYPE_RESTORE 5
#endif /* _LINUX_OBJTOOL_TYPES_H */
WARN_FUNC("UNWIND_HINT_IRET_REGS without ENDBR",
insn->sec, insn->offset);
}
-
- insn->entry = 1;
}
}
- if (hint->type == UNWIND_HINT_TYPE_ENTRY) {
- hint->type = UNWIND_HINT_TYPE_CALL;
- insn->entry = 1;
- }
-
if (hint->type == UNWIND_HINT_TYPE_FUNC) {
insn->cfi = &func_cfi;
continue;
return 0;
}
+static int read_validate_unret_hints(struct objtool_file *file)
+{
+ struct section *sec;
+ struct instruction *insn;
+ struct reloc *reloc;
+
+ sec = find_section_by_name(file->elf, ".rela.discard.validate_unret");
+ if (!sec)
+ return 0;
+
+ list_for_each_entry(reloc, &sec->reloc_list, list) {
+ if (reloc->sym->type != STT_SECTION) {
+ WARN("unexpected relocation symbol type in %s", sec->name);
+ return -1;
+ }
+
+ insn = find_insn(file, reloc->sym->sec, reloc->addend);
+ if (!insn) {
+ WARN("bad .discard.instr_end entry");
+ return -1;
+ }
+ insn->unret = 1;
+ }
+
+ return 0;
+}
+
+
static int read_intra_function_calls(struct objtool_file *file)
{
struct instruction *insn;
if (ret)
return ret;
+ ret = read_validate_unret_hints(file);
+ if (ret)
+ return ret;
+
return 0;
}
/*
* Validate rethunk entry constraint: must untrain RET before the first RET.
*
- * Follow every branch (intra-function) and ensure ANNOTATE_UNRET_END comes
+ * Follow every branch (intra-function) and ensure VALIDATE_UNRET_END comes
* before an actual RET instruction.
*/
-static int validate_entry(struct objtool_file *file, struct instruction *insn)
+static int validate_unret(struct objtool_file *file, struct instruction *insn)
{
struct instruction *next, *dest;
int ret, warnings = 0;
for (;;) {
next = next_insn_to_validate(file, insn);
- if (insn->visited & VISITED_ENTRY)
+ if (insn->visited & VISITED_UNRET)
return 0;
- insn->visited |= VISITED_ENTRY;
+ insn->visited |= VISITED_UNRET;
if (!insn->ignore_alts && insn->alts) {
struct alternative *alt;
if (alt->skip_orig)
skip_orig = true;
- ret = validate_entry(file, alt->insn);
+ ret = validate_unret(file, alt->insn);
if (ret) {
if (opts.backtrace)
BT_FUNC("(alt)", insn);
insn->sec, insn->offset);
return -1;
}
- ret = validate_entry(file, insn->jump_dest);
+ ret = validate_unret(file, insn->jump_dest);
if (ret) {
if (opts.backtrace) {
BT_FUNC("(branch%s)", insn,
return -1;
}
- ret = validate_entry(file, dest);
+ ret = validate_unret(file, dest);
if (ret) {
if (opts.backtrace)
BT_FUNC("(call)", insn);
}
/*
- * Validate that all branches starting at 'insn->entry' encounter UNRET_END
- * before RET.
+ * Validate that all branches starting at VALIDATE_UNRET_BEGIN encounter
+ * VALIDATE_UNRET_END before RET.
*/
-static int validate_unret(struct objtool_file *file)
+static int validate_unrets(struct objtool_file *file)
{
struct instruction *insn;
int ret, warnings = 0;
for_each_insn(file, insn) {
- if (!insn->entry)
+ if (!insn->unret)
continue;
- ret = validate_entry(file, insn);
+ ret = validate_unret(file, insn);
if (ret < 0) {
WARN_FUNC("Failed UNRET validation", insn->sec, insn->offset);
return ret;
* Must be after validate_branch() and friends, it plays
* further games with insn->visited.
*/
- ret = validate_unret(file);
+ ret = validate_unrets(file);
if (ret < 0)
return ret;
warnings += ret;
restore : 1,
retpoline_safe : 1,
noendbr : 1,
- entry : 1,
+ unret : 1,
visited : 4,
no_reloc : 1;
/* 10 bit hole */
#define VISITED_BRANCH 0x01
#define VISITED_BRANCH_UACCESS 0x02
#define VISITED_BRANCH_MASK 0x03
-#define VISITED_ENTRY 0x04
+#define VISITED_UNRET 0x04
static inline bool is_static_jump(struct instruction *insn)
{