4 BPF_MOV64_IMM(BPF_REG_0, 1),
5 BPF_LD_MAP_FD(BPF_REG_6, 0),
6 BPF_MOV64_REG(BPF_REG_1, BPF_REG_6),
7 BPF_MOV64_REG(BPF_REG_2, BPF_REG_FP),
8 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
9 BPF_ST_MEM(BPF_DW, BPF_REG_FP, -8, 0),
10 BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem),
11 BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 1),
14 BPF_MOV64_REG(BPF_REG_9, BPF_REG_0),
16 BPF_MOV64_REG(BPF_REG_1, BPF_REG_6),
17 BPF_MOV64_REG(BPF_REG_2, BPF_REG_FP),
18 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
19 BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem),
20 BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 1),
23 BPF_MOV64_REG(BPF_REG_8, BPF_REG_0),
25 BPF_ALU64_REG(BPF_SUB, BPF_REG_9, BPF_REG_8), /* map_value_ptr -= map_value_ptr */
26 BPF_MOV64_REG(BPF_REG_2, BPF_REG_9),
27 BPF_JMP_IMM(BPF_JLT, BPF_REG_2, 8, 1),
30 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, 1), /* R2=scalar(umin=1, umax=8) */
31 BPF_MOV64_REG(BPF_REG_1, BPF_REG_FP),
32 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -8),
33 BPF_MOV64_IMM(BPF_REG_3, 0),
34 BPF_EMIT_CALL(BPF_FUNC_probe_read_kernel),
37 .prog_type = BPF_PROG_TYPE_TRACEPOINT,
38 .fixup_map_array_48b = { 1 },
39 .result = VERBOSE_ACCEPT,
41 "mark_precise: frame0: last_idx 26 first_idx 20\
42 mark_precise: frame0: regs=r2 stack= before 25\
43 mark_precise: frame0: regs=r2 stack= before 24\
44 mark_precise: frame0: regs=r2 stack= before 23\
45 mark_precise: frame0: regs=r2 stack= before 22\
46 mark_precise: frame0: regs=r2 stack= before 20\
47 parent didn't have regs=4 stack=0 marks:\
48 mark_precise: frame0: last_idx 19 first_idx 10\
49 mark_precise: frame0: regs=r2 stack= before 19\
50 mark_precise: frame0: regs=r9 stack= before 18\
51 mark_precise: frame0: regs=r8,r9 stack= before 17\
52 mark_precise: frame0: regs=r0,r9 stack= before 15\
53 mark_precise: frame0: regs=r0,r9 stack= before 14\
54 mark_precise: frame0: regs=r9 stack= before 13\
55 mark_precise: frame0: regs=r9 stack= before 12\
56 mark_precise: frame0: regs=r9 stack= before 11\
57 mark_precise: frame0: regs=r9 stack= before 10\
58 parent already had regs=0 stack=0 marks:",
63 BPF_MOV64_IMM(BPF_REG_0, 1),
64 BPF_LD_MAP_FD(BPF_REG_6, 0),
65 BPF_MOV64_REG(BPF_REG_1, BPF_REG_6),
66 BPF_MOV64_REG(BPF_REG_2, BPF_REG_FP),
67 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
68 BPF_ST_MEM(BPF_DW, BPF_REG_FP, -8, 0),
69 BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem),
70 BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 1),
73 BPF_MOV64_REG(BPF_REG_9, BPF_REG_0),
75 BPF_MOV64_REG(BPF_REG_1, BPF_REG_6),
76 BPF_MOV64_REG(BPF_REG_2, BPF_REG_FP),
77 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
78 BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem),
79 BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 1),
82 BPF_MOV64_REG(BPF_REG_8, BPF_REG_0),
84 BPF_ALU64_REG(BPF_SUB, BPF_REG_9, BPF_REG_8), /* map_value_ptr -= map_value_ptr */
85 BPF_MOV64_REG(BPF_REG_2, BPF_REG_9),
86 BPF_JMP_IMM(BPF_JLT, BPF_REG_2, 8, 1),
89 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, 1), /* R2=scalar(umin=1, umax=8) */
90 BPF_MOV64_REG(BPF_REG_1, BPF_REG_FP),
91 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -8),
92 BPF_MOV64_IMM(BPF_REG_3, 0),
93 BPF_EMIT_CALL(BPF_FUNC_probe_read_kernel),
96 .prog_type = BPF_PROG_TYPE_TRACEPOINT,
97 .fixup_map_array_48b = { 1 },
98 .result = VERBOSE_ACCEPT,
99 .flags = BPF_F_TEST_STATE_FREQ,
101 "26: (85) call bpf_probe_read_kernel#113\
102 mark_precise: frame0: last_idx 26 first_idx 22\
103 mark_precise: frame0: regs=r2 stack= before 25\
104 mark_precise: frame0: regs=r2 stack= before 24\
105 mark_precise: frame0: regs=r2 stack= before 23\
106 mark_precise: frame0: regs=r2 stack= before 22\
107 parent didn't have regs=4 stack=0 marks:\
108 mark_precise: frame0: last_idx 20 first_idx 20\
109 mark_precise: frame0: regs=r2 stack= before 20\
110 parent didn't have regs=4 stack=0 marks:\
111 mark_precise: frame0: last_idx 19 first_idx 17\
112 mark_precise: frame0: regs=r2 stack= before 19\
113 mark_precise: frame0: regs=r9 stack= before 18\
114 mark_precise: frame0: regs=r8,r9 stack= before 17\
115 parent already had regs=0 stack=0 marks:",
118 "precise: cross frame pruning",
120 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
121 BPF_MOV64_IMM(BPF_REG_8, 0),
122 BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 1),
123 BPF_MOV64_IMM(BPF_REG_8, 1),
124 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
125 BPF_MOV64_IMM(BPF_REG_9, 0),
126 BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 1),
127 BPF_MOV64_IMM(BPF_REG_9, 1),
128 BPF_MOV64_REG(BPF_REG_1, BPF_REG_0),
129 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 1, 0, 4),
130 BPF_JMP_IMM(BPF_JEQ, BPF_REG_8, 1, 1),
131 BPF_LDX_MEM(BPF_B, BPF_REG_1, BPF_REG_2, 0),
132 BPF_MOV64_IMM(BPF_REG_0, 0),
134 BPF_JMP_IMM(BPF_JEQ, BPF_REG_1, 0, 0),
137 .prog_type = BPF_PROG_TYPE_XDP,
138 .flags = BPF_F_TEST_STATE_FREQ,
139 .errstr = "!read_ok",
143 "precise: ST insn causing spi > allocated_stack",
145 BPF_MOV64_REG(BPF_REG_3, BPF_REG_10),
146 BPF_JMP_IMM(BPF_JNE, BPF_REG_3, 123, 0),
147 BPF_ST_MEM(BPF_DW, BPF_REG_3, -8, 0),
148 BPF_LDX_MEM(BPF_DW, BPF_REG_4, BPF_REG_10, -8),
149 BPF_MOV64_IMM(BPF_REG_0, -1),
150 BPF_JMP_REG(BPF_JGT, BPF_REG_4, BPF_REG_0, 0),
153 .prog_type = BPF_PROG_TYPE_XDP,
154 .flags = BPF_F_TEST_STATE_FREQ,
155 .errstr = "mark_precise: frame0: last_idx 5 first_idx 5\
156 parent didn't have regs=10 stack=0 marks:\
157 mark_precise: frame0: last_idx 4 first_idx 2\
158 mark_precise: frame0: regs=r4 stack= before 4\
159 mark_precise: frame0: regs=r4 stack= before 3\
160 mark_precise: frame0: regs= stack=-8 before 2\
161 mark_precise: frame0: falling back to forcing all scalars precise\
162 mark_precise: frame0: last_idx 5 first_idx 5\
163 parent didn't have regs=1 stack=0 marks:",
164 .result = VERBOSE_ACCEPT,
168 "precise: STX insn causing spi > allocated_stack",
170 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_get_prandom_u32),
171 BPF_MOV64_REG(BPF_REG_3, BPF_REG_10),
172 BPF_JMP_IMM(BPF_JNE, BPF_REG_3, 123, 0),
173 BPF_STX_MEM(BPF_DW, BPF_REG_3, BPF_REG_0, -8),
174 BPF_LDX_MEM(BPF_DW, BPF_REG_4, BPF_REG_10, -8),
175 BPF_MOV64_IMM(BPF_REG_0, -1),
176 BPF_JMP_REG(BPF_JGT, BPF_REG_4, BPF_REG_0, 0),
179 .prog_type = BPF_PROG_TYPE_XDP,
180 .flags = BPF_F_TEST_STATE_FREQ,
181 .errstr = "mark_precise: frame0: last_idx 6 first_idx 6\
182 parent didn't have regs=10 stack=0 marks:\
183 mark_precise: frame0: last_idx 5 first_idx 3\
184 mark_precise: frame0: regs=r4 stack= before 5\
185 mark_precise: frame0: regs=r4 stack= before 4\
186 mark_precise: frame0: regs= stack=-8 before 3\
187 mark_precise: frame0: falling back to forcing all scalars precise\
188 force_precise: frame0: forcing r0 to be precise\
189 force_precise: frame0: forcing r0 to be precise\
190 mark_precise: frame0: last_idx 6 first_idx 6\
191 parent didn't have regs=1 stack=0 marks:\
192 mark_precise: frame0: last_idx 5 first_idx 3\
193 mark_precise: frame0: regs=r0 stack= before 5",
194 .result = VERBOSE_ACCEPT,
198 "precise: mark_chain_precision for ARG_CONST_ALLOC_SIZE_OR_ZERO",
200 BPF_LDX_MEM(BPF_W, BPF_REG_4, BPF_REG_1, offsetof(struct xdp_md, ingress_ifindex)),
201 BPF_LD_MAP_FD(BPF_REG_6, 0),
202 BPF_MOV64_REG(BPF_REG_1, BPF_REG_6),
203 BPF_MOV64_IMM(BPF_REG_2, 1),
204 BPF_MOV64_IMM(BPF_REG_3, 0),
205 BPF_JMP_IMM(BPF_JEQ, BPF_REG_4, 0, 1),
206 BPF_MOV64_IMM(BPF_REG_2, 0x1000),
207 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_ringbuf_reserve),
208 BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 1),
210 BPF_MOV64_REG(BPF_REG_1, BPF_REG_0),
211 BPF_LDX_MEM(BPF_DW, BPF_REG_2, BPF_REG_0, 42),
212 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_ringbuf_submit),
213 BPF_MOV64_IMM(BPF_REG_0, 0),
216 .fixup_map_ringbuf = { 1 },
217 .prog_type = BPF_PROG_TYPE_XDP,
218 .flags = BPF_F_TEST_STATE_FREQ,
219 .errstr = "invalid access to memory, mem_size=1 off=42 size=8",
223 "precise: program doesn't prematurely prune branches",
225 BPF_ALU64_IMM(BPF_MOV, BPF_REG_6, 0x400),
226 BPF_ALU64_IMM(BPF_MOV, BPF_REG_7, 0),
227 BPF_ALU64_IMM(BPF_MOV, BPF_REG_8, 0),
228 BPF_ALU64_IMM(BPF_MOV, BPF_REG_9, 0x80000000),
229 BPF_ALU64_IMM(BPF_MOD, BPF_REG_6, 0x401),
230 BPF_JMP_IMM(BPF_JA, 0, 0, 0),
231 BPF_JMP_REG(BPF_JLE, BPF_REG_6, BPF_REG_9, 2),
232 BPF_ALU64_IMM(BPF_MOD, BPF_REG_6, 1),
233 BPF_ALU64_IMM(BPF_MOV, BPF_REG_9, 0),
234 BPF_JMP_REG(BPF_JLE, BPF_REG_6, BPF_REG_9, 1),
235 BPF_ALU64_IMM(BPF_MOV, BPF_REG_6, 0),
236 BPF_ALU64_IMM(BPF_MOV, BPF_REG_0, 0),
237 BPF_STX_MEM(BPF_W, BPF_REG_10, BPF_REG_0, -4),
238 BPF_LD_MAP_FD(BPF_REG_4, 0),
239 BPF_ALU64_REG(BPF_MOV, BPF_REG_1, BPF_REG_4),
240 BPF_ALU64_REG(BPF_MOV, BPF_REG_2, BPF_REG_10),
241 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -4),
242 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
243 BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 1),
245 BPF_ALU64_IMM(BPF_RSH, BPF_REG_6, 10),
246 BPF_ALU64_IMM(BPF_MUL, BPF_REG_6, 8192),
247 BPF_ALU64_REG(BPF_MOV, BPF_REG_1, BPF_REG_0),
248 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_6),
249 BPF_LDX_MEM(BPF_DW, BPF_REG_3, BPF_REG_0, 0),
250 BPF_STX_MEM(BPF_DW, BPF_REG_1, BPF_REG_3, 0),
253 .fixup_map_array_48b = { 13 },
254 .prog_type = BPF_PROG_TYPE_XDP,
256 .errstr = "register with unbounded min value is not allowed",