arm64: introduce aarch64_insn_gen_data3()
[linux-2.6-block.git] / arch / arm64 / kernel / insn.c
CommitLineData
b11a64a4
JL
1/*
2 * Copyright (C) 2013 Huawei Ltd.
3 * Author: Jiang Liu <liuj97@gmail.com>
4 *
617d2fbc
ZSL
5 * Copyright (C) 2014 Zi Shen Lim <zlim.lnx@gmail.com>
6 *
b11a64a4
JL
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
15 *
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
18 */
5c5bf25d 19#include <linux/bitops.h>
b11a64a4
JL
20#include <linux/compiler.h>
21#include <linux/kernel.h>
ae164807
JL
22#include <linux/smp.h>
23#include <linux/stop_machine.h>
24#include <linux/uaccess.h>
25#include <asm/cacheflush.h>
b11a64a4
JL
26#include <asm/insn.h>
27
617d2fbc 28#define AARCH64_INSN_SF_BIT BIT(31)
4a89d2c9 29#define AARCH64_INSN_N_BIT BIT(22)
617d2fbc 30
b11a64a4
JL
31static int aarch64_insn_encoding_class[] = {
32 AARCH64_INSN_CLS_UNKNOWN,
33 AARCH64_INSN_CLS_UNKNOWN,
34 AARCH64_INSN_CLS_UNKNOWN,
35 AARCH64_INSN_CLS_UNKNOWN,
36 AARCH64_INSN_CLS_LDST,
37 AARCH64_INSN_CLS_DP_REG,
38 AARCH64_INSN_CLS_LDST,
39 AARCH64_INSN_CLS_DP_FPSIMD,
40 AARCH64_INSN_CLS_DP_IMM,
41 AARCH64_INSN_CLS_DP_IMM,
42 AARCH64_INSN_CLS_BR_SYS,
43 AARCH64_INSN_CLS_BR_SYS,
44 AARCH64_INSN_CLS_LDST,
45 AARCH64_INSN_CLS_DP_REG,
46 AARCH64_INSN_CLS_LDST,
47 AARCH64_INSN_CLS_DP_FPSIMD,
48};
49
50enum aarch64_insn_encoding_class __kprobes aarch64_get_insn_class(u32 insn)
51{
52 return aarch64_insn_encoding_class[(insn >> 25) & 0xf];
53}
54
55/* NOP is an alias of HINT */
56bool __kprobes aarch64_insn_is_nop(u32 insn)
57{
58 if (!aarch64_insn_is_hint(insn))
59 return false;
60
61 switch (insn & 0xFE0) {
62 case AARCH64_INSN_HINT_YIELD:
63 case AARCH64_INSN_HINT_WFE:
64 case AARCH64_INSN_HINT_WFI:
65 case AARCH64_INSN_HINT_SEV:
66 case AARCH64_INSN_HINT_SEVL:
67 return false;
68 default:
69 return true;
70 }
71}
72
ae164807
JL
73/*
74 * In ARMv8-A, A64 instructions have a fixed length of 32 bits and are always
75 * little-endian.
76 */
77int __kprobes aarch64_insn_read(void *addr, u32 *insnp)
78{
79 int ret;
80 u32 val;
81
82 ret = probe_kernel_read(&val, addr, AARCH64_INSN_SIZE);
83 if (!ret)
84 *insnp = le32_to_cpu(val);
85
86 return ret;
87}
88
89int __kprobes aarch64_insn_write(void *addr, u32 insn)
90{
91 insn = cpu_to_le32(insn);
92 return probe_kernel_write(addr, &insn, AARCH64_INSN_SIZE);
93}
94
b11a64a4
JL
95static bool __kprobes __aarch64_insn_hotpatch_safe(u32 insn)
96{
97 if (aarch64_get_insn_class(insn) != AARCH64_INSN_CLS_BR_SYS)
98 return false;
99
100 return aarch64_insn_is_b(insn) ||
101 aarch64_insn_is_bl(insn) ||
102 aarch64_insn_is_svc(insn) ||
103 aarch64_insn_is_hvc(insn) ||
104 aarch64_insn_is_smc(insn) ||
105 aarch64_insn_is_brk(insn) ||
106 aarch64_insn_is_nop(insn);
107}
108
109/*
110 * ARM Architecture Reference Manual for ARMv8 Profile-A, Issue A.a
111 * Section B2.6.5 "Concurrent modification and execution of instructions":
112 * Concurrent modification and execution of instructions can lead to the
113 * resulting instruction performing any behavior that can be achieved by
114 * executing any sequence of instructions that can be executed from the
115 * same Exception level, except where the instruction before modification
116 * and the instruction after modification is a B, BL, NOP, BKPT, SVC, HVC,
117 * or SMC instruction.
118 */
119bool __kprobes aarch64_insn_hotpatch_safe(u32 old_insn, u32 new_insn)
120{
121 return __aarch64_insn_hotpatch_safe(old_insn) &&
122 __aarch64_insn_hotpatch_safe(new_insn);
123}
ae164807
JL
124
125int __kprobes aarch64_insn_patch_text_nosync(void *addr, u32 insn)
126{
127 u32 *tp = addr;
128 int ret;
129
130 /* A64 instructions must be word aligned */
131 if ((uintptr_t)tp & 0x3)
132 return -EINVAL;
133
134 ret = aarch64_insn_write(tp, insn);
135 if (ret == 0)
136 flush_icache_range((uintptr_t)tp,
137 (uintptr_t)tp + AARCH64_INSN_SIZE);
138
139 return ret;
140}
141
142struct aarch64_insn_patch {
143 void **text_addrs;
144 u32 *new_insns;
145 int insn_cnt;
146 atomic_t cpu_count;
147};
148
149static int __kprobes aarch64_insn_patch_text_cb(void *arg)
150{
151 int i, ret = 0;
152 struct aarch64_insn_patch *pp = arg;
153
154 /* The first CPU becomes master */
155 if (atomic_inc_return(&pp->cpu_count) == 1) {
156 for (i = 0; ret == 0 && i < pp->insn_cnt; i++)
157 ret = aarch64_insn_patch_text_nosync(pp->text_addrs[i],
158 pp->new_insns[i]);
159 /*
160 * aarch64_insn_patch_text_nosync() calls flush_icache_range(),
161 * which ends with "dsb; isb" pair guaranteeing global
162 * visibility.
163 */
164 atomic_set(&pp->cpu_count, -1);
165 } else {
166 while (atomic_read(&pp->cpu_count) != -1)
167 cpu_relax();
168 isb();
169 }
170
171 return ret;
172}
173
174int __kprobes aarch64_insn_patch_text_sync(void *addrs[], u32 insns[], int cnt)
175{
176 struct aarch64_insn_patch patch = {
177 .text_addrs = addrs,
178 .new_insns = insns,
179 .insn_cnt = cnt,
180 .cpu_count = ATOMIC_INIT(0),
181 };
182
183 if (cnt <= 0)
184 return -EINVAL;
185
186 return stop_machine(aarch64_insn_patch_text_cb, &patch,
187 cpu_online_mask);
188}
189
190int __kprobes aarch64_insn_patch_text(void *addrs[], u32 insns[], int cnt)
191{
192 int ret;
193 u32 insn;
194
195 /* Unsafe to patch multiple instructions without synchronizaiton */
196 if (cnt == 1) {
197 ret = aarch64_insn_read(addrs[0], &insn);
198 if (ret)
199 return ret;
200
201 if (aarch64_insn_hotpatch_safe(insn, insns[0])) {
202 /*
203 * ARMv8 architecture doesn't guarantee all CPUs see
204 * the new instruction after returning from function
205 * aarch64_insn_patch_text_nosync(). So send IPIs to
206 * all other CPUs to achieve instruction
207 * synchronization.
208 */
209 ret = aarch64_insn_patch_text_nosync(addrs[0], insns[0]);
210 kick_all_cpus_sync();
211 return ret;
212 }
213 }
214
215 return aarch64_insn_patch_text_sync(addrs, insns, cnt);
216}
c84fced8
JL
217
218u32 __kprobes aarch64_insn_encode_immediate(enum aarch64_insn_imm_type type,
219 u32 insn, u64 imm)
220{
221 u32 immlo, immhi, lomask, himask, mask;
222 int shift;
223
224 switch (type) {
225 case AARCH64_INSN_IMM_ADR:
226 lomask = 0x3;
227 himask = 0x7ffff;
228 immlo = imm & lomask;
229 imm >>= 2;
230 immhi = imm & himask;
231 imm = (immlo << 24) | (immhi);
232 mask = (lomask << 24) | (himask);
233 shift = 5;
234 break;
235 case AARCH64_INSN_IMM_26:
236 mask = BIT(26) - 1;
237 shift = 0;
238 break;
239 case AARCH64_INSN_IMM_19:
240 mask = BIT(19) - 1;
241 shift = 5;
242 break;
243 case AARCH64_INSN_IMM_16:
244 mask = BIT(16) - 1;
245 shift = 5;
246 break;
247 case AARCH64_INSN_IMM_14:
248 mask = BIT(14) - 1;
249 shift = 5;
250 break;
251 case AARCH64_INSN_IMM_12:
252 mask = BIT(12) - 1;
253 shift = 10;
254 break;
255 case AARCH64_INSN_IMM_9:
256 mask = BIT(9) - 1;
257 shift = 12;
258 break;
1bba567d
ZSL
259 case AARCH64_INSN_IMM_7:
260 mask = BIT(7) - 1;
261 shift = 15;
262 break;
5fdc639a 263 case AARCH64_INSN_IMM_6:
4a89d2c9
ZSL
264 case AARCH64_INSN_IMM_S:
265 mask = BIT(6) - 1;
266 shift = 10;
267 break;
268 case AARCH64_INSN_IMM_R:
269 mask = BIT(6) - 1;
270 shift = 16;
271 break;
c84fced8
JL
272 default:
273 pr_err("aarch64_insn_encode_immediate: unknown immediate encoding %d\n",
274 type);
275 return 0;
276 }
277
278 /* Update the immediate field. */
279 insn &= ~(mask << shift);
280 insn |= (imm & mask) << shift;
281
282 return insn;
283}
5c5bf25d 284
617d2fbc
ZSL
285static u32 aarch64_insn_encode_register(enum aarch64_insn_register_type type,
286 u32 insn,
287 enum aarch64_insn_register reg)
288{
289 int shift;
290
291 if (reg < AARCH64_INSN_REG_0 || reg > AARCH64_INSN_REG_SP) {
292 pr_err("%s: unknown register encoding %d\n", __func__, reg);
293 return 0;
294 }
295
296 switch (type) {
297 case AARCH64_INSN_REGTYPE_RT:
9951a157 298 case AARCH64_INSN_REGTYPE_RD:
617d2fbc
ZSL
299 shift = 0;
300 break;
c0cafbae
ZSL
301 case AARCH64_INSN_REGTYPE_RN:
302 shift = 5;
303 break;
1bba567d 304 case AARCH64_INSN_REGTYPE_RT2:
27f95ba5 305 case AARCH64_INSN_REGTYPE_RA:
1bba567d
ZSL
306 shift = 10;
307 break;
17cac179
ZSL
308 case AARCH64_INSN_REGTYPE_RM:
309 shift = 16;
310 break;
617d2fbc
ZSL
311 default:
312 pr_err("%s: unknown register type encoding %d\n", __func__,
313 type);
314 return 0;
315 }
316
317 insn &= ~(GENMASK(4, 0) << shift);
318 insn |= reg << shift;
319
320 return insn;
321}
322
17cac179
ZSL
323static u32 aarch64_insn_encode_ldst_size(enum aarch64_insn_size_type type,
324 u32 insn)
325{
326 u32 size;
327
328 switch (type) {
329 case AARCH64_INSN_SIZE_8:
330 size = 0;
331 break;
332 case AARCH64_INSN_SIZE_16:
333 size = 1;
334 break;
335 case AARCH64_INSN_SIZE_32:
336 size = 2;
337 break;
338 case AARCH64_INSN_SIZE_64:
339 size = 3;
340 break;
341 default:
342 pr_err("%s: unknown size encoding %d\n", __func__, type);
343 return 0;
344 }
345
346 insn &= ~GENMASK(31, 30);
347 insn |= size << 30;
348
349 return insn;
350}
351
617d2fbc
ZSL
352static inline long branch_imm_common(unsigned long pc, unsigned long addr,
353 long range)
5c5bf25d 354{
5c5bf25d
JL
355 long offset;
356
357 /*
358 * PC: A 64-bit Program Counter holding the address of the current
359 * instruction. A64 instructions must be word-aligned.
360 */
361 BUG_ON((pc & 0x3) || (addr & 0x3));
362
617d2fbc
ZSL
363 offset = ((long)addr - (long)pc);
364 BUG_ON(offset < -range || offset >= range);
365
366 return offset;
367}
368
369u32 __kprobes aarch64_insn_gen_branch_imm(unsigned long pc, unsigned long addr,
370 enum aarch64_insn_branch_type type)
371{
372 u32 insn;
373 long offset;
374
5c5bf25d
JL
375 /*
376 * B/BL support [-128M, 128M) offset
377 * ARM64 virtual address arrangement guarantees all kernel and module
378 * texts are within +/-128M.
379 */
617d2fbc 380 offset = branch_imm_common(pc, addr, SZ_128M);
5c5bf25d 381
c0cafbae
ZSL
382 switch (type) {
383 case AARCH64_INSN_BRANCH_LINK:
5c5bf25d 384 insn = aarch64_insn_get_bl_value();
c0cafbae
ZSL
385 break;
386 case AARCH64_INSN_BRANCH_NOLINK:
5c5bf25d 387 insn = aarch64_insn_get_b_value();
c0cafbae
ZSL
388 break;
389 default:
390 BUG_ON(1);
391 }
5c5bf25d
JL
392
393 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_26, insn,
394 offset >> 2);
395}
396
617d2fbc
ZSL
397u32 aarch64_insn_gen_comp_branch_imm(unsigned long pc, unsigned long addr,
398 enum aarch64_insn_register reg,
399 enum aarch64_insn_variant variant,
400 enum aarch64_insn_branch_type type)
401{
402 u32 insn;
403 long offset;
404
405 offset = branch_imm_common(pc, addr, SZ_1M);
406
407 switch (type) {
408 case AARCH64_INSN_BRANCH_COMP_ZERO:
409 insn = aarch64_insn_get_cbz_value();
410 break;
411 case AARCH64_INSN_BRANCH_COMP_NONZERO:
412 insn = aarch64_insn_get_cbnz_value();
413 break;
414 default:
415 BUG_ON(1);
416 }
417
418 switch (variant) {
419 case AARCH64_INSN_VARIANT_32BIT:
420 break;
421 case AARCH64_INSN_VARIANT_64BIT:
422 insn |= AARCH64_INSN_SF_BIT;
423 break;
424 default:
425 BUG_ON(1);
426 }
427
428 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RT, insn, reg);
429
430 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_19, insn,
431 offset >> 2);
432}
433
345e0d35
ZSL
434u32 aarch64_insn_gen_cond_branch_imm(unsigned long pc, unsigned long addr,
435 enum aarch64_insn_condition cond)
436{
437 u32 insn;
438 long offset;
439
440 offset = branch_imm_common(pc, addr, SZ_1M);
441
442 insn = aarch64_insn_get_bcond_value();
443
444 BUG_ON(cond < AARCH64_INSN_COND_EQ || cond > AARCH64_INSN_COND_AL);
445 insn |= cond;
446
447 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_19, insn,
448 offset >> 2);
449}
450
5c5bf25d
JL
451u32 __kprobes aarch64_insn_gen_hint(enum aarch64_insn_hint_op op)
452{
453 return aarch64_insn_get_hint_value() | op;
454}
455
456u32 __kprobes aarch64_insn_gen_nop(void)
457{
458 return aarch64_insn_gen_hint(AARCH64_INSN_HINT_NOP);
459}
c0cafbae
ZSL
460
461u32 aarch64_insn_gen_branch_reg(enum aarch64_insn_register reg,
462 enum aarch64_insn_branch_type type)
463{
464 u32 insn;
465
466 switch (type) {
467 case AARCH64_INSN_BRANCH_NOLINK:
468 insn = aarch64_insn_get_br_value();
469 break;
470 case AARCH64_INSN_BRANCH_LINK:
471 insn = aarch64_insn_get_blr_value();
472 break;
473 case AARCH64_INSN_BRANCH_RETURN:
474 insn = aarch64_insn_get_ret_value();
475 break;
476 default:
477 BUG_ON(1);
478 }
479
480 return aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn, reg);
481}
17cac179
ZSL
482
483u32 aarch64_insn_gen_load_store_reg(enum aarch64_insn_register reg,
484 enum aarch64_insn_register base,
485 enum aarch64_insn_register offset,
486 enum aarch64_insn_size_type size,
487 enum aarch64_insn_ldst_type type)
488{
489 u32 insn;
490
491 switch (type) {
492 case AARCH64_INSN_LDST_LOAD_REG_OFFSET:
493 insn = aarch64_insn_get_ldr_reg_value();
494 break;
495 case AARCH64_INSN_LDST_STORE_REG_OFFSET:
496 insn = aarch64_insn_get_str_reg_value();
497 break;
498 default:
499 BUG_ON(1);
500 }
501
502 insn = aarch64_insn_encode_ldst_size(size, insn);
503
504 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RT, insn, reg);
505
506 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn,
507 base);
508
509 return aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RM, insn,
510 offset);
511}
1bba567d
ZSL
512
513u32 aarch64_insn_gen_load_store_pair(enum aarch64_insn_register reg1,
514 enum aarch64_insn_register reg2,
515 enum aarch64_insn_register base,
516 int offset,
517 enum aarch64_insn_variant variant,
518 enum aarch64_insn_ldst_type type)
519{
520 u32 insn;
521 int shift;
522
523 switch (type) {
524 case AARCH64_INSN_LDST_LOAD_PAIR_PRE_INDEX:
525 insn = aarch64_insn_get_ldp_pre_value();
526 break;
527 case AARCH64_INSN_LDST_STORE_PAIR_PRE_INDEX:
528 insn = aarch64_insn_get_stp_pre_value();
529 break;
530 case AARCH64_INSN_LDST_LOAD_PAIR_POST_INDEX:
531 insn = aarch64_insn_get_ldp_post_value();
532 break;
533 case AARCH64_INSN_LDST_STORE_PAIR_POST_INDEX:
534 insn = aarch64_insn_get_stp_post_value();
535 break;
536 default:
537 BUG_ON(1);
538 }
539
540 switch (variant) {
541 case AARCH64_INSN_VARIANT_32BIT:
542 /* offset must be multiples of 4 in the range [-256, 252] */
543 BUG_ON(offset & 0x3);
544 BUG_ON(offset < -256 || offset > 252);
545 shift = 2;
546 break;
547 case AARCH64_INSN_VARIANT_64BIT:
548 /* offset must be multiples of 8 in the range [-512, 504] */
549 BUG_ON(offset & 0x7);
550 BUG_ON(offset < -512 || offset > 504);
551 shift = 3;
552 insn |= AARCH64_INSN_SF_BIT;
553 break;
554 default:
555 BUG_ON(1);
556 }
557
558 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RT, insn,
559 reg1);
560
561 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RT2, insn,
562 reg2);
563
564 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn,
565 base);
566
567 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_7, insn,
568 offset >> shift);
569}
9951a157
ZSL
570
571u32 aarch64_insn_gen_add_sub_imm(enum aarch64_insn_register dst,
572 enum aarch64_insn_register src,
573 int imm, enum aarch64_insn_variant variant,
574 enum aarch64_insn_adsb_type type)
575{
576 u32 insn;
577
578 switch (type) {
579 case AARCH64_INSN_ADSB_ADD:
580 insn = aarch64_insn_get_add_imm_value();
581 break;
582 case AARCH64_INSN_ADSB_SUB:
583 insn = aarch64_insn_get_sub_imm_value();
584 break;
585 case AARCH64_INSN_ADSB_ADD_SETFLAGS:
586 insn = aarch64_insn_get_adds_imm_value();
587 break;
588 case AARCH64_INSN_ADSB_SUB_SETFLAGS:
589 insn = aarch64_insn_get_subs_imm_value();
590 break;
591 default:
592 BUG_ON(1);
593 }
594
595 switch (variant) {
596 case AARCH64_INSN_VARIANT_32BIT:
597 break;
598 case AARCH64_INSN_VARIANT_64BIT:
599 insn |= AARCH64_INSN_SF_BIT;
600 break;
601 default:
602 BUG_ON(1);
603 }
604
605 BUG_ON(imm & ~(SZ_4K - 1));
606
607 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst);
608
609 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn, src);
610
611 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_12, insn, imm);
612}
4a89d2c9
ZSL
613
614u32 aarch64_insn_gen_bitfield(enum aarch64_insn_register dst,
615 enum aarch64_insn_register src,
616 int immr, int imms,
617 enum aarch64_insn_variant variant,
618 enum aarch64_insn_bitfield_type type)
619{
620 u32 insn;
621 u32 mask;
622
623 switch (type) {
624 case AARCH64_INSN_BITFIELD_MOVE:
625 insn = aarch64_insn_get_bfm_value();
626 break;
627 case AARCH64_INSN_BITFIELD_MOVE_UNSIGNED:
628 insn = aarch64_insn_get_ubfm_value();
629 break;
630 case AARCH64_INSN_BITFIELD_MOVE_SIGNED:
631 insn = aarch64_insn_get_sbfm_value();
632 break;
633 default:
634 BUG_ON(1);
635 }
636
637 switch (variant) {
638 case AARCH64_INSN_VARIANT_32BIT:
639 mask = GENMASK(4, 0);
640 break;
641 case AARCH64_INSN_VARIANT_64BIT:
642 insn |= AARCH64_INSN_SF_BIT | AARCH64_INSN_N_BIT;
643 mask = GENMASK(5, 0);
644 break;
645 default:
646 BUG_ON(1);
647 }
648
649 BUG_ON(immr & ~mask);
650 BUG_ON(imms & ~mask);
651
652 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst);
653
654 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn, src);
655
656 insn = aarch64_insn_encode_immediate(AARCH64_INSN_IMM_R, insn, immr);
657
658 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_S, insn, imms);
659}
6098f2d5
ZSL
660
661u32 aarch64_insn_gen_movewide(enum aarch64_insn_register dst,
662 int imm, int shift,
663 enum aarch64_insn_variant variant,
664 enum aarch64_insn_movewide_type type)
665{
666 u32 insn;
667
668 switch (type) {
669 case AARCH64_INSN_MOVEWIDE_ZERO:
670 insn = aarch64_insn_get_movz_value();
671 break;
672 case AARCH64_INSN_MOVEWIDE_KEEP:
673 insn = aarch64_insn_get_movk_value();
674 break;
675 case AARCH64_INSN_MOVEWIDE_INVERSE:
676 insn = aarch64_insn_get_movn_value();
677 break;
678 default:
679 BUG_ON(1);
680 }
681
682 BUG_ON(imm & ~(SZ_64K - 1));
683
684 switch (variant) {
685 case AARCH64_INSN_VARIANT_32BIT:
686 BUG_ON(shift != 0 && shift != 16);
687 break;
688 case AARCH64_INSN_VARIANT_64BIT:
689 insn |= AARCH64_INSN_SF_BIT;
690 BUG_ON(shift != 0 && shift != 16 && shift != 32 &&
691 shift != 48);
692 break;
693 default:
694 BUG_ON(1);
695 }
696
697 insn |= (shift >> 4) << 21;
698
699 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst);
700
701 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_16, insn, imm);
702}
5fdc639a
ZSL
703
704u32 aarch64_insn_gen_add_sub_shifted_reg(enum aarch64_insn_register dst,
705 enum aarch64_insn_register src,
706 enum aarch64_insn_register reg,
707 int shift,
708 enum aarch64_insn_variant variant,
709 enum aarch64_insn_adsb_type type)
710{
711 u32 insn;
712
713 switch (type) {
714 case AARCH64_INSN_ADSB_ADD:
715 insn = aarch64_insn_get_add_value();
716 break;
717 case AARCH64_INSN_ADSB_SUB:
718 insn = aarch64_insn_get_sub_value();
719 break;
720 case AARCH64_INSN_ADSB_ADD_SETFLAGS:
721 insn = aarch64_insn_get_adds_value();
722 break;
723 case AARCH64_INSN_ADSB_SUB_SETFLAGS:
724 insn = aarch64_insn_get_subs_value();
725 break;
726 default:
727 BUG_ON(1);
728 }
729
730 switch (variant) {
731 case AARCH64_INSN_VARIANT_32BIT:
732 BUG_ON(shift & ~(SZ_32 - 1));
733 break;
734 case AARCH64_INSN_VARIANT_64BIT:
735 insn |= AARCH64_INSN_SF_BIT;
736 BUG_ON(shift & ~(SZ_64 - 1));
737 break;
738 default:
739 BUG_ON(1);
740 }
741
742
743 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst);
744
745 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn, src);
746
747 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RM, insn, reg);
748
749 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_6, insn, shift);
750}
546dd36b
ZSL
751
752u32 aarch64_insn_gen_data1(enum aarch64_insn_register dst,
753 enum aarch64_insn_register src,
754 enum aarch64_insn_variant variant,
755 enum aarch64_insn_data1_type type)
756{
757 u32 insn;
758
759 switch (type) {
760 case AARCH64_INSN_DATA1_REVERSE_16:
761 insn = aarch64_insn_get_rev16_value();
762 break;
763 case AARCH64_INSN_DATA1_REVERSE_32:
764 insn = aarch64_insn_get_rev32_value();
765 break;
766 case AARCH64_INSN_DATA1_REVERSE_64:
767 BUG_ON(variant != AARCH64_INSN_VARIANT_64BIT);
768 insn = aarch64_insn_get_rev64_value();
769 break;
770 default:
771 BUG_ON(1);
772 }
773
774 switch (variant) {
775 case AARCH64_INSN_VARIANT_32BIT:
776 break;
777 case AARCH64_INSN_VARIANT_64BIT:
778 insn |= AARCH64_INSN_SF_BIT;
779 break;
780 default:
781 BUG_ON(1);
782 }
783
784 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst);
785
786 return aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn, src);
787}
64810639
ZSL
788
789u32 aarch64_insn_gen_data2(enum aarch64_insn_register dst,
790 enum aarch64_insn_register src,
791 enum aarch64_insn_register reg,
792 enum aarch64_insn_variant variant,
793 enum aarch64_insn_data2_type type)
794{
795 u32 insn;
796
797 switch (type) {
798 case AARCH64_INSN_DATA2_UDIV:
799 insn = aarch64_insn_get_udiv_value();
800 break;
801 case AARCH64_INSN_DATA2_SDIV:
802 insn = aarch64_insn_get_sdiv_value();
803 break;
804 case AARCH64_INSN_DATA2_LSLV:
805 insn = aarch64_insn_get_lslv_value();
806 break;
807 case AARCH64_INSN_DATA2_LSRV:
808 insn = aarch64_insn_get_lsrv_value();
809 break;
810 case AARCH64_INSN_DATA2_ASRV:
811 insn = aarch64_insn_get_asrv_value();
812 break;
813 case AARCH64_INSN_DATA2_RORV:
814 insn = aarch64_insn_get_rorv_value();
815 break;
816 default:
817 BUG_ON(1);
818 }
819
820 switch (variant) {
821 case AARCH64_INSN_VARIANT_32BIT:
822 break;
823 case AARCH64_INSN_VARIANT_64BIT:
824 insn |= AARCH64_INSN_SF_BIT;
825 break;
826 default:
827 BUG_ON(1);
828 }
829
830 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst);
831
832 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn, src);
833
834 return aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RM, insn, reg);
835}
27f95ba5
ZSL
836
837u32 aarch64_insn_gen_data3(enum aarch64_insn_register dst,
838 enum aarch64_insn_register src,
839 enum aarch64_insn_register reg1,
840 enum aarch64_insn_register reg2,
841 enum aarch64_insn_variant variant,
842 enum aarch64_insn_data3_type type)
843{
844 u32 insn;
845
846 switch (type) {
847 case AARCH64_INSN_DATA3_MADD:
848 insn = aarch64_insn_get_madd_value();
849 break;
850 case AARCH64_INSN_DATA3_MSUB:
851 insn = aarch64_insn_get_msub_value();
852 break;
853 default:
854 BUG_ON(1);
855 }
856
857 switch (variant) {
858 case AARCH64_INSN_VARIANT_32BIT:
859 break;
860 case AARCH64_INSN_VARIANT_64BIT:
861 insn |= AARCH64_INSN_SF_BIT;
862 break;
863 default:
864 BUG_ON(1);
865 }
866
867 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst);
868
869 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RA, insn, src);
870
871 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn,
872 reg1);
873
874 return aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RM, insn,
875 reg2);
876}