1 /* SPDX-License-Identifier: GPL-2.0-only */
3 * Copyright (C) 2021 Sifive.
5 #ifndef ASM_ERRATA_LIST_H
6 #define ASM_ERRATA_LIST_H
8 #include <asm/alternative.h>
9 #include <asm/vendorid_list.h>
11 #ifdef CONFIG_ERRATA_SIFIVE
12 #define ERRATA_SIFIVE_CIP_453 0
13 #define ERRATA_SIFIVE_CIP_1200 1
14 #define ERRATA_SIFIVE_NUMBER 2
17 #ifdef CONFIG_ERRATA_THEAD
18 #define ERRATA_THEAD_PBMT 0
19 #define ERRATA_THEAD_CMO 1
20 #define ERRATA_THEAD_NUMBER 2
23 #define CPUFEATURE_SVPBMT 0
24 #define CPUFEATURE_ZICBOM 1
25 #define CPUFEATURE_NUMBER 2
29 #define ALT_INSN_FAULT(x) \
30 ALTERNATIVE(__stringify(RISCV_PTR do_trap_insn_fault), \
31 __stringify(RISCV_PTR sifive_cip_453_insn_fault_trp), \
32 SIFIVE_VENDOR_ID, ERRATA_SIFIVE_CIP_453, \
33 CONFIG_ERRATA_SIFIVE_CIP_453)
35 #define ALT_PAGE_FAULT(x) \
36 ALTERNATIVE(__stringify(RISCV_PTR do_page_fault), \
37 __stringify(RISCV_PTR sifive_cip_453_page_fault_trp), \
38 SIFIVE_VENDOR_ID, ERRATA_SIFIVE_CIP_453, \
39 CONFIG_ERRATA_SIFIVE_CIP_453)
40 #else /* !__ASSEMBLY__ */
42 #define ALT_FLUSH_TLB_PAGE(x) \
43 asm(ALTERNATIVE("sfence.vma %0", "sfence.vma", SIFIVE_VENDOR_ID, \
44 ERRATA_SIFIVE_CIP_1200, CONFIG_ERRATA_SIFIVE_CIP_1200) \
45 : : "r" (addr) : "memory")
48 * _val is marked as "will be overwritten", so need to set it to 0
49 * in the default case.
51 #define ALT_SVPBMT_SHIFT 61
52 #define ALT_THEAD_PBMT_SHIFT 59
53 #define ALT_SVPBMT(_val, prot) \
54 asm(ALTERNATIVE_2("li %0, 0\t\nnop", \
55 "li %0, %1\t\nslli %0,%0,%3", 0, \
56 CPUFEATURE_SVPBMT, CONFIG_RISCV_ISA_SVPBMT, \
57 "li %0, %2\t\nslli %0,%0,%4", THEAD_VENDOR_ID, \
58 ERRATA_THEAD_PBMT, CONFIG_ERRATA_THEAD_PBMT) \
60 : "I"(prot##_SVPBMT >> ALT_SVPBMT_SHIFT), \
61 "I"(prot##_THEAD >> ALT_THEAD_PBMT_SHIFT), \
62 "I"(ALT_SVPBMT_SHIFT), \
63 "I"(ALT_THEAD_PBMT_SHIFT))
65 #ifdef CONFIG_ERRATA_THEAD_PBMT
67 * IO/NOCACHE memory types are handled together with svpbmt,
68 * so on T-Head chips, check if no other memory type is set,
69 * and set the non-0 PMA type if applicable.
71 #define ALT_THEAD_PMA(_val) \
72 asm volatile(ALTERNATIVE( \
75 "slli t3, t3, %3\n\t" \
76 "and t3, %0, t3\n\t" \
77 "bne t3, zero, 2f\n\t" \
79 "slli t3, t3, %3\n\t" \
81 "2:", THEAD_VENDOR_ID, \
82 ERRATA_THEAD_PBMT, CONFIG_ERRATA_THEAD_PBMT) \
84 : "I"(_PAGE_MTMASK_THEAD >> ALT_THEAD_PBMT_SHIFT), \
85 "I"(_PAGE_PMA_THEAD >> ALT_THEAD_PBMT_SHIFT), \
86 "I"(ALT_THEAD_PBMT_SHIFT) \
89 #define ALT_THEAD_PMA(_val)
93 * dcache.ipa rs1 (invalidate, physical address)
94 * | 31 - 25 | 24 - 20 | 19 - 15 | 14 - 12 | 11 - 7 | 6 - 0 |
95 * 0000001 01010 rs1 000 00000 0001011
96 * dache.iva rs1 (invalida, virtual address)
97 * 0000001 00110 rs1 000 00000 0001011
99 * dcache.cpa rs1 (clean, physical address)
100 * | 31 - 25 | 24 - 20 | 19 - 15 | 14 - 12 | 11 - 7 | 6 - 0 |
101 * 0000001 01001 rs1 000 00000 0001011
102 * dcache.cva rs1 (clean, virtual address)
103 * 0000001 00100 rs1 000 00000 0001011
105 * dcache.cipa rs1 (clean then invalidate, physical address)
106 * | 31 - 25 | 24 - 20 | 19 - 15 | 14 - 12 | 11 - 7 | 6 - 0 |
107 * 0000001 01011 rs1 000 00000 0001011
108 * dcache.civa rs1 (... virtual address)
109 * 0000001 00111 rs1 000 00000 0001011
111 * sync.s (make sure all cache operations finished)
112 * | 31 - 25 | 24 - 20 | 19 - 15 | 14 - 12 | 11 - 7 | 6 - 0 |
113 * 0000000 11001 00000 000 00000 0001011
115 #define THEAD_inval_A0 ".long 0x0265000b"
116 #define THEAD_clean_A0 ".long 0x0245000b"
117 #define THEAD_flush_A0 ".long 0x0275000b"
118 #define THEAD_SYNC_S ".long 0x0190000b"
120 #define ALT_CMO_OP(_op, _start, _size, _cachesize) \
121 asm volatile(ALTERNATIVE_2( \
126 "cbo." __stringify(_op) " (a0)\n\t" \
127 "add a0, a0, %0\n\t" \
129 "bltu a0, %2, 3b\n\t" \
130 "nop", 0, CPUFEATURE_ZICBOM, CONFIG_RISCV_ISA_ZICBOM, \
134 THEAD_##_op##_A0 "\n\t" \
135 "add a0, a0, %0\n\t" \
137 "bltu a0, %2, 3b\n\t" \
138 THEAD_SYNC_S, THEAD_VENDOR_ID, \
139 ERRATA_THEAD_CMO, CONFIG_ERRATA_THEAD_CMO) \
140 : : "r"(_cachesize), \
141 "r"((unsigned long)(_start) & ~((_cachesize) - 1UL)), \
142 "r"((unsigned long)(_start) + (_size)) \
145 #endif /* __ASSEMBLY__ */