| 1 | /* |
| 2 | * This file is subject to the terms and conditions of the GNU General Public |
| 3 | * License. See the file "COPYING" in the main directory of this archive |
| 4 | * for more details. |
| 5 | * |
| 6 | * Copyright (C) 2003, 2004 Ralf Baechle |
| 7 | * Copyright (C) 2004 Maciej W. Rozycki |
| 8 | */ |
| 9 | #ifndef __ASM_CPU_FEATURES_H |
| 10 | #define __ASM_CPU_FEATURES_H |
| 11 | |
| 12 | #include <asm/cpu.h> |
| 13 | #include <asm/cpu-info.h> |
| 14 | #include <asm/isa-rev.h> |
| 15 | #include <cpu-feature-overrides.h> |
| 16 | |
| 17 | #define __ase(ase) (cpu_data[0].ases & (ase)) |
| 18 | #define __isa(isa) (cpu_data[0].isa_level & (isa)) |
| 19 | #define __opt(opt) (cpu_data[0].options & (opt)) |
| 20 | |
| 21 | /* |
| 22 | * Check if MIPS_ISA_REV is >= isa *and* an option or ASE is detected during |
| 23 | * boot (typically by cpu_probe()). |
| 24 | * |
| 25 | * Note that these should only be used in cases where a kernel built for an |
| 26 | * older ISA *cannot* run on a CPU which supports the feature in question. For |
| 27 | * example this may be used for features introduced with MIPSr6, since a kernel |
| 28 | * built for an older ISA cannot run on a MIPSr6 CPU. This should not be used |
| 29 | * for MIPSr2 features however, since a MIPSr1 or earlier kernel might run on a |
| 30 | * MIPSr2 CPU. |
| 31 | */ |
| 32 | #define __isa_ge_and_ase(isa, ase) ((MIPS_ISA_REV >= (isa)) && __ase(ase)) |
| 33 | #define __isa_ge_and_opt(isa, opt) ((MIPS_ISA_REV >= (isa)) && __opt(opt)) |
| 34 | |
| 35 | /* |
| 36 | * Check if MIPS_ISA_REV is >= isa *or* an option or ASE is detected during |
| 37 | * boot (typically by cpu_probe()). |
| 38 | * |
| 39 | * These are for use with features that are optional up until a particular ISA |
| 40 | * revision & then become required. |
| 41 | */ |
| 42 | #define __isa_ge_or_ase(isa, ase) ((MIPS_ISA_REV >= (isa)) || __ase(ase)) |
| 43 | #define __isa_ge_or_opt(isa, opt) ((MIPS_ISA_REV >= (isa)) || __opt(opt)) |
| 44 | |
| 45 | /* |
| 46 | * Check if MIPS_ISA_REV is < isa *and* an option or ASE is detected during |
| 47 | * boot (typically by cpu_probe()). |
| 48 | * |
| 49 | * These are for use with features that are optional up until a particular ISA |
| 50 | * revision & are then removed - ie. no longer present in any CPU implementing |
| 51 | * the given ISA revision. |
| 52 | */ |
| 53 | #define __isa_lt_and_ase(isa, ase) ((MIPS_ISA_REV < (isa)) && __ase(ase)) |
| 54 | #define __isa_lt_and_opt(isa, opt) ((MIPS_ISA_REV < (isa)) && __opt(opt)) |
| 55 | |
| 56 | /* |
| 57 | * Similarly allow for ISA level checks that take into account knowledge of the |
| 58 | * ISA targeted by the kernel build, provided by MIPS_ISA_REV. |
| 59 | */ |
| 60 | #define __isa_ge_and_flag(isa, flag) ((MIPS_ISA_REV >= (isa)) && __isa(flag)) |
| 61 | #define __isa_ge_or_flag(isa, flag) ((MIPS_ISA_REV >= (isa)) || __isa(flag)) |
| 62 | #define __isa_lt_and_flag(isa, flag) ((MIPS_ISA_REV < (isa)) && __isa(flag)) |
| 63 | #define __isa_range(ge, lt) \ |
| 64 | ((MIPS_ISA_REV >= (ge)) && (MIPS_ISA_REV < (lt))) |
| 65 | #define __isa_range_or_flag(ge, lt, flag) \ |
| 66 | (__isa_range(ge, lt) || ((MIPS_ISA_REV < (lt)) && __isa(flag))) |
| 67 | #define __isa_range_and_ase(ge, lt, ase) \ |
| 68 | (__isa_range(ge, lt) && __ase(ase)) |
| 69 | |
| 70 | /* |
| 71 | * SMP assumption: Options of CPU 0 are a superset of all processors. |
| 72 | * This is true for all known MIPS systems. |
| 73 | */ |
| 74 | #ifndef cpu_has_tlb |
| 75 | #define cpu_has_tlb __opt(MIPS_CPU_TLB) |
| 76 | #endif |
| 77 | #ifndef cpu_has_ftlb |
| 78 | #define cpu_has_ftlb __opt(MIPS_CPU_FTLB) |
| 79 | #endif |
| 80 | #ifndef cpu_has_tlbinv |
| 81 | #define cpu_has_tlbinv __opt(MIPS_CPU_TLBINV) |
| 82 | #endif |
| 83 | #ifndef cpu_has_segments |
| 84 | #define cpu_has_segments __opt(MIPS_CPU_SEGMENTS) |
| 85 | #endif |
| 86 | #ifndef cpu_has_eva |
| 87 | #define cpu_has_eva __opt(MIPS_CPU_EVA) |
| 88 | #endif |
| 89 | #ifndef cpu_has_htw |
| 90 | #define cpu_has_htw __opt(MIPS_CPU_HTW) |
| 91 | #endif |
| 92 | #ifndef cpu_has_ldpte |
| 93 | #define cpu_has_ldpte __opt(MIPS_CPU_LDPTE) |
| 94 | #endif |
| 95 | #ifndef cpu_has_rixiex |
| 96 | #define cpu_has_rixiex __isa_ge_or_opt(6, MIPS_CPU_RIXIEX) |
| 97 | #endif |
| 98 | #ifndef cpu_has_maar |
| 99 | #define cpu_has_maar __opt(MIPS_CPU_MAAR) |
| 100 | #endif |
| 101 | #ifndef cpu_has_rw_llb |
| 102 | #define cpu_has_rw_llb __isa_ge_or_opt(6, MIPS_CPU_RW_LLB) |
| 103 | #endif |
| 104 | |
| 105 | /* |
| 106 | * For the moment we don't consider R6000 and R8000 so we can assume that |
| 107 | * anything that doesn't support R4000-style exceptions and interrupts is |
| 108 | * R3000-like. Users should still treat these two macro definitions as |
| 109 | * opaque. |
| 110 | */ |
| 111 | #ifndef cpu_has_3kex |
| 112 | #define cpu_has_3kex (!cpu_has_4kex) |
| 113 | #endif |
| 114 | #ifndef cpu_has_4kex |
| 115 | #define cpu_has_4kex __isa_ge_or_opt(1, MIPS_CPU_4KEX) |
| 116 | #endif |
| 117 | #ifndef cpu_has_3k_cache |
| 118 | #define cpu_has_3k_cache __isa_lt_and_opt(1, MIPS_CPU_3K_CACHE) |
| 119 | #endif |
| 120 | #ifndef cpu_has_4k_cache |
| 121 | #define cpu_has_4k_cache __opt(MIPS_CPU_4K_CACHE) |
| 122 | #endif |
| 123 | #ifndef cpu_has_octeon_cache |
| 124 | #define cpu_has_octeon_cache \ |
| 125 | ({ \ |
| 126 | int __res; \ |
| 127 | \ |
| 128 | switch (boot_cpu_type()) { \ |
| 129 | case CPU_CAVIUM_OCTEON: \ |
| 130 | case CPU_CAVIUM_OCTEON_PLUS: \ |
| 131 | case CPU_CAVIUM_OCTEON2: \ |
| 132 | case CPU_CAVIUM_OCTEON3: \ |
| 133 | __res = 1; \ |
| 134 | break; \ |
| 135 | \ |
| 136 | default: \ |
| 137 | __res = 0; \ |
| 138 | } \ |
| 139 | \ |
| 140 | __res; \ |
| 141 | }) |
| 142 | #endif |
| 143 | /* Don't override `cpu_has_fpu' to 1 or the "nofpu" option won't work. */ |
| 144 | #ifndef cpu_has_fpu |
| 145 | # ifdef CONFIG_MIPS_FP_SUPPORT |
| 146 | # define cpu_has_fpu (current_cpu_data.options & MIPS_CPU_FPU) |
| 147 | # define raw_cpu_has_fpu (raw_current_cpu_data.options & MIPS_CPU_FPU) |
| 148 | # else |
| 149 | # define cpu_has_fpu 0 |
| 150 | # define raw_cpu_has_fpu 0 |
| 151 | # endif |
| 152 | #else |
| 153 | # if cpu_has_fpu |
| 154 | # error "Forcing `cpu_has_fpu' to non-zero is not supported" |
| 155 | # endif |
| 156 | # define raw_cpu_has_fpu cpu_has_fpu |
| 157 | #endif |
| 158 | #ifndef cpu_has_32fpr |
| 159 | #define cpu_has_32fpr __isa_ge_or_opt(1, MIPS_CPU_32FPR) |
| 160 | #endif |
| 161 | #ifndef cpu_has_counter |
| 162 | #define cpu_has_counter __opt(MIPS_CPU_COUNTER) |
| 163 | #endif |
| 164 | #ifndef cpu_has_watch |
| 165 | #define cpu_has_watch __opt(MIPS_CPU_WATCH) |
| 166 | #endif |
| 167 | #ifndef cpu_has_divec |
| 168 | #define cpu_has_divec __isa_ge_or_opt(1, MIPS_CPU_DIVEC) |
| 169 | #endif |
| 170 | #ifndef cpu_has_vce |
| 171 | #define cpu_has_vce __opt(MIPS_CPU_VCE) |
| 172 | #endif |
| 173 | #ifndef cpu_has_cache_cdex_p |
| 174 | #define cpu_has_cache_cdex_p __opt(MIPS_CPU_CACHE_CDEX_P) |
| 175 | #endif |
| 176 | #ifndef cpu_has_cache_cdex_s |
| 177 | #define cpu_has_cache_cdex_s __opt(MIPS_CPU_CACHE_CDEX_S) |
| 178 | #endif |
| 179 | #ifndef cpu_has_prefetch |
| 180 | #define cpu_has_prefetch __isa_ge_or_opt(1, MIPS_CPU_PREFETCH) |
| 181 | #endif |
| 182 | #ifndef cpu_has_mcheck |
| 183 | #define cpu_has_mcheck __isa_ge_or_opt(1, MIPS_CPU_MCHECK) |
| 184 | #endif |
| 185 | #ifndef cpu_has_ejtag |
| 186 | #define cpu_has_ejtag __opt(MIPS_CPU_EJTAG) |
| 187 | #endif |
| 188 | #ifndef cpu_has_llsc |
| 189 | #define cpu_has_llsc __isa_ge_or_opt(1, MIPS_CPU_LLSC) |
| 190 | #endif |
| 191 | #ifndef kernel_uses_llsc |
| 192 | #define kernel_uses_llsc cpu_has_llsc |
| 193 | #endif |
| 194 | #ifndef cpu_has_guestctl0ext |
| 195 | #define cpu_has_guestctl0ext __opt(MIPS_CPU_GUESTCTL0EXT) |
| 196 | #endif |
| 197 | #ifndef cpu_has_guestctl1 |
| 198 | #define cpu_has_guestctl1 __opt(MIPS_CPU_GUESTCTL1) |
| 199 | #endif |
| 200 | #ifndef cpu_has_guestctl2 |
| 201 | #define cpu_has_guestctl2 __opt(MIPS_CPU_GUESTCTL2) |
| 202 | #endif |
| 203 | #ifndef cpu_has_guestid |
| 204 | #define cpu_has_guestid __opt(MIPS_CPU_GUESTID) |
| 205 | #endif |
| 206 | #ifndef cpu_has_drg |
| 207 | #define cpu_has_drg __opt(MIPS_CPU_DRG) |
| 208 | #endif |
| 209 | #ifndef cpu_has_mips16 |
| 210 | #define cpu_has_mips16 __isa_lt_and_ase(6, MIPS_ASE_MIPS16) |
| 211 | #endif |
| 212 | #ifndef cpu_has_mips16e2 |
| 213 | #define cpu_has_mips16e2 __isa_lt_and_ase(6, MIPS_ASE_MIPS16E2) |
| 214 | #endif |
| 215 | #ifndef cpu_has_mdmx |
| 216 | #define cpu_has_mdmx __isa_lt_and_ase(6, MIPS_ASE_MDMX) |
| 217 | #endif |
| 218 | #ifndef cpu_has_mips3d |
| 219 | #define cpu_has_mips3d __isa_lt_and_ase(6, MIPS_ASE_MIPS3D) |
| 220 | #endif |
| 221 | #ifndef cpu_has_smartmips |
| 222 | #define cpu_has_smartmips __isa_lt_and_ase(6, MIPS_ASE_SMARTMIPS) |
| 223 | #endif |
| 224 | |
| 225 | #ifndef cpu_has_rixi |
| 226 | #define cpu_has_rixi __isa_ge_or_opt(6, MIPS_CPU_RIXI) |
| 227 | #endif |
| 228 | |
| 229 | #ifndef cpu_has_mmips |
| 230 | # if defined(__mips_micromips) |
| 231 | # define cpu_has_mmips 1 |
| 232 | # elif defined(CONFIG_SYS_SUPPORTS_MICROMIPS) |
| 233 | # define cpu_has_mmips __opt(MIPS_CPU_MICROMIPS) |
| 234 | # else |
| 235 | # define cpu_has_mmips 0 |
| 236 | # endif |
| 237 | #endif |
| 238 | |
| 239 | #ifndef cpu_has_lpa |
| 240 | #define cpu_has_lpa __opt(MIPS_CPU_LPA) |
| 241 | #endif |
| 242 | #ifndef cpu_has_mvh |
| 243 | #define cpu_has_mvh __opt(MIPS_CPU_MVH) |
| 244 | #endif |
| 245 | #ifndef cpu_has_xpa |
| 246 | #define cpu_has_xpa (cpu_has_lpa && cpu_has_mvh) |
| 247 | #endif |
| 248 | #ifndef cpu_has_vtag_icache |
| 249 | #define cpu_has_vtag_icache (cpu_data[0].icache.flags & MIPS_CACHE_VTAG) |
| 250 | #endif |
| 251 | #ifndef cpu_has_dc_aliases |
| 252 | #define cpu_has_dc_aliases (cpu_data[0].dcache.flags & MIPS_CACHE_ALIASES) |
| 253 | #endif |
| 254 | #ifndef cpu_has_ic_fills_f_dc |
| 255 | #define cpu_has_ic_fills_f_dc (cpu_data[0].icache.flags & MIPS_CACHE_IC_F_DC) |
| 256 | #endif |
| 257 | #ifndef cpu_has_pindexed_dcache |
| 258 | #define cpu_has_pindexed_dcache (cpu_data[0].dcache.flags & MIPS_CACHE_PINDEX) |
| 259 | #endif |
| 260 | |
| 261 | /* |
| 262 | * I-Cache snoops remote store. This only matters on SMP. Some multiprocessors |
| 263 | * such as the R10000 have I-Caches that snoop local stores; the embedded ones |
| 264 | * don't. For maintaining I-cache coherency this means we need to flush the |
| 265 | * D-cache all the way back to whever the I-cache does refills from, so the |
| 266 | * I-cache has a chance to see the new data at all. Then we have to flush the |
| 267 | * I-cache also. |
| 268 | * Note we may have been rescheduled and may no longer be running on the CPU |
| 269 | * that did the store so we can't optimize this into only doing the flush on |
| 270 | * the local CPU. |
| 271 | */ |
| 272 | #ifndef cpu_icache_snoops_remote_store |
| 273 | #ifdef CONFIG_SMP |
| 274 | #define cpu_icache_snoops_remote_store (cpu_data[0].icache.flags & MIPS_IC_SNOOPS_REMOTE) |
| 275 | #else |
| 276 | #define cpu_icache_snoops_remote_store 1 |
| 277 | #endif |
| 278 | #endif |
| 279 | |
| 280 | #ifndef cpu_has_mips_1 |
| 281 | # define cpu_has_mips_1 (MIPS_ISA_REV < 6) |
| 282 | #endif |
| 283 | #ifndef cpu_has_mips_2 |
| 284 | # define cpu_has_mips_2 __isa_lt_and_flag(6, MIPS_CPU_ISA_II) |
| 285 | #endif |
| 286 | #ifndef cpu_has_mips_3 |
| 287 | # define cpu_has_mips_3 __isa_lt_and_flag(6, MIPS_CPU_ISA_III) |
| 288 | #endif |
| 289 | #ifndef cpu_has_mips_4 |
| 290 | # define cpu_has_mips_4 __isa_lt_and_flag(6, MIPS_CPU_ISA_IV) |
| 291 | #endif |
| 292 | #ifndef cpu_has_mips_5 |
| 293 | # define cpu_has_mips_5 __isa_lt_and_flag(6, MIPS_CPU_ISA_V) |
| 294 | #endif |
| 295 | #ifndef cpu_has_mips32r1 |
| 296 | # define cpu_has_mips32r1 __isa_range_or_flag(1, 6, MIPS_CPU_ISA_M32R1) |
| 297 | #endif |
| 298 | #ifndef cpu_has_mips32r2 |
| 299 | # define cpu_has_mips32r2 __isa_range_or_flag(2, 6, MIPS_CPU_ISA_M32R2) |
| 300 | #endif |
| 301 | #ifndef cpu_has_mips32r5 |
| 302 | # define cpu_has_mips32r5 __isa_range_or_flag(5, 6, MIPS_CPU_ISA_M32R5) |
| 303 | #endif |
| 304 | #ifndef cpu_has_mips32r6 |
| 305 | # define cpu_has_mips32r6 __isa_ge_or_flag(6, MIPS_CPU_ISA_M32R6) |
| 306 | #endif |
| 307 | #ifndef cpu_has_mips64r1 |
| 308 | # define cpu_has_mips64r1 (cpu_has_64bits && \ |
| 309 | __isa_range_or_flag(1, 6, MIPS_CPU_ISA_M64R1)) |
| 310 | #endif |
| 311 | #ifndef cpu_has_mips64r2 |
| 312 | # define cpu_has_mips64r2 (cpu_has_64bits && \ |
| 313 | __isa_range_or_flag(2, 6, MIPS_CPU_ISA_M64R2)) |
| 314 | #endif |
| 315 | #ifndef cpu_has_mips64r5 |
| 316 | # define cpu_has_mips64r5 (cpu_has_64bits && \ |
| 317 | __isa_range_or_flag(5, 6, MIPS_CPU_ISA_M64R5)) |
| 318 | #endif |
| 319 | #ifndef cpu_has_mips64r6 |
| 320 | # define cpu_has_mips64r6 __isa_ge_and_flag(6, MIPS_CPU_ISA_M64R6) |
| 321 | #endif |
| 322 | |
| 323 | /* |
| 324 | * Shortcuts ... |
| 325 | */ |
| 326 | #define cpu_has_mips_2_3_4_5 (cpu_has_mips_2 | cpu_has_mips_3_4_5) |
| 327 | #define cpu_has_mips_3_4_5 (cpu_has_mips_3 | cpu_has_mips_4_5) |
| 328 | #define cpu_has_mips_4_5 (cpu_has_mips_4 | cpu_has_mips_5) |
| 329 | |
| 330 | #define cpu_has_mips_2_3_4_5_r (cpu_has_mips_2 | cpu_has_mips_3_4_5_r) |
| 331 | #define cpu_has_mips_3_4_5_r (cpu_has_mips_3 | cpu_has_mips_4_5_r) |
| 332 | #define cpu_has_mips_4_5_r (cpu_has_mips_4 | cpu_has_mips_5_r) |
| 333 | #define cpu_has_mips_5_r (cpu_has_mips_5 | cpu_has_mips_r) |
| 334 | |
| 335 | #define cpu_has_mips_3_4_5_64_r2_r6 \ |
| 336 | (cpu_has_mips_3 | cpu_has_mips_4_5_64_r2_r6) |
| 337 | #define cpu_has_mips_4_5_64_r2_r6 \ |
| 338 | (cpu_has_mips_4_5 | cpu_has_mips64r1 | \ |
| 339 | cpu_has_mips_r2 | cpu_has_mips_r5 | \ |
| 340 | cpu_has_mips_r6) |
| 341 | |
| 342 | #define cpu_has_mips32 (cpu_has_mips32r1 | cpu_has_mips32r2 | \ |
| 343 | cpu_has_mips32r5 | cpu_has_mips32r6) |
| 344 | #define cpu_has_mips64 (cpu_has_mips64r1 | cpu_has_mips64r2 | \ |
| 345 | cpu_has_mips64r5 | cpu_has_mips64r6) |
| 346 | #define cpu_has_mips_r1 (cpu_has_mips32r1 | cpu_has_mips64r1) |
| 347 | #define cpu_has_mips_r2 (cpu_has_mips32r2 | cpu_has_mips64r2) |
| 348 | #define cpu_has_mips_r5 (cpu_has_mips32r5 | cpu_has_mips64r5) |
| 349 | #define cpu_has_mips_r6 (cpu_has_mips32r6 | cpu_has_mips64r6) |
| 350 | #define cpu_has_mips_r (cpu_has_mips32r1 | cpu_has_mips32r2 | \ |
| 351 | cpu_has_mips32r5 | cpu_has_mips32r6 | \ |
| 352 | cpu_has_mips64r1 | cpu_has_mips64r2 | \ |
| 353 | cpu_has_mips64r5 | cpu_has_mips64r6) |
| 354 | |
| 355 | /* MIPSR2 - MIPSR6 have a lot of similarities */ |
| 356 | #define cpu_has_mips_r2_r6 (cpu_has_mips_r2 | cpu_has_mips_r5 | \ |
| 357 | cpu_has_mips_r6) |
| 358 | |
| 359 | /* |
| 360 | * cpu_has_mips_r2_exec_hazard - return if IHB is required on current processor |
| 361 | * |
| 362 | * Returns non-zero value if the current processor implementation requires |
| 363 | * an IHB instruction to deal with an instruction hazard as per MIPS R2 |
| 364 | * architecture specification, zero otherwise. |
| 365 | */ |
| 366 | #ifndef cpu_has_mips_r2_exec_hazard |
| 367 | #define cpu_has_mips_r2_exec_hazard \ |
| 368 | ({ \ |
| 369 | int __res; \ |
| 370 | \ |
| 371 | switch (boot_cpu_type()) { \ |
| 372 | case CPU_M14KC: \ |
| 373 | case CPU_74K: \ |
| 374 | case CPU_1074K: \ |
| 375 | case CPU_PROAPTIV: \ |
| 376 | case CPU_P5600: \ |
| 377 | case CPU_M5150: \ |
| 378 | case CPU_QEMU_GENERIC: \ |
| 379 | case CPU_CAVIUM_OCTEON: \ |
| 380 | case CPU_CAVIUM_OCTEON_PLUS: \ |
| 381 | case CPU_CAVIUM_OCTEON2: \ |
| 382 | case CPU_CAVIUM_OCTEON3: \ |
| 383 | __res = 0; \ |
| 384 | break; \ |
| 385 | \ |
| 386 | default: \ |
| 387 | __res = 1; \ |
| 388 | } \ |
| 389 | \ |
| 390 | __res; \ |
| 391 | }) |
| 392 | #endif |
| 393 | |
| 394 | /* |
| 395 | * MIPS32, MIPS64, VR5500, IDT32332, IDT32334 and maybe a few other |
| 396 | * pre-MIPS32/MIPS64 processors have CLO, CLZ. The IDT RC64574 is 64-bit and |
| 397 | * has CLO and CLZ but not DCLO nor DCLZ. For 64-bit kernels |
| 398 | * cpu_has_clo_clz also indicates the availability of DCLO and DCLZ. |
| 399 | */ |
| 400 | #ifndef cpu_has_clo_clz |
| 401 | #define cpu_has_clo_clz cpu_has_mips_r |
| 402 | #endif |
| 403 | |
| 404 | /* |
| 405 | * MIPS32 R2, MIPS64 R2, Loongson 3A and Octeon have WSBH. |
| 406 | * MIPS64 R2, Loongson 3A and Octeon have WSBH, DSBH and DSHD. |
| 407 | * This indicates the availability of WSBH and in case of 64 bit CPUs also |
| 408 | * DSBH and DSHD. |
| 409 | */ |
| 410 | #ifndef cpu_has_wsbh |
| 411 | #define cpu_has_wsbh cpu_has_mips_r2 |
| 412 | #endif |
| 413 | |
| 414 | #ifndef cpu_has_dsp |
| 415 | #define cpu_has_dsp __ase(MIPS_ASE_DSP) |
| 416 | #endif |
| 417 | |
| 418 | #ifndef cpu_has_dsp2 |
| 419 | #define cpu_has_dsp2 __ase(MIPS_ASE_DSP2P) |
| 420 | #endif |
| 421 | |
| 422 | #ifndef cpu_has_dsp3 |
| 423 | #define cpu_has_dsp3 __ase(MIPS_ASE_DSP3) |
| 424 | #endif |
| 425 | |
| 426 | #ifndef cpu_has_loongson_mmi |
| 427 | #define cpu_has_loongson_mmi __ase(MIPS_ASE_LOONGSON_MMI) |
| 428 | #endif |
| 429 | |
| 430 | #ifndef cpu_has_loongson_cam |
| 431 | #define cpu_has_loongson_cam __ase(MIPS_ASE_LOONGSON_CAM) |
| 432 | #endif |
| 433 | |
| 434 | #ifndef cpu_has_loongson_ext |
| 435 | #define cpu_has_loongson_ext __ase(MIPS_ASE_LOONGSON_EXT) |
| 436 | #endif |
| 437 | |
| 438 | #ifndef cpu_has_loongson_ext2 |
| 439 | #define cpu_has_loongson_ext2 __ase(MIPS_ASE_LOONGSON_EXT2) |
| 440 | #endif |
| 441 | |
| 442 | #ifndef cpu_has_mipsmt |
| 443 | #define cpu_has_mipsmt __isa_range_and_ase(2, 6, MIPS_ASE_MIPSMT) |
| 444 | #endif |
| 445 | |
| 446 | #ifndef cpu_has_vp |
| 447 | #define cpu_has_vp __isa_ge_and_opt(6, MIPS_CPU_VP) |
| 448 | #endif |
| 449 | |
| 450 | #ifndef cpu_has_userlocal |
| 451 | #define cpu_has_userlocal __isa_ge_or_opt(6, MIPS_CPU_ULRI) |
| 452 | #endif |
| 453 | |
| 454 | #ifdef CONFIG_32BIT |
| 455 | # ifndef cpu_has_nofpuex |
| 456 | # define cpu_has_nofpuex __isa_lt_and_opt(1, MIPS_CPU_NOFPUEX) |
| 457 | # endif |
| 458 | # ifndef cpu_has_64bits |
| 459 | # define cpu_has_64bits (cpu_data[0].isa_level & MIPS_CPU_ISA_64BIT) |
| 460 | # endif |
| 461 | # ifndef cpu_has_64bit_zero_reg |
| 462 | # define cpu_has_64bit_zero_reg (cpu_data[0].isa_level & MIPS_CPU_ISA_64BIT) |
| 463 | # endif |
| 464 | # ifndef cpu_has_64bit_gp_regs |
| 465 | # define cpu_has_64bit_gp_regs 0 |
| 466 | # endif |
| 467 | # ifndef cpu_vmbits |
| 468 | # define cpu_vmbits 31 |
| 469 | # endif |
| 470 | #endif |
| 471 | |
| 472 | #ifdef CONFIG_64BIT |
| 473 | # ifndef cpu_has_nofpuex |
| 474 | # define cpu_has_nofpuex 0 |
| 475 | # endif |
| 476 | # ifndef cpu_has_64bits |
| 477 | # define cpu_has_64bits 1 |
| 478 | # endif |
| 479 | # ifndef cpu_has_64bit_zero_reg |
| 480 | # define cpu_has_64bit_zero_reg 1 |
| 481 | # endif |
| 482 | # ifndef cpu_has_64bit_gp_regs |
| 483 | # define cpu_has_64bit_gp_regs 1 |
| 484 | # endif |
| 485 | # ifndef cpu_vmbits |
| 486 | # define cpu_vmbits cpu_data[0].vmbits |
| 487 | # define __NEED_VMBITS_PROBE |
| 488 | # endif |
| 489 | #endif |
| 490 | |
| 491 | #if defined(CONFIG_CPU_MIPSR2_IRQ_VI) && !defined(cpu_has_vint) |
| 492 | # define cpu_has_vint __opt(MIPS_CPU_VINT) |
| 493 | #elif !defined(cpu_has_vint) |
| 494 | # define cpu_has_vint 0 |
| 495 | #endif |
| 496 | |
| 497 | #if defined(CONFIG_CPU_MIPSR2_IRQ_EI) && !defined(cpu_has_veic) |
| 498 | # define cpu_has_veic __opt(MIPS_CPU_VEIC) |
| 499 | #elif !defined(cpu_has_veic) |
| 500 | # define cpu_has_veic 0 |
| 501 | #endif |
| 502 | |
| 503 | #ifndef cpu_has_inclusive_pcaches |
| 504 | #define cpu_has_inclusive_pcaches __opt(MIPS_CPU_INCLUSIVE_CACHES) |
| 505 | #endif |
| 506 | |
| 507 | #ifndef cpu_dcache_line_size |
| 508 | #define cpu_dcache_line_size() cpu_data[0].dcache.linesz |
| 509 | #endif |
| 510 | #ifndef cpu_icache_line_size |
| 511 | #define cpu_icache_line_size() cpu_data[0].icache.linesz |
| 512 | #endif |
| 513 | #ifndef cpu_scache_line_size |
| 514 | #define cpu_scache_line_size() cpu_data[0].scache.linesz |
| 515 | #endif |
| 516 | #ifndef cpu_tcache_line_size |
| 517 | #define cpu_tcache_line_size() cpu_data[0].tcache.linesz |
| 518 | #endif |
| 519 | |
| 520 | #ifndef cpu_hwrena_impl_bits |
| 521 | #define cpu_hwrena_impl_bits 0 |
| 522 | #endif |
| 523 | |
| 524 | #ifndef cpu_has_perf_cntr_intr_bit |
| 525 | #define cpu_has_perf_cntr_intr_bit __opt(MIPS_CPU_PCI) |
| 526 | #endif |
| 527 | |
| 528 | #ifndef cpu_has_vz |
| 529 | #define cpu_has_vz __ase(MIPS_ASE_VZ) |
| 530 | #endif |
| 531 | |
| 532 | #if defined(CONFIG_CPU_HAS_MSA) && !defined(cpu_has_msa) |
| 533 | # define cpu_has_msa __ase(MIPS_ASE_MSA) |
| 534 | #elif !defined(cpu_has_msa) |
| 535 | # define cpu_has_msa 0 |
| 536 | #endif |
| 537 | |
| 538 | #ifndef cpu_has_ufr |
| 539 | # define cpu_has_ufr __opt(MIPS_CPU_UFR) |
| 540 | #endif |
| 541 | |
| 542 | #ifndef cpu_has_fre |
| 543 | # define cpu_has_fre __opt(MIPS_CPU_FRE) |
| 544 | #endif |
| 545 | |
| 546 | #ifndef cpu_has_cdmm |
| 547 | # define cpu_has_cdmm __opt(MIPS_CPU_CDMM) |
| 548 | #endif |
| 549 | |
| 550 | #ifndef cpu_has_small_pages |
| 551 | # define cpu_has_small_pages __opt(MIPS_CPU_SP) |
| 552 | #endif |
| 553 | |
| 554 | #ifndef cpu_has_nan_legacy |
| 555 | #define cpu_has_nan_legacy __isa_lt_and_opt(6, MIPS_CPU_NAN_LEGACY) |
| 556 | #endif |
| 557 | #ifndef cpu_has_nan_2008 |
| 558 | #define cpu_has_nan_2008 __isa_ge_or_opt(6, MIPS_CPU_NAN_2008) |
| 559 | #endif |
| 560 | |
| 561 | #ifndef cpu_has_ebase_wg |
| 562 | # define cpu_has_ebase_wg __opt(MIPS_CPU_EBASE_WG) |
| 563 | #endif |
| 564 | |
| 565 | #ifndef cpu_has_badinstr |
| 566 | # define cpu_has_badinstr __isa_ge_or_opt(6, MIPS_CPU_BADINSTR) |
| 567 | #endif |
| 568 | |
| 569 | #ifndef cpu_has_badinstrp |
| 570 | # define cpu_has_badinstrp __isa_ge_or_opt(6, MIPS_CPU_BADINSTRP) |
| 571 | #endif |
| 572 | |
| 573 | #ifndef cpu_has_contextconfig |
| 574 | # define cpu_has_contextconfig __opt(MIPS_CPU_CTXTC) |
| 575 | #endif |
| 576 | |
| 577 | #ifndef cpu_has_perf |
| 578 | # define cpu_has_perf __opt(MIPS_CPU_PERF) |
| 579 | #endif |
| 580 | |
| 581 | #ifndef cpu_has_mac2008_only |
| 582 | # define cpu_has_mac2008_only __opt(MIPS_CPU_MAC_2008_ONLY) |
| 583 | #endif |
| 584 | |
| 585 | #ifndef cpu_has_ftlbparex |
| 586 | # define cpu_has_ftlbparex __opt(MIPS_CPU_FTLBPAREX) |
| 587 | #endif |
| 588 | |
| 589 | #ifndef cpu_has_gsexcex |
| 590 | # define cpu_has_gsexcex __opt(MIPS_CPU_GSEXCEX) |
| 591 | #endif |
| 592 | |
| 593 | #ifdef CONFIG_SMP |
| 594 | /* |
| 595 | * Some systems share FTLB RAMs between threads within a core (siblings in |
| 596 | * kernel parlance). This means that FTLB entries may become invalid at almost |
| 597 | * any point when an entry is evicted due to a sibling thread writing an entry |
| 598 | * to the shared FTLB RAM. |
| 599 | * |
| 600 | * This is only relevant to SMP systems, and the only systems that exhibit this |
| 601 | * property implement MIPSr6 or higher so we constrain support for this to |
| 602 | * kernels that will run on such systems. |
| 603 | */ |
| 604 | # ifndef cpu_has_shared_ftlb_ram |
| 605 | # define cpu_has_shared_ftlb_ram \ |
| 606 | __isa_ge_and_opt(6, MIPS_CPU_SHARED_FTLB_RAM) |
| 607 | # endif |
| 608 | |
| 609 | /* |
| 610 | * Some systems take this a step further & share FTLB entries between siblings. |
| 611 | * This is implemented as TLB writes happening as usual, but if an entry |
| 612 | * written by a sibling exists in the shared FTLB for a translation which would |
| 613 | * otherwise cause a TLB refill exception then the CPU will use the entry |
| 614 | * written by its sibling rather than triggering a refill & writing a matching |
| 615 | * TLB entry for itself. |
| 616 | * |
| 617 | * This is naturally only valid if a TLB entry is known to be suitable for use |
| 618 | * on all siblings in a CPU, and so it only takes effect when MMIDs are in use |
| 619 | * rather than ASIDs or when a TLB entry is marked global. |
| 620 | */ |
| 621 | # ifndef cpu_has_shared_ftlb_entries |
| 622 | # define cpu_has_shared_ftlb_entries \ |
| 623 | __isa_ge_and_opt(6, MIPS_CPU_SHARED_FTLB_ENTRIES) |
| 624 | # endif |
| 625 | #endif /* SMP */ |
| 626 | |
| 627 | #ifndef cpu_has_shared_ftlb_ram |
| 628 | # define cpu_has_shared_ftlb_ram 0 |
| 629 | #endif |
| 630 | #ifndef cpu_has_shared_ftlb_entries |
| 631 | # define cpu_has_shared_ftlb_entries 0 |
| 632 | #endif |
| 633 | |
| 634 | #ifdef CONFIG_MIPS_MT_SMP |
| 635 | # define cpu_has_mipsmt_pertccounters \ |
| 636 | __isa_lt_and_opt(6, MIPS_CPU_MT_PER_TC_PERF_COUNTERS) |
| 637 | #else |
| 638 | # define cpu_has_mipsmt_pertccounters 0 |
| 639 | #endif /* CONFIG_MIPS_MT_SMP */ |
| 640 | |
| 641 | /* |
| 642 | * We only enable MMID support for configurations which natively support 64 bit |
| 643 | * atomics because getting good performance from the allocator relies upon |
| 644 | * efficient atomic64_*() functions. |
| 645 | */ |
| 646 | #ifndef cpu_has_mmid |
| 647 | # ifdef CONFIG_GENERIC_ATOMIC64 |
| 648 | # define cpu_has_mmid 0 |
| 649 | # else |
| 650 | # define cpu_has_mmid __isa_ge_and_opt(6, MIPS_CPU_MMID) |
| 651 | # endif |
| 652 | #endif |
| 653 | |
| 654 | #ifndef cpu_has_mm_sysad |
| 655 | # define cpu_has_mm_sysad __opt(MIPS_CPU_MM_SYSAD) |
| 656 | #endif |
| 657 | |
| 658 | #ifndef cpu_has_mm_full |
| 659 | # define cpu_has_mm_full __opt(MIPS_CPU_MM_FULL) |
| 660 | #endif |
| 661 | |
| 662 | /* |
| 663 | * Guest capabilities |
| 664 | */ |
| 665 | #ifndef cpu_guest_has_conf1 |
| 666 | #define cpu_guest_has_conf1 (cpu_data[0].guest.conf & (1 << 1)) |
| 667 | #endif |
| 668 | #ifndef cpu_guest_has_conf2 |
| 669 | #define cpu_guest_has_conf2 (cpu_data[0].guest.conf & (1 << 2)) |
| 670 | #endif |
| 671 | #ifndef cpu_guest_has_conf3 |
| 672 | #define cpu_guest_has_conf3 (cpu_data[0].guest.conf & (1 << 3)) |
| 673 | #endif |
| 674 | #ifndef cpu_guest_has_conf4 |
| 675 | #define cpu_guest_has_conf4 (cpu_data[0].guest.conf & (1 << 4)) |
| 676 | #endif |
| 677 | #ifndef cpu_guest_has_conf5 |
| 678 | #define cpu_guest_has_conf5 (cpu_data[0].guest.conf & (1 << 5)) |
| 679 | #endif |
| 680 | #ifndef cpu_guest_has_conf6 |
| 681 | #define cpu_guest_has_conf6 (cpu_data[0].guest.conf & (1 << 6)) |
| 682 | #endif |
| 683 | #ifndef cpu_guest_has_conf7 |
| 684 | #define cpu_guest_has_conf7 (cpu_data[0].guest.conf & (1 << 7)) |
| 685 | #endif |
| 686 | #ifndef cpu_guest_has_fpu |
| 687 | #define cpu_guest_has_fpu (cpu_data[0].guest.options & MIPS_CPU_FPU) |
| 688 | #endif |
| 689 | #ifndef cpu_guest_has_watch |
| 690 | #define cpu_guest_has_watch (cpu_data[0].guest.options & MIPS_CPU_WATCH) |
| 691 | #endif |
| 692 | #ifndef cpu_guest_has_contextconfig |
| 693 | #define cpu_guest_has_contextconfig (cpu_data[0].guest.options & MIPS_CPU_CTXTC) |
| 694 | #endif |
| 695 | #ifndef cpu_guest_has_segments |
| 696 | #define cpu_guest_has_segments (cpu_data[0].guest.options & MIPS_CPU_SEGMENTS) |
| 697 | #endif |
| 698 | #ifndef cpu_guest_has_badinstr |
| 699 | #define cpu_guest_has_badinstr (cpu_data[0].guest.options & MIPS_CPU_BADINSTR) |
| 700 | #endif |
| 701 | #ifndef cpu_guest_has_badinstrp |
| 702 | #define cpu_guest_has_badinstrp (cpu_data[0].guest.options & MIPS_CPU_BADINSTRP) |
| 703 | #endif |
| 704 | #ifndef cpu_guest_has_htw |
| 705 | #define cpu_guest_has_htw (cpu_data[0].guest.options & MIPS_CPU_HTW) |
| 706 | #endif |
| 707 | #ifndef cpu_guest_has_ldpte |
| 708 | #define cpu_guest_has_ldpte (cpu_data[0].guest.options & MIPS_CPU_LDPTE) |
| 709 | #endif |
| 710 | #ifndef cpu_guest_has_mvh |
| 711 | #define cpu_guest_has_mvh (cpu_data[0].guest.options & MIPS_CPU_MVH) |
| 712 | #endif |
| 713 | #ifndef cpu_guest_has_msa |
| 714 | #define cpu_guest_has_msa (cpu_data[0].guest.ases & MIPS_ASE_MSA) |
| 715 | #endif |
| 716 | #ifndef cpu_guest_has_kscr |
| 717 | #define cpu_guest_has_kscr(n) (cpu_data[0].guest.kscratch_mask & (1u << (n))) |
| 718 | #endif |
| 719 | #ifndef cpu_guest_has_rw_llb |
| 720 | #define cpu_guest_has_rw_llb (cpu_has_mips_r6 || (cpu_data[0].guest.options & MIPS_CPU_RW_LLB)) |
| 721 | #endif |
| 722 | #ifndef cpu_guest_has_perf |
| 723 | #define cpu_guest_has_perf (cpu_data[0].guest.options & MIPS_CPU_PERF) |
| 724 | #endif |
| 725 | #ifndef cpu_guest_has_maar |
| 726 | #define cpu_guest_has_maar (cpu_data[0].guest.options & MIPS_CPU_MAAR) |
| 727 | #endif |
| 728 | #ifndef cpu_guest_has_userlocal |
| 729 | #define cpu_guest_has_userlocal (cpu_data[0].guest.options & MIPS_CPU_ULRI) |
| 730 | #endif |
| 731 | |
| 732 | /* |
| 733 | * Guest dynamic capabilities |
| 734 | */ |
| 735 | #ifndef cpu_guest_has_dyn_fpu |
| 736 | #define cpu_guest_has_dyn_fpu (cpu_data[0].guest.options_dyn & MIPS_CPU_FPU) |
| 737 | #endif |
| 738 | #ifndef cpu_guest_has_dyn_watch |
| 739 | #define cpu_guest_has_dyn_watch (cpu_data[0].guest.options_dyn & MIPS_CPU_WATCH) |
| 740 | #endif |
| 741 | #ifndef cpu_guest_has_dyn_contextconfig |
| 742 | #define cpu_guest_has_dyn_contextconfig (cpu_data[0].guest.options_dyn & MIPS_CPU_CTXTC) |
| 743 | #endif |
| 744 | #ifndef cpu_guest_has_dyn_perf |
| 745 | #define cpu_guest_has_dyn_perf (cpu_data[0].guest.options_dyn & MIPS_CPU_PERF) |
| 746 | #endif |
| 747 | #ifndef cpu_guest_has_dyn_msa |
| 748 | #define cpu_guest_has_dyn_msa (cpu_data[0].guest.ases_dyn & MIPS_ASE_MSA) |
| 749 | #endif |
| 750 | #ifndef cpu_guest_has_dyn_maar |
| 751 | #define cpu_guest_has_dyn_maar (cpu_data[0].guest.options_dyn & MIPS_CPU_MAAR) |
| 752 | #endif |
| 753 | |
| 754 | #endif /* __ASM_CPU_FEATURES_H */ |