2 * arch/xtensa/include/asm/xchal_vaddr_remap.h
4 * Xtensa macros for MMU V3 Support. Deals with re-mapping the Virtual
5 * Memory Addresses from "Virtual == Physical" to their prevvious V2 MMU
6 * mappings (KSEG at 0xD0000000 and KIO at 0XF0000000).
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 2008 - 2012 Tensilica Inc.
14 * Pete Delaney <piet@tensilica.com>
15 * Marc Gauthier <marc@tensilica.com
18 #ifndef _XTENSA_VECTORS_H
19 #define _XTENSA_VECTORS_H
21 #include <variant/core.h>
23 #define XCHAL_KIO_CACHED_VADDR 0xe0000000
24 #define XCHAL_KIO_BYPASS_VADDR 0xf0000000
25 #define XCHAL_KIO_PADDR 0xf0000000
26 #define XCHAL_KIO_SIZE 0x10000000
28 #if defined(CONFIG_MMU)
30 /* Will Become VECBASE */
31 #define VIRTUAL_MEMORY_ADDRESS 0xD0000000
33 /* Image Virtual Start Address */
34 #define KERNELOFFSET 0xD0003000
36 #if defined(XCHAL_HAVE_PTP_MMU) && XCHAL_HAVE_PTP_MMU && XCHAL_HAVE_SPANNING_WAY
37 /* MMU v3 - XCHAL_HAVE_PTP_MMU == 1 */
38 #define LOAD_MEMORY_ADDRESS 0x00003000
40 /* MMU V2 - XCHAL_HAVE_PTP_MMU == 0 */
41 #define LOAD_MEMORY_ADDRESS 0xD0003000
44 #else /* !defined(CONFIG_MMU) */
45 /* MMU Not being used - Virtual == Physical */
48 #define VIRTUAL_MEMORY_ADDRESS 0x00002000
50 /* Location of the start of the kernel text, _start */
51 #define KERNELOFFSET 0x00003000
53 /* Loaded just above possibly live vectors */
54 #define LOAD_MEMORY_ADDRESS 0x00003000
56 #endif /* CONFIG_MMU */
58 #define XC_VADDR(offset) (VIRTUAL_MEMORY_ADDRESS + offset)
60 /* Used to set VECBASE register */
61 #define VECBASE_RESET_VADDR VIRTUAL_MEMORY_ADDRESS
63 #define RESET_VECTOR_VECOFS (XCHAL_RESET_VECTOR_VADDR - \
65 #define RESET_VECTOR_VADDR XC_VADDR(RESET_VECTOR_VECOFS)
67 #define RESET_VECTOR1_VECOFS (XCHAL_RESET_VECTOR1_VADDR - \
69 #define RESET_VECTOR1_VADDR XC_VADDR(RESET_VECTOR1_VECOFS)
71 #if defined(XCHAL_HAVE_VECBASE) && XCHAL_HAVE_VECBASE
73 #define USER_VECTOR_VADDR XC_VADDR(XCHAL_USER_VECOFS)
74 #define KERNEL_VECTOR_VADDR XC_VADDR(XCHAL_KERNEL_VECOFS)
75 #define DOUBLEEXC_VECTOR_VADDR XC_VADDR(XCHAL_DOUBLEEXC_VECOFS)
76 #define WINDOW_VECTORS_VADDR XC_VADDR(XCHAL_WINDOW_OF4_VECOFS)
77 #define INTLEVEL2_VECTOR_VADDR XC_VADDR(XCHAL_INTLEVEL2_VECOFS)
78 #define INTLEVEL3_VECTOR_VADDR XC_VADDR(XCHAL_INTLEVEL3_VECOFS)
79 #define INTLEVEL4_VECTOR_VADDR XC_VADDR(XCHAL_INTLEVEL4_VECOFS)
80 #define INTLEVEL5_VECTOR_VADDR XC_VADDR(XCHAL_INTLEVEL5_VECOFS)
81 #define INTLEVEL6_VECTOR_VADDR XC_VADDR(XCHAL_INTLEVEL6_VECOFS)
83 #define DEBUG_VECTOR_VADDR XC_VADDR(XCHAL_DEBUG_VECOFS)
85 #define NMI_VECTOR_VADDR XC_VADDR(XCHAL_NMI_VECOFS)
87 #define INTLEVEL7_VECTOR_VADDR XC_VADDR(XCHAL_INTLEVEL7_VECOFS)
90 * These XCHAL_* #defines from varian/core.h
91 * are not valid to use with V3 MMU. Non-XCHAL
92 * constants are defined above and should be used.
94 #undef XCHAL_VECBASE_RESET_VADDR
95 #undef XCHAL_RESET_VECTOR0_VADDR
96 #undef XCHAL_USER_VECTOR_VADDR
97 #undef XCHAL_KERNEL_VECTOR_VADDR
98 #undef XCHAL_DOUBLEEXC_VECTOR_VADDR
99 #undef XCHAL_WINDOW_VECTORS_VADDR
100 #undef XCHAL_INTLEVEL2_VECTOR_VADDR
101 #undef XCHAL_INTLEVEL3_VECTOR_VADDR
102 #undef XCHAL_INTLEVEL4_VECTOR_VADDR
103 #undef XCHAL_INTLEVEL5_VECTOR_VADDR
104 #undef XCHAL_INTLEVEL6_VECTOR_VADDR
105 #undef XCHAL_DEBUG_VECTOR_VADDR
106 #undef XCHAL_NMI_VECTOR_VADDR
107 #undef XCHAL_INTLEVEL7_VECTOR_VADDR
111 #define USER_VECTOR_VADDR XCHAL_USER_VECTOR_VADDR
112 #define KERNEL_VECTOR_VADDR XCHAL_KERNEL_VECTOR_VADDR
113 #define DOUBLEEXC_VECTOR_VADDR XCHAL_DOUBLEEXC_VECTOR_VADDR
114 #define WINDOW_VECTORS_VADDR XCHAL_WINDOW_VECTORS_VADDR
115 #define INTLEVEL2_VECTOR_VADDR XCHAL_INTLEVEL2_VECTOR_VADDR
116 #define INTLEVEL3_VECTOR_VADDR XCHAL_INTLEVEL3_VECTOR_VADDR
117 #define INTLEVEL4_VECTOR_VADDR XCHAL_INTLEVEL4_VECTOR_VADDR
118 #define INTLEVEL5_VECTOR_VADDR XCHAL_INTLEVEL5_VECTOR_VADDR
119 #define INTLEVEL6_VECTOR_VADDR XCHAL_INTLEVEL6_VECTOR_VADDR
120 #define DEBUG_VECTOR_VADDR XCHAL_DEBUG_VECTOR_VADDR
124 #endif /* _XTENSA_VECTORS_H */