|
@@ -48,61 +48,42 @@ static inline unsigned long xtensa_get_kio_paddr(void)
|
|
|
|
|
|
#if defined(CONFIG_MMU)
|
|
#if defined(CONFIG_MMU)
|
|
|
|
|
|
-/* Will Become VECBASE */
|
|
|
|
-#define VIRTUAL_MEMORY_ADDRESS XCHAL_KSEG_CACHED_VADDR
|
|
|
|
-
|
|
|
|
|
|
+#if XCHAL_HAVE_PTP_MMU && XCHAL_HAVE_SPANNING_WAY
|
|
/* Image Virtual Start Address */
|
|
/* Image Virtual Start Address */
|
|
-#define KERNELOFFSET (XCHAL_KSEG_CACHED_VADDR + 0x3000)
|
|
|
|
-
|
|
|
|
-#if defined(XCHAL_HAVE_PTP_MMU) && XCHAL_HAVE_PTP_MMU && XCHAL_HAVE_SPANNING_WAY
|
|
|
|
- /* MMU v3 - XCHAL_HAVE_PTP_MMU == 1 */
|
|
|
|
- #define LOAD_MEMORY_ADDRESS 0x00003000
|
|
|
|
|
|
+#define KERNELOFFSET (XCHAL_KSEG_CACHED_VADDR + \
|
|
|
|
+ CONFIG_KERNEL_LOAD_ADDRESS - \
|
|
|
|
+ XCHAL_KSEG_PADDR)
|
|
#else
|
|
#else
|
|
- /* MMU V2 - XCHAL_HAVE_PTP_MMU == 0 */
|
|
|
|
- #define LOAD_MEMORY_ADDRESS 0xD0003000
|
|
|
|
|
|
+#define KERNELOFFSET CONFIG_KERNEL_LOAD_ADDRESS
|
|
#endif
|
|
#endif
|
|
|
|
|
|
-#define RESET_VECTOR1_VADDR (VIRTUAL_MEMORY_ADDRESS + \
|
|
|
|
- XCHAL_RESET_VECTOR1_PADDR)
|
|
|
|
-
|
|
|
|
#else /* !defined(CONFIG_MMU) */
|
|
#else /* !defined(CONFIG_MMU) */
|
|
/* MMU Not being used - Virtual == Physical */
|
|
/* MMU Not being used - Virtual == Physical */
|
|
|
|
|
|
- /* VECBASE */
|
|
|
|
- #define VIRTUAL_MEMORY_ADDRESS (PLATFORM_DEFAULT_MEM_START + 0x2000)
|
|
|
|
|
|
+/* Location of the start of the kernel text, _start */
|
|
|
|
+#define KERNELOFFSET CONFIG_KERNEL_LOAD_ADDRESS
|
|
|
|
|
|
- /* Location of the start of the kernel text, _start */
|
|
|
|
- #define KERNELOFFSET (PLATFORM_DEFAULT_MEM_START + 0x3000)
|
|
|
|
-
|
|
|
|
- /* Loaded just above possibly live vectors */
|
|
|
|
- #define LOAD_MEMORY_ADDRESS (PLATFORM_DEFAULT_MEM_START + 0x3000)
|
|
|
|
-
|
|
|
|
-#define RESET_VECTOR1_VADDR (XCHAL_RESET_VECTOR1_VADDR)
|
|
|
|
|
|
|
|
#endif /* CONFIG_MMU */
|
|
#endif /* CONFIG_MMU */
|
|
|
|
|
|
-#define XC_VADDR(offset) (VIRTUAL_MEMORY_ADDRESS + offset)
|
|
|
|
-
|
|
|
|
-/* Used to set VECBASE register */
|
|
|
|
-#define VECBASE_RESET_VADDR VIRTUAL_MEMORY_ADDRESS
|
|
|
|
|
|
+#define RESET_VECTOR1_VADDR (XCHAL_RESET_VECTOR1_VADDR)
|
|
|
|
+#define VECBASE_VADDR (KERNELOFFSET - CONFIG_VECTORS_OFFSET)
|
|
|
|
|
|
#if defined(XCHAL_HAVE_VECBASE) && XCHAL_HAVE_VECBASE
|
|
#if defined(XCHAL_HAVE_VECBASE) && XCHAL_HAVE_VECBASE
|
|
|
|
|
|
-#define USER_VECTOR_VADDR XC_VADDR(XCHAL_USER_VECOFS)
|
|
|
|
-#define KERNEL_VECTOR_VADDR XC_VADDR(XCHAL_KERNEL_VECOFS)
|
|
|
|
-#define DOUBLEEXC_VECTOR_VADDR XC_VADDR(XCHAL_DOUBLEEXC_VECOFS)
|
|
|
|
-#define WINDOW_VECTORS_VADDR XC_VADDR(XCHAL_WINDOW_OF4_VECOFS)
|
|
|
|
-#define INTLEVEL2_VECTOR_VADDR XC_VADDR(XCHAL_INTLEVEL2_VECOFS)
|
|
|
|
-#define INTLEVEL3_VECTOR_VADDR XC_VADDR(XCHAL_INTLEVEL3_VECOFS)
|
|
|
|
-#define INTLEVEL4_VECTOR_VADDR XC_VADDR(XCHAL_INTLEVEL4_VECOFS)
|
|
|
|
-#define INTLEVEL5_VECTOR_VADDR XC_VADDR(XCHAL_INTLEVEL5_VECOFS)
|
|
|
|
-#define INTLEVEL6_VECTOR_VADDR XC_VADDR(XCHAL_INTLEVEL6_VECOFS)
|
|
|
|
-
|
|
|
|
-#define DEBUG_VECTOR_VADDR XC_VADDR(XCHAL_DEBUG_VECOFS)
|
|
|
|
|
|
+#define VECTOR_VADDR(offset) (VECBASE_VADDR + offset)
|
|
|
|
|
|
-#define NMI_VECTOR_VADDR XC_VADDR(XCHAL_NMI_VECOFS)
|
|
|
|
-
|
|
|
|
-#define INTLEVEL7_VECTOR_VADDR XC_VADDR(XCHAL_INTLEVEL7_VECOFS)
|
|
|
|
|
|
+#define USER_VECTOR_VADDR VECTOR_VADDR(XCHAL_USER_VECOFS)
|
|
|
|
+#define KERNEL_VECTOR_VADDR VECTOR_VADDR(XCHAL_KERNEL_VECOFS)
|
|
|
|
+#define DOUBLEEXC_VECTOR_VADDR VECTOR_VADDR(XCHAL_DOUBLEEXC_VECOFS)
|
|
|
|
+#define WINDOW_VECTORS_VADDR VECTOR_VADDR(XCHAL_WINDOW_OF4_VECOFS)
|
|
|
|
+#define INTLEVEL2_VECTOR_VADDR VECTOR_VADDR(XCHAL_INTLEVEL2_VECOFS)
|
|
|
|
+#define INTLEVEL3_VECTOR_VADDR VECTOR_VADDR(XCHAL_INTLEVEL3_VECOFS)
|
|
|
|
+#define INTLEVEL4_VECTOR_VADDR VECTOR_VADDR(XCHAL_INTLEVEL4_VECOFS)
|
|
|
|
+#define INTLEVEL5_VECTOR_VADDR VECTOR_VADDR(XCHAL_INTLEVEL5_VECOFS)
|
|
|
|
+#define INTLEVEL6_VECTOR_VADDR VECTOR_VADDR(XCHAL_INTLEVEL6_VECOFS)
|
|
|
|
+#define INTLEVEL7_VECTOR_VADDR VECTOR_VADDR(XCHAL_INTLEVEL7_VECOFS)
|
|
|
|
+#define DEBUG_VECTOR_VADDR VECTOR_VADDR(XCHAL_DEBUG_VECOFS)
|
|
|
|
|
|
/*
|
|
/*
|
|
* These XCHAL_* #defines from varian/core.h
|
|
* These XCHAL_* #defines from varian/core.h
|
|
@@ -110,7 +91,6 @@ static inline unsigned long xtensa_get_kio_paddr(void)
|
|
* constants are defined above and should be used.
|
|
* constants are defined above and should be used.
|
|
*/
|
|
*/
|
|
#undef XCHAL_VECBASE_RESET_VADDR
|
|
#undef XCHAL_VECBASE_RESET_VADDR
|
|
-#undef XCHAL_RESET_VECTOR0_VADDR
|
|
|
|
#undef XCHAL_USER_VECTOR_VADDR
|
|
#undef XCHAL_USER_VECTOR_VADDR
|
|
#undef XCHAL_KERNEL_VECTOR_VADDR
|
|
#undef XCHAL_KERNEL_VECTOR_VADDR
|
|
#undef XCHAL_DOUBLEEXC_VECTOR_VADDR
|
|
#undef XCHAL_DOUBLEEXC_VECTOR_VADDR
|
|
@@ -120,9 +100,8 @@ static inline unsigned long xtensa_get_kio_paddr(void)
|
|
#undef XCHAL_INTLEVEL4_VECTOR_VADDR
|
|
#undef XCHAL_INTLEVEL4_VECTOR_VADDR
|
|
#undef XCHAL_INTLEVEL5_VECTOR_VADDR
|
|
#undef XCHAL_INTLEVEL5_VECTOR_VADDR
|
|
#undef XCHAL_INTLEVEL6_VECTOR_VADDR
|
|
#undef XCHAL_INTLEVEL6_VECTOR_VADDR
|
|
-#undef XCHAL_DEBUG_VECTOR_VADDR
|
|
|
|
-#undef XCHAL_NMI_VECTOR_VADDR
|
|
|
|
#undef XCHAL_INTLEVEL7_VECTOR_VADDR
|
|
#undef XCHAL_INTLEVEL7_VECTOR_VADDR
|
|
|
|
+#undef XCHAL_DEBUG_VECTOR_VADDR
|
|
|
|
|
|
#else
|
|
#else
|
|
|
|
|
|
@@ -135,6 +114,7 @@ static inline unsigned long xtensa_get_kio_paddr(void)
|
|
#define INTLEVEL4_VECTOR_VADDR XCHAL_INTLEVEL4_VECTOR_VADDR
|
|
#define INTLEVEL4_VECTOR_VADDR XCHAL_INTLEVEL4_VECTOR_VADDR
|
|
#define INTLEVEL5_VECTOR_VADDR XCHAL_INTLEVEL5_VECTOR_VADDR
|
|
#define INTLEVEL5_VECTOR_VADDR XCHAL_INTLEVEL5_VECTOR_VADDR
|
|
#define INTLEVEL6_VECTOR_VADDR XCHAL_INTLEVEL6_VECTOR_VADDR
|
|
#define INTLEVEL6_VECTOR_VADDR XCHAL_INTLEVEL6_VECTOR_VADDR
|
|
|
|
+#define INTLEVEL7_VECTOR_VADDR XCHAL_INTLEVEL6_VECTOR_VADDR
|
|
#define DEBUG_VECTOR_VADDR XCHAL_DEBUG_VECTOR_VADDR
|
|
#define DEBUG_VECTOR_VADDR XCHAL_DEBUG_VECTOR_VADDR
|
|
|
|
|
|
#endif
|
|
#endif
|