summaryrefslogtreecommitdiff
path: root/arch/xtensa/include/asm/initialize_mmu.h
diff options
context:
space:
mode:
authorMax Filippov <jcmvbkbc@gmail.com>2016-04-13 05:20:02 +0300
committerMax Filippov <jcmvbkbc@gmail.com>2016-07-24 06:33:58 +0300
commita9f2fc628e3a26a829fd79aff74eb49839d1e74b (patch)
tree7dad4e57c917757d39bb3470720392ac2b92b335 /arch/xtensa/include/asm/initialize_mmu.h
parentd39af90265feb40ec198c4ca8268724645b4b50e (diff)
xtensa: cleanup MMU setup and kernel layout macros
Make kernel load address explicit, independent of the selected MMU configuration and configurable from Kconfig. Do not restrict it to the first 512MB of the physical address space. Cleanup kernel memory layout macros: - rename VECBASE_RESET_VADDR to VECBASE_VADDR, XC_VADDR to VECTOR_VADDR; - drop VIRTUAL_MEMORY_ADDRESS and LOAD_MEMORY_ADDRESS; - introduce PHYS_OFFSET and use it in __va and __pa definitions; - synchronize MMU/noMMU vectors, drop unused NMI vector; - replace hardcoded vectors offset of 0x3000 with Kconfig symbol. Signed-off-by: Max Filippov <jcmvbkbc@gmail.com>
Diffstat (limited to 'arch/xtensa/include/asm/initialize_mmu.h')
-rw-r--r--arch/xtensa/include/asm/initialize_mmu.h21
1 files changed, 12 insertions, 9 deletions
diff --git a/arch/xtensa/include/asm/initialize_mmu.h b/arch/xtensa/include/asm/initialize_mmu.h
index 46d4bef61655..42410f253597 100644
--- a/arch/xtensa/include/asm/initialize_mmu.h
+++ b/arch/xtensa/include/asm/initialize_mmu.h
@@ -77,13 +77,16 @@
.align 4
1: movi a2, 0x10000000
- movi a3, 0x18000000
- add a2, a2, a0
-9: bgeu a2, a3, 9b /* PC is out of the expected range */
+
+#if CONFIG_KERNEL_LOAD_ADDRESS < 0x40000000ul
+#define TEMP_MAPPING_VADDR 0x40000000
+#else
+#define TEMP_MAPPING_VADDR 0x00000000
+#endif
/* Step 1: invalidate mapping at 0x40000000..0x5FFFFFFF. */
- movi a2, 0x40000000 | XCHAL_SPANNING_WAY
+ movi a2, TEMP_MAPPING_VADDR | XCHAL_SPANNING_WAY
idtlb a2
iitlb a2
isync
@@ -95,14 +98,14 @@
srli a3, a0, 27
slli a3, a3, 27
addi a3, a3, CA_BYPASS
- addi a7, a2, -1
+ addi a7, a2, 5 - XCHAL_SPANNING_WAY
wdtlb a3, a7
witlb a3, a7
isync
slli a4, a0, 5
srli a4, a4, 5
- addi a5, a2, -6
+ addi a5, a2, -XCHAL_SPANNING_WAY
add a4, a4, a5
jx a4
@@ -145,19 +148,19 @@
witlb a4, a5
#endif
- movi a5, XCHAL_KIO_CACHED_VADDR + 6
+ movi a5, XCHAL_KIO_CACHED_VADDR + XCHAL_KIO_TLB_WAY
movi a4, XCHAL_KIO_DEFAULT_PADDR + CA_WRITEBACK
wdtlb a4, a5
witlb a4, a5
- movi a5, XCHAL_KIO_BYPASS_VADDR + 6
+ movi a5, XCHAL_KIO_BYPASS_VADDR + XCHAL_KIO_TLB_WAY
movi a4, XCHAL_KIO_DEFAULT_PADDR + CA_BYPASS
wdtlb a4, a5
witlb a4, a5
isync
- /* Jump to self, using MMU v2 mappings. */
+ /* Jump to self, using final mappings. */
movi a4, 1f
jx a4