Path: blob/master/arch/xtensa/include/asm/initialize_mmu.h
26442 views
/*1* arch/xtensa/include/asm/initialize_mmu.h2*3* Initializes MMU:4*5* For the new V3 MMU we remap the TLB from virtual == physical6* to the standard Linux mapping used in earlier MMU's.7*8* For the MMU we also support a new configuration register that9* specifies how the S32C1I instruction operates with the cache10* controller.11*12* This file is subject to the terms and conditions of the GNU General13* Public License. See the file "COPYING" in the main directory of14* this archive for more details.15*16* Copyright (C) 2008 - 2012 Tensilica, Inc.17*18* Marc Gauthier <[email protected]>19* Pete Delaney <[email protected]>20*/2122#ifndef _XTENSA_INITIALIZE_MMU_H23#define _XTENSA_INITIALIZE_MMU_H2425#include <linux/init.h>26#include <linux/pgtable.h>27#include <asm/vectors.h>2829#if XCHAL_HAVE_PTP_MMU30#define CA_BYPASS (_PAGE_CA_BYPASS | _PAGE_HW_WRITE | _PAGE_HW_EXEC)31#define CA_WRITEBACK (_PAGE_CA_WB | _PAGE_HW_WRITE | _PAGE_HW_EXEC)32#else33#define CA_WRITEBACK (0x4)34#endif3536#ifdef __ASSEMBLER__3738#define XTENSA_HWVERSION_RC_2009_0 2300003940.macro initialize_mmu4142#if XCHAL_HAVE_S32C1I && (XCHAL_HW_MIN_VERSION >= XTENSA_HWVERSION_RC_2009_0)43/*44* We Have Atomic Operation Control (ATOMCTL) Register; Initialize it.45* For details see Documentation/arch/xtensa/atomctl.rst46*/47#if XCHAL_DCACHE_IS_COHERENT48movi a3, 0x25 /* For SMP/MX -- internal for writeback,49* RCW otherwise50*/51#else52movi a3, 0x29 /* non-MX -- Most cores use Std Memory53* Controlers which usually can't use RCW54*/55#endif56wsr a3, atomctl57#endif /* XCHAL_HAVE_S32C1I &&58* (XCHAL_HW_MIN_VERSION >= XTENSA_HWVERSION_RC_2009_0)59*/6061#if defined(CONFIG_MMU) && XCHAL_HAVE_PTP_MMU && XCHAL_HAVE_SPANNING_WAY62/*63* Have MMU v364*/6566#if !XCHAL_HAVE_VECBASE67# error "MMU v3 requires reloc vectors"68#endif6970movi a1, 071_call0 1f72_j 2f7374.align 4751:7677#if CONFIG_KERNEL_LOAD_ADDRESS < 0x40000000ul78#define TEMP_MAPPING_VADDR 0x4000000079#else80#define TEMP_MAPPING_VADDR 0x0000000081#endif8283/* Step 1: invalidate mapping at 0x40000000..0x5FFFFFFF. */8485movi a2, TEMP_MAPPING_VADDR | XCHAL_SPANNING_WAY86idtlb a287iitlb a288isync8990/* Step 2: map 0x40000000..0x47FFFFFF to paddr containing this code91* and jump to the new mapping.92*/9394srli a3, a0, 2795slli a3, a3, 2796addi a3, a3, CA_BYPASS97addi a7, a2, 5 - XCHAL_SPANNING_WAY98wdtlb a3, a799witlb a3, a7100isync101102slli a4, a0, 5103srli a4, a4, 5104addi a5, a2, -XCHAL_SPANNING_WAY105add a4, a4, a5106jx a4107108/* Step 3: unmap everything other than current area.109* Start at 0x60000000, wrap around, and end with 0x20000000110*/1112: movi a4, 0x20000000112add a5, a2, a41133: idtlb a5114iitlb a5115add a5, a5, a4116bne a5, a2, 3b117118/* Step 4: Setup MMU with the requested static mappings. */119120movi a6, 0x01000000121wsr a6, ITLBCFG122wsr a6, DTLBCFG123isync124125movi a5, XCHAL_KSEG_CACHED_VADDR + XCHAL_KSEG_TLB_WAY126movi a4, XCHAL_KSEG_PADDR + CA_WRITEBACK127wdtlb a4, a5128witlb a4, a5129130movi a5, XCHAL_KSEG_BYPASS_VADDR + XCHAL_KSEG_TLB_WAY131movi a4, XCHAL_KSEG_PADDR + CA_BYPASS132wdtlb a4, a5133witlb a4, a5134135#ifdef CONFIG_XTENSA_KSEG_512M136movi a5, XCHAL_KSEG_CACHED_VADDR + 0x10000000 + XCHAL_KSEG_TLB_WAY137movi a4, XCHAL_KSEG_PADDR + 0x10000000 + CA_WRITEBACK138wdtlb a4, a5139witlb a4, a5140141movi a5, XCHAL_KSEG_BYPASS_VADDR + 0x10000000 + XCHAL_KSEG_TLB_WAY142movi a4, XCHAL_KSEG_PADDR + 0x10000000 + CA_BYPASS143wdtlb a4, a5144witlb a4, a5145#endif146147movi a5, XCHAL_KIO_CACHED_VADDR + XCHAL_KIO_TLB_WAY148movi a4, XCHAL_KIO_DEFAULT_PADDR + CA_WRITEBACK149wdtlb a4, a5150witlb a4, a5151152movi a5, XCHAL_KIO_BYPASS_VADDR + XCHAL_KIO_TLB_WAY153movi a4, XCHAL_KIO_DEFAULT_PADDR + CA_BYPASS154wdtlb a4, a5155witlb a4, a5156157isync158159/* Jump to self, using final mappings. */160movi a4, 1f161jx a41621631:164/* Step 5: remove temporary mapping. */165idtlb a7166iitlb a7167isync168169movi a0, 0170wsr a0, ptevaddr171rsync172173#endif /* defined(CONFIG_MMU) && XCHAL_HAVE_PTP_MMU &&174XCHAL_HAVE_SPANNING_WAY */175176.endm177178.macro initialize_cacheattr179180#if !defined(CONFIG_MMU) && (XCHAL_HAVE_TLBS || XCHAL_HAVE_MPU)181#if CONFIG_MEMMAP_CACHEATTR == 0x22222222 && XCHAL_HAVE_PTP_MMU182#error Default MEMMAP_CACHEATTR of 0x22222222 does not work with full MMU.183#endif184185#if XCHAL_HAVE_MPU186__REFCONST187.align 4188.Lattribute_table:189.long 0x000000, 0x1fff00, 0x1ddf00, 0x1eef00190.long 0x006600, 0x000000, 0x000000, 0x000000191.long 0x000000, 0x000000, 0x000000, 0x000000192.long 0x000000, 0x000000, 0x000000, 0x000000193.previous194195movi a3, .Lattribute_table196movi a4, CONFIG_MEMMAP_CACHEATTR197movi a5, 1198movi a6, XCHAL_MPU_ENTRIES199movi a10, 0x20000000200movi a11, -12011:202sub a5, a5, a10203extui a8, a4, 28, 4204beq a8, a11, 2f205addi a6, a6, -1206mov a11, a82072:208addx4 a9, a8, a3209l32i a9, a9, 0210or a9, a9, a6211wptlb a9, a5212slli a4, a4, 4213bgeu a5, a10, 1b214215#else216movi a5, XCHAL_SPANNING_WAY217movi a6, ~_PAGE_ATTRIB_MASK218movi a4, CONFIG_MEMMAP_CACHEATTR219movi a8, 0x200000002201:221rdtlb1 a3, a5222xor a3, a3, a4223and a3, a3, a6224xor a3, a3, a4225wdtlb a3, a5226ritlb1 a3, a5227xor a3, a3, a4228and a3, a3, a6229xor a3, a3, a4230witlb a3, a5231232add a5, a5, a8233srli a4, a4, 4234bgeu a5, a8, 1b235236isync237#endif238#endif239240.endm241242#endif /*__ASSEMBLER__*/243244#endif /* _XTENSA_INITIALIZE_MMU_H */245246247