/* SPDX-License-Identifier: GPL-2.0-or-later */1/*2* This file contains low-level functions for performing various3* types of TLB invalidations on various processors with no hash4* table.5*6* This file implements the following functions for all no-hash7* processors. Some aren't implemented for some variants. Some8* are inline in tlbflush.h9*10* - tlbil_va11* - tlbil_pid12* - tlbil_all13* - tlbivax_bcast14*15* Code mostly moved over from misc_32.S16*17* Copyright (C) 1995-1996 Gary Thomas ([email protected])18*19* Partially rewritten by Cort Dougan ([email protected])20* Paul Mackerras, Kumar Gala and Benjamin Herrenschmidt.21*/2223#include <asm/reg.h>24#include <asm/page.h>25#include <asm/cputable.h>26#include <asm/mmu.h>27#include <asm/ppc_asm.h>28#include <asm/asm-offsets.h>29#include <asm/processor.h>30#include <asm/bug.h>31#include <asm/asm-compat.h>32#include <asm/feature-fixups.h>3334#if defined(CONFIG_PPC_8xx)3536/*37* Nothing to do for 8xx, everything is inline38*/3940#elif defined(CONFIG_44x) /* Includes 47x */4142/*43* 440 implementation uses tlbsx/we for tlbil_va and a full sweep44* of the TLB for everything else.45*/46_GLOBAL(__tlbil_va)47mfspr r5,SPRN_MMUCR48mfmsr r104950/*51* We write 16 bits of STID since 47x supports that much, we52* will never be passed out of bounds values on 440 (hopefully)53*/54rlwimi r5,r4,0,16,315556/* We have to run the search with interrupts disabled, otherwise57* an interrupt which causes a TLB miss can clobber the MMUCR58* between the mtspr and the tlbsx.59*60* Critical and Machine Check interrupts take care of saving61* and restoring MMUCR, so only normal interrupts have to be62* taken care of.63*/64wrteei 065mtspr SPRN_MMUCR,r566tlbsx. r6,0,r367bne 10f68sync69#ifndef CONFIG_PPC_47x70/* On 440 There are only 64 TLB entries, so r3 < 64, which means bit71* 22, is clear. Since 22 is the V bit in the TLB_PAGEID, loading this72* value will invalidate the TLB entry.73*/74tlbwe r6,r6,PPC44x_TLB_PAGEID75#else76oris r7,r6,0x8000 /* specify way explicitly */77clrrwi r4,r3,12 /* get an EPN for the hashing with V = 0 */78ori r4,r4,PPC47x_TLBE_SIZE79tlbwe r4,r7,0 /* write it */80#endif /* !CONFIG_PPC_47x */81isync8210: wrtee r1083blr8485_GLOBAL(_tlbil_all)86_GLOBAL(_tlbil_pid)87#ifndef CONFIG_PPC_47x88li r3,089sync9091/* Load high watermark */92lis r4,tlb_44x_hwater@ha93lwz r5,tlb_44x_hwater@l(r4)94951: tlbwe r3,r3,PPC44x_TLB_PAGEID96addi r3,r3,197cmpw 0,r3,r598ble 1b99100isync101blr102#else103/* 476 variant. There's not simple way to do this, hopefully we'll104* try to limit the amount of such full invalidates105*/106mfmsr r11 /* Interrupts off */107wrteei 0108li r3,-1 /* Current set */109lis r10,tlb_47x_boltmap@h110ori r10,r10,tlb_47x_boltmap@l111lis r7,0x8000 /* Specify way explicitly */112113b 9f /* For each set */1141151: li r9,4 /* Number of ways */116li r4,0 /* Current way */117li r6,0 /* Default entry value 0 */118andi. r0,r8,1 /* Check if way 0 is bolted */119mtctr r9 /* Load way counter */120bne- 3f /* Bolted, skip loading it */1211222: /* For each way */123or r5,r3,r4 /* Make way|index for tlbre */124rlwimi r5,r5,16,8,15 /* Copy index into position */125tlbre r6,r5,0 /* Read entry */1263: addis r4,r4,0x2000 /* Next way */127andi. r0,r6,PPC47x_TLB0_VALID /* Valid entry ? */128beq 4f /* Nope, skip it */129rlwimi r7,r5,0,1,2 /* Insert way number */130rlwinm r6,r6,0,21,19 /* Clear V */131tlbwe r6,r7,0 /* Write it */1324: bdnz 2b /* Loop for each way */133srwi r8,r8,1 /* Next boltmap bit */1349: cmpwi cr1,r3,255 /* Last set done ? */135addi r3,r3,1 /* Next set */136beq cr1,1f /* End of loop */137andi. r0,r3,0x1f /* Need to load a new boltmap word ? */138bne 1b /* No, loop */139lwz r8,0(r10) /* Load boltmap entry */140addi r10,r10,4 /* Next word */141b 1b /* Then loop */1421: isync /* Sync shadows */143wrtee r11144blr145#endif /* !CONFIG_PPC_47x */146147#ifdef CONFIG_PPC_47x148149/*150* _tlbivax_bcast is only on 47x. We don't bother doing a runtime151* check though, it will blow up soon enough if we mistakenly try152* to use it on a 440.153*/154_GLOBAL(_tlbivax_bcast)155mfspr r5,SPRN_MMUCR156mfmsr r10157rlwimi r5,r4,0,16,31158wrteei 0159mtspr SPRN_MMUCR,r5160isync161PPC_TLBIVAX(0, R3)162isync163mbar164tlbsync165BEGIN_FTR_SECTION166b 1f167END_FTR_SECTION_IFSET(CPU_FTR_476_DD2)168sync169wrtee r10170blr171/*172* DD2 HW could hang if in instruction fetch happens before msync completes.173* Touch enough instruction cache lines to ensure cache hits174*/1751: mflr r9176bcl 20,31,$+41772: mflr r6178li r7,32179PPC_ICBT(0,R6,R7) /* touch next cache line */180add r6,r6,r7181PPC_ICBT(0,R6,R7) /* touch next cache line */182add r6,r6,r7183PPC_ICBT(0,R6,R7) /* touch next cache line */184sync185nop186nop187nop188nop189nop190nop191nop192nop193mtlr r9194wrtee r10195blr196#endif /* CONFIG_PPC_47x */197198#elif defined(CONFIG_PPC_85xx)199/*200* FSL BookE implementations.201*202* Since feature sections are using _SECTION_ELSE we need203* to have the larger code path before the _SECTION_ELSE204*/205206/*207* Flush MMU TLB on the local processor208*/209_GLOBAL(_tlbil_all)210BEGIN_MMU_FTR_SECTION211li r3,(MMUCSR0_TLBFI)@l212mtspr SPRN_MMUCSR0, r32131:214mfspr r3,SPRN_MMUCSR0215andi. r3,r3,MMUCSR0_TLBFI@l216bne 1b217MMU_FTR_SECTION_ELSE218PPC_TLBILX_ALL(0,R0)219ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX)220msync221isync222blr223224_GLOBAL(_tlbil_pid)225BEGIN_MMU_FTR_SECTION226slwi r3,r3,16227mfmsr r10228wrteei 0229mfspr r4,SPRN_MAS6 /* save MAS6 */230mtspr SPRN_MAS6,r3231PPC_TLBILX_PID(0,R0)232mtspr SPRN_MAS6,r4 /* restore MAS6 */233wrtee r10234MMU_FTR_SECTION_ELSE235li r3,(MMUCSR0_TLBFI)@l236mtspr SPRN_MMUCSR0, r32371:238mfspr r3,SPRN_MMUCSR0239andi. r3,r3,MMUCSR0_TLBFI@l240bne 1b241ALT_MMU_FTR_SECTION_END_IFSET(MMU_FTR_USE_TLBILX)242msync243isync244blr245246/*247* Flush MMU TLB for a particular address, but only on the local processor248* (no broadcast)249*/250_GLOBAL(__tlbil_va)251mfmsr r10252wrteei 0253slwi r4,r4,16254ori r4,r4,(MAS6_ISIZE(BOOK3E_PAGESZ_4K))@l255mtspr SPRN_MAS6,r4 /* assume AS=0 for now */256BEGIN_MMU_FTR_SECTION257tlbsx 0,r3258mfspr r4,SPRN_MAS1 /* check valid */259andis. r3,r4,MAS1_VALID@h260beq 1f261rlwinm r4,r4,0,1,31262mtspr SPRN_MAS1,r4263tlbwe264MMU_FTR_SECTION_ELSE265PPC_TLBILX_VA(0,R3)266ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX)267msync268isync2691: wrtee r10270blr271#elif defined(CONFIG_PPC_BOOK3E_64)272/*273* New Book3E (>= 2.06) implementation274*275* Note: We may be able to get away without the interrupt masking stuff276* if we save/restore MAS6 on exceptions that might modify it277*/278_GLOBAL(_tlbil_pid)279slwi r4,r3,MAS6_SPID_SHIFT280mfmsr r10281wrteei 0282mtspr SPRN_MAS6,r4283PPC_TLBILX_PID(0,R0)284wrtee r10285msync286isync287blr288289_GLOBAL(_tlbil_pid_noind)290slwi r4,r3,MAS6_SPID_SHIFT291mfmsr r10292ori r4,r4,MAS6_SIND293wrteei 0294mtspr SPRN_MAS6,r4295PPC_TLBILX_PID(0,R0)296wrtee r10297msync298isync299blr300301_GLOBAL(_tlbil_all)302PPC_TLBILX_ALL(0,R0)303msync304isync305blr306307_GLOBAL(_tlbil_va)308mfmsr r10309wrteei 0310cmpwi cr0,r6,0311slwi r4,r4,MAS6_SPID_SHIFT312rlwimi r4,r5,MAS6_ISIZE_SHIFT,MAS6_ISIZE_MASK313beq 1f314rlwimi r4,r6,MAS6_SIND_SHIFT,MAS6_SIND3151: mtspr SPRN_MAS6,r4 /* assume AS=0 for now */316PPC_TLBILX_VA(0,R3)317msync318isync319wrtee r10320blr321322_GLOBAL(_tlbivax_bcast)323mfmsr r10324wrteei 0325cmpwi cr0,r6,0326slwi r4,r4,MAS6_SPID_SHIFT327rlwimi r4,r5,MAS6_ISIZE_SHIFT,MAS6_ISIZE_MASK328beq 1f329rlwimi r4,r6,MAS6_SIND_SHIFT,MAS6_SIND3301: mtspr SPRN_MAS6,r4 /* assume AS=0 for now */331PPC_TLBIVAX(0,R3)332mbar333tlbsync334sync335wrtee r10336blr337#else338#error Unsupported processor type !339#endif340341#if defined(CONFIG_PPC_E500)342/*343* extern void loadcam_entry(unsigned int index)344*345* Load TLBCAM[index] entry in to the L2 CAM MMU346* Must preserve r7, r8, r9, r10, r11, r12347*/348_GLOBAL(loadcam_entry)349mflr r5350LOAD_REG_ADDR_PIC(r4, TLBCAM)351mtlr r5352mulli r5,r3,TLBCAM_SIZE353add r3,r5,r4354lwz r4,TLBCAM_MAS0(r3)355mtspr SPRN_MAS0,r4356lwz r4,TLBCAM_MAS1(r3)357mtspr SPRN_MAS1,r4358PPC_LL r4,TLBCAM_MAS2(r3)359mtspr SPRN_MAS2,r4360lwz r4,TLBCAM_MAS3(r3)361mtspr SPRN_MAS3,r4362BEGIN_MMU_FTR_SECTION363lwz r4,TLBCAM_MAS7(r3)364mtspr SPRN_MAS7,r4365END_MMU_FTR_SECTION_IFSET(MMU_FTR_BIG_PHYS)366isync367tlbwe368isync369blr370371/*372* Load multiple TLB entries at once, using an alternate-space373* trampoline so that we don't have to care about whether the same374* TLB entry maps us before and after.375*376* r3 = first entry to write377* r4 = number of entries to write378* r5 = temporary tlb entry (0 means no switch to AS1)379*/380_GLOBAL(loadcam_multi)381mflr r8382/* Don't switch to AS=1 if already there */383mfmsr r11384andi. r11,r11,MSR_IS385bne 10f386mr. r12, r5387beq 10f388389/*390* Set up temporary TLB entry that is the same as what we're391* running from, but in AS=1.392*/393bcl 20,31,$+43941: mflr r6395tlbsx 0,r8396mfspr r6,SPRN_MAS1397ori r6,r6,MAS1_TS398mtspr SPRN_MAS1,r6399mfspr r6,SPRN_MAS0400rlwimi r6,r5,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK401mr r7,r5402mtspr SPRN_MAS0,r6403isync404tlbwe405isync406407/* Switch to AS=1 */408mfmsr r6409ori r6,r6,MSR_IS|MSR_DS410mtmsr r6411isync41241310:414mr r9,r3415add r10,r3,r44162: bl loadcam_entry417addi r9,r9,1418cmpw r9,r10419mr r3,r9420blt 2b421422/* Don't return to AS=0 if we were in AS=1 at function start */423andi. r11,r11,MSR_IS424bne 3f425cmpwi r12, 0426beq 3f427428/* Return to AS=0 and clear the temporary entry */429mfmsr r6430rlwinm. r6,r6,0,~(MSR_IS|MSR_DS)431mtmsr r6432isync433434li r6,0435mtspr SPRN_MAS1,r6436rlwinm r6,r7,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK437oris r6,r6,MAS0_TLBSEL(1)@h438mtspr SPRN_MAS0,r6439isync440tlbwe441isync4424433:444mtlr r8445blr446#endif447448449