/*1* PARISC TLB and cache flushing support2* Copyright (C) 2000-2001 Hewlett-Packard (John Marvin)3* Copyright (C) 2001 Matthew Wilcox (willy at parisc-linux.org)4* Copyright (C) 2002 Richard Hirst (rhirst with parisc-linux.org)5*6* This program is free software; you can redistribute it and/or modify7* it under the terms of the GNU General Public License as published by8* the Free Software Foundation; either version 2, or (at your option)9* any later version.10*11* This program is distributed in the hope that it will be useful,12* but WITHOUT ANY WARRANTY; without even the implied warranty of13* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the14* GNU General Public License for more details.15*16* You should have received a copy of the GNU General Public License17* along with this program; if not, write to the Free Software18* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA19*/2021/*22* NOTE: fdc,fic, and pdc instructions that use base register modification23* should only use index and base registers that are not shadowed,24* so that the fast path emulation in the non access miss handler25* can be used.26*/2728#ifdef CONFIG_64BIT29.level 2.0w30#else31.level 2.032#endif3334#include <asm/psw.h>35#include <asm/assembly.h>36#include <asm/pgtable.h>37#include <asm/cache.h>38#include <linux/linkage.h>3940.text41.align 1284243ENTRY(flush_tlb_all_local)44.proc45.callinfo NO_CALLS46.entry4748/*49* The pitlbe and pdtlbe instructions should only be used to50* flush the entire tlb. Also, there needs to be no intervening51* tlb operations, e.g. tlb misses, so the operation needs52* to happen in real mode with all interruptions disabled.53*/5455/* pcxt_ssm_bug - relied upon translation! PA 2.0 Arch. F-4 and F-5 */56rsm PSW_SM_I, %r19 /* save I-bit state */57load32 PA(1f), %r158nop59nop60nop61nop62nop6364rsm PSW_SM_Q, %r0 /* prep to load iia queue */65mtctl %r0, %cr17 /* Clear IIASQ tail */66mtctl %r0, %cr17 /* Clear IIASQ head */67mtctl %r1, %cr18 /* IIAOQ head */68ldo 4(%r1), %r169mtctl %r1, %cr18 /* IIAOQ tail */70load32 REAL_MODE_PSW, %r171mtctl %r1, %ipsw72rfi73nop74751: load32 PA(cache_info), %r17677/* Flush Instruction Tlb */7879LDREG ITLB_SID_BASE(%r1), %r2080LDREG ITLB_SID_STRIDE(%r1), %r2181LDREG ITLB_SID_COUNT(%r1), %r2282LDREG ITLB_OFF_BASE(%r1), %arg083LDREG ITLB_OFF_STRIDE(%r1), %arg184LDREG ITLB_OFF_COUNT(%r1), %arg285LDREG ITLB_LOOP(%r1), %arg38687addib,COND(=) -1, %arg3, fitoneloop /* Preadjust and test */88movb,<,n %arg3, %r31, fitdone /* If loop < 0, skip */89copy %arg0, %r28 /* Init base addr */9091fitmanyloop: /* Loop if LOOP >= 2 */92mtsp %r20, %sr193add %r21, %r20, %r20 /* increment space */94copy %arg2, %r29 /* Init middle loop count */9596fitmanymiddle: /* Loop if LOOP >= 2 */97addib,COND(>) -1, %r31, fitmanymiddle /* Adjusted inner loop decr */98pitlbe 0(%sr1, %r28)99pitlbe,m %arg1(%sr1, %r28) /* Last pitlbe and addr adjust */100addib,COND(>) -1, %r29, fitmanymiddle /* Middle loop decr */101copy %arg3, %r31 /* Re-init inner loop count */102103movb,tr %arg0, %r28, fitmanyloop /* Re-init base addr */104addib,COND(<=),n -1, %r22, fitdone /* Outer loop count decr */105106fitoneloop: /* Loop if LOOP = 1 */107mtsp %r20, %sr1108copy %arg0, %r28 /* init base addr */109copy %arg2, %r29 /* init middle loop count */110111fitonemiddle: /* Loop if LOOP = 1 */112addib,COND(>) -1, %r29, fitonemiddle /* Middle loop count decr */113pitlbe,m %arg1(%sr1, %r28) /* pitlbe for one loop */114115addib,COND(>) -1, %r22, fitoneloop /* Outer loop count decr */116add %r21, %r20, %r20 /* increment space */117118fitdone:119120/* Flush Data Tlb */121122LDREG DTLB_SID_BASE(%r1), %r20123LDREG DTLB_SID_STRIDE(%r1), %r21124LDREG DTLB_SID_COUNT(%r1), %r22125LDREG DTLB_OFF_BASE(%r1), %arg0126LDREG DTLB_OFF_STRIDE(%r1), %arg1127LDREG DTLB_OFF_COUNT(%r1), %arg2128LDREG DTLB_LOOP(%r1), %arg3129130addib,COND(=) -1, %arg3, fdtoneloop /* Preadjust and test */131movb,<,n %arg3, %r31, fdtdone /* If loop < 0, skip */132copy %arg0, %r28 /* Init base addr */133134fdtmanyloop: /* Loop if LOOP >= 2 */135mtsp %r20, %sr1136add %r21, %r20, %r20 /* increment space */137copy %arg2, %r29 /* Init middle loop count */138139fdtmanymiddle: /* Loop if LOOP >= 2 */140addib,COND(>) -1, %r31, fdtmanymiddle /* Adjusted inner loop decr */141pdtlbe 0(%sr1, %r28)142pdtlbe,m %arg1(%sr1, %r28) /* Last pdtlbe and addr adjust */143addib,COND(>) -1, %r29, fdtmanymiddle /* Middle loop decr */144copy %arg3, %r31 /* Re-init inner loop count */145146movb,tr %arg0, %r28, fdtmanyloop /* Re-init base addr */147addib,COND(<=),n -1, %r22,fdtdone /* Outer loop count decr */148149fdtoneloop: /* Loop if LOOP = 1 */150mtsp %r20, %sr1151copy %arg0, %r28 /* init base addr */152copy %arg2, %r29 /* init middle loop count */153154fdtonemiddle: /* Loop if LOOP = 1 */155addib,COND(>) -1, %r29, fdtonemiddle /* Middle loop count decr */156pdtlbe,m %arg1(%sr1, %r28) /* pdtlbe for one loop */157158addib,COND(>) -1, %r22, fdtoneloop /* Outer loop count decr */159add %r21, %r20, %r20 /* increment space */160161162fdtdone:163/*164* Switch back to virtual mode165*/166/* pcxt_ssm_bug */167rsm PSW_SM_I, %r0168load32 2f, %r1169nop170nop171nop172nop173nop174175rsm PSW_SM_Q, %r0 /* prep to load iia queue */176mtctl %r0, %cr17 /* Clear IIASQ tail */177mtctl %r0, %cr17 /* Clear IIASQ head */178mtctl %r1, %cr18 /* IIAOQ head */179ldo 4(%r1), %r1180mtctl %r1, %cr18 /* IIAOQ tail */181load32 KERNEL_PSW, %r1182or %r1, %r19, %r1 /* I-bit to state on entry */183mtctl %r1, %ipsw /* restore I-bit (entire PSW) */184rfi185nop1861872: bv %r0(%r2)188nop189190.exit191.procend192ENDPROC(flush_tlb_all_local)193194.import cache_info,data195196ENTRY(flush_instruction_cache_local)197.proc198.callinfo NO_CALLS199.entry200201mtsp %r0, %sr1202load32 cache_info, %r1203204/* Flush Instruction Cache */205206LDREG ICACHE_BASE(%r1), %arg0207LDREG ICACHE_STRIDE(%r1), %arg1208LDREG ICACHE_COUNT(%r1), %arg2209LDREG ICACHE_LOOP(%r1), %arg3210rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/211addib,COND(=) -1, %arg3, fioneloop /* Preadjust and test */212movb,<,n %arg3, %r31, fisync /* If loop < 0, do sync */213214fimanyloop: /* Loop if LOOP >= 2 */215addib,COND(>) -1, %r31, fimanyloop /* Adjusted inner loop decr */216fice %r0(%sr1, %arg0)217fice,m %arg1(%sr1, %arg0) /* Last fice and addr adjust */218movb,tr %arg3, %r31, fimanyloop /* Re-init inner loop count */219addib,COND(<=),n -1, %arg2, fisync /* Outer loop decr */220221fioneloop: /* Loop if LOOP = 1 */222addib,COND(>) -1, %arg2, fioneloop /* Outer loop count decr */223fice,m %arg1(%sr1, %arg0) /* Fice for one loop */224225fisync:226sync227mtsm %r22 /* restore I-bit */228bv %r0(%r2)229nop230.exit231232.procend233ENDPROC(flush_instruction_cache_local)234235236.import cache_info, data237ENTRY(flush_data_cache_local)238.proc239.callinfo NO_CALLS240.entry241242mtsp %r0, %sr1243load32 cache_info, %r1244245/* Flush Data Cache */246247LDREG DCACHE_BASE(%r1), %arg0248LDREG DCACHE_STRIDE(%r1), %arg1249LDREG DCACHE_COUNT(%r1), %arg2250LDREG DCACHE_LOOP(%r1), %arg3251rsm PSW_SM_I, %r22252addib,COND(=) -1, %arg3, fdoneloop /* Preadjust and test */253movb,<,n %arg3, %r31, fdsync /* If loop < 0, do sync */254255fdmanyloop: /* Loop if LOOP >= 2 */256addib,COND(>) -1, %r31, fdmanyloop /* Adjusted inner loop decr */257fdce %r0(%sr1, %arg0)258fdce,m %arg1(%sr1, %arg0) /* Last fdce and addr adjust */259movb,tr %arg3, %r31, fdmanyloop /* Re-init inner loop count */260addib,COND(<=),n -1, %arg2, fdsync /* Outer loop decr */261262fdoneloop: /* Loop if LOOP = 1 */263addib,COND(>) -1, %arg2, fdoneloop /* Outer loop count decr */264fdce,m %arg1(%sr1, %arg0) /* Fdce for one loop */265266fdsync:267syncdma268sync269mtsm %r22 /* restore I-bit */270bv %r0(%r2)271nop272.exit273274.procend275ENDPROC(flush_data_cache_local)276277.align 16278279ENTRY(copy_user_page_asm)280.proc281.callinfo NO_CALLS282.entry283284#ifdef CONFIG_64BIT285/* PA8x00 CPUs can consume 2 loads or 1 store per cycle.286* Unroll the loop by hand and arrange insn appropriately.287* GCC probably can do this just as well.288*/289290ldd 0(%r25), %r19291ldi (PAGE_SIZE / 128), %r1292293ldw 64(%r25), %r0 /* prefetch 1 cacheline ahead */294ldw 128(%r25), %r0 /* prefetch 2 */2952961: ldd 8(%r25), %r20297ldw 192(%r25), %r0 /* prefetch 3 */298ldw 256(%r25), %r0 /* prefetch 4 */299300ldd 16(%r25), %r21301ldd 24(%r25), %r22302std %r19, 0(%r26)303std %r20, 8(%r26)304305ldd 32(%r25), %r19306ldd 40(%r25), %r20307std %r21, 16(%r26)308std %r22, 24(%r26)309310ldd 48(%r25), %r21311ldd 56(%r25), %r22312std %r19, 32(%r26)313std %r20, 40(%r26)314315ldd 64(%r25), %r19316ldd 72(%r25), %r20317std %r21, 48(%r26)318std %r22, 56(%r26)319320ldd 80(%r25), %r21321ldd 88(%r25), %r22322std %r19, 64(%r26)323std %r20, 72(%r26)324325ldd 96(%r25), %r19326ldd 104(%r25), %r20327std %r21, 80(%r26)328std %r22, 88(%r26)329330ldd 112(%r25), %r21331ldd 120(%r25), %r22332std %r19, 96(%r26)333std %r20, 104(%r26)334335ldo 128(%r25), %r25336std %r21, 112(%r26)337std %r22, 120(%r26)338ldo 128(%r26), %r26339340/* conditional branches nullify on forward taken branch, and on341* non-taken backward branch. Note that .+4 is a backwards branch.342* The ldd should only get executed if the branch is taken.343*/344addib,COND(>),n -1, %r1, 1b /* bundle 10 */345ldd 0(%r25), %r19 /* start next loads */346347#else348349/*350* This loop is optimized for PCXL/PCXL2 ldw/ldw and stw/stw351* bundles (very restricted rules for bundling).352* Note that until (if) we start saving353* the full 64 bit register values on interrupt, we can't354* use ldd/std on a 32 bit kernel.355*/356ldw 0(%r25), %r19357ldi (PAGE_SIZE / 64), %r13583591:360ldw 4(%r25), %r20361ldw 8(%r25), %r21362ldw 12(%r25), %r22363stw %r19, 0(%r26)364stw %r20, 4(%r26)365stw %r21, 8(%r26)366stw %r22, 12(%r26)367ldw 16(%r25), %r19368ldw 20(%r25), %r20369ldw 24(%r25), %r21370ldw 28(%r25), %r22371stw %r19, 16(%r26)372stw %r20, 20(%r26)373stw %r21, 24(%r26)374stw %r22, 28(%r26)375ldw 32(%r25), %r19376ldw 36(%r25), %r20377ldw 40(%r25), %r21378ldw 44(%r25), %r22379stw %r19, 32(%r26)380stw %r20, 36(%r26)381stw %r21, 40(%r26)382stw %r22, 44(%r26)383ldw 48(%r25), %r19384ldw 52(%r25), %r20385ldw 56(%r25), %r21386ldw 60(%r25), %r22387stw %r19, 48(%r26)388stw %r20, 52(%r26)389ldo 64(%r25), %r25390stw %r21, 56(%r26)391stw %r22, 60(%r26)392ldo 64(%r26), %r26393addib,COND(>),n -1, %r1, 1b394ldw 0(%r25), %r19395#endif396bv %r0(%r2)397nop398.exit399400.procend401ENDPROC(copy_user_page_asm)402403/*404* NOTE: Code in clear_user_page has a hard coded dependency on the405* maximum alias boundary being 4 Mb. We've been assured by the406* parisc chip designers that there will not ever be a parisc407* chip with a larger alias boundary (Never say never :-) ).408*409* Subtle: the dtlb miss handlers support the temp alias region by410* "knowing" that if a dtlb miss happens within the temp alias411* region it must have occurred while in clear_user_page. Since412* this routine makes use of processor local translations, we413* don't want to insert them into the kernel page table. Instead,414* we load up some general registers (they need to be registers415* which aren't shadowed) with the physical page numbers (preshifted416* for tlb insertion) needed to insert the translations. When we417* miss on the translation, the dtlb miss handler inserts the418* translation into the tlb using these values:419*420* %r26 physical page (shifted for tlb insert) of "to" translation421* %r23 physical page (shifted for tlb insert) of "from" translation422*/423424#if 0425426/*427* We can't do this since copy_user_page is used to bring in428* file data that might have instructions. Since the data would429* then need to be flushed out so the i-fetch can see it, it430* makes more sense to just copy through the kernel translation431* and flush it.432*433* I'm still keeping this around because it may be possible to434* use it if more information is passed into copy_user_page().435* Have to do some measurements to see if it is worthwhile to436* lobby for such a change.437*/438439ENTRY(copy_user_page_asm)440.proc441.callinfo NO_CALLS442.entry443444ldil L%(__PAGE_OFFSET), %r1445sub %r26, %r1, %r26446sub %r25, %r1, %r23 /* move physical addr into non shadowed reg */447448ldil L%(TMPALIAS_MAP_START), %r28449/* FIXME for different page sizes != 4k */450#ifdef CONFIG_64BIT451extrd,u %r26,56,32, %r26 /* convert phys addr to tlb insert format */452extrd,u %r23,56,32, %r23 /* convert phys addr to tlb insert format */453depd %r24,63,22, %r28 /* Form aliased virtual address 'to' */454depdi 0, 63,12, %r28 /* Clear any offset bits */455copy %r28, %r29456depdi 1, 41,1, %r29 /* Form aliased virtual address 'from' */457#else458extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */459extrw,u %r23, 24,25, %r23 /* convert phys addr to tlb insert format */460depw %r24, 31,22, %r28 /* Form aliased virtual address 'to' */461depwi 0, 31,12, %r28 /* Clear any offset bits */462copy %r28, %r29463depwi 1, 9,1, %r29 /* Form aliased virtual address 'from' */464#endif465466/* Purge any old translations */467468pdtlb 0(%r28)469pdtlb 0(%r29)470471ldi 64, %r1472473/*474* This loop is optimized for PCXL/PCXL2 ldw/ldw and stw/stw475* bundles (very restricted rules for bundling). It probably476* does OK on PCXU and better, but we could do better with477* ldd/std instructions. Note that until (if) we start saving478* the full 64 bit register values on interrupt, we can't479* use ldd/std on a 32 bit kernel.480*/4814824831:484ldw 0(%r29), %r19485ldw 4(%r29), %r20486ldw 8(%r29), %r21487ldw 12(%r29), %r22488stw %r19, 0(%r28)489stw %r20, 4(%r28)490stw %r21, 8(%r28)491stw %r22, 12(%r28)492ldw 16(%r29), %r19493ldw 20(%r29), %r20494ldw 24(%r29), %r21495ldw 28(%r29), %r22496stw %r19, 16(%r28)497stw %r20, 20(%r28)498stw %r21, 24(%r28)499stw %r22, 28(%r28)500ldw 32(%r29), %r19501ldw 36(%r29), %r20502ldw 40(%r29), %r21503ldw 44(%r29), %r22504stw %r19, 32(%r28)505stw %r20, 36(%r28)506stw %r21, 40(%r28)507stw %r22, 44(%r28)508ldw 48(%r29), %r19509ldw 52(%r29), %r20510ldw 56(%r29), %r21511ldw 60(%r29), %r22512stw %r19, 48(%r28)513stw %r20, 52(%r28)514stw %r21, 56(%r28)515stw %r22, 60(%r28)516ldo 64(%r28), %r28517addib,COND(>) -1, %r1,1b518ldo 64(%r29), %r29519520bv %r0(%r2)521nop522.exit523524.procend525ENDPROC(copy_user_page_asm)526#endif527528ENTRY(__clear_user_page_asm)529.proc530.callinfo NO_CALLS531.entry532533tophys_r1 %r26534535ldil L%(TMPALIAS_MAP_START), %r28536#ifdef CONFIG_64BIT537#if (TMPALIAS_MAP_START >= 0x80000000)538depdi 0, 31,32, %r28 /* clear any sign extension */539/* FIXME: page size dependend */540#endif541extrd,u %r26, 56,32, %r26 /* convert phys addr to tlb insert format */542depd %r25, 63,22, %r28 /* Form aliased virtual address 'to' */543depdi 0, 63,12, %r28 /* Clear any offset bits */544#else545extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */546depw %r25, 31,22, %r28 /* Form aliased virtual address 'to' */547depwi 0, 31,12, %r28 /* Clear any offset bits */548#endif549550/* Purge any old translation */551552pdtlb 0(%r28)553554#ifdef CONFIG_64BIT555ldi (PAGE_SIZE / 128), %r1556557/* PREFETCH (Write) has not (yet) been proven to help here */558/* #define PREFETCHW_OP ldd 256(%0), %r0 */5595601: std %r0, 0(%r28)561std %r0, 8(%r28)562std %r0, 16(%r28)563std %r0, 24(%r28)564std %r0, 32(%r28)565std %r0, 40(%r28)566std %r0, 48(%r28)567std %r0, 56(%r28)568std %r0, 64(%r28)569std %r0, 72(%r28)570std %r0, 80(%r28)571std %r0, 88(%r28)572std %r0, 96(%r28)573std %r0, 104(%r28)574std %r0, 112(%r28)575std %r0, 120(%r28)576addib,COND(>) -1, %r1, 1b577ldo 128(%r28), %r28578579#else /* ! CONFIG_64BIT */580ldi (PAGE_SIZE / 64), %r15815821:583stw %r0, 0(%r28)584stw %r0, 4(%r28)585stw %r0, 8(%r28)586stw %r0, 12(%r28)587stw %r0, 16(%r28)588stw %r0, 20(%r28)589stw %r0, 24(%r28)590stw %r0, 28(%r28)591stw %r0, 32(%r28)592stw %r0, 36(%r28)593stw %r0, 40(%r28)594stw %r0, 44(%r28)595stw %r0, 48(%r28)596stw %r0, 52(%r28)597stw %r0, 56(%r28)598stw %r0, 60(%r28)599addib,COND(>) -1, %r1, 1b600ldo 64(%r28), %r28601#endif /* CONFIG_64BIT */602603bv %r0(%r2)604nop605.exit606607.procend608ENDPROC(__clear_user_page_asm)609610ENTRY(flush_dcache_page_asm)611.proc612.callinfo NO_CALLS613.entry614615ldil L%(TMPALIAS_MAP_START), %r28616#ifdef CONFIG_64BIT617#if (TMPALIAS_MAP_START >= 0x80000000)618depdi 0, 31,32, %r28 /* clear any sign extension */619/* FIXME: page size dependend */620#endif621extrd,u %r26, 56,32, %r26 /* convert phys addr to tlb insert format */622depd %r25, 63,22, %r28 /* Form aliased virtual address 'to' */623depdi 0, 63,12, %r28 /* Clear any offset bits */624#else625extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */626depw %r25, 31,22, %r28 /* Form aliased virtual address 'to' */627depwi 0, 31,12, %r28 /* Clear any offset bits */628#endif629630/* Purge any old translation */631632pdtlb 0(%r28)633634ldil L%dcache_stride, %r1635ldw R%dcache_stride(%r1), %r1636637#ifdef CONFIG_64BIT638depdi,z 1, 63-PAGE_SHIFT,1, %r25639#else640depwi,z 1, 31-PAGE_SHIFT,1, %r25641#endif642add %r28, %r25, %r25643sub %r25, %r1, %r256446456461: fdc,m %r1(%r28)647fdc,m %r1(%r28)648fdc,m %r1(%r28)649fdc,m %r1(%r28)650fdc,m %r1(%r28)651fdc,m %r1(%r28)652fdc,m %r1(%r28)653fdc,m %r1(%r28)654fdc,m %r1(%r28)655fdc,m %r1(%r28)656fdc,m %r1(%r28)657fdc,m %r1(%r28)658fdc,m %r1(%r28)659fdc,m %r1(%r28)660fdc,m %r1(%r28)661cmpb,COND(<<) %r28, %r25,1b662fdc,m %r1(%r28)663664sync665bv %r0(%r2)666pdtlb (%r25)667.exit668669.procend670ENDPROC(flush_dcache_page_asm)671672ENTRY(flush_icache_page_asm)673.proc674.callinfo NO_CALLS675.entry676677ldil L%(TMPALIAS_MAP_START), %r28678#ifdef CONFIG_64BIT679#if (TMPALIAS_MAP_START >= 0x80000000)680depdi 0, 31,32, %r28 /* clear any sign extension */681/* FIXME: page size dependend */682#endif683extrd,u %r26, 56,32, %r26 /* convert phys addr to tlb insert format */684depd %r25, 63,22, %r28 /* Form aliased virtual address 'to' */685depdi 0, 63,12, %r28 /* Clear any offset bits */686#else687extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */688depw %r25, 31,22, %r28 /* Form aliased virtual address 'to' */689depwi 0, 31,12, %r28 /* Clear any offset bits */690#endif691692/* Purge any old translation */693694pitlb (%sr0,%r28)695696ldil L%icache_stride, %r1697ldw R%icache_stride(%r1), %r1698699#ifdef CONFIG_64BIT700depdi,z 1, 63-PAGE_SHIFT,1, %r25701#else702depwi,z 1, 31-PAGE_SHIFT,1, %r25703#endif704add %r28, %r25, %r25705sub %r25, %r1, %r257067077081: fic,m %r1(%r28)709fic,m %r1(%r28)710fic,m %r1(%r28)711fic,m %r1(%r28)712fic,m %r1(%r28)713fic,m %r1(%r28)714fic,m %r1(%r28)715fic,m %r1(%r28)716fic,m %r1(%r28)717fic,m %r1(%r28)718fic,m %r1(%r28)719fic,m %r1(%r28)720fic,m %r1(%r28)721fic,m %r1(%r28)722fic,m %r1(%r28)723cmpb,COND(<<) %r28, %r25,1b724fic,m %r1(%r28)725726sync727bv %r0(%r2)728pitlb (%sr0,%r25)729.exit730731.procend732ENDPROC(flush_icache_page_asm)733734ENTRY(flush_kernel_dcache_page_asm)735.proc736.callinfo NO_CALLS737.entry738739ldil L%dcache_stride, %r1740ldw R%dcache_stride(%r1), %r23741742#ifdef CONFIG_64BIT743depdi,z 1, 63-PAGE_SHIFT,1, %r25744#else745depwi,z 1, 31-PAGE_SHIFT,1, %r25746#endif747add %r26, %r25, %r25748sub %r25, %r23, %r257497507511: fdc,m %r23(%r26)752fdc,m %r23(%r26)753fdc,m %r23(%r26)754fdc,m %r23(%r26)755fdc,m %r23(%r26)756fdc,m %r23(%r26)757fdc,m %r23(%r26)758fdc,m %r23(%r26)759fdc,m %r23(%r26)760fdc,m %r23(%r26)761fdc,m %r23(%r26)762fdc,m %r23(%r26)763fdc,m %r23(%r26)764fdc,m %r23(%r26)765fdc,m %r23(%r26)766cmpb,COND(<<) %r26, %r25,1b767fdc,m %r23(%r26)768769sync770bv %r0(%r2)771nop772.exit773774.procend775ENDPROC(flush_kernel_dcache_page_asm)776777ENTRY(purge_kernel_dcache_page)778.proc779.callinfo NO_CALLS780.entry781782ldil L%dcache_stride, %r1783ldw R%dcache_stride(%r1), %r23784785#ifdef CONFIG_64BIT786depdi,z 1, 63-PAGE_SHIFT,1, %r25787#else788depwi,z 1, 31-PAGE_SHIFT,1, %r25789#endif790add %r26, %r25, %r25791sub %r25, %r23, %r257927931: pdc,m %r23(%r26)794pdc,m %r23(%r26)795pdc,m %r23(%r26)796pdc,m %r23(%r26)797pdc,m %r23(%r26)798pdc,m %r23(%r26)799pdc,m %r23(%r26)800pdc,m %r23(%r26)801pdc,m %r23(%r26)802pdc,m %r23(%r26)803pdc,m %r23(%r26)804pdc,m %r23(%r26)805pdc,m %r23(%r26)806pdc,m %r23(%r26)807pdc,m %r23(%r26)808cmpb,COND(<<) %r26, %r25, 1b809pdc,m %r23(%r26)810811sync812bv %r0(%r2)813nop814.exit815816.procend817ENDPROC(purge_kernel_dcache_page)818819ENTRY(flush_user_dcache_range_asm)820.proc821.callinfo NO_CALLS822.entry823824ldil L%dcache_stride, %r1825ldw R%dcache_stride(%r1), %r23826ldo -1(%r23), %r21827ANDCM %r26, %r21, %r268288291: cmpb,COND(<<),n %r26, %r25, 1b830fdc,m %r23(%sr3, %r26)831832sync833bv %r0(%r2)834nop835.exit836837.procend838ENDPROC(flush_user_dcache_range_asm)839840ENTRY(flush_kernel_dcache_range_asm)841.proc842.callinfo NO_CALLS843.entry844845ldil L%dcache_stride, %r1846ldw R%dcache_stride(%r1), %r23847ldo -1(%r23), %r21848ANDCM %r26, %r21, %r268498501: cmpb,COND(<<),n %r26, %r25,1b851fdc,m %r23(%r26)852853sync854syncdma855bv %r0(%r2)856nop857.exit858859.procend860ENDPROC(flush_kernel_dcache_range_asm)861862ENTRY(flush_user_icache_range_asm)863.proc864.callinfo NO_CALLS865.entry866867ldil L%icache_stride, %r1868ldw R%icache_stride(%r1), %r23869ldo -1(%r23), %r21870ANDCM %r26, %r21, %r268718721: cmpb,COND(<<),n %r26, %r25,1b873fic,m %r23(%sr3, %r26)874875sync876bv %r0(%r2)877nop878.exit879880.procend881ENDPROC(flush_user_icache_range_asm)882883ENTRY(flush_kernel_icache_page)884.proc885.callinfo NO_CALLS886.entry887888ldil L%icache_stride, %r1889ldw R%icache_stride(%r1), %r23890891#ifdef CONFIG_64BIT892depdi,z 1, 63-PAGE_SHIFT,1, %r25893#else894depwi,z 1, 31-PAGE_SHIFT,1, %r25895#endif896add %r26, %r25, %r25897sub %r25, %r23, %r258988999001: fic,m %r23(%sr4, %r26)901fic,m %r23(%sr4, %r26)902fic,m %r23(%sr4, %r26)903fic,m %r23(%sr4, %r26)904fic,m %r23(%sr4, %r26)905fic,m %r23(%sr4, %r26)906fic,m %r23(%sr4, %r26)907fic,m %r23(%sr4, %r26)908fic,m %r23(%sr4, %r26)909fic,m %r23(%sr4, %r26)910fic,m %r23(%sr4, %r26)911fic,m %r23(%sr4, %r26)912fic,m %r23(%sr4, %r26)913fic,m %r23(%sr4, %r26)914fic,m %r23(%sr4, %r26)915cmpb,COND(<<) %r26, %r25, 1b916fic,m %r23(%sr4, %r26)917918sync919bv %r0(%r2)920nop921.exit922923.procend924ENDPROC(flush_kernel_icache_page)925926ENTRY(flush_kernel_icache_range_asm)927.proc928.callinfo NO_CALLS929.entry930931ldil L%icache_stride, %r1932ldw R%icache_stride(%r1), %r23933ldo -1(%r23), %r21934ANDCM %r26, %r21, %r269359361: cmpb,COND(<<),n %r26, %r25, 1b937fic,m %r23(%sr4, %r26)938939sync940bv %r0(%r2)941nop942.exit943.procend944ENDPROC(flush_kernel_icache_range_asm)945946/* align should cover use of rfi in disable_sr_hashing_asm and947* srdis_done.948*/949.align 256950ENTRY(disable_sr_hashing_asm)951.proc952.callinfo NO_CALLS953.entry954955/*956* Switch to real mode957*/958/* pcxt_ssm_bug */959rsm PSW_SM_I, %r0960load32 PA(1f), %r1961nop962nop963nop964nop965nop966967rsm PSW_SM_Q, %r0 /* prep to load iia queue */968mtctl %r0, %cr17 /* Clear IIASQ tail */969mtctl %r0, %cr17 /* Clear IIASQ head */970mtctl %r1, %cr18 /* IIAOQ head */971ldo 4(%r1), %r1972mtctl %r1, %cr18 /* IIAOQ tail */973load32 REAL_MODE_PSW, %r1974mtctl %r1, %ipsw975rfi976nop9779781: cmpib,=,n SRHASH_PCXST, %r26,srdis_pcxs979cmpib,=,n SRHASH_PCXL, %r26,srdis_pcxl980cmpib,=,n SRHASH_PA20, %r26,srdis_pa20981b,n srdis_done982983srdis_pcxs:984985/* Disable Space Register Hashing for PCXS,PCXT,PCXT' */986987.word 0x141c1a00 /* mfdiag %dr0, %r28 */988.word 0x141c1a00 /* must issue twice */989depwi 0,18,1, %r28 /* Clear DHE (dcache hash enable) */990depwi 0,20,1, %r28 /* Clear IHE (icache hash enable) */991.word 0x141c1600 /* mtdiag %r28, %dr0 */992.word 0x141c1600 /* must issue twice */993b,n srdis_done994995srdis_pcxl:996997/* Disable Space Register Hashing for PCXL */998999.word 0x141c0600 /* mfdiag %dr0, %r28 */1000depwi 0,28,2, %r28 /* Clear DHASH_EN & IHASH_EN */1001.word 0x141c0240 /* mtdiag %r28, %dr0 */1002b,n srdis_done10031004srdis_pa20:10051006/* Disable Space Register Hashing for PCXU,PCXU+,PCXW,PCXW+,PCXW2 */10071008.word 0x144008bc /* mfdiag %dr2, %r28 */1009depdi 0, 54,1, %r28 /* clear DIAG_SPHASH_ENAB (bit 54) */1010.word 0x145c1840 /* mtdiag %r28, %dr2 */101110121013srdis_done:1014/* Switch back to virtual mode */1015rsm PSW_SM_I, %r0 /* prep to load iia queue */1016load32 2f, %r11017nop1018nop1019nop1020nop1021nop10221023rsm PSW_SM_Q, %r0 /* prep to load iia queue */1024mtctl %r0, %cr17 /* Clear IIASQ tail */1025mtctl %r0, %cr17 /* Clear IIASQ head */1026mtctl %r1, %cr18 /* IIAOQ head */1027ldo 4(%r1), %r11028mtctl %r1, %cr18 /* IIAOQ tail */1029load32 KERNEL_PSW, %r11030mtctl %r1, %ipsw1031rfi1032nop103310342: bv %r0(%r2)1035nop1036.exit10371038.procend1039ENDPROC(disable_sr_hashing_asm)10401041.end104210431044