/* SPDX-License-Identifier: GPL-2.0-only */1/*2*3* Copyright SUSE Linux Products GmbH 20094*5* Authors: Alexander Graf <[email protected]>6*/78#include <asm/asm-compat.h>9#include <asm/feature-fixups.h>1011#define SHADOW_SLB_ENTRY_LEN 0x1012#define OFFSET_ESID(x) (SHADOW_SLB_ENTRY_LEN * x)13#define OFFSET_VSID(x) ((SHADOW_SLB_ENTRY_LEN * x) + 8)1415/******************************************************************************16* *17* Entry code *18* *19*****************************************************************************/2021.macro LOAD_GUEST_SEGMENTS2223/* Required state:24*25* MSR = ~IR|DR26* R13 = PACA27* R1 = host R128* R2 = host R229* R3 = shadow vcpu30* all other volatile GPRS = free except R4, R631* SVCPU[CR] = guest CR32* SVCPU[XER] = guest XER33* SVCPU[CTR] = guest CTR34* SVCPU[LR] = guest LR35*/3637BEGIN_FW_FTR_SECTION3839/* Declare SLB shadow as 0 entries big */4041ld r11, PACA_SLBSHADOWPTR(r13)42li r8, 043stb r8, 3(r11)4445END_FW_FTR_SECTION_IFSET(FW_FEATURE_LPAR)4647/* Flush SLB */4849li r10, 050slbmte r10, r1051slbia5253/* Fill SLB with our shadow */5455lbz r12, SVCPU_SLB_MAX(r3)56mulli r12, r12, 1657addi r12, r12, SVCPU_SLB58add r12, r12, r35960/* for (r11 = kvm_slb; r11 < kvm_slb + kvm_slb_size; r11+=slb_entry) */61li r11, SVCPU_SLB62add r11, r11, r36364slb_loop_enter:6566ld r10, 0(r11)6768andis. r9, r10, SLB_ESID_V@h69beq slb_loop_enter_skip7071ld r9, 8(r11)72slbmte r9, r107374slb_loop_enter_skip:75addi r11, r11, 1676cmpd cr0, r11, r1277blt slb_loop_enter7879slb_do_enter:8081.endm8283/******************************************************************************84* *85* Exit code *86* *87*****************************************************************************/8889.macro LOAD_HOST_SEGMENTS9091/* Register usage at this point:92*93* R1 = host R194* R2 = host R295* R12 = exit handler id96* R13 = shadow vcpu - SHADOW_VCPU_OFF [=PACA on PPC64]97* SVCPU.* = guest *98* SVCPU[CR] = guest CR99* SVCPU[XER] = guest XER100* SVCPU[CTR] = guest CTR101* SVCPU[LR] = guest LR102*103*/104105/* Remove all SLB entries that are in use. */106107li r0, 0108slbmte r0, r0109slbia110111/* Restore bolted entries from the shadow */112113ld r11, PACA_SLBSHADOWPTR(r13)114115BEGIN_FW_FTR_SECTION116117/* Declare SLB shadow as SLB_NUM_BOLTED entries big */118119li r8, SLB_NUM_BOLTED120stb r8, 3(r11)121122END_FW_FTR_SECTION_IFSET(FW_FEATURE_LPAR)123124/* Manually load all entries from shadow SLB */125126li r8, SLBSHADOW_SAVEAREA127li r7, SLBSHADOW_SAVEAREA + 8128129.rept SLB_NUM_BOLTED130LDX_BE r10, r11, r8131cmpdi r10, 0132beq 1f133LDX_BE r9, r11, r7134slbmte r9, r101351: addi r7, r7, SHADOW_SLB_ENTRY_LEN136addi r8, r8, SHADOW_SLB_ENTRY_LEN137.endr138139isync140sync141142slb_do_exit:143144.endm145146147