/*-1* SPDX-License-Identifier: BSD-2-Clause2*3* Copyright (c) 2013, Anish Gupta ([email protected])4* All rights reserved.5*6* Redistribution and use in source and binary forms, with or without7* modification, are permitted provided that the following conditions8* are met:9* 1. Redistributions of source code must retain the above copyright10* notice unmodified, this list of conditions, and the following11* disclaimer.12* 2. Redistributions in binary form must reproduce the above copyright13* notice, this list of conditions and the following disclaimer in the14* documentation and/or other materials provided with the distribution.15*16* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR17* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES18* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.19* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,20* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT21* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,22* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY23* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT24* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF25* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.26*/27#include <machine/asmacros.h>2829#include "svm_assym.h"3031/*32* Be friendly to DTrace FBT's prologue/epilogue pattern matching.33*34* They are also responsible for saving/restoring the host %rbp across VMRUN.35*/36#define VENTER push %rbp ; mov %rsp,%rbp37#define VLEAVE pop %rbp3839/*40* svm_launch(uint64_t vmcb, struct svm_regctx *gctx, struct pcpu *pcpu)41* %rdi: physical address of VMCB42* %rsi: pointer to guest context43* %rdx: pointer to the pcpu data44*/45ENTRY(svm_launch)46VENTER4748/* save pointer to the pcpu data */49push %rdx5051/*52* Host register state saved across a VMRUN.53*54* All "callee saved registers" except:55* %rsp: because it is preserved by the processor across VMRUN.56* %rbp: because it is saved/restored by the function prologue/epilogue.57*/58push %rbx59push %r1260push %r1361push %r1462push %r156364/* Save the physical address of the VMCB in %rax */65movq %rdi, %rax6667push %rsi /* push guest context pointer on the stack */6869/*70* Restore guest state.71*/72movq SCTX_R8(%rsi), %r873movq SCTX_R9(%rsi), %r974movq SCTX_R10(%rsi), %r1075movq SCTX_R11(%rsi), %r1176movq SCTX_R12(%rsi), %r1277movq SCTX_R13(%rsi), %r1378movq SCTX_R14(%rsi), %r1479movq SCTX_R15(%rsi), %r1580movq SCTX_RBP(%rsi), %rbp81movq SCTX_RBX(%rsi), %rbx82movq SCTX_RCX(%rsi), %rcx83movq SCTX_RDX(%rsi), %rdx84movq SCTX_RDI(%rsi), %rdi85movq SCTX_RSI(%rsi), %rsi /* %rsi must be restored last */8687vmload %rax88vmrun %rax89vmsave %rax9091pop %rax /* pop guest context pointer from the stack */9293/*94* Save guest state.95*/96movq %r8, SCTX_R8(%rax)97movq %r9, SCTX_R9(%rax)98movq %r10, SCTX_R10(%rax)99movq %r11, SCTX_R11(%rax)100movq %r12, SCTX_R12(%rax)101movq %r13, SCTX_R13(%rax)102movq %r14, SCTX_R14(%rax)103movq %r15, SCTX_R15(%rax)104movq %rbp, SCTX_RBP(%rax)105movq %rbx, SCTX_RBX(%rax)106movq %rcx, SCTX_RCX(%rax)107movq %rdx, SCTX_RDX(%rax)108movq %rdi, SCTX_RDI(%rax)109movq %rsi, SCTX_RSI(%rax)110111/*112* To prevent malicious branch target predictions from113* affecting the host, overwrite all entries in the RSB upon114* exiting a guest.115*/116mov $16, %ecx /* 16 iterations, two calls per loop */117mov %rsp, %rax1180: call 2f /* create an RSB entry. */1191: pause120call 1b /* capture rogue speculation. */1212: call 2f /* create an RSB entry. */1221: pause123call 1b /* capture rogue speculation. */1242: sub $1, %ecx125jnz 0b126mov %rax, %rsp127128/* Restore host state */129pop %r15130pop %r14131pop %r13132pop %r12133pop %rbx134135/* Restore %GS.base to point to the host's pcpu data */136pop %rdx137mov %edx, %eax138shr $32, %rdx139mov $MSR_GSBASE, %rcx140wrmsr141142/*143* Clobber the remaining registers with guest contents so they144* can't be misused.145*/146xor %rbp, %rbp147xor %rdi, %rdi148xor %rsi, %rsi149xor %r8, %r8150xor %r9, %r9151xor %r10, %r10152xor %r11, %r11153154VLEAVE155ret156END(svm_launch)157158159