Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
freebsd
GitHub Repository: freebsd/freebsd-src
Path: blob/main/sys/amd64/vmm/amd/svm_support.S
39537 views
1
/*-
2
* SPDX-License-Identifier: BSD-2-Clause
3
*
4
* Copyright (c) 2013, Anish Gupta ([email protected])
5
* All rights reserved.
6
*
7
* Redistribution and use in source and binary forms, with or without
8
* modification, are permitted provided that the following conditions
9
* are met:
10
* 1. Redistributions of source code must retain the above copyright
11
* notice unmodified, this list of conditions, and the following
12
* disclaimer.
13
* 2. Redistributions in binary form must reproduce the above copyright
14
* notice, this list of conditions and the following disclaimer in the
15
* documentation and/or other materials provided with the distribution.
16
*
17
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
*/
28
#include <machine/asmacros.h>
29
30
#include "svm_assym.h"
31
32
/*
33
* Be friendly to DTrace FBT's prologue/epilogue pattern matching.
34
*
35
* They are also responsible for saving/restoring the host %rbp across VMRUN.
36
*/
37
#define VENTER push %rbp ; mov %rsp,%rbp
38
#define VLEAVE pop %rbp
39
40
/*
41
* svm_launch(uint64_t vmcb, struct svm_regctx *gctx, struct pcpu *pcpu)
42
* %rdi: physical address of VMCB
43
* %rsi: pointer to guest context
44
* %rdx: pointer to the pcpu data
45
*/
46
ENTRY(svm_launch)
47
VENTER
48
49
/* save pointer to the pcpu data */
50
push %rdx
51
52
/*
53
* Host register state saved across a VMRUN.
54
*
55
* All "callee saved registers" except:
56
* %rsp: because it is preserved by the processor across VMRUN.
57
* %rbp: because it is saved/restored by the function prologue/epilogue.
58
*/
59
push %rbx
60
push %r12
61
push %r13
62
push %r14
63
push %r15
64
65
/* Save the physical address of the VMCB in %rax */
66
movq %rdi, %rax
67
68
push %rsi /* push guest context pointer on the stack */
69
70
/*
71
* Restore guest state.
72
*/
73
movq SCTX_R8(%rsi), %r8
74
movq SCTX_R9(%rsi), %r9
75
movq SCTX_R10(%rsi), %r10
76
movq SCTX_R11(%rsi), %r11
77
movq SCTX_R12(%rsi), %r12
78
movq SCTX_R13(%rsi), %r13
79
movq SCTX_R14(%rsi), %r14
80
movq SCTX_R15(%rsi), %r15
81
movq SCTX_RBP(%rsi), %rbp
82
movq SCTX_RBX(%rsi), %rbx
83
movq SCTX_RCX(%rsi), %rcx
84
movq SCTX_RDX(%rsi), %rdx
85
movq SCTX_RDI(%rsi), %rdi
86
movq SCTX_RSI(%rsi), %rsi /* %rsi must be restored last */
87
88
vmload %rax
89
vmrun %rax
90
vmsave %rax
91
92
pop %rax /* pop guest context pointer from the stack */
93
94
/*
95
* Save guest state.
96
*/
97
movq %r8, SCTX_R8(%rax)
98
movq %r9, SCTX_R9(%rax)
99
movq %r10, SCTX_R10(%rax)
100
movq %r11, SCTX_R11(%rax)
101
movq %r12, SCTX_R12(%rax)
102
movq %r13, SCTX_R13(%rax)
103
movq %r14, SCTX_R14(%rax)
104
movq %r15, SCTX_R15(%rax)
105
movq %rbp, SCTX_RBP(%rax)
106
movq %rbx, SCTX_RBX(%rax)
107
movq %rcx, SCTX_RCX(%rax)
108
movq %rdx, SCTX_RDX(%rax)
109
movq %rdi, SCTX_RDI(%rax)
110
movq %rsi, SCTX_RSI(%rax)
111
112
/*
113
* To prevent malicious branch target predictions from
114
* affecting the host, overwrite all entries in the RSB upon
115
* exiting a guest.
116
*/
117
mov $16, %ecx /* 16 iterations, two calls per loop */
118
mov %rsp, %rax
119
0: call 2f /* create an RSB entry. */
120
1: pause
121
call 1b /* capture rogue speculation. */
122
2: call 2f /* create an RSB entry. */
123
1: pause
124
call 1b /* capture rogue speculation. */
125
2: sub $1, %ecx
126
jnz 0b
127
mov %rax, %rsp
128
129
/* Restore host state */
130
pop %r15
131
pop %r14
132
pop %r13
133
pop %r12
134
pop %rbx
135
136
/* Restore %GS.base to point to the host's pcpu data */
137
pop %rdx
138
mov %edx, %eax
139
shr $32, %rdx
140
mov $MSR_GSBASE, %rcx
141
wrmsr
142
143
/*
144
* Clobber the remaining registers with guest contents so they
145
* can't be misused.
146
*/
147
xor %rbp, %rbp
148
xor %rdi, %rdi
149
xor %rsi, %rsi
150
xor %r8, %r8
151
xor %r9, %r9
152
xor %r10, %r10
153
xor %r11, %r11
154
155
VLEAVE
156
ret
157
END(svm_launch)
158
159