Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
awilliam
GitHub Repository: awilliam/linux-vfio
Path: blob/master/arch/x86/include/asm/calling.h
10821 views
1
/*
2
3
x86 function call convention, 64-bit:
4
-------------------------------------
5
arguments | callee-saved | extra caller-saved | return
6
[callee-clobbered] | | [callee-clobbered] |
7
---------------------------------------------------------------------------
8
rdi rsi rdx rcx r8-9 | rbx rbp [*] r12-15 | r10-11 | rax, rdx [**]
9
10
( rsp is obviously invariant across normal function calls. (gcc can 'merge'
11
functions when it sees tail-call optimization possibilities) rflags is
12
clobbered. Leftover arguments are passed over the stack frame.)
13
14
[*] In the frame-pointers case rbp is fixed to the stack frame.
15
16
[**] for struct return values wider than 64 bits the return convention is a
17
bit more complex: up to 128 bits width we return small structures
18
straight in rax, rdx. For structures larger than that (3 words or
19
larger) the caller puts a pointer to an on-stack return struct
20
[allocated in the caller's stack frame] into the first argument - i.e.
21
into rdi. All other arguments shift up by one in this case.
22
Fortunately this case is rare in the kernel.
23
24
For 32-bit we have the following conventions - kernel is built with
25
-mregparm=3 and -freg-struct-return:
26
27
x86 function calling convention, 32-bit:
28
----------------------------------------
29
arguments | callee-saved | extra caller-saved | return
30
[callee-clobbered] | | [callee-clobbered] |
31
-------------------------------------------------------------------------
32
eax edx ecx | ebx edi esi ebp [*] | <none> | eax, edx [**]
33
34
( here too esp is obviously invariant across normal function calls. eflags
35
is clobbered. Leftover arguments are passed over the stack frame. )
36
37
[*] In the frame-pointers case ebp is fixed to the stack frame.
38
39
[**] We build with -freg-struct-return, which on 32-bit means similar
40
semantics as on 64-bit: edx can be used for a second return value
41
(i.e. covering integer and structure sizes up to 64 bits) - after that
42
it gets more complex and more expensive: 3-word or larger struct returns
43
get done in the caller's frame and the pointer to the return struct goes
44
into regparm0, i.e. eax - the other arguments shift up and the
45
function's register parameters degenerate to regparm=2 in essence.
46
47
*/
48
49
50
/*
51
* 64-bit system call stack frame layout defines and helpers, for
52
* assembly code (note that the seemingly unnecessary parentheses
53
* are to prevent cpp from inserting spaces in expressions that get
54
* passed to macros):
55
*/
56
57
#define R15 (0)
58
#define R14 (8)
59
#define R13 (16)
60
#define R12 (24)
61
#define RBP (32)
62
#define RBX (40)
63
64
/* arguments: interrupts/non tracing syscalls only save up to here: */
65
#define R11 (48)
66
#define R10 (56)
67
#define R9 (64)
68
#define R8 (72)
69
#define RAX (80)
70
#define RCX (88)
71
#define RDX (96)
72
#define RSI (104)
73
#define RDI (112)
74
#define ORIG_RAX (120) /* + error_code */
75
/* end of arguments */
76
77
/* cpu exception frame or undefined in case of fast syscall: */
78
#define RIP (128)
79
#define CS (136)
80
#define EFLAGS (144)
81
#define RSP (152)
82
#define SS (160)
83
84
#define ARGOFFSET R11
85
#define SWFRAME ORIG_RAX
86
87
.macro SAVE_ARGS addskip=0, norcx=0, nor891011=0
88
subq $9*8+\addskip, %rsp
89
CFI_ADJUST_CFA_OFFSET 9*8+\addskip
90
movq %rdi, 8*8(%rsp)
91
CFI_REL_OFFSET rdi, 8*8
92
movq %rsi, 7*8(%rsp)
93
CFI_REL_OFFSET rsi, 7*8
94
movq %rdx, 6*8(%rsp)
95
CFI_REL_OFFSET rdx, 6*8
96
.if \norcx
97
.else
98
movq %rcx, 5*8(%rsp)
99
CFI_REL_OFFSET rcx, 5*8
100
.endif
101
movq %rax, 4*8(%rsp)
102
CFI_REL_OFFSET rax, 4*8
103
.if \nor891011
104
.else
105
movq %r8, 3*8(%rsp)
106
CFI_REL_OFFSET r8, 3*8
107
movq %r9, 2*8(%rsp)
108
CFI_REL_OFFSET r9, 2*8
109
movq %r10, 1*8(%rsp)
110
CFI_REL_OFFSET r10, 1*8
111
movq %r11, (%rsp)
112
CFI_REL_OFFSET r11, 0*8
113
.endif
114
.endm
115
116
#define ARG_SKIP (9*8)
117
118
.macro RESTORE_ARGS skiprax=0, addskip=0, skiprcx=0, skipr11=0, \
119
skipr8910=0, skiprdx=0
120
.if \skipr11
121
.else
122
movq (%rsp), %r11
123
CFI_RESTORE r11
124
.endif
125
.if \skipr8910
126
.else
127
movq 1*8(%rsp), %r10
128
CFI_RESTORE r10
129
movq 2*8(%rsp), %r9
130
CFI_RESTORE r9
131
movq 3*8(%rsp), %r8
132
CFI_RESTORE r8
133
.endif
134
.if \skiprax
135
.else
136
movq 4*8(%rsp), %rax
137
CFI_RESTORE rax
138
.endif
139
.if \skiprcx
140
.else
141
movq 5*8(%rsp), %rcx
142
CFI_RESTORE rcx
143
.endif
144
.if \skiprdx
145
.else
146
movq 6*8(%rsp), %rdx
147
CFI_RESTORE rdx
148
.endif
149
movq 7*8(%rsp), %rsi
150
CFI_RESTORE rsi
151
movq 8*8(%rsp), %rdi
152
CFI_RESTORE rdi
153
.if ARG_SKIP+\addskip > 0
154
addq $ARG_SKIP+\addskip, %rsp
155
CFI_ADJUST_CFA_OFFSET -(ARG_SKIP+\addskip)
156
.endif
157
.endm
158
159
.macro LOAD_ARGS offset, skiprax=0
160
movq \offset(%rsp), %r11
161
movq \offset+8(%rsp), %r10
162
movq \offset+16(%rsp), %r9
163
movq \offset+24(%rsp), %r8
164
movq \offset+40(%rsp), %rcx
165
movq \offset+48(%rsp), %rdx
166
movq \offset+56(%rsp), %rsi
167
movq \offset+64(%rsp), %rdi
168
.if \skiprax
169
.else
170
movq \offset+72(%rsp), %rax
171
.endif
172
.endm
173
174
#define REST_SKIP (6*8)
175
176
.macro SAVE_REST
177
subq $REST_SKIP, %rsp
178
CFI_ADJUST_CFA_OFFSET REST_SKIP
179
movq %rbx, 5*8(%rsp)
180
CFI_REL_OFFSET rbx, 5*8
181
movq %rbp, 4*8(%rsp)
182
CFI_REL_OFFSET rbp, 4*8
183
movq %r12, 3*8(%rsp)
184
CFI_REL_OFFSET r12, 3*8
185
movq %r13, 2*8(%rsp)
186
CFI_REL_OFFSET r13, 2*8
187
movq %r14, 1*8(%rsp)
188
CFI_REL_OFFSET r14, 1*8
189
movq %r15, (%rsp)
190
CFI_REL_OFFSET r15, 0*8
191
.endm
192
193
.macro RESTORE_REST
194
movq (%rsp), %r15
195
CFI_RESTORE r15
196
movq 1*8(%rsp), %r14
197
CFI_RESTORE r14
198
movq 2*8(%rsp), %r13
199
CFI_RESTORE r13
200
movq 3*8(%rsp), %r12
201
CFI_RESTORE r12
202
movq 4*8(%rsp), %rbp
203
CFI_RESTORE rbp
204
movq 5*8(%rsp), %rbx
205
CFI_RESTORE rbx
206
addq $REST_SKIP, %rsp
207
CFI_ADJUST_CFA_OFFSET -(REST_SKIP)
208
.endm
209
210
.macro SAVE_ALL
211
SAVE_ARGS
212
SAVE_REST
213
.endm
214
215
.macro RESTORE_ALL addskip=0
216
RESTORE_REST
217
RESTORE_ARGS 0, \addskip
218
.endm
219
220
.macro icebp
221
.byte 0xf1
222
.endm
223
224