Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
godotengine
GitHub Repository: godotengine/godot
Path: blob/master/thirdparty/pcre2/deps/sljit/sljit_src/sljitNativeX86_64.c
9913 views
1
/*
2
* Stack-less Just-In-Time compiler
3
*
4
* Copyright Zoltan Herczeg ([email protected]). All rights reserved.
5
*
6
* Redistribution and use in source and binary forms, with or without modification, are
7
* permitted provided that the following conditions are met:
8
*
9
* 1. Redistributions of source code must retain the above copyright notice, this list of
10
* conditions and the following disclaimer.
11
*
12
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
13
* of conditions and the following disclaimer in the documentation and/or other materials
14
* provided with the distribution.
15
*
16
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
17
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
19
* SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
22
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
24
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25
*/
26
27
/* x86 64-bit arch dependent functions. */
28
29
/* --------------------------------------------------------------------- */
30
/* Operators */
31
/* --------------------------------------------------------------------- */
32
33
static sljit_s32 emit_load_imm64(struct sljit_compiler *compiler, sljit_s32 reg, sljit_sw imm)
34
{
35
sljit_u8 *inst;
36
37
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2 + sizeof(sljit_sw));
38
FAIL_IF(!inst);
39
INC_SIZE(2 + sizeof(sljit_sw));
40
inst[0] = REX_W | ((reg_map[reg] <= 7) ? 0 : REX_B);
41
inst[1] = U8(MOV_r_i32 | reg_lmap[reg]);
42
sljit_unaligned_store_sw(inst + 2, imm);
43
return SLJIT_SUCCESS;
44
}
45
46
static sljit_s32 emit_do_imm32(struct sljit_compiler *compiler, sljit_u8 rex, sljit_u8 opcode, sljit_sw imm)
47
{
48
sljit_u8 *inst;
49
sljit_uw length = (rex ? 2 : 1) + sizeof(sljit_s32);
50
51
inst = (sljit_u8*)ensure_buf(compiler, 1 + length);
52
FAIL_IF(!inst);
53
INC_SIZE(length);
54
if (rex)
55
*inst++ = rex;
56
*inst++ = opcode;
57
sljit_unaligned_store_s32(inst, (sljit_s32)imm);
58
return SLJIT_SUCCESS;
59
}
60
61
static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw size,
62
/* The register or immediate operand. */
63
sljit_s32 a, sljit_sw imma,
64
/* The general operand (not immediate). */
65
sljit_s32 b, sljit_sw immb)
66
{
67
sljit_u8 *inst;
68
sljit_u8 *buf_ptr;
69
sljit_u8 rex = 0;
70
sljit_u8 reg_lmap_b;
71
sljit_uw flags = size;
72
sljit_uw inst_size;
73
74
/* The immediate operand must be 32 bit. */
75
SLJIT_ASSERT(a != SLJIT_IMM || compiler->mode32 || IS_HALFWORD(imma));
76
/* Both cannot be switched on. */
77
SLJIT_ASSERT((flags & (EX86_BIN_INS | EX86_SHIFT_INS)) != (EX86_BIN_INS | EX86_SHIFT_INS));
78
/* Size flags not allowed for typed instructions. */
79
SLJIT_ASSERT(!(flags & (EX86_BIN_INS | EX86_SHIFT_INS)) || (flags & (EX86_BYTE_ARG | EX86_HALF_ARG)) == 0);
80
/* Both size flags cannot be switched on. */
81
SLJIT_ASSERT((flags & (EX86_BYTE_ARG | EX86_HALF_ARG)) != (EX86_BYTE_ARG | EX86_HALF_ARG));
82
/* SSE2 and immediate is not possible. */
83
SLJIT_ASSERT(a != SLJIT_IMM || !(flags & EX86_SSE2));
84
SLJIT_ASSERT(((flags & (EX86_PREF_F2 | EX86_PREF_F3 | EX86_PREF_66))
85
& ((flags & (EX86_PREF_F2 | EX86_PREF_F3 | EX86_PREF_66)) - 1)) == 0);
86
SLJIT_ASSERT((flags & (EX86_VEX_EXT | EX86_REX)) != EX86_VEX_EXT);
87
88
size &= 0xf;
89
/* The mod r/m byte is always present. */
90
inst_size = size + 1;
91
92
if (!compiler->mode32 && !(flags & EX86_NO_REXW))
93
rex |= REX_W;
94
else if (flags & EX86_REX)
95
rex |= REX;
96
97
if (flags & (EX86_PREF_F2 | EX86_PREF_F3 | EX86_PREF_66))
98
inst_size++;
99
100
/* Calculate size of b. */
101
if (b & SLJIT_MEM) {
102
if (!(b & OFFS_REG_MASK) && NOT_HALFWORD(immb)) {
103
PTR_FAIL_IF(emit_load_imm64(compiler, TMP_REG2, immb));
104
immb = 0;
105
if (b & REG_MASK)
106
b |= TO_OFFS_REG(TMP_REG2);
107
else
108
b |= TMP_REG2;
109
}
110
111
if (!(b & REG_MASK))
112
inst_size += 1 + sizeof(sljit_s32); /* SIB byte required to avoid RIP based addressing. */
113
else {
114
if (immb != 0 && !(b & OFFS_REG_MASK)) {
115
/* Immediate operand. */
116
if (immb <= 127 && immb >= -128)
117
inst_size += sizeof(sljit_s8);
118
else
119
inst_size += sizeof(sljit_s32);
120
} else if (reg_lmap[b & REG_MASK] == 5) {
121
/* Swap registers if possible. */
122
if ((b & OFFS_REG_MASK) && (immb & 0x3) == 0 && reg_lmap[OFFS_REG(b)] != 5)
123
b = SLJIT_MEM | OFFS_REG(b) | TO_OFFS_REG(b & REG_MASK);
124
else
125
inst_size += sizeof(sljit_s8);
126
}
127
128
if (reg_map[b & REG_MASK] >= 8)
129
rex |= REX_B;
130
131
if (reg_lmap[b & REG_MASK] == 4 && !(b & OFFS_REG_MASK))
132
b |= TO_OFFS_REG(SLJIT_SP);
133
134
if (b & OFFS_REG_MASK) {
135
inst_size += 1; /* SIB byte. */
136
if (reg_map[OFFS_REG(b)] >= 8)
137
rex |= REX_X;
138
}
139
}
140
} else if (!(flags & EX86_SSE2_OP2)) {
141
if (reg_map[b] >= 8)
142
rex |= REX_B;
143
} else if (freg_map[b] >= 8)
144
rex |= REX_B;
145
146
if ((flags & EX86_VEX_EXT) && (rex & 0x3)) {
147
SLJIT_ASSERT(size == 2);
148
size++;
149
inst_size++;
150
}
151
152
if (a == SLJIT_IMM) {
153
if (flags & EX86_BIN_INS) {
154
if (imma <= 127 && imma >= -128) {
155
inst_size += 1;
156
flags |= EX86_BYTE_ARG;
157
} else
158
inst_size += 4;
159
} else if (flags & EX86_SHIFT_INS) {
160
SLJIT_ASSERT(imma <= (compiler->mode32 ? 0x1f : 0x3f));
161
if (imma != 1) {
162
inst_size++;
163
flags |= EX86_BYTE_ARG;
164
}
165
} else if (flags & EX86_BYTE_ARG)
166
inst_size++;
167
else if (flags & EX86_HALF_ARG)
168
inst_size += sizeof(short);
169
else
170
inst_size += sizeof(sljit_s32);
171
} else {
172
SLJIT_ASSERT(!(flags & EX86_SHIFT_INS) || a == SLJIT_PREF_SHIFT_REG);
173
/* reg_map[SLJIT_PREF_SHIFT_REG] is less than 8. */
174
if (!(flags & EX86_SSE2_OP1)) {
175
if (reg_map[a] >= 8)
176
rex |= REX_R;
177
}
178
else if (freg_map[a] >= 8)
179
rex |= REX_R;
180
}
181
182
if (rex)
183
inst_size++;
184
185
inst = (sljit_u8*)ensure_buf(compiler, 1 + inst_size);
186
PTR_FAIL_IF(!inst);
187
188
/* Encoding prefixes. */
189
INC_SIZE(inst_size);
190
if (flags & EX86_PREF_F2)
191
*inst++ = 0xf2;
192
else if (flags & EX86_PREF_F3)
193
*inst++ = 0xf3;
194
else if (flags & EX86_PREF_66)
195
*inst++ = 0x66;
196
197
/* Rex is always the last prefix. */
198
if (rex)
199
*inst++ = rex;
200
201
buf_ptr = inst + size;
202
203
/* Encode mod/rm byte. */
204
if (!(flags & EX86_SHIFT_INS)) {
205
if ((flags & EX86_BIN_INS) && a == SLJIT_IMM)
206
*inst = (flags & EX86_BYTE_ARG) ? GROUP_BINARY_83 : GROUP_BINARY_81;
207
208
if (a == SLJIT_IMM)
209
*buf_ptr = 0;
210
else if (!(flags & EX86_SSE2_OP1))
211
*buf_ptr = U8(reg_lmap[a] << 3);
212
else
213
*buf_ptr = U8(freg_lmap[a] << 3);
214
} else {
215
if (a == SLJIT_IMM) {
216
if (imma == 1)
217
*inst = GROUP_SHIFT_1;
218
else
219
*inst = GROUP_SHIFT_N;
220
} else
221
*inst = GROUP_SHIFT_CL;
222
*buf_ptr = 0;
223
}
224
225
if (!(b & SLJIT_MEM)) {
226
*buf_ptr = U8(*buf_ptr | MOD_REG | (!(flags & EX86_SSE2_OP2) ? reg_lmap[b] : freg_lmap[b]));
227
buf_ptr++;
228
} else if (b & REG_MASK) {
229
reg_lmap_b = reg_lmap[b & REG_MASK];
230
231
if (!(b & OFFS_REG_MASK) || (b & OFFS_REG_MASK) == TO_OFFS_REG(SLJIT_SP)) {
232
if (immb != 0 || reg_lmap_b == 5) {
233
if (immb <= 127 && immb >= -128)
234
*buf_ptr |= 0x40;
235
else
236
*buf_ptr |= 0x80;
237
}
238
239
if (!(b & OFFS_REG_MASK))
240
*buf_ptr++ |= reg_lmap_b;
241
else {
242
buf_ptr[0] |= 0x04;
243
buf_ptr[1] = U8(reg_lmap_b | (reg_lmap[OFFS_REG(b)] << 3));
244
buf_ptr += 2;
245
}
246
247
if (immb != 0 || reg_lmap_b == 5) {
248
if (immb <= 127 && immb >= -128)
249
*buf_ptr++ = U8(immb); /* 8 bit displacement. */
250
else {
251
sljit_unaligned_store_s32(buf_ptr, (sljit_s32)immb); /* 32 bit displacement. */
252
buf_ptr += sizeof(sljit_s32);
253
}
254
}
255
} else {
256
if (reg_lmap_b == 5)
257
*buf_ptr |= 0x40;
258
259
buf_ptr[0] |= 0x04;
260
buf_ptr[1] = U8(reg_lmap_b | (reg_lmap[OFFS_REG(b)] << 3) | (immb << 6));
261
buf_ptr += 2;
262
263
if (reg_lmap_b == 5)
264
*buf_ptr++ = 0;
265
}
266
} else {
267
buf_ptr[0] |= 0x04;
268
buf_ptr[1] = 0x25;
269
buf_ptr += 2;
270
sljit_unaligned_store_s32(buf_ptr, (sljit_s32)immb); /* 32 bit displacement. */
271
buf_ptr += sizeof(sljit_s32);
272
}
273
274
if (a == SLJIT_IMM) {
275
if (flags & EX86_BYTE_ARG)
276
*buf_ptr = U8(imma);
277
else if (flags & EX86_HALF_ARG)
278
sljit_unaligned_store_s16(buf_ptr, (sljit_s16)imma);
279
else if (!(flags & EX86_SHIFT_INS))
280
sljit_unaligned_store_s32(buf_ptr, (sljit_s32)imma);
281
}
282
283
return inst;
284
}
285
286
static sljit_s32 emit_vex_instruction(struct sljit_compiler *compiler, sljit_uw op,
287
/* The first and second register operand. */
288
sljit_s32 a, sljit_s32 v,
289
/* The general operand (not immediate). */
290
sljit_s32 b, sljit_sw immb)
291
{
292
sljit_u8 *inst;
293
sljit_u8 vex = 0;
294
sljit_u8 vex_m = 0;
295
sljit_uw size;
296
297
SLJIT_ASSERT(((op & (EX86_PREF_F2 | EX86_PREF_F3 | EX86_PREF_66))
298
& ((op & (EX86_PREF_F2 | EX86_PREF_F3 | EX86_PREF_66)) - 1)) == 0);
299
300
op |= EX86_REX;
301
302
if (op & VEX_OP_0F38)
303
vex_m = 0x2;
304
else if (op & VEX_OP_0F3A)
305
vex_m = 0x3;
306
307
if ((op & VEX_W) || ((op & VEX_AUTO_W) && !compiler->mode32)) {
308
if (vex_m == 0)
309
vex_m = 0x1;
310
311
vex |= 0x80;
312
}
313
314
if (op & EX86_PREF_66)
315
vex |= 0x1;
316
else if (op & EX86_PREF_F2)
317
vex |= 0x3;
318
else if (op & EX86_PREF_F3)
319
vex |= 0x2;
320
321
op &= ~(EX86_PREF_66 | EX86_PREF_F2 | EX86_PREF_F3);
322
323
if (op & VEX_256)
324
vex |= 0x4;
325
326
vex = U8(vex | ((((op & VEX_SSE2_OPV) ? freg_map[v] : reg_map[v]) ^ 0xf) << 3));
327
328
size = op & ~(sljit_uw)0xff;
329
size |= (vex_m == 0) ? (EX86_VEX_EXT | 2) : 3;
330
331
inst = emit_x86_instruction(compiler, size, a, 0, b, immb);
332
FAIL_IF(!inst);
333
334
SLJIT_ASSERT((inst[-1] & 0xf0) == REX);
335
336
/* If X or B is present in REX prefix. */
337
if (vex_m == 0 && inst[-1] & 0x3)
338
vex_m = 0x1;
339
340
if (vex_m == 0) {
341
vex |= U8(((inst[-1] >> 2) ^ 0x1) << 7);
342
343
inst[-1] = 0xc5;
344
inst[0] = vex;
345
inst[1] = U8(op);
346
return SLJIT_SUCCESS;
347
}
348
349
vex_m |= U8((inst[-1] ^ 0x7) << 5);
350
inst[-1] = 0xc4;
351
inst[0] = vex_m;
352
inst[1] = vex;
353
inst[2] = U8(op);
354
return SLJIT_SUCCESS;
355
}
356
357
/* --------------------------------------------------------------------- */
358
/* Enter / return */
359
/* --------------------------------------------------------------------- */
360
361
static sljit_u8* detect_far_jump_type(struct sljit_jump *jump, sljit_u8 *code_ptr)
362
{
363
sljit_uw type = jump->flags >> TYPE_SHIFT;
364
365
int short_addr = !(jump->flags & SLJIT_REWRITABLE_JUMP) && (jump->flags & JUMP_ADDR) && (jump->u.target <= 0xffffffff);
366
367
/* The relative jump below specialized for this case. */
368
SLJIT_ASSERT(reg_map[TMP_REG2] >= 8 && TMP_REG2 != SLJIT_TMP_DEST_REG);
369
370
if (type < SLJIT_JUMP) {
371
/* Invert type. */
372
code_ptr[0] = U8(get_jump_code(type ^ 0x1) - 0x10);
373
code_ptr[1] = short_addr ? (6 + 3) : (10 + 3);
374
code_ptr += 2;
375
}
376
377
code_ptr[0] = short_addr ? REX_B : (REX_W | REX_B);
378
code_ptr[1] = MOV_r_i32 | reg_lmap[TMP_REG2];
379
code_ptr += 2;
380
jump->addr = (sljit_uw)code_ptr;
381
382
if (!(jump->flags & JUMP_ADDR))
383
jump->flags |= PATCH_MD;
384
else if (short_addr)
385
sljit_unaligned_store_s32(code_ptr, (sljit_s32)jump->u.target);
386
else
387
sljit_unaligned_store_sw(code_ptr, (sljit_sw)jump->u.target);
388
389
code_ptr += short_addr ? sizeof(sljit_s32) : sizeof(sljit_sw);
390
391
code_ptr[0] = REX_B;
392
code_ptr[1] = GROUP_FF;
393
code_ptr[2] = U8(MOD_REG | (type >= SLJIT_FAST_CALL ? CALL_rm : JMP_rm) | reg_lmap[TMP_REG2]);
394
395
return code_ptr + 3;
396
}
397
398
static sljit_u8* generate_mov_addr_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_u8 *code, sljit_sw executable_offset)
399
{
400
sljit_uw addr;
401
sljit_sw diff;
402
SLJIT_UNUSED_ARG(executable_offset);
403
404
SLJIT_ASSERT(((jump->flags >> JUMP_SIZE_SHIFT) & 0x1f) <= 10);
405
if (jump->flags & JUMP_ADDR)
406
addr = jump->u.target;
407
else
408
addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code, executable_offset) + jump->u.label->size;
409
410
if (addr > 0xffffffffl) {
411
diff = (sljit_sw)addr - (sljit_sw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
412
413
if (diff <= HALFWORD_MAX && diff >= HALFWORD_MIN) {
414
SLJIT_ASSERT(((jump->flags >> JUMP_SIZE_SHIFT) & 0x1f) >= 7);
415
code_ptr -= SSIZE_OF(s32) - 1;
416
417
SLJIT_ASSERT((code_ptr[-3 - SSIZE_OF(s32)] & 0xf8) == REX_W);
418
SLJIT_ASSERT((code_ptr[-2 - SSIZE_OF(s32)] & 0xf8) == MOV_r_i32);
419
420
code_ptr[-3 - SSIZE_OF(s32)] = U8(REX_W | ((code_ptr[-3 - SSIZE_OF(s32)] & 0x1) << 2));
421
code_ptr[-1 - SSIZE_OF(s32)] = U8(((code_ptr[-2 - SSIZE_OF(s32)] & 0x7) << 3) | 0x5);
422
code_ptr[-2 - SSIZE_OF(s32)] = LEA_r_m;
423
424
jump->flags |= PATCH_MW;
425
return code_ptr;
426
}
427
428
jump->flags |= PATCH_MD;
429
return code_ptr;
430
}
431
432
code_ptr -= 2 + sizeof(sljit_uw);
433
434
SLJIT_ASSERT((code_ptr[0] & 0xf8) == REX_W);
435
SLJIT_ASSERT((code_ptr[1] & 0xf8) == MOV_r_i32);
436
437
if ((code_ptr[0] & 0x07) != 0) {
438
SLJIT_ASSERT(((jump->flags >> JUMP_SIZE_SHIFT) & 0x1f) >= 6);
439
code_ptr[0] = U8(code_ptr[0] & ~0x08);
440
code_ptr += 2 + sizeof(sljit_s32);
441
} else {
442
SLJIT_ASSERT(((jump->flags >> JUMP_SIZE_SHIFT) & 0x1f) >= 5);
443
code_ptr[0] = code_ptr[1];
444
code_ptr += 1 + sizeof(sljit_s32);
445
}
446
447
return code_ptr;
448
}
449
450
#ifdef _WIN64
451
typedef struct {
452
sljit_sw regs[2];
453
} sljit_sse2_reg;
454
#endif /* _WIN64 */
455
456
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
457
sljit_s32 options, sljit_s32 arg_types,
458
sljit_s32 scratches, sljit_s32 saveds, sljit_s32 local_size)
459
{
460
sljit_uw size;
461
sljit_s32 word_arg_count = 0;
462
sljit_s32 saved_arg_count = SLJIT_KEPT_SAVEDS_COUNT(options);
463
sljit_s32 saved_regs_size, tmp, i;
464
#ifdef _WIN64
465
sljit_s32 fscratches;
466
sljit_s32 fsaveds;
467
sljit_s32 saved_float_regs_size;
468
sljit_s32 saved_float_regs_offset = 0;
469
sljit_s32 float_arg_count = 0;
470
#endif /* _WIN64 */
471
sljit_u8 *inst;
472
473
CHECK_ERROR();
474
CHECK(check_sljit_emit_enter(compiler, options, arg_types, scratches, saveds, local_size));
475
set_emit_enter(compiler, options, arg_types, scratches, saveds, local_size);
476
477
scratches = ENTER_GET_REGS(scratches);
478
#ifdef _WIN64
479
saveds = ENTER_GET_REGS(saveds);
480
fscratches = compiler->fscratches;
481
fsaveds = compiler->fsaveds;
482
#endif /* _WIN64 */
483
484
if (options & SLJIT_ENTER_REG_ARG)
485
arg_types = 0;
486
487
/* Emit ENDBR64 at function entry if needed. */
488
FAIL_IF(emit_endbranch(compiler));
489
490
compiler->mode32 = 0;
491
492
/* Including the return address saved by the call instruction. */
493
saved_regs_size = GET_SAVED_REGISTERS_SIZE(scratches, saveds - saved_arg_count, 1);
494
495
tmp = SLJIT_S0 - saveds;
496
for (i = SLJIT_S0 - saved_arg_count; i > tmp; i--) {
497
size = reg_map[i] >= 8 ? 2 : 1;
498
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
499
FAIL_IF(!inst);
500
INC_SIZE(size);
501
if (reg_map[i] >= 8)
502
*inst++ = REX_B;
503
PUSH_REG(reg_lmap[i]);
504
}
505
506
for (i = scratches; i >= SLJIT_FIRST_SAVED_REG; i--) {
507
size = reg_map[i] >= 8 ? 2 : 1;
508
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
509
FAIL_IF(!inst);
510
INC_SIZE(size);
511
if (reg_map[i] >= 8)
512
*inst++ = REX_B;
513
PUSH_REG(reg_lmap[i]);
514
}
515
516
#ifdef _WIN64
517
local_size += SLJIT_LOCALS_OFFSET;
518
saved_float_regs_size = GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sse2_reg);
519
520
if (saved_float_regs_size > 0) {
521
saved_float_regs_offset = ((local_size + 0xf) & ~0xf);
522
local_size = saved_float_regs_offset + saved_float_regs_size;
523
}
524
#else /* !_WIN64 */
525
SLJIT_ASSERT(SLJIT_LOCALS_OFFSET == 0);
526
#endif /* _WIN64 */
527
528
arg_types >>= SLJIT_ARG_SHIFT;
529
530
while (arg_types > 0) {
531
if ((arg_types & SLJIT_ARG_MASK) < SLJIT_ARG_TYPE_F64) {
532
tmp = 0;
533
#ifndef _WIN64
534
switch (word_arg_count) {
535
case 0:
536
tmp = SLJIT_R2;
537
break;
538
case 1:
539
tmp = SLJIT_R1;
540
break;
541
case 2:
542
tmp = TMP_REG1;
543
break;
544
default:
545
tmp = SLJIT_R3;
546
break;
547
}
548
#else /* !_WIN64 */
549
switch (word_arg_count + float_arg_count) {
550
case 0:
551
tmp = SLJIT_R3;
552
break;
553
case 1:
554
tmp = SLJIT_R1;
555
break;
556
case 2:
557
tmp = SLJIT_R2;
558
break;
559
default:
560
tmp = TMP_REG1;
561
break;
562
}
563
#endif /* _WIN64 */
564
if (arg_types & SLJIT_ARG_TYPE_SCRATCH_REG) {
565
if (tmp != SLJIT_R0 + word_arg_count)
566
EMIT_MOV(compiler, SLJIT_R0 + word_arg_count, 0, tmp, 0);
567
} else {
568
EMIT_MOV(compiler, SLJIT_S0 - saved_arg_count, 0, tmp, 0);
569
saved_arg_count++;
570
}
571
word_arg_count++;
572
} else {
573
#ifdef _WIN64
574
SLJIT_COMPILE_ASSERT(SLJIT_FR0 == 1, float_register_index_start);
575
float_arg_count++;
576
if (float_arg_count != float_arg_count + word_arg_count)
577
FAIL_IF(emit_sse2_load(compiler, (arg_types & SLJIT_ARG_MASK) == SLJIT_ARG_TYPE_F32,
578
float_arg_count, float_arg_count + word_arg_count, 0));
579
#endif /* _WIN64 */
580
}
581
arg_types >>= SLJIT_ARG_SHIFT;
582
}
583
584
local_size = ((local_size + saved_regs_size + 0xf) & ~0xf) - saved_regs_size;
585
compiler->local_size = local_size;
586
587
#ifdef _WIN64
588
if (local_size > 0) {
589
if (local_size <= 4 * 4096) {
590
if (local_size > 4096)
591
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_SP), -4096);
592
if (local_size > 2 * 4096)
593
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_SP), -4096 * 2);
594
if (local_size > 3 * 4096)
595
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_SP), -4096 * 3);
596
}
597
else {
598
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, local_size >> 12);
599
600
EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_MEM1(SLJIT_SP), -4096);
601
BINARY_IMM32(SUB, 4096, SLJIT_SP, 0);
602
BINARY_IMM32(SUB, 1, TMP_REG1, 0);
603
604
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
605
FAIL_IF(!inst);
606
607
INC_SIZE(2);
608
inst[0] = JNE_i8;
609
inst[1] = (sljit_u8)-21;
610
local_size &= 0xfff;
611
}
612
613
if (local_size > 0)
614
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_SP), -local_size);
615
}
616
#endif /* _WIN64 */
617
618
if (local_size > 0)
619
BINARY_IMM32(SUB, local_size, SLJIT_SP, 0);
620
621
#ifdef _WIN64
622
if (saved_float_regs_size > 0) {
623
compiler->mode32 = 1;
624
625
tmp = SLJIT_FS0 - fsaveds;
626
for (i = SLJIT_FS0; i > tmp; i--) {
627
FAIL_IF(emit_groupf(compiler, MOVAPS_xm_x | EX86_SSE2, i, SLJIT_MEM1(SLJIT_SP), saved_float_regs_offset));
628
saved_float_regs_offset += 16;
629
}
630
631
for (i = fscratches; i >= SLJIT_FIRST_SAVED_FLOAT_REG; i--) {
632
FAIL_IF(emit_groupf(compiler, MOVAPS_xm_x | EX86_SSE2, i, SLJIT_MEM1(SLJIT_SP), saved_float_regs_offset));
633
saved_float_regs_offset += 16;
634
}
635
}
636
#endif /* _WIN64 */
637
638
return SLJIT_SUCCESS;
639
}
640
641
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
642
sljit_s32 options, sljit_s32 arg_types,
643
sljit_s32 scratches, sljit_s32 saveds, sljit_s32 local_size)
644
{
645
sljit_s32 saved_regs_size;
646
#ifdef _WIN64
647
sljit_s32 fscratches;
648
sljit_s32 fsaveds;
649
sljit_s32 saved_float_regs_size;
650
#endif /* _WIN64 */
651
652
CHECK_ERROR();
653
CHECK(check_sljit_set_context(compiler, options, arg_types, scratches, saveds, local_size));
654
set_emit_enter(compiler, options, arg_types, scratches, saveds, local_size);
655
656
scratches = ENTER_GET_REGS(scratches);
657
658
#ifdef _WIN64
659
saveds = ENTER_GET_REGS(saveds);
660
fscratches = compiler->fscratches;
661
fsaveds = compiler->fsaveds;
662
663
local_size += SLJIT_LOCALS_OFFSET;
664
saved_float_regs_size = GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sse2_reg);
665
666
if (saved_float_regs_size > 0)
667
local_size = ((local_size + 0xf) & ~0xf) + saved_float_regs_size;
668
#else /* !_WIN64 */
669
SLJIT_ASSERT(SLJIT_LOCALS_OFFSET == 0);
670
#endif /* _WIN64 */
671
672
/* Including the return address saved by the call instruction. */
673
saved_regs_size = GET_SAVED_REGISTERS_SIZE(scratches, saveds - SLJIT_KEPT_SAVEDS_COUNT(options), 1);
674
compiler->local_size = ((local_size + saved_regs_size + 0xf) & ~0xf) - saved_regs_size;
675
return SLJIT_SUCCESS;
676
}
677
678
static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit_s32 is_return_to)
679
{
680
sljit_uw size;
681
sljit_s32 local_size, i, tmp;
682
sljit_u8 *inst;
683
#ifdef _WIN64
684
sljit_s32 saved_float_regs_offset;
685
sljit_s32 fscratches = compiler->fscratches;
686
sljit_s32 fsaveds = compiler->fsaveds;
687
#endif /* _WIN64 */
688
689
#ifdef _WIN64
690
saved_float_regs_offset = GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sse2_reg);
691
692
if (saved_float_regs_offset > 0) {
693
compiler->mode32 = 1;
694
saved_float_regs_offset = (compiler->local_size - saved_float_regs_offset) & ~0xf;
695
696
tmp = SLJIT_FS0 - fsaveds;
697
for (i = SLJIT_FS0; i > tmp; i--) {
698
FAIL_IF(emit_groupf(compiler, MOVAPS_x_xm | EX86_SSE2, i, SLJIT_MEM1(SLJIT_SP), saved_float_regs_offset));
699
saved_float_regs_offset += 16;
700
}
701
702
for (i = fscratches; i >= SLJIT_FIRST_SAVED_FLOAT_REG; i--) {
703
FAIL_IF(emit_groupf(compiler, MOVAPS_x_xm | EX86_SSE2, i, SLJIT_MEM1(SLJIT_SP), saved_float_regs_offset));
704
saved_float_regs_offset += 16;
705
}
706
707
compiler->mode32 = 0;
708
}
709
#endif /* _WIN64 */
710
711
local_size = compiler->local_size;
712
713
if (is_return_to && compiler->scratches < SLJIT_FIRST_SAVED_REG && (compiler->saveds == SLJIT_KEPT_SAVEDS_COUNT(compiler->options))) {
714
local_size += SSIZE_OF(sw);
715
is_return_to = 0;
716
}
717
718
if (local_size > 0)
719
BINARY_IMM32(ADD, local_size, SLJIT_SP, 0);
720
721
tmp = compiler->scratches;
722
for (i = SLJIT_FIRST_SAVED_REG; i <= tmp; i++) {
723
size = reg_map[i] >= 8 ? 2 : 1;
724
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
725
FAIL_IF(!inst);
726
INC_SIZE(size);
727
if (reg_map[i] >= 8)
728
*inst++ = REX_B;
729
POP_REG(reg_lmap[i]);
730
}
731
732
tmp = SLJIT_S0 - SLJIT_KEPT_SAVEDS_COUNT(compiler->options);
733
for (i = SLJIT_S0 + 1 - compiler->saveds; i <= tmp; i++) {
734
size = reg_map[i] >= 8 ? 2 : 1;
735
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
736
FAIL_IF(!inst);
737
INC_SIZE(size);
738
if (reg_map[i] >= 8)
739
*inst++ = REX_B;
740
POP_REG(reg_lmap[i]);
741
}
742
743
if (is_return_to)
744
BINARY_IMM32(ADD, sizeof(sljit_sw), SLJIT_SP, 0);
745
746
return SLJIT_SUCCESS;
747
}
748
749
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_void(struct sljit_compiler *compiler)
750
{
751
CHECK_ERROR();
752
CHECK(check_sljit_emit_return_void(compiler));
753
754
compiler->mode32 = 0;
755
756
FAIL_IF(emit_stack_frame_release(compiler, 0));
757
return emit_byte(compiler, RET_near);
758
}
759
760
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_to(struct sljit_compiler *compiler,
761
sljit_s32 src, sljit_sw srcw)
762
{
763
CHECK_ERROR();
764
CHECK(check_sljit_emit_return_to(compiler, src, srcw));
765
766
compiler->mode32 = 0;
767
768
if ((src & SLJIT_MEM) || (src >= SLJIT_FIRST_SAVED_REG && src <= (SLJIT_S0 - SLJIT_KEPT_SAVEDS_COUNT(compiler->options)))) {
769
ADJUST_LOCAL_OFFSET(src, srcw);
770
771
EMIT_MOV(compiler, TMP_REG2, 0, src, srcw);
772
src = TMP_REG2;
773
srcw = 0;
774
}
775
776
FAIL_IF(emit_stack_frame_release(compiler, 1));
777
778
SLJIT_SKIP_CHECKS(compiler);
779
return sljit_emit_ijump(compiler, SLJIT_JUMP, src, srcw);
780
}
781
782
/* --------------------------------------------------------------------- */
783
/* Call / return instructions */
784
/* --------------------------------------------------------------------- */
785
786
#ifndef _WIN64
787
788
static sljit_s32 call_with_args(struct sljit_compiler *compiler, sljit_s32 arg_types, sljit_s32 *src_ptr)
789
{
790
sljit_s32 src = src_ptr ? (*src_ptr) : 0;
791
sljit_s32 word_arg_count = 0;
792
793
SLJIT_ASSERT(reg_map[SLJIT_R1] == 6 && reg_map[SLJIT_R3] == 1 && reg_map[TMP_REG1] == 2);
794
SLJIT_ASSERT(!(src & SLJIT_MEM));
795
796
/* Remove return value. */
797
arg_types >>= SLJIT_ARG_SHIFT;
798
799
while (arg_types) {
800
if ((arg_types & SLJIT_ARG_MASK) < SLJIT_ARG_TYPE_F64)
801
word_arg_count++;
802
arg_types >>= SLJIT_ARG_SHIFT;
803
}
804
805
if (word_arg_count == 0)
806
return SLJIT_SUCCESS;
807
808
if (word_arg_count >= 3) {
809
if (src == SLJIT_R2)
810
*src_ptr = TMP_REG1;
811
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_R2, 0);
812
}
813
814
return emit_mov(compiler, SLJIT_R2, 0, SLJIT_R0, 0);
815
}
816
817
#else
818
819
static sljit_s32 call_with_args(struct sljit_compiler *compiler, sljit_s32 arg_types, sljit_s32 *src_ptr)
820
{
821
sljit_s32 src = src_ptr ? (*src_ptr) : 0;
822
sljit_s32 arg_count = 0;
823
sljit_s32 word_arg_count = 0;
824
sljit_s32 float_arg_count = 0;
825
sljit_s32 types = 0;
826
sljit_s32 data_trandfer = 0;
827
static sljit_u8 word_arg_regs[5] = { 0, SLJIT_R3, SLJIT_R1, SLJIT_R2, TMP_REG1 };
828
829
SLJIT_ASSERT(reg_map[SLJIT_R3] == 1 && reg_map[SLJIT_R1] == 2 && reg_map[SLJIT_R2] == 8 && reg_map[TMP_REG1] == 9);
830
SLJIT_ASSERT(!(src & SLJIT_MEM));
831
832
arg_types >>= SLJIT_ARG_SHIFT;
833
834
while (arg_types) {
835
types = (types << SLJIT_ARG_SHIFT) | (arg_types & SLJIT_ARG_MASK);
836
837
switch (arg_types & SLJIT_ARG_MASK) {
838
case SLJIT_ARG_TYPE_F64:
839
case SLJIT_ARG_TYPE_F32:
840
arg_count++;
841
float_arg_count++;
842
843
if (arg_count != float_arg_count)
844
data_trandfer = 1;
845
break;
846
default:
847
arg_count++;
848
word_arg_count++;
849
850
if (arg_count != word_arg_count || arg_count != word_arg_regs[arg_count]) {
851
data_trandfer = 1;
852
853
if (src == word_arg_regs[arg_count]) {
854
EMIT_MOV(compiler, TMP_REG2, 0, src, 0);
855
*src_ptr = TMP_REG2;
856
}
857
}
858
break;
859
}
860
861
arg_types >>= SLJIT_ARG_SHIFT;
862
}
863
864
if (!data_trandfer)
865
return SLJIT_SUCCESS;
866
867
while (types) {
868
switch (types & SLJIT_ARG_MASK) {
869
case SLJIT_ARG_TYPE_F64:
870
if (arg_count != float_arg_count)
871
FAIL_IF(emit_sse2_load(compiler, 0, arg_count, float_arg_count, 0));
872
arg_count--;
873
float_arg_count--;
874
break;
875
case SLJIT_ARG_TYPE_F32:
876
if (arg_count != float_arg_count)
877
FAIL_IF(emit_sse2_load(compiler, 1, arg_count, float_arg_count, 0));
878
arg_count--;
879
float_arg_count--;
880
break;
881
default:
882
if (arg_count != word_arg_count || arg_count != word_arg_regs[arg_count])
883
EMIT_MOV(compiler, word_arg_regs[arg_count], 0, word_arg_count, 0);
884
arg_count--;
885
word_arg_count--;
886
break;
887
}
888
889
types >>= SLJIT_ARG_SHIFT;
890
}
891
892
return SLJIT_SUCCESS;
893
}
894
895
#endif
896
897
SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_call(struct sljit_compiler *compiler, sljit_s32 type,
898
sljit_s32 arg_types)
899
{
900
CHECK_ERROR_PTR();
901
CHECK_PTR(check_sljit_emit_call(compiler, type, arg_types));
902
903
compiler->mode32 = 0;
904
905
if ((type & 0xff) != SLJIT_CALL_REG_ARG)
906
PTR_FAIL_IF(call_with_args(compiler, arg_types, NULL));
907
908
if (type & SLJIT_CALL_RETURN) {
909
PTR_FAIL_IF(emit_stack_frame_release(compiler, 0));
910
type = SLJIT_JUMP | (type & SLJIT_REWRITABLE_JUMP);
911
}
912
913
SLJIT_SKIP_CHECKS(compiler);
914
return sljit_emit_jump(compiler, type);
915
}
916
917
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compiler, sljit_s32 type,
918
sljit_s32 arg_types,
919
sljit_s32 src, sljit_sw srcw)
920
{
921
CHECK_ERROR();
922
CHECK(check_sljit_emit_icall(compiler, type, arg_types, src, srcw));
923
924
compiler->mode32 = 0;
925
926
if (src & SLJIT_MEM) {
927
ADJUST_LOCAL_OFFSET(src, srcw);
928
EMIT_MOV(compiler, TMP_REG2, 0, src, srcw);
929
src = TMP_REG2;
930
}
931
932
if (type & SLJIT_CALL_RETURN) {
933
if (src >= SLJIT_FIRST_SAVED_REG && src <= (SLJIT_S0 - SLJIT_KEPT_SAVEDS_COUNT(compiler->options))) {
934
EMIT_MOV(compiler, TMP_REG2, 0, src, srcw);
935
src = TMP_REG2;
936
}
937
938
FAIL_IF(emit_stack_frame_release(compiler, 0));
939
}
940
941
if ((type & 0xff) != SLJIT_CALL_REG_ARG)
942
FAIL_IF(call_with_args(compiler, arg_types, &src));
943
944
if (type & SLJIT_CALL_RETURN)
945
type = SLJIT_JUMP;
946
947
SLJIT_SKIP_CHECKS(compiler);
948
return sljit_emit_ijump(compiler, type, src, srcw);
949
}
950
951
static sljit_s32 emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
952
{
953
sljit_u8 *inst;
954
955
if (FAST_IS_REG(dst)) {
956
if (reg_map[dst] < 8)
957
return emit_byte(compiler, U8(POP_r + reg_lmap[dst]));
958
959
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
960
FAIL_IF(!inst);
961
INC_SIZE(2);
962
*inst++ = REX_B;
963
POP_REG(reg_lmap[dst]);
964
return SLJIT_SUCCESS;
965
}
966
967
/* REX_W is not necessary (src is not immediate). */
968
compiler->mode32 = 1;
969
inst = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw);
970
FAIL_IF(!inst);
971
*inst = POP_rm;
972
return SLJIT_SUCCESS;
973
}
974
975
static sljit_s32 emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw)
976
{
977
sljit_u8 *inst;
978
979
if (FAST_IS_REG(src)) {
980
if (reg_map[src] < 8) {
981
inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 1);
982
FAIL_IF(!inst);
983
984
INC_SIZE(1 + 1);
985
PUSH_REG(reg_lmap[src]);
986
}
987
else {
988
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2 + 1);
989
FAIL_IF(!inst);
990
991
INC_SIZE(2 + 1);
992
*inst++ = REX_B;
993
PUSH_REG(reg_lmap[src]);
994
}
995
}
996
else {
997
/* REX_W is not necessary (src is not immediate). */
998
compiler->mode32 = 1;
999
inst = emit_x86_instruction(compiler, 1, 0, 0, src, srcw);
1000
FAIL_IF(!inst);
1001
inst[0] = GROUP_FF;
1002
inst[1] |= PUSH_rm;
1003
1004
inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
1005
FAIL_IF(!inst);
1006
INC_SIZE(1);
1007
}
1008
1009
RET();
1010
return SLJIT_SUCCESS;
1011
}
1012
1013
static sljit_s32 sljit_emit_get_return_address(struct sljit_compiler *compiler,
1014
sljit_s32 dst, sljit_sw dstw)
1015
{
1016
sljit_s32 saved_regs_size;
1017
1018
compiler->mode32 = 0;
1019
saved_regs_size = GET_SAVED_REGISTERS_SIZE(compiler->scratches, compiler->saveds - SLJIT_KEPT_SAVEDS_COUNT(compiler->options), 0);
1020
return emit_mov(compiler, dst, dstw, SLJIT_MEM1(SLJIT_SP), compiler->local_size + saved_regs_size);
1021
}
1022
1023
/* --------------------------------------------------------------------- */
1024
/* Other operations */
1025
/* --------------------------------------------------------------------- */
1026
1027
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_select(struct sljit_compiler *compiler, sljit_s32 type,
1028
sljit_s32 dst_reg,
1029
sljit_s32 src1, sljit_sw src1w,
1030
sljit_s32 src2_reg)
1031
{
1032
CHECK_ERROR();
1033
CHECK(check_sljit_emit_select(compiler, type, dst_reg, src1, src1w, src2_reg));
1034
1035
ADJUST_LOCAL_OFFSET(src1, src1w);
1036
1037
compiler->mode32 = type & SLJIT_32;
1038
type &= ~SLJIT_32;
1039
1040
if (dst_reg != src2_reg) {
1041
if (dst_reg == src1) {
1042
src1 = src2_reg;
1043
src1w = 0;
1044
type ^= 0x1;
1045
} else if (ADDRESSING_DEPENDS_ON(src1, dst_reg)) {
1046
EMIT_MOV(compiler, dst_reg, 0, src1, src1w);
1047
src1 = src2_reg;
1048
src1w = 0;
1049
type ^= 0x1;
1050
} else
1051
EMIT_MOV(compiler, dst_reg, 0, src2_reg, 0);
1052
}
1053
1054
if (sljit_has_cpu_feature(SLJIT_HAS_CMOV)) {
1055
if (SLJIT_UNLIKELY(src1 == SLJIT_IMM)) {
1056
EMIT_MOV(compiler, TMP_REG2, 0, src1, src1w);
1057
src1 = TMP_REG2;
1058
src1w = 0;
1059
}
1060
1061
return emit_groupf(compiler, U8(get_jump_code((sljit_uw)type) - 0x40), dst_reg, src1, src1w);
1062
}
1063
1064
return emit_cmov_generic(compiler, type, dst_reg, src1, src1w);
1065
}
1066
1067
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compiler, sljit_s32 type,
1068
sljit_s32 reg,
1069
sljit_s32 mem, sljit_sw memw)
1070
{
1071
sljit_u8* inst;
1072
sljit_s32 i, next, reg_idx;
1073
sljit_u8 regs[2];
1074
1075
CHECK_ERROR();
1076
CHECK(check_sljit_emit_mem(compiler, type, reg, mem, memw));
1077
1078
if (!(reg & REG_PAIR_MASK))
1079
return sljit_emit_mem_unaligned(compiler, type, reg, mem, memw);
1080
1081
ADJUST_LOCAL_OFFSET(mem, memw);
1082
1083
compiler->mode32 = 0;
1084
1085
if ((mem & REG_MASK) == 0) {
1086
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, memw);
1087
1088
mem = SLJIT_MEM1(TMP_REG1);
1089
memw = 0;
1090
} else if (!(mem & OFFS_REG_MASK) && ((memw < HALFWORD_MIN) || (memw > HALFWORD_MAX - SSIZE_OF(sw)))) {
1091
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, memw);
1092
1093
mem = SLJIT_MEM2(mem & REG_MASK, TMP_REG1);
1094
memw = 0;
1095
}
1096
1097
regs[0] = U8(REG_PAIR_FIRST(reg));
1098
regs[1] = U8(REG_PAIR_SECOND(reg));
1099
1100
next = SSIZE_OF(sw);
1101
1102
if (!(type & SLJIT_MEM_STORE) && (regs[0] == (mem & REG_MASK) || regs[0] == OFFS_REG(mem))) {
1103
if (regs[1] == (mem & REG_MASK) || regs[1] == OFFS_REG(mem)) {
1104
/* Base and offset cannot be TMP_REG1. */
1105
EMIT_MOV(compiler, TMP_REG1, 0, OFFS_REG(mem), 0);
1106
1107
if (regs[1] == OFFS_REG(mem))
1108
next = -SSIZE_OF(sw);
1109
1110
mem = (mem & ~OFFS_REG_MASK) | TO_OFFS_REG(TMP_REG1);
1111
} else {
1112
next = -SSIZE_OF(sw);
1113
1114
if (!(mem & OFFS_REG_MASK))
1115
memw += SSIZE_OF(sw);
1116
}
1117
}
1118
1119
for (i = 0; i < 2; i++) {
1120
reg_idx = next > 0 ? i : (i ^ 0x1);
1121
reg = regs[reg_idx];
1122
1123
if ((mem & OFFS_REG_MASK) && (reg_idx == 1)) {
1124
inst = (sljit_u8*)ensure_buf(compiler, (sljit_uw)(1 + 5));
1125
FAIL_IF(!inst);
1126
1127
INC_SIZE(5);
1128
1129
inst[0] = U8(REX_W | ((reg_map[reg] >= 8) ? REX_R : 0) | ((reg_map[mem & REG_MASK] >= 8) ? REX_B : 0) | ((reg_map[OFFS_REG(mem)] >= 8) ? REX_X : 0));
1130
inst[1] = (type & SLJIT_MEM_STORE) ? MOV_rm_r : MOV_r_rm;
1131
inst[2] = 0x44 | U8(reg_lmap[reg] << 3);
1132
inst[3] = U8(memw << 6) | U8(reg_lmap[OFFS_REG(mem)] << 3) | reg_lmap[mem & REG_MASK];
1133
inst[4] = sizeof(sljit_sw);
1134
} else if (type & SLJIT_MEM_STORE) {
1135
EMIT_MOV(compiler, mem, memw, reg, 0);
1136
} else {
1137
EMIT_MOV(compiler, reg, 0, mem, memw);
1138
}
1139
1140
if (!(mem & OFFS_REG_MASK))
1141
memw += next;
1142
}
1143
1144
return SLJIT_SUCCESS;
1145
}
1146
1147
static sljit_s32 emit_mov_int(struct sljit_compiler *compiler, sljit_s32 sign,
1148
sljit_s32 dst, sljit_sw dstw,
1149
sljit_s32 src, sljit_sw srcw)
1150
{
1151
sljit_u8* inst;
1152
sljit_s32 dst_r;
1153
1154
compiler->mode32 = 0;
1155
1156
if (src == SLJIT_IMM) {
1157
if (FAST_IS_REG(dst)) {
1158
if (!sign || ((sljit_u32)srcw <= 0x7fffffff))
1159
return emit_do_imm32(compiler, reg_map[dst] <= 7 ? 0 : REX_B, U8(MOV_r_i32 | reg_lmap[dst]), srcw);
1160
1161
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, (sljit_sw)(sljit_s32)srcw, dst, dstw);
1162
FAIL_IF(!inst);
1163
*inst = MOV_rm_i32;
1164
return SLJIT_SUCCESS;
1165
}
1166
compiler->mode32 = 1;
1167
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, (sljit_sw)(sljit_s32)srcw, dst, dstw);
1168
FAIL_IF(!inst);
1169
*inst = MOV_rm_i32;
1170
compiler->mode32 = 0;
1171
return SLJIT_SUCCESS;
1172
}
1173
1174
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1175
1176
if ((dst & SLJIT_MEM) && FAST_IS_REG(src))
1177
dst_r = src;
1178
else {
1179
if (sign) {
1180
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src, srcw);
1181
FAIL_IF(!inst);
1182
*inst = MOVSXD_r_rm;
1183
} else {
1184
compiler->mode32 = 1;
1185
EMIT_MOV(compiler, dst_r, 0, src, srcw);
1186
compiler->mode32 = 0;
1187
}
1188
}
1189
1190
if (dst & SLJIT_MEM) {
1191
compiler->mode32 = 1;
1192
inst = emit_x86_instruction(compiler, 1, dst_r, 0, dst, dstw);
1193
FAIL_IF(!inst);
1194
*inst = MOV_rm_r;
1195
compiler->mode32 = 0;
1196
}
1197
1198
return SLJIT_SUCCESS;
1199
}
1200
1201
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_uw(struct sljit_compiler *compiler, sljit_s32 op,
1202
sljit_s32 dst, sljit_sw dstw,
1203
sljit_s32 src, sljit_sw srcw)
1204
{
1205
sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
1206
sljit_u8 *inst, *jump_inst1, *jump_inst2;
1207
sljit_uw size1, size2;
1208
1209
compiler->mode32 = 0;
1210
1211
if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_U32) {
1212
if (src != SLJIT_IMM) {
1213
compiler->mode32 = 1;
1214
EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
1215
compiler->mode32 = 0;
1216
} else
1217
FAIL_IF(emit_do_imm32(compiler, reg_map[TMP_REG1] <= 7 ? 0 : REX_B, U8(MOV_r_i32 | reg_lmap[TMP_REG1]), srcw));
1218
1219
FAIL_IF(emit_groupf(compiler, CVTSI2SD_x_rm | EX86_SELECT_F2_F3(op) | EX86_SSE2_OP1, dst_r, TMP_REG1, 0));
1220
1221
compiler->mode32 = 1;
1222
1223
if (dst_r == TMP_FREG)
1224
return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
1225
return SLJIT_SUCCESS;
1226
}
1227
1228
if (!FAST_IS_REG(src)) {
1229
EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
1230
src = TMP_REG1;
1231
}
1232
1233
BINARY_IMM32(CMP, 0, src, 0);
1234
1235
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
1236
FAIL_IF(!inst);
1237
INC_SIZE(2);
1238
inst[0] = JL_i8;
1239
jump_inst1 = inst;
1240
1241
size1 = compiler->size;
1242
1243
compiler->mode32 = 0;
1244
FAIL_IF(emit_groupf(compiler, CVTSI2SD_x_rm | EX86_SELECT_F2_F3(op) | EX86_SSE2_OP1, dst_r, src, 0));
1245
1246
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
1247
FAIL_IF(!inst);
1248
INC_SIZE(2);
1249
inst[0] = JMP_i8;
1250
jump_inst2 = inst;
1251
1252
size2 = compiler->size;
1253
1254
jump_inst1[1] = U8(size2 - size1);
1255
1256
if (src != TMP_REG1)
1257
EMIT_MOV(compiler, TMP_REG1, 0, src, 0);
1258
1259
EMIT_MOV(compiler, TMP_REG2, 0, src, 0);
1260
1261
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 1, TMP_REG1, 0);
1262
FAIL_IF(!inst);
1263
inst[1] |= SHR;
1264
1265
compiler->mode32 = 1;
1266
BINARY_IMM32(AND, 1, TMP_REG2, 0);
1267
1268
compiler->mode32 = 0;
1269
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, TMP_REG2, 0);
1270
FAIL_IF(!inst);
1271
inst[0] = OR_r_rm;
1272
1273
FAIL_IF(emit_groupf(compiler, CVTSI2SD_x_rm | EX86_SELECT_F2_F3(op) | EX86_SSE2_OP1, dst_r, TMP_REG1, 0));
1274
compiler->mode32 = 1;
1275
FAIL_IF(emit_groupf(compiler, ADDSD_x_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2, dst_r, dst_r, 0));
1276
1277
jump_inst2[1] = U8(compiler->size - size2);
1278
1279
if (dst_r == TMP_FREG)
1280
return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
1281
return SLJIT_SUCCESS;
1282
}
1283
1284
static sljit_s32 sljit_emit_fset(struct sljit_compiler *compiler,
1285
sljit_s32 freg, sljit_u8 rex, sljit_s32 is_zero)
1286
{
1287
sljit_u8 *inst;
1288
sljit_u32 size;
1289
1290
if (is_zero) {
1291
rex = freg_map[freg] >= 8 ? (REX_R | REX_B) : 0;
1292
} else {
1293
if (freg_map[freg] >= 8)
1294
rex |= REX_R;
1295
if (reg_map[TMP_REG1] >= 8)
1296
rex |= REX_B;
1297
}
1298
1299
size = (rex != 0) ? 5 : 4;
1300
1301
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
1302
FAIL_IF(!inst);
1303
INC_SIZE(size);
1304
1305
*inst++ = GROUP_66;
1306
if (rex != 0)
1307
*inst++ = rex;
1308
inst[0] = GROUP_0F;
1309
1310
if (is_zero) {
1311
inst[1] = PXOR_x_xm;
1312
inst[2] = U8(freg_lmap[freg] | (freg_lmap[freg] << 3) | MOD_REG);
1313
} else {
1314
inst[1] = MOVD_x_rm;
1315
inst[2] = U8(reg_lmap[TMP_REG1] | (freg_lmap[freg] << 3) | MOD_REG);
1316
}
1317
1318
return SLJIT_SUCCESS;
1319
}
1320
1321
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset32(struct sljit_compiler *compiler,
1322
sljit_s32 freg, sljit_f32 value)
1323
{
1324
union {
1325
sljit_s32 imm;
1326
sljit_f32 value;
1327
} u;
1328
1329
CHECK_ERROR();
1330
CHECK(check_sljit_emit_fset32(compiler, freg, value));
1331
1332
u.value = value;
1333
1334
if (u.imm != 0) {
1335
compiler->mode32 = 1;
1336
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, u.imm);
1337
}
1338
1339
return sljit_emit_fset(compiler, freg, 0, u.imm == 0);
1340
}
1341
1342
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset64(struct sljit_compiler *compiler,
1343
sljit_s32 freg, sljit_f64 value)
1344
{
1345
union {
1346
sljit_sw imm;
1347
sljit_f64 value;
1348
} u;
1349
1350
CHECK_ERROR();
1351
CHECK(check_sljit_emit_fset64(compiler, freg, value));
1352
1353
u.value = value;
1354
1355
if (u.imm != 0) {
1356
compiler->mode32 = 0;
1357
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, u.imm);
1358
}
1359
1360
return sljit_emit_fset(compiler, freg, REX_W, u.imm == 0);
1361
}
1362
1363
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fcopy(struct sljit_compiler *compiler, sljit_s32 op,
1364
sljit_s32 freg, sljit_s32 reg)
1365
{
1366
sljit_u8 *inst;
1367
sljit_u32 size;
1368
sljit_u8 rex = 0;
1369
1370
CHECK_ERROR();
1371
CHECK(check_sljit_emit_fcopy(compiler, op, freg, reg));
1372
1373
if (!(op & SLJIT_32))
1374
rex = REX_W;
1375
1376
if (freg_map[freg] >= 8)
1377
rex |= REX_R;
1378
1379
if (reg_map[reg] >= 8)
1380
rex |= REX_B;
1381
1382
size = (rex != 0) ? 5 : 4;
1383
1384
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
1385
FAIL_IF(!inst);
1386
INC_SIZE(size);
1387
1388
*inst++ = GROUP_66;
1389
if (rex != 0)
1390
*inst++ = rex;
1391
inst[0] = GROUP_0F;
1392
inst[1] = GET_OPCODE(op) == SLJIT_COPY_TO_F64 ? MOVD_x_rm : MOVD_rm_x;
1393
inst[2] = U8(reg_lmap[reg] | (freg_lmap[freg] << 3) | MOD_REG);
1394
1395
return SLJIT_SUCCESS;
1396
}
1397
1398
static sljit_s32 skip_frames_before_return(struct sljit_compiler *compiler)
1399
{
1400
sljit_s32 tmp, size;
1401
1402
/* Don't adjust shadow stack if it isn't enabled. */
1403
if (!cpu_has_shadow_stack())
1404
return SLJIT_SUCCESS;
1405
1406
size = compiler->local_size;
1407
tmp = compiler->scratches;
1408
if (tmp >= SLJIT_FIRST_SAVED_REG)
1409
size += (tmp - SLJIT_FIRST_SAVED_REG + 1) * SSIZE_OF(sw);
1410
tmp = compiler->saveds < SLJIT_NUMBER_OF_SAVED_REGISTERS ? (SLJIT_S0 + 1 - compiler->saveds) : SLJIT_FIRST_SAVED_REG;
1411
if (SLJIT_S0 >= tmp)
1412
size += (SLJIT_S0 - tmp + 1) * SSIZE_OF(sw);
1413
1414
return adjust_shadow_stack(compiler, SLJIT_MEM1(SLJIT_SP), size);
1415
}
1416
1417