Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
godotengine
GitHub Repository: godotengine/godot
Path: blob/master/thirdparty/pcre2/deps/sljit/sljit_src/sljitNativeX86_common.c
9913 views
1
/*
2
* Stack-less Just-In-Time compiler
3
*
4
* Copyright Zoltan Herczeg ([email protected]). All rights reserved.
5
*
6
* Redistribution and use in source and binary forms, with or without modification, are
7
* permitted provided that the following conditions are met:
8
*
9
* 1. Redistributions of source code must retain the above copyright notice, this list of
10
* conditions and the following disclaimer.
11
*
12
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
13
* of conditions and the following disclaimer in the documentation and/or other materials
14
* provided with the distribution.
15
*
16
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
17
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
19
* SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
22
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
24
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25
*/
26
27
SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
28
{
29
return "x86" SLJIT_CPUINFO;
30
}
31
32
/*
33
32b register indexes:
34
0 - EAX
35
1 - ECX
36
2 - EDX
37
3 - EBX
38
4 - ESP
39
5 - EBP
40
6 - ESI
41
7 - EDI
42
*/
43
44
/*
45
64b register indexes:
46
0 - RAX
47
1 - RCX
48
2 - RDX
49
3 - RBX
50
4 - RSP
51
5 - RBP
52
6 - RSI
53
7 - RDI
54
8 - R8 - From now on REX prefix is required
55
9 - R9
56
10 - R10
57
11 - R11
58
12 - R12
59
13 - R13
60
14 - R14
61
15 - R15
62
*/
63
64
#define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
65
#define TMP_FREG (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
66
67
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
68
69
static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 3] = {
70
0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 5, 7, 6, 4, 3
71
};
72
73
static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2] = {
74
0, 1, 2, 3, 4, 5, 6, 7, 0
75
};
76
77
#define CHECK_EXTRA_REGS(p, w, do) \
78
if (p >= SLJIT_R3 && p <= SLJIT_S3) { \
79
w = (2 * SSIZE_OF(sw)) + ((p) - SLJIT_R3) * SSIZE_OF(sw); \
80
p = SLJIT_MEM1(SLJIT_SP); \
81
do; \
82
}
83
84
#else /* SLJIT_CONFIG_X86_32 */
85
86
#define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3)
87
88
/* Note: r12 & 0x7 == 0b100, which decoded as SIB byte present
89
Note: avoid to use r12 and r13 for memory addressing
90
therefore r12 is better to be a higher saved register. */
91
#ifndef _WIN64
92
/* Args: rdi(=7), rsi(=6), rdx(=2), rcx(=1), r8, r9. Scratches: rax(=0), r10, r11 */
93
static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 4] = {
94
0, 0, 6, 7, 1, 8, 11, 10, 12, 5, 13, 14, 15, 3, 4, 2, 9
95
};
96
/* low-map. reg_map & 0x7. */
97
static const sljit_u8 reg_lmap[SLJIT_NUMBER_OF_REGISTERS + 4] = {
98
0, 0, 6, 7, 1, 0, 3, 2, 4, 5, 5, 6, 7, 3, 4, 2, 1
99
};
100
#else
101
/* Args: rcx(=1), rdx(=2), r8, r9. Scratches: rax(=0), r10, r11 */
102
static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 4] = {
103
0, 0, 2, 8, 1, 11, 12, 5, 13, 14, 15, 7, 6, 3, 4, 9, 10
104
};
105
/* low-map. reg_map & 0x7. */
106
static const sljit_u8 reg_lmap[SLJIT_NUMBER_OF_REGISTERS + 4] = {
107
0, 0, 2, 0, 1, 3, 4, 5, 5, 6, 7, 7, 6, 3, 4, 1, 2
108
};
109
#endif
110
111
/* Args: xmm0-xmm3 */
112
static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2] = {
113
0, 0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 4
114
};
115
/* low-map. freg_map & 0x7. */
116
static const sljit_u8 freg_lmap[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2] = {
117
0, 0, 1, 2, 3, 5, 6, 7, 0, 1, 2, 3, 4, 5, 6, 7, 4
118
};
119
120
#define REX_W 0x48
121
#define REX_R 0x44
122
#define REX_X 0x42
123
#define REX_B 0x41
124
#define REX 0x40
125
126
#ifndef _WIN64
127
#define HALFWORD_MAX 0x7fffffffl
128
#define HALFWORD_MIN -0x80000000l
129
#else
130
#define HALFWORD_MAX 0x7fffffffll
131
#define HALFWORD_MIN -0x80000000ll
132
#endif
133
134
#define IS_HALFWORD(x) ((x) <= HALFWORD_MAX && (x) >= HALFWORD_MIN)
135
#define NOT_HALFWORD(x) ((x) > HALFWORD_MAX || (x) < HALFWORD_MIN)
136
137
#define CHECK_EXTRA_REGS(p, w, do)
138
139
#endif /* SLJIT_CONFIG_X86_32 */
140
141
#define U8(v) ((sljit_u8)(v))
142
143
/* Size flags for emit_x86_instruction: */
144
#define EX86_BIN_INS ((sljit_uw)0x000010)
145
#define EX86_SHIFT_INS ((sljit_uw)0x000020)
146
#define EX86_BYTE_ARG ((sljit_uw)0x000040)
147
#define EX86_HALF_ARG ((sljit_uw)0x000080)
148
/* Size flags for both emit_x86_instruction and emit_vex_instruction: */
149
#define EX86_REX ((sljit_uw)0x000100)
150
#define EX86_NO_REXW ((sljit_uw)0x000200)
151
#define EX86_PREF_66 ((sljit_uw)0x000400)
152
#define EX86_PREF_F2 ((sljit_uw)0x000800)
153
#define EX86_PREF_F3 ((sljit_uw)0x001000)
154
#define EX86_SSE2_OP1 ((sljit_uw)0x002000)
155
#define EX86_SSE2_OP2 ((sljit_uw)0x004000)
156
#define EX86_SSE2 (EX86_SSE2_OP1 | EX86_SSE2_OP2)
157
#define EX86_VEX_EXT ((sljit_uw)0x008000)
158
/* Op flags for emit_vex_instruction: */
159
#define VEX_OP_0F38 ((sljit_uw)0x010000)
160
#define VEX_OP_0F3A ((sljit_uw)0x020000)
161
#define VEX_SSE2_OPV ((sljit_uw)0x040000)
162
#define VEX_AUTO_W ((sljit_uw)0x080000)
163
#define VEX_W ((sljit_uw)0x100000)
164
#define VEX_256 ((sljit_uw)0x200000)
165
166
#define EX86_SELECT_66(op) (((op) & SLJIT_32) ? 0 : EX86_PREF_66)
167
#define EX86_SELECT_F2_F3(op) (((op) & SLJIT_32) ? EX86_PREF_F3 : EX86_PREF_F2)
168
169
/* --------------------------------------------------------------------- */
170
/* Instruction forms */
171
/* --------------------------------------------------------------------- */
172
173
#define ADD (/* BINARY */ 0 << 3)
174
#define ADD_EAX_i32 0x05
175
#define ADD_r_rm 0x03
176
#define ADD_rm_r 0x01
177
#define ADDSD_x_xm 0x58
178
#define ADC (/* BINARY */ 2 << 3)
179
#define ADC_EAX_i32 0x15
180
#define ADC_r_rm 0x13
181
#define ADC_rm_r 0x11
182
#define AND (/* BINARY */ 4 << 3)
183
#define AND_EAX_i32 0x25
184
#define AND_r_rm 0x23
185
#define AND_rm_r 0x21
186
#define ANDPD_x_xm 0x54
187
#define BSR_r_rm (/* GROUP_0F */ 0xbd)
188
#define BSF_r_rm (/* GROUP_0F */ 0xbc)
189
#define BSWAP_r (/* GROUP_0F */ 0xc8)
190
#define CALL_i32 0xe8
191
#define CALL_rm (/* GROUP_FF */ 2 << 3)
192
#define CDQ 0x99
193
#define CMOVE_r_rm (/* GROUP_0F */ 0x44)
194
#define CMP (/* BINARY */ 7 << 3)
195
#define CMP_EAX_i32 0x3d
196
#define CMP_r_rm 0x3b
197
#define CMP_rm_r 0x39
198
#define CMPS_x_xm 0xc2
199
#define CMPXCHG_rm_r 0xb1
200
#define CMPXCHG_rm8_r 0xb0
201
#define CVTPD2PS_x_xm 0x5a
202
#define CVTPS2PD_x_xm 0x5a
203
#define CVTSI2SD_x_rm 0x2a
204
#define CVTTSD2SI_r_xm 0x2c
205
#define DIV (/* GROUP_F7 */ 6 << 3)
206
#define DIVSD_x_xm 0x5e
207
#define EXTRACTPS_x_xm 0x17
208
#define FLDS 0xd9
209
#define FLDL 0xdd
210
#define FSTPS 0xd9
211
#define FSTPD 0xdd
212
#define INSERTPS_x_xm 0x21
213
#define INT3 0xcc
214
#define IDIV (/* GROUP_F7 */ 7 << 3)
215
#define IMUL (/* GROUP_F7 */ 5 << 3)
216
#define IMUL_r_rm (/* GROUP_0F */ 0xaf)
217
#define IMUL_r_rm_i8 0x6b
218
#define IMUL_r_rm_i32 0x69
219
#define JL_i8 0x7c
220
#define JE_i8 0x74
221
#define JNC_i8 0x73
222
#define JNE_i8 0x75
223
#define JMP_i8 0xeb
224
#define JMP_i32 0xe9
225
#define JMP_rm (/* GROUP_FF */ 4 << 3)
226
#define LEA_r_m 0x8d
227
#define LOOP_i8 0xe2
228
#define LZCNT_r_rm (/* GROUP_F3 */ /* GROUP_0F */ 0xbd)
229
#define MOV_r_rm 0x8b
230
#define MOV_r_i32 0xb8
231
#define MOV_rm_r 0x89
232
#define MOV_rm_i32 0xc7
233
#define MOV_rm8_i8 0xc6
234
#define MOV_rm8_r8 0x88
235
#define MOVAPS_x_xm 0x28
236
#define MOVAPS_xm_x 0x29
237
#define MOVD_x_rm 0x6e
238
#define MOVD_rm_x 0x7e
239
#define MOVDDUP_x_xm 0x12
240
#define MOVDQA_x_xm 0x6f
241
#define MOVDQA_xm_x 0x7f
242
#define MOVDQU_x_xm 0x6f
243
#define MOVHLPS_x_x 0x12
244
#define MOVHPD_m_x 0x17
245
#define MOVHPD_x_m 0x16
246
#define MOVLHPS_x_x 0x16
247
#define MOVLPD_m_x 0x13
248
#define MOVLPD_x_m 0x12
249
#define MOVMSKPS_r_x (/* GROUP_0F */ 0x50)
250
#define MOVQ_x_xm (/* GROUP_0F */ 0x7e)
251
#define MOVSD_x_xm 0x10
252
#define MOVSD_xm_x 0x11
253
#define MOVSHDUP_x_xm 0x16
254
#define MOVSXD_r_rm 0x63
255
#define MOVSX_r_rm8 (/* GROUP_0F */ 0xbe)
256
#define MOVSX_r_rm16 (/* GROUP_0F */ 0xbf)
257
#define MOVUPS_x_xm 0x10
258
#define MOVZX_r_rm8 (/* GROUP_0F */ 0xb6)
259
#define MOVZX_r_rm16 (/* GROUP_0F */ 0xb7)
260
#define MUL (/* GROUP_F7 */ 4 << 3)
261
#define MULSD_x_xm 0x59
262
#define NEG_rm (/* GROUP_F7 */ 3 << 3)
263
#define NOP 0x90
264
#define NOT_rm (/* GROUP_F7 */ 2 << 3)
265
#define OR (/* BINARY */ 1 << 3)
266
#define OR_r_rm 0x0b
267
#define OR_EAX_i32 0x0d
268
#define OR_rm_r 0x09
269
#define OR_rm8_r8 0x08
270
#define ORPD_x_xm 0x56
271
#define PACKSSWB_x_xm (/* GROUP_0F */ 0x63)
272
#define PAND_x_xm 0xdb
273
#define PCMPEQD_x_xm 0x76
274
#define PINSRB_x_rm_i8 0x20
275
#define PINSRW_x_rm_i8 0xc4
276
#define PINSRD_x_rm_i8 0x22
277
#define PEXTRB_rm_x_i8 0x14
278
#define PEXTRW_rm_x_i8 0x15
279
#define PEXTRD_rm_x_i8 0x16
280
#define PMOVMSKB_r_x (/* GROUP_0F */ 0xd7)
281
#define PMOVSXBD_x_xm 0x21
282
#define PMOVSXBQ_x_xm 0x22
283
#define PMOVSXBW_x_xm 0x20
284
#define PMOVSXDQ_x_xm 0x25
285
#define PMOVSXWD_x_xm 0x23
286
#define PMOVSXWQ_x_xm 0x24
287
#define PMOVZXBD_x_xm 0x31
288
#define PMOVZXBQ_x_xm 0x32
289
#define PMOVZXBW_x_xm 0x30
290
#define PMOVZXDQ_x_xm 0x35
291
#define PMOVZXWD_x_xm 0x33
292
#define PMOVZXWQ_x_xm 0x34
293
#define POP_r 0x58
294
#define POP_rm 0x8f
295
#define POPF 0x9d
296
#define POR_x_xm 0xeb
297
#define PREFETCH 0x18
298
#define PSHUFB_x_xm 0x00
299
#define PSHUFD_x_xm 0x70
300
#define PSHUFLW_x_xm 0x70
301
#define PSRLDQ_x 0x73
302
#define PSLLD_x_i8 0x72
303
#define PSLLQ_x_i8 0x73
304
#define PUSH_i32 0x68
305
#define PUSH_r 0x50
306
#define PUSH_rm (/* GROUP_FF */ 6 << 3)
307
#define PUSHF 0x9c
308
#define PXOR_x_xm 0xef
309
#define ROL (/* SHIFT */ 0 << 3)
310
#define ROR (/* SHIFT */ 1 << 3)
311
#define RET_near 0xc3
312
#define RET_i16 0xc2
313
#define SBB (/* BINARY */ 3 << 3)
314
#define SBB_EAX_i32 0x1d
315
#define SBB_r_rm 0x1b
316
#define SBB_rm_r 0x19
317
#define SAR (/* SHIFT */ 7 << 3)
318
#define SHL (/* SHIFT */ 4 << 3)
319
#define SHLD (/* GROUP_0F */ 0xa5)
320
#define SHRD (/* GROUP_0F */ 0xad)
321
#define SHR (/* SHIFT */ 5 << 3)
322
#define SHUFPS_x_xm 0xc6
323
#define SUB (/* BINARY */ 5 << 3)
324
#define SUB_EAX_i32 0x2d
325
#define SUB_r_rm 0x2b
326
#define SUB_rm_r 0x29
327
#define SUBSD_x_xm 0x5c
328
#define TEST_EAX_i32 0xa9
329
#define TEST_rm_r 0x85
330
#define TZCNT_r_rm (/* GROUP_F3 */ /* GROUP_0F */ 0xbc)
331
#define UCOMISD_x_xm 0x2e
332
#define UNPCKLPD_x_xm 0x14
333
#define UNPCKLPS_x_xm 0x14
334
#define VBROADCASTSD_x_xm 0x19
335
#define VBROADCASTSS_x_xm 0x18
336
#define VEXTRACTF128_x_ym 0x19
337
#define VEXTRACTI128_x_ym 0x39
338
#define VINSERTF128_y_y_xm 0x18
339
#define VINSERTI128_y_y_xm 0x38
340
#define VPBROADCASTB_x_xm 0x78
341
#define VPBROADCASTD_x_xm 0x58
342
#define VPBROADCASTQ_x_xm 0x59
343
#define VPBROADCASTW_x_xm 0x79
344
#define VPERMPD_y_ym 0x01
345
#define VPERMQ_y_ym 0x00
346
#define XCHG_EAX_r 0x90
347
#define XCHG_r_rm 0x87
348
#define XOR (/* BINARY */ 6 << 3)
349
#define XOR_EAX_i32 0x35
350
#define XOR_r_rm 0x33
351
#define XOR_rm_r 0x31
352
#define XORPD_x_xm 0x57
353
354
#define GROUP_0F 0x0f
355
#define GROUP_66 0x66
356
#define GROUP_F3 0xf3
357
#define GROUP_F7 0xf7
358
#define GROUP_FF 0xff
359
#define GROUP_BINARY_81 0x81
360
#define GROUP_BINARY_83 0x83
361
#define GROUP_SHIFT_1 0xd1
362
#define GROUP_SHIFT_N 0xc1
363
#define GROUP_SHIFT_CL 0xd3
364
#define GROUP_LOCK 0xf0
365
366
#define MOD_REG 0xc0
367
#define MOD_DISP8 0x40
368
369
#define INC_SIZE(s) (*inst++ = U8(s), compiler->size += (s))
370
371
#define PUSH_REG(r) (*inst++ = U8(PUSH_r + (r)))
372
#define POP_REG(r) (*inst++ = U8(POP_r + (r)))
373
#define RET() (*inst++ = RET_near)
374
#define RET_I16(n) (*inst++ = RET_i16, *inst++ = U8(n), *inst++ = 0)
375
376
#define SLJIT_INST_LABEL 255
377
#define SLJIT_INST_JUMP 254
378
#define SLJIT_INST_MOV_ADDR 253
379
#define SLJIT_INST_CONST 252
380
381
/* Multithreading does not affect these static variables, since they store
382
built-in CPU features. Therefore they can be overwritten by different threads
383
if they detect the CPU features in the same time. */
384
#define CPU_FEATURE_DETECTED 0x001
385
#if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
386
#define CPU_FEATURE_SSE2 0x002
387
#endif
388
#define CPU_FEATURE_SSE41 0x004
389
#define CPU_FEATURE_LZCNT 0x008
390
#define CPU_FEATURE_TZCNT 0x010
391
#define CPU_FEATURE_CMOV 0x020
392
#define CPU_FEATURE_AVX 0x040
393
#define CPU_FEATURE_AVX2 0x080
394
#define CPU_FEATURE_OSXSAVE 0x100
395
396
static sljit_u32 cpu_feature_list = 0;
397
398
#ifdef _WIN32_WCE
399
#include <cmnintrin.h>
400
#elif defined(_MSC_VER) && _MSC_VER >= 1400
401
#include <intrin.h>
402
#elif defined(__INTEL_COMPILER)
403
#include <cpuid.h>
404
#endif
405
406
#if (defined(_MSC_VER) && _MSC_VER >= 1400) || defined(__INTEL_COMPILER) \
407
|| (defined(__INTEL_LLVM_COMPILER) && defined(__XSAVE__))
408
#include <immintrin.h>
409
#endif
410
411
/******************************************************/
412
/* Unaligned-store functions */
413
/******************************************************/
414
415
static SLJIT_INLINE void sljit_unaligned_store_s16(void *addr, sljit_s16 value)
416
{
417
SLJIT_MEMCPY(addr, &value, sizeof(value));
418
}
419
420
static SLJIT_INLINE void sljit_unaligned_store_s32(void *addr, sljit_s32 value)
421
{
422
SLJIT_MEMCPY(addr, &value, sizeof(value));
423
}
424
425
static SLJIT_INLINE void sljit_unaligned_store_sw(void *addr, sljit_sw value)
426
{
427
SLJIT_MEMCPY(addr, &value, sizeof(value));
428
}
429
430
/******************************************************/
431
/* Utility functions */
432
/******************************************************/
433
434
static void execute_cpu_id(sljit_u32 info[4])
435
{
436
#if (defined(_MSC_VER) && _MSC_VER >= 1400) \
437
|| (defined(__INTEL_COMPILER) && __INTEL_COMPILER == 2021 && __INTEL_COMPILER_UPDATE >= 7)
438
439
__cpuidex((int*)info, (int)info[0], (int)info[2]);
440
441
#elif (defined(__INTEL_COMPILER) && __INTEL_COMPILER >= 1900)
442
443
__get_cpuid_count(info[0], info[2], info, info + 1, info + 2, info + 3);
444
445
#elif (defined(_MSC_VER) || defined(__INTEL_COMPILER)) \
446
&& (defined(SLJIT_CONFIG_X86_32) && SLJIT_CONFIG_X86_32)
447
448
/* Intel syntax. */
449
__asm {
450
mov esi, info
451
mov eax, [esi]
452
mov ecx, [esi + 8]
453
cpuid
454
mov [esi], eax
455
mov [esi + 4], ebx
456
mov [esi + 8], ecx
457
mov [esi + 12], edx
458
}
459
460
#else
461
462
__asm__ __volatile__ (
463
"cpuid\n"
464
: "=a" (info[0]), "=b" (info[1]), "=c" (info[2]), "=d" (info[3])
465
: "0" (info[0]), "2" (info[2])
466
);
467
468
#endif
469
}
470
471
static sljit_u32 execute_get_xcr0_low(void)
472
{
473
sljit_u32 xcr0;
474
475
#if (defined(_MSC_VER) && _MSC_VER >= 1400) || defined(__INTEL_COMPILER) \
476
|| (defined(__INTEL_LLVM_COMPILER) && defined(__XSAVE__))
477
478
xcr0 = (sljit_u32)_xgetbv(0);
479
480
#elif defined(__TINYC__)
481
482
__asm__ (
483
"xorl %%ecx, %%ecx\n"
484
".byte 0x0f\n"
485
".byte 0x01\n"
486
".byte 0xd0\n"
487
: "=a" (xcr0)
488
:
489
#if defined(SLJIT_CONFIG_X86_32) && SLJIT_CONFIG_X86_32
490
: "ecx", "edx"
491
#else /* !SLJIT_CONFIG_X86_32 */
492
: "rcx", "rdx"
493
#endif /* SLJIT_CONFIG_X86_32 */
494
);
495
496
#elif (defined(__INTEL_LLVM_COMPILER) && __INTEL_LLVM_COMPILER < 20220100) \
497
|| (defined(__clang__) && __clang_major__ < 14) \
498
|| (defined(__GNUC__) && __GNUC__ < 3) \
499
|| defined(__SUNPRO_C) || defined(__SUNPRO_CC)
500
501
/* AT&T syntax. */
502
__asm__ (
503
"xorl %%ecx, %%ecx\n"
504
"xgetbv\n"
505
: "=a" (xcr0)
506
:
507
#if defined(SLJIT_CONFIG_X86_32) && SLJIT_CONFIG_X86_32
508
: "ecx", "edx"
509
#else /* !SLJIT_CONFIG_X86_32 */
510
: "rcx", "rdx"
511
#endif /* SLJIT_CONFIG_X86_32 */
512
);
513
514
#elif defined(_MSC_VER)
515
516
/* Intel syntax. */
517
__asm {
518
xor ecx, ecx
519
xgetbv
520
mov xcr0, eax
521
}
522
523
#else
524
525
__asm__ (
526
"xor{l %%ecx, %%ecx | ecx, ecx}\n"
527
"xgetbv\n"
528
: "=a" (xcr0)
529
:
530
#if defined(SLJIT_CONFIG_X86_32) && SLJIT_CONFIG_X86_32
531
: "ecx", "edx"
532
#else /* !SLJIT_CONFIG_X86_32 */
533
: "rcx", "rdx"
534
#endif /* SLJIT_CONFIG_X86_32 */
535
);
536
537
#endif
538
return xcr0;
539
}
540
541
static void get_cpu_features(void)
542
{
543
sljit_u32 feature_list = CPU_FEATURE_DETECTED;
544
sljit_u32 info[4] = {0};
545
sljit_u32 max_id;
546
547
execute_cpu_id(info);
548
max_id = info[0];
549
550
if (max_id >= 7) {
551
info[0] = 7;
552
info[2] = 0;
553
execute_cpu_id(info);
554
555
if (info[1] & 0x8)
556
feature_list |= CPU_FEATURE_TZCNT;
557
if (info[1] & 0x20)
558
feature_list |= CPU_FEATURE_AVX2;
559
}
560
561
if (max_id >= 1) {
562
info[0] = 1;
563
#if defined(SLJIT_CONFIG_X86_32) && SLJIT_CONFIG_X86_32
564
/* Winchip 2 and Cyrix MII bugs */
565
info[1] = info[2] = 0;
566
#endif
567
execute_cpu_id(info);
568
569
if (info[2] & 0x80000)
570
feature_list |= CPU_FEATURE_SSE41;
571
if (info[2] & 0x8000000)
572
feature_list |= CPU_FEATURE_OSXSAVE;
573
if (info[2] & 0x10000000)
574
feature_list |= CPU_FEATURE_AVX;
575
#if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
576
if (info[3] & 0x4000000)
577
feature_list |= CPU_FEATURE_SSE2;
578
#endif
579
if (info[3] & 0x8000)
580
feature_list |= CPU_FEATURE_CMOV;
581
}
582
583
info[0] = 0x80000000;
584
execute_cpu_id(info);
585
max_id = info[0];
586
587
if (max_id >= 0x80000001) {
588
info[0] = 0x80000001;
589
execute_cpu_id(info);
590
591
if (info[2] & 0x20)
592
feature_list |= CPU_FEATURE_LZCNT;
593
}
594
595
if ((feature_list & CPU_FEATURE_OSXSAVE) && (execute_get_xcr0_low() & 0x4) == 0)
596
feature_list &= ~(sljit_u32)(CPU_FEATURE_AVX | CPU_FEATURE_AVX2);
597
598
cpu_feature_list = feature_list;
599
}
600
601
static sljit_u8 get_jump_code(sljit_uw type)
602
{
603
switch (type) {
604
case SLJIT_EQUAL:
605
case SLJIT_ATOMIC_STORED:
606
case SLJIT_F_EQUAL:
607
case SLJIT_UNORDERED_OR_EQUAL:
608
return 0x84 /* je */;
609
610
case SLJIT_NOT_EQUAL:
611
case SLJIT_ATOMIC_NOT_STORED:
612
case SLJIT_F_NOT_EQUAL:
613
case SLJIT_ORDERED_NOT_EQUAL:
614
return 0x85 /* jne */;
615
616
case SLJIT_LESS:
617
case SLJIT_CARRY:
618
case SLJIT_F_LESS:
619
case SLJIT_UNORDERED_OR_LESS:
620
case SLJIT_UNORDERED_OR_GREATER:
621
return 0x82 /* jc */;
622
623
case SLJIT_GREATER_EQUAL:
624
case SLJIT_NOT_CARRY:
625
case SLJIT_F_GREATER_EQUAL:
626
case SLJIT_ORDERED_GREATER_EQUAL:
627
case SLJIT_ORDERED_LESS_EQUAL:
628
return 0x83 /* jae */;
629
630
case SLJIT_GREATER:
631
case SLJIT_F_GREATER:
632
case SLJIT_ORDERED_LESS:
633
case SLJIT_ORDERED_GREATER:
634
return 0x87 /* jnbe */;
635
636
case SLJIT_LESS_EQUAL:
637
case SLJIT_F_LESS_EQUAL:
638
case SLJIT_UNORDERED_OR_GREATER_EQUAL:
639
case SLJIT_UNORDERED_OR_LESS_EQUAL:
640
return 0x86 /* jbe */;
641
642
case SLJIT_SIG_LESS:
643
return 0x8c /* jl */;
644
645
case SLJIT_SIG_GREATER_EQUAL:
646
return 0x8d /* jnl */;
647
648
case SLJIT_SIG_GREATER:
649
return 0x8f /* jnle */;
650
651
case SLJIT_SIG_LESS_EQUAL:
652
return 0x8e /* jle */;
653
654
case SLJIT_OVERFLOW:
655
return 0x80 /* jo */;
656
657
case SLJIT_NOT_OVERFLOW:
658
return 0x81 /* jno */;
659
660
case SLJIT_UNORDERED:
661
case SLJIT_ORDERED_EQUAL: /* NaN. */
662
return 0x8a /* jp */;
663
664
case SLJIT_ORDERED:
665
case SLJIT_UNORDERED_OR_NOT_EQUAL: /* Not NaN. */
666
return 0x8b /* jpo */;
667
}
668
return 0;
669
}
670
671
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
672
static sljit_u8* detect_far_jump_type(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_sw executable_offset);
673
#else /* !SLJIT_CONFIG_X86_32 */
674
static sljit_u8* detect_far_jump_type(struct sljit_jump *jump, sljit_u8 *code_ptr);
675
static sljit_u8* generate_mov_addr_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_u8 *code, sljit_sw executable_offset);
676
#endif /* SLJIT_CONFIG_X86_32 */
677
678
static sljit_u8* detect_near_jump_type(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_u8 *code, sljit_sw executable_offset)
679
{
680
sljit_uw type = jump->flags >> TYPE_SHIFT;
681
sljit_s32 short_jump;
682
sljit_uw label_addr;
683
sljit_uw jump_addr;
684
685
jump_addr = (sljit_uw)code_ptr;
686
if (!(jump->flags & JUMP_ADDR)) {
687
label_addr = (sljit_uw)(code + jump->u.label->size);
688
689
if (jump->u.label->size > jump->addr)
690
jump_addr = (sljit_uw)(code + jump->addr);
691
} else
692
label_addr = jump->u.target - (sljit_uw)executable_offset;
693
694
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
695
if ((sljit_sw)(label_addr - (jump_addr + 6)) > HALFWORD_MAX || (sljit_sw)(label_addr - (jump_addr + 5)) < HALFWORD_MIN)
696
return detect_far_jump_type(jump, code_ptr);
697
#endif /* SLJIT_CONFIG_X86_64 */
698
699
short_jump = (sljit_sw)(label_addr - (jump_addr + 2)) >= -0x80 && (sljit_sw)(label_addr - (jump_addr + 2)) <= 0x7f;
700
701
if (type == SLJIT_JUMP) {
702
if (short_jump)
703
*code_ptr++ = JMP_i8;
704
else
705
*code_ptr++ = JMP_i32;
706
} else if (type > SLJIT_JUMP) {
707
short_jump = 0;
708
*code_ptr++ = CALL_i32;
709
} else if (short_jump) {
710
*code_ptr++ = U8(get_jump_code(type) - 0x10);
711
} else {
712
*code_ptr++ = GROUP_0F;
713
*code_ptr++ = get_jump_code(type);
714
}
715
716
jump->addr = (sljit_uw)code_ptr;
717
718
if (short_jump) {
719
jump->flags |= PATCH_MB;
720
code_ptr += sizeof(sljit_s8);
721
} else {
722
jump->flags |= PATCH_MW;
723
code_ptr += sizeof(sljit_s32);
724
}
725
726
return code_ptr;
727
}
728
729
static void generate_jump_or_mov_addr(struct sljit_jump *jump, sljit_sw executable_offset)
730
{
731
sljit_uw flags = jump->flags;
732
sljit_uw addr = (flags & JUMP_ADDR) ? jump->u.target : jump->u.label->u.addr;
733
sljit_uw jump_addr = jump->addr;
734
SLJIT_UNUSED_ARG(executable_offset);
735
736
if (SLJIT_UNLIKELY(flags & JUMP_MOV_ADDR)) {
737
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
738
sljit_unaligned_store_sw((void*)(jump_addr - sizeof(sljit_sw)), (sljit_sw)addr);
739
#else /* SLJIT_CONFIG_X86_32 */
740
if (flags & PATCH_MD) {
741
SLJIT_ASSERT(addr > HALFWORD_MAX);
742
sljit_unaligned_store_sw((void*)(jump_addr - sizeof(sljit_sw)), (sljit_sw)addr);
743
return;
744
}
745
746
if (flags & PATCH_MW) {
747
addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET((sljit_u8*)jump_addr, executable_offset);
748
SLJIT_ASSERT((sljit_sw)addr <= HALFWORD_MAX && (sljit_sw)addr >= HALFWORD_MIN);
749
} else {
750
SLJIT_ASSERT(addr <= HALFWORD_MAX);
751
}
752
sljit_unaligned_store_s32((void*)(jump_addr - sizeof(sljit_s32)), (sljit_s32)addr);
753
#endif /* !SLJIT_CONFIG_X86_32 */
754
return;
755
}
756
757
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
758
if (SLJIT_UNLIKELY(flags & PATCH_MD)) {
759
SLJIT_ASSERT(!(flags & JUMP_ADDR));
760
sljit_unaligned_store_sw((void*)jump_addr, (sljit_sw)addr);
761
return;
762
}
763
#endif /* SLJIT_CONFIG_X86_64 */
764
765
addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET((sljit_u8*)jump_addr, executable_offset);
766
767
if (flags & PATCH_MB) {
768
addr -= sizeof(sljit_s8);
769
SLJIT_ASSERT((sljit_sw)addr <= 0x7f && (sljit_sw)addr >= -0x80);
770
*(sljit_u8*)jump_addr = U8(addr);
771
return;
772
} else if (flags & PATCH_MW) {
773
addr -= sizeof(sljit_s32);
774
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
775
sljit_unaligned_store_sw((void*)jump_addr, (sljit_sw)addr);
776
#else /* !SLJIT_CONFIG_X86_32 */
777
SLJIT_ASSERT((sljit_sw)addr <= HALFWORD_MAX && (sljit_sw)addr >= HALFWORD_MIN);
778
sljit_unaligned_store_s32((void*)jump_addr, (sljit_s32)addr);
779
#endif /* SLJIT_CONFIG_X86_32 */
780
}
781
}
782
783
static void reduce_code_size(struct sljit_compiler *compiler)
784
{
785
struct sljit_label *label;
786
struct sljit_jump *jump;
787
sljit_uw next_label_size;
788
sljit_uw next_jump_addr;
789
sljit_uw next_min_addr;
790
sljit_uw size_reduce = 0;
791
sljit_sw diff;
792
sljit_uw type;
793
#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
794
sljit_uw size_reduce_max;
795
#endif /* SLJIT_DEBUG */
796
797
label = compiler->labels;
798
jump = compiler->jumps;
799
800
next_label_size = SLJIT_GET_NEXT_SIZE(label);
801
next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
802
803
while (1) {
804
next_min_addr = next_label_size;
805
if (next_jump_addr < next_min_addr)
806
next_min_addr = next_jump_addr;
807
808
if (next_min_addr == SLJIT_MAX_ADDRESS)
809
break;
810
811
if (next_min_addr == next_label_size) {
812
label->size -= size_reduce;
813
814
label = label->next;
815
next_label_size = SLJIT_GET_NEXT_SIZE(label);
816
}
817
818
if (next_min_addr != next_jump_addr)
819
continue;
820
821
jump->addr -= size_reduce;
822
if (!(jump->flags & JUMP_MOV_ADDR)) {
823
#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
824
size_reduce_max = size_reduce + (((jump->flags >> TYPE_SHIFT) < SLJIT_JUMP) ? CJUMP_MAX_SIZE : JUMP_MAX_SIZE);
825
#endif /* SLJIT_DEBUG */
826
827
if (!(jump->flags & SLJIT_REWRITABLE_JUMP)) {
828
if (jump->flags & JUMP_ADDR) {
829
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
830
if (jump->u.target <= 0xffffffffl)
831
size_reduce += sizeof(sljit_s32);
832
#endif /* SLJIT_CONFIG_X86_64 */
833
} else {
834
/* Unit size: instruction. */
835
diff = (sljit_sw)jump->u.label->size - (sljit_sw)jump->addr;
836
if (jump->u.label->size > jump->addr) {
837
SLJIT_ASSERT(jump->u.label->size - size_reduce >= jump->addr);
838
diff -= (sljit_sw)size_reduce;
839
}
840
type = jump->flags >> TYPE_SHIFT;
841
842
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
843
if (type == SLJIT_JUMP) {
844
if (diff <= 0x7f + 2 && diff >= -0x80 + 2)
845
size_reduce += JUMP_MAX_SIZE - 2;
846
else if (diff <= HALFWORD_MAX + 5 && diff >= HALFWORD_MIN + 5)
847
size_reduce += JUMP_MAX_SIZE - 5;
848
} else if (type < SLJIT_JUMP) {
849
if (diff <= 0x7f + 2 && diff >= -0x80 + 2)
850
size_reduce += CJUMP_MAX_SIZE - 2;
851
else if (diff <= HALFWORD_MAX + 6 && diff >= HALFWORD_MIN + 6)
852
size_reduce += CJUMP_MAX_SIZE - 6;
853
} else {
854
if (diff <= HALFWORD_MAX + 5 && diff >= HALFWORD_MIN + 5)
855
size_reduce += JUMP_MAX_SIZE - 5;
856
}
857
#else /* !SLJIT_CONFIG_X86_64 */
858
if (type == SLJIT_JUMP) {
859
if (diff <= 0x7f + 2 && diff >= -0x80 + 2)
860
size_reduce += JUMP_MAX_SIZE - 2;
861
} else if (type < SLJIT_JUMP) {
862
if (diff <= 0x7f + 2 && diff >= -0x80 + 2)
863
size_reduce += CJUMP_MAX_SIZE - 2;
864
}
865
#endif /* SLJIT_CONFIG_X86_64 */
866
}
867
}
868
869
#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
870
jump->flags |= (size_reduce_max - size_reduce) << JUMP_SIZE_SHIFT;
871
#endif /* SLJIT_DEBUG */
872
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
873
} else {
874
#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
875
size_reduce_max = size_reduce + 10;
876
#endif /* SLJIT_DEBUG */
877
878
if (!(jump->flags & JUMP_ADDR)) {
879
diff = (sljit_sw)jump->u.label->size - (sljit_sw)(jump->addr - 3);
880
881
if (diff <= HALFWORD_MAX && diff >= HALFWORD_MIN)
882
size_reduce += 3;
883
} else if (jump->u.target <= 0xffffffffl)
884
size_reduce += (jump->flags & MOV_ADDR_HI) ? 4 : 5;
885
886
#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
887
jump->flags |= (size_reduce_max - size_reduce) << JUMP_SIZE_SHIFT;
888
#endif /* SLJIT_DEBUG */
889
#endif /* SLJIT_CONFIG_X86_64 */
890
}
891
892
jump = jump->next;
893
next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
894
}
895
896
compiler->size -= size_reduce;
897
}
898
899
SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler, sljit_s32 options, void *exec_allocator_data)
900
{
901
struct sljit_memory_fragment *buf;
902
sljit_u8 *code;
903
sljit_u8 *code_ptr;
904
sljit_u8 *buf_ptr;
905
sljit_u8 *buf_end;
906
sljit_u8 len;
907
sljit_sw executable_offset;
908
#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
909
sljit_uw addr;
910
#endif /* SLJIT_DEBUG */
911
912
struct sljit_label *label;
913
struct sljit_jump *jump;
914
struct sljit_const *const_;
915
916
CHECK_ERROR_PTR();
917
CHECK_PTR(check_sljit_generate_code(compiler));
918
919
reduce_code_size(compiler);
920
921
/* Second code generation pass. */
922
code = (sljit_u8*)allocate_executable_memory(compiler->size, options, exec_allocator_data, &executable_offset);
923
PTR_FAIL_WITH_EXEC_IF(code);
924
925
reverse_buf(compiler);
926
buf = compiler->buf;
927
928
code_ptr = code;
929
label = compiler->labels;
930
jump = compiler->jumps;
931
const_ = compiler->consts;
932
933
do {
934
buf_ptr = buf->memory;
935
buf_end = buf_ptr + buf->used_size;
936
do {
937
len = *buf_ptr++;
938
SLJIT_ASSERT(len > 0);
939
if (len < SLJIT_INST_CONST) {
940
/* The code is already generated. */
941
SLJIT_MEMCPY(code_ptr, buf_ptr, len);
942
code_ptr += len;
943
buf_ptr += len;
944
} else {
945
switch (len) {
946
case SLJIT_INST_LABEL:
947
label->u.addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
948
label->size = (sljit_uw)(code_ptr - code);
949
label = label->next;
950
break;
951
case SLJIT_INST_JUMP:
952
#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
953
addr = (sljit_uw)code_ptr;
954
#endif /* SLJIT_DEBUG */
955
if (!(jump->flags & SLJIT_REWRITABLE_JUMP))
956
code_ptr = detect_near_jump_type(jump, code_ptr, code, executable_offset);
957
else {
958
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
959
code_ptr = detect_far_jump_type(jump, code_ptr, executable_offset);
960
#else /* !SLJIT_CONFIG_X86_32 */
961
code_ptr = detect_far_jump_type(jump, code_ptr);
962
#endif /* SLJIT_CONFIG_X86_32 */
963
}
964
965
SLJIT_ASSERT((sljit_uw)code_ptr - addr <= ((jump->flags >> JUMP_SIZE_SHIFT) & 0x1f));
966
jump = jump->next;
967
break;
968
case SLJIT_INST_MOV_ADDR:
969
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
970
code_ptr = generate_mov_addr_code(jump, code_ptr, code, executable_offset);
971
#endif /* SLJIT_CONFIG_X86_64 */
972
jump->addr = (sljit_uw)code_ptr;
973
jump = jump->next;
974
break;
975
default:
976
SLJIT_ASSERT(len == SLJIT_INST_CONST);
977
const_->addr = ((sljit_uw)code_ptr) - sizeof(sljit_sw);
978
const_ = const_->next;
979
break;
980
}
981
}
982
} while (buf_ptr < buf_end);
983
984
SLJIT_ASSERT(buf_ptr == buf_end);
985
buf = buf->next;
986
} while (buf);
987
988
SLJIT_ASSERT(!label);
989
SLJIT_ASSERT(!jump);
990
SLJIT_ASSERT(!const_);
991
SLJIT_ASSERT(code_ptr <= code + compiler->size);
992
993
jump = compiler->jumps;
994
while (jump) {
995
generate_jump_or_mov_addr(jump, executable_offset);
996
jump = jump->next;
997
}
998
999
compiler->error = SLJIT_ERR_COMPILED;
1000
compiler->executable_offset = executable_offset;
1001
compiler->executable_size = (sljit_uw)(code_ptr - code);
1002
1003
code = (sljit_u8*)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
1004
1005
SLJIT_UPDATE_WX_FLAGS(code, (sljit_u8*)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset), 1);
1006
return (void*)code;
1007
}
1008
1009
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
1010
{
1011
switch (feature_type) {
1012
case SLJIT_HAS_FPU:
1013
#ifdef SLJIT_IS_FPU_AVAILABLE
1014
return (SLJIT_IS_FPU_AVAILABLE) != 0;
1015
#elif (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
1016
if (cpu_feature_list == 0)
1017
get_cpu_features();
1018
return (cpu_feature_list & CPU_FEATURE_SSE2) != 0;
1019
#else /* SLJIT_DETECT_SSE2 */
1020
return 1;
1021
#endif /* SLJIT_DETECT_SSE2 */
1022
1023
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1024
case SLJIT_HAS_VIRTUAL_REGISTERS:
1025
return 1;
1026
#endif /* SLJIT_CONFIG_X86_32 */
1027
1028
case SLJIT_HAS_CLZ:
1029
if (cpu_feature_list == 0)
1030
get_cpu_features();
1031
1032
return (cpu_feature_list & CPU_FEATURE_LZCNT) ? 1 : 2;
1033
1034
case SLJIT_HAS_CTZ:
1035
if (cpu_feature_list == 0)
1036
get_cpu_features();
1037
1038
return (cpu_feature_list & CPU_FEATURE_TZCNT) ? 1 : 2;
1039
1040
case SLJIT_HAS_CMOV:
1041
if (cpu_feature_list == 0)
1042
get_cpu_features();
1043
return (cpu_feature_list & CPU_FEATURE_CMOV) != 0;
1044
1045
case SLJIT_HAS_REV:
1046
case SLJIT_HAS_ROT:
1047
case SLJIT_HAS_PREFETCH:
1048
case SLJIT_HAS_COPY_F32:
1049
case SLJIT_HAS_COPY_F64:
1050
case SLJIT_HAS_ATOMIC:
1051
case SLJIT_HAS_MEMORY_BARRIER:
1052
return 1;
1053
1054
#if !(defined SLJIT_IS_FPU_AVAILABLE) || SLJIT_IS_FPU_AVAILABLE
1055
case SLJIT_HAS_AVX:
1056
if (cpu_feature_list == 0)
1057
get_cpu_features();
1058
return (cpu_feature_list & CPU_FEATURE_AVX) != 0;
1059
case SLJIT_HAS_AVX2:
1060
if (cpu_feature_list == 0)
1061
get_cpu_features();
1062
return (cpu_feature_list & CPU_FEATURE_AVX2) != 0;
1063
case SLJIT_HAS_SIMD:
1064
if (cpu_feature_list == 0)
1065
get_cpu_features();
1066
return (cpu_feature_list & CPU_FEATURE_SSE41) != 0;
1067
#endif /* SLJIT_IS_FPU_AVAILABLE */
1068
default:
1069
return 0;
1070
}
1071
}
1072
1073
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type)
1074
{
1075
switch (type) {
1076
case SLJIT_ORDERED_EQUAL:
1077
case SLJIT_UNORDERED_OR_NOT_EQUAL:
1078
return 2;
1079
}
1080
1081
return 0;
1082
}
1083
1084
/* --------------------------------------------------------------------- */
1085
/* Operators */
1086
/* --------------------------------------------------------------------- */
1087
1088
#define BINARY_OPCODE(opcode) (((opcode ## _EAX_i32) << 24) | ((opcode ## _r_rm) << 16) | ((opcode ## _rm_r) << 8) | (opcode))
1089
1090
#define BINARY_IMM32(op_imm, immw, arg, argw) \
1091
do { \
1092
inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, immw, arg, argw); \
1093
FAIL_IF(!inst); \
1094
*(inst + 1) |= (op_imm); \
1095
} while (0)
1096
1097
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1098
1099
#define BINARY_IMM(op_imm, op_mr, immw, arg, argw) \
1100
do { \
1101
if (IS_HALFWORD(immw) || compiler->mode32) { \
1102
BINARY_IMM32(op_imm, immw, arg, argw); \
1103
} \
1104
else { \
1105
FAIL_IF(emit_load_imm64(compiler, FAST_IS_REG(arg) ? TMP_REG2 : TMP_REG1, immw)); \
1106
inst = emit_x86_instruction(compiler, 1, FAST_IS_REG(arg) ? TMP_REG2 : TMP_REG1, 0, arg, argw); \
1107
FAIL_IF(!inst); \
1108
*inst = (op_mr); \
1109
} \
1110
} while (0)
1111
1112
#define BINARY_EAX_IMM(op_eax_imm, immw) \
1113
FAIL_IF(emit_do_imm32(compiler, (!compiler->mode32) ? REX_W : 0, (op_eax_imm), immw))
1114
1115
#else /* !SLJIT_CONFIG_X86_64 */
1116
1117
#define BINARY_IMM(op_imm, op_mr, immw, arg, argw) \
1118
BINARY_IMM32(op_imm, immw, arg, argw)
1119
1120
#define BINARY_EAX_IMM(op_eax_imm, immw) \
1121
FAIL_IF(emit_do_imm(compiler, (op_eax_imm), immw))
1122
1123
#endif /* SLJIT_CONFIG_X86_64 */
1124
1125
static sljit_s32 emit_byte(struct sljit_compiler *compiler, sljit_u8 byte)
1126
{
1127
sljit_u8 *inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
1128
FAIL_IF(!inst);
1129
INC_SIZE(1);
1130
*inst = byte;
1131
return SLJIT_SUCCESS;
1132
}
1133
1134
static sljit_s32 emit_mov(struct sljit_compiler *compiler,
1135
sljit_s32 dst, sljit_sw dstw,
1136
sljit_s32 src, sljit_sw srcw);
1137
1138
#define EMIT_MOV(compiler, dst, dstw, src, srcw) \
1139
FAIL_IF(emit_mov(compiler, dst, dstw, src, srcw));
1140
1141
static sljit_s32 emit_groupf(struct sljit_compiler *compiler,
1142
sljit_uw op,
1143
sljit_s32 dst, sljit_s32 src, sljit_sw srcw);
1144
1145
static sljit_s32 emit_groupf_ext(struct sljit_compiler *compiler,
1146
sljit_uw op,
1147
sljit_s32 dst, sljit_s32 src, sljit_sw srcw);
1148
1149
static SLJIT_INLINE sljit_s32 emit_sse2_store(struct sljit_compiler *compiler,
1150
sljit_s32 single, sljit_s32 dst, sljit_sw dstw, sljit_s32 src);
1151
1152
static SLJIT_INLINE sljit_s32 emit_sse2_load(struct sljit_compiler *compiler,
1153
sljit_s32 single, sljit_s32 dst, sljit_s32 src, sljit_sw srcw);
1154
1155
static sljit_s32 emit_cmp_binary(struct sljit_compiler *compiler,
1156
sljit_s32 src1, sljit_sw src1w,
1157
sljit_s32 src2, sljit_sw src2w);
1158
1159
static sljit_s32 emit_cmov_generic(struct sljit_compiler *compiler, sljit_s32 type,
1160
sljit_s32 dst_reg,
1161
sljit_s32 src, sljit_sw srcw);
1162
1163
static SLJIT_INLINE sljit_s32 emit_endbranch(struct sljit_compiler *compiler)
1164
{
1165
#if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET)
1166
/* Emit endbr32/endbr64 when CET is enabled. */
1167
sljit_u8 *inst;
1168
inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
1169
FAIL_IF(!inst);
1170
INC_SIZE(4);
1171
inst[0] = GROUP_F3;
1172
inst[1] = GROUP_0F;
1173
inst[2] = 0x1e;
1174
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1175
inst[3] = 0xfb;
1176
#else /* !SLJIT_CONFIG_X86_32 */
1177
inst[3] = 0xfa;
1178
#endif /* SLJIT_CONFIG_X86_32 */
1179
#else /* !SLJIT_CONFIG_X86_CET */
1180
SLJIT_UNUSED_ARG(compiler);
1181
#endif /* SLJIT_CONFIG_X86_CET */
1182
return SLJIT_SUCCESS;
1183
}
1184
1185
#if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET) && defined (__SHSTK__)
1186
1187
static SLJIT_INLINE sljit_s32 emit_rdssp(struct sljit_compiler *compiler, sljit_s32 reg)
1188
{
1189
sljit_u8 *inst;
1190
sljit_s32 size;
1191
1192
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1193
size = 5;
1194
#else
1195
size = 4;
1196
#endif
1197
1198
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
1199
FAIL_IF(!inst);
1200
INC_SIZE(size);
1201
*inst++ = GROUP_F3;
1202
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1203
*inst++ = REX_W | (reg_map[reg] <= 7 ? 0 : REX_B);
1204
#endif
1205
inst[0] = GROUP_0F;
1206
inst[1] = 0x1e;
1207
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1208
inst[2] = U8(MOD_REG | (0x1 << 3) | reg_lmap[reg]);
1209
#else
1210
inst[2] = U8(MOD_REG | (0x1 << 3) | reg_map[reg]);
1211
#endif
1212
return SLJIT_SUCCESS;
1213
}
1214
1215
static SLJIT_INLINE sljit_s32 emit_incssp(struct sljit_compiler *compiler, sljit_s32 reg)
1216
{
1217
sljit_u8 *inst;
1218
sljit_s32 size;
1219
1220
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1221
size = 5;
1222
#else
1223
size = 4;
1224
#endif
1225
1226
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
1227
FAIL_IF(!inst);
1228
INC_SIZE(size);
1229
*inst++ = GROUP_F3;
1230
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1231
*inst++ = REX_W | (reg_map[reg] <= 7 ? 0 : REX_B);
1232
#endif
1233
inst[0] = GROUP_0F;
1234
inst[1] = 0xae;
1235
inst[2] = (0x3 << 6) | (0x5 << 3) | (reg_map[reg] & 0x7);
1236
return SLJIT_SUCCESS;
1237
}
1238
1239
#endif /* SLJIT_CONFIG_X86_CET && __SHSTK__ */
1240
1241
static SLJIT_INLINE sljit_s32 cpu_has_shadow_stack(void)
1242
{
1243
#if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET) && defined (__SHSTK__)
1244
return _get_ssp() != 0;
1245
#else /* !SLJIT_CONFIG_X86_CET || !__SHSTK__ */
1246
return 0;
1247
#endif /* SLJIT_CONFIG_X86_CET && __SHSTK__ */
1248
}
1249
1250
static SLJIT_INLINE sljit_s32 adjust_shadow_stack(struct sljit_compiler *compiler,
1251
sljit_s32 src, sljit_sw srcw)
1252
{
1253
#if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET) && defined (__SHSTK__)
1254
sljit_u8 *inst, *jz_after_cmp_inst;
1255
sljit_uw size_jz_after_cmp_inst;
1256
1257
sljit_uw size_before_rdssp_inst = compiler->size;
1258
1259
/* Generate "RDSSP TMP_REG1". */
1260
FAIL_IF(emit_rdssp(compiler, TMP_REG1));
1261
1262
/* Load return address on shadow stack into TMP_REG1. */
1263
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(TMP_REG1), 0);
1264
1265
/* Compare return address against TMP_REG1. */
1266
FAIL_IF(emit_cmp_binary (compiler, TMP_REG1, 0, src, srcw));
1267
1268
/* Generate JZ to skip shadow stack ajdustment when shadow
1269
stack matches normal stack. */
1270
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
1271
FAIL_IF(!inst);
1272
INC_SIZE(2);
1273
*inst++ = get_jump_code(SLJIT_EQUAL) - 0x10;
1274
size_jz_after_cmp_inst = compiler->size;
1275
jz_after_cmp_inst = inst;
1276
1277
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1278
/* REX_W is not necessary. */
1279
compiler->mode32 = 1;
1280
#endif
1281
/* Load 1 into TMP_REG1. */
1282
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, 1);
1283
1284
/* Generate "INCSSP TMP_REG1". */
1285
FAIL_IF(emit_incssp(compiler, TMP_REG1));
1286
1287
/* Jump back to "RDSSP TMP_REG1" to check shadow stack again. */
1288
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
1289
FAIL_IF(!inst);
1290
INC_SIZE(2);
1291
inst[0] = JMP_i8;
1292
inst[1] = size_before_rdssp_inst - compiler->size;
1293
1294
*jz_after_cmp_inst = compiler->size - size_jz_after_cmp_inst;
1295
#else /* !SLJIT_CONFIG_X86_CET || !__SHSTK__ */
1296
SLJIT_UNUSED_ARG(compiler);
1297
SLJIT_UNUSED_ARG(src);
1298
SLJIT_UNUSED_ARG(srcw);
1299
#endif /* SLJIT_CONFIG_X86_CET && __SHSTK__ */
1300
return SLJIT_SUCCESS;
1301
}
1302
1303
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1304
#include "sljitNativeX86_32.c"
1305
#else
1306
#include "sljitNativeX86_64.c"
1307
#endif
1308
1309
static sljit_s32 emit_mov(struct sljit_compiler *compiler,
1310
sljit_s32 dst, sljit_sw dstw,
1311
sljit_s32 src, sljit_sw srcw)
1312
{
1313
sljit_u8* inst;
1314
1315
if (FAST_IS_REG(src)) {
1316
inst = emit_x86_instruction(compiler, 1, src, 0, dst, dstw);
1317
FAIL_IF(!inst);
1318
*inst = MOV_rm_r;
1319
return SLJIT_SUCCESS;
1320
}
1321
1322
if (src == SLJIT_IMM) {
1323
if (FAST_IS_REG(dst)) {
1324
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1325
return emit_do_imm(compiler, MOV_r_i32 | reg_map[dst], srcw);
1326
#else
1327
if (!compiler->mode32) {
1328
if (NOT_HALFWORD(srcw))
1329
return emit_load_imm64(compiler, dst, srcw);
1330
}
1331
else
1332
return emit_do_imm32(compiler, (reg_map[dst] >= 8) ? REX_B : 0, U8(MOV_r_i32 | reg_lmap[dst]), srcw);
1333
#endif
1334
}
1335
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1336
if (!compiler->mode32 && NOT_HALFWORD(srcw)) {
1337
/* Immediate to memory move. Only SLJIT_MOV operation copies
1338
an immediate directly into memory so TMP_REG1 can be used. */
1339
FAIL_IF(emit_load_imm64(compiler, TMP_REG1, srcw));
1340
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
1341
FAIL_IF(!inst);
1342
*inst = MOV_rm_r;
1343
return SLJIT_SUCCESS;
1344
}
1345
#endif
1346
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, dstw);
1347
FAIL_IF(!inst);
1348
*inst = MOV_rm_i32;
1349
return SLJIT_SUCCESS;
1350
}
1351
if (FAST_IS_REG(dst)) {
1352
inst = emit_x86_instruction(compiler, 1, dst, 0, src, srcw);
1353
FAIL_IF(!inst);
1354
*inst = MOV_r_rm;
1355
return SLJIT_SUCCESS;
1356
}
1357
1358
/* Memory to memory move. Only SLJIT_MOV operation copies
1359
data from memory to memory so TMP_REG1 can be used. */
1360
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src, srcw);
1361
FAIL_IF(!inst);
1362
*inst = MOV_r_rm;
1363
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
1364
FAIL_IF(!inst);
1365
*inst = MOV_rm_r;
1366
return SLJIT_SUCCESS;
1367
}
1368
1369
static sljit_s32 emit_cmov_generic(struct sljit_compiler *compiler, sljit_s32 type,
1370
sljit_s32 dst_reg,
1371
sljit_s32 src, sljit_sw srcw)
1372
{
1373
sljit_u8* inst;
1374
sljit_uw size;
1375
1376
SLJIT_ASSERT(type >= SLJIT_EQUAL && type <= SLJIT_ORDERED_LESS_EQUAL);
1377
1378
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
1379
FAIL_IF(!inst);
1380
INC_SIZE(2);
1381
inst[0] = U8(get_jump_code((sljit_uw)type ^ 0x1) - 0x10);
1382
1383
size = compiler->size;
1384
EMIT_MOV(compiler, dst_reg, 0, src, srcw);
1385
1386
inst[1] = U8(compiler->size - size);
1387
return SLJIT_SUCCESS;
1388
}
1389
1390
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
1391
{
1392
sljit_u8 *inst;
1393
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1394
sljit_uw size;
1395
#endif
1396
1397
CHECK_ERROR();
1398
CHECK(check_sljit_emit_op0(compiler, op));
1399
1400
switch (GET_OPCODE(op)) {
1401
case SLJIT_BREAKPOINT:
1402
return emit_byte(compiler, INT3);
1403
case SLJIT_NOP:
1404
return emit_byte(compiler, NOP);
1405
case SLJIT_LMUL_UW:
1406
case SLJIT_LMUL_SW:
1407
case SLJIT_DIVMOD_UW:
1408
case SLJIT_DIVMOD_SW:
1409
case SLJIT_DIV_UW:
1410
case SLJIT_DIV_SW:
1411
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1412
#ifdef _WIN64
1413
SLJIT_ASSERT(
1414
reg_map[SLJIT_R0] == 0
1415
&& reg_map[SLJIT_R1] == 2
1416
&& reg_map[TMP_REG1] > 7);
1417
#else
1418
SLJIT_ASSERT(
1419
reg_map[SLJIT_R0] == 0
1420
&& reg_map[SLJIT_R1] < 7
1421
&& reg_map[TMP_REG1] == 2);
1422
#endif
1423
compiler->mode32 = op & SLJIT_32;
1424
#endif
1425
SLJIT_COMPILE_ASSERT((SLJIT_DIVMOD_UW & 0x2) == 0 && SLJIT_DIV_UW - 0x2 == SLJIT_DIVMOD_UW, bad_div_opcode_assignments);
1426
1427
op = GET_OPCODE(op);
1428
if ((op | 0x2) == SLJIT_DIV_UW) {
1429
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || defined(_WIN64)
1430
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_R1, 0);
1431
inst = emit_x86_instruction(compiler, 1, SLJIT_R1, 0, SLJIT_R1, 0);
1432
#else
1433
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, TMP_REG1, 0);
1434
#endif
1435
FAIL_IF(!inst);
1436
*inst = XOR_r_rm;
1437
}
1438
1439
if ((op | 0x2) == SLJIT_DIV_SW) {
1440
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || defined(_WIN64)
1441
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_R1, 0);
1442
#endif
1443
1444
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1445
FAIL_IF(emit_byte(compiler, CDQ));
1446
#else
1447
if (!compiler->mode32) {
1448
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
1449
FAIL_IF(!inst);
1450
INC_SIZE(2);
1451
inst[0] = REX_W;
1452
inst[1] = CDQ;
1453
} else
1454
FAIL_IF(emit_byte(compiler, CDQ));
1455
#endif
1456
}
1457
1458
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1459
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
1460
FAIL_IF(!inst);
1461
INC_SIZE(2);
1462
inst[0] = GROUP_F7;
1463
inst[1] = MOD_REG | ((op >= SLJIT_DIVMOD_UW) ? reg_map[TMP_REG1] : reg_map[SLJIT_R1]);
1464
#else /* !SLJIT_CONFIG_X86_32 */
1465
#ifdef _WIN64
1466
size = (!compiler->mode32 || op >= SLJIT_DIVMOD_UW) ? 3 : 2;
1467
#else /* !_WIN64 */
1468
size = (!compiler->mode32) ? 3 : 2;
1469
#endif /* _WIN64 */
1470
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
1471
FAIL_IF(!inst);
1472
INC_SIZE(size);
1473
#ifdef _WIN64
1474
if (!compiler->mode32)
1475
*inst++ = REX_W | ((op >= SLJIT_DIVMOD_UW) ? REX_B : 0);
1476
else if (op >= SLJIT_DIVMOD_UW)
1477
*inst++ = REX_B;
1478
inst[0] = GROUP_F7;
1479
inst[1] = MOD_REG | ((op >= SLJIT_DIVMOD_UW) ? reg_lmap[TMP_REG1] : reg_lmap[SLJIT_R1]);
1480
#else /* !_WIN64 */
1481
if (!compiler->mode32)
1482
*inst++ = REX_W;
1483
inst[0] = GROUP_F7;
1484
inst[1] = MOD_REG | reg_map[SLJIT_R1];
1485
#endif /* _WIN64 */
1486
#endif /* SLJIT_CONFIG_X86_32 */
1487
switch (op) {
1488
case SLJIT_LMUL_UW:
1489
inst[1] |= MUL;
1490
break;
1491
case SLJIT_LMUL_SW:
1492
inst[1] |= IMUL;
1493
break;
1494
case SLJIT_DIVMOD_UW:
1495
case SLJIT_DIV_UW:
1496
inst[1] |= DIV;
1497
break;
1498
case SLJIT_DIVMOD_SW:
1499
case SLJIT_DIV_SW:
1500
inst[1] |= IDIV;
1501
break;
1502
}
1503
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) && !defined(_WIN64)
1504
if (op <= SLJIT_DIVMOD_SW)
1505
EMIT_MOV(compiler, SLJIT_R1, 0, TMP_REG1, 0);
1506
#else
1507
if (op >= SLJIT_DIV_UW)
1508
EMIT_MOV(compiler, SLJIT_R1, 0, TMP_REG1, 0);
1509
#endif
1510
break;
1511
case SLJIT_MEMORY_BARRIER:
1512
inst = (sljit_u8*)ensure_buf(compiler, 1 + 3);
1513
FAIL_IF(!inst);
1514
INC_SIZE(3);
1515
inst[0] = GROUP_0F;
1516
inst[1] = 0xae;
1517
inst[2] = 0xf0;
1518
return SLJIT_SUCCESS;
1519
case SLJIT_ENDBR:
1520
return emit_endbranch(compiler);
1521
case SLJIT_SKIP_FRAMES_BEFORE_RETURN:
1522
return skip_frames_before_return(compiler);
1523
}
1524
1525
return SLJIT_SUCCESS;
1526
}
1527
1528
static sljit_s32 emit_mov_byte(struct sljit_compiler *compiler, sljit_s32 sign,
1529
sljit_s32 dst, sljit_sw dstw,
1530
sljit_s32 src, sljit_sw srcw)
1531
{
1532
sljit_u8* inst;
1533
sljit_s32 dst_r;
1534
1535
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1536
compiler->mode32 = 0;
1537
#endif
1538
1539
if (src == SLJIT_IMM) {
1540
if (FAST_IS_REG(dst)) {
1541
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1542
return emit_do_imm(compiler, MOV_r_i32 | reg_map[dst], srcw);
1543
#else
1544
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, 0);
1545
FAIL_IF(!inst);
1546
*inst = MOV_rm_i32;
1547
return SLJIT_SUCCESS;
1548
#endif
1549
}
1550
inst = emit_x86_instruction(compiler, 1 | EX86_BYTE_ARG | EX86_NO_REXW, SLJIT_IMM, srcw, dst, dstw);
1551
FAIL_IF(!inst);
1552
*inst = MOV_rm8_i8;
1553
return SLJIT_SUCCESS;
1554
}
1555
1556
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1557
1558
if ((dst & SLJIT_MEM) && FAST_IS_REG(src)) {
1559
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1560
if (reg_map[src] >= 4) {
1561
SLJIT_ASSERT(dst_r == TMP_REG1);
1562
EMIT_MOV(compiler, TMP_REG1, 0, src, 0);
1563
} else
1564
dst_r = src;
1565
#else
1566
dst_r = src;
1567
#endif
1568
} else {
1569
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1570
if (FAST_IS_REG(src) && reg_map[src] >= 4) {
1571
/* Both src and dst are registers. */
1572
SLJIT_ASSERT(FAST_IS_REG(dst));
1573
1574
if (src == dst && !sign) {
1575
inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 0xff, dst, 0);
1576
FAIL_IF(!inst);
1577
*(inst + 1) |= AND;
1578
return SLJIT_SUCCESS;
1579
}
1580
1581
EMIT_MOV(compiler, TMP_REG1, 0, src, 0);
1582
src = TMP_REG1;
1583
srcw = 0;
1584
}
1585
#endif /* !SLJIT_CONFIG_X86_32 */
1586
1587
/* src can be memory addr or reg_map[src] < 4 on x86_32 architectures. */
1588
FAIL_IF(emit_groupf(compiler, sign ? MOVSX_r_rm8 : MOVZX_r_rm8, dst_r, src, srcw));
1589
}
1590
1591
if (dst & SLJIT_MEM) {
1592
inst = emit_x86_instruction(compiler, 1 | EX86_REX | EX86_NO_REXW, dst_r, 0, dst, dstw);
1593
FAIL_IF(!inst);
1594
*inst = MOV_rm8_r8;
1595
}
1596
1597
return SLJIT_SUCCESS;
1598
}
1599
1600
static sljit_s32 emit_prefetch(struct sljit_compiler *compiler, sljit_s32 op,
1601
sljit_s32 src, sljit_sw srcw)
1602
{
1603
sljit_u8* inst;
1604
1605
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1606
compiler->mode32 = 1;
1607
#endif
1608
1609
inst = emit_x86_instruction(compiler, 2, 0, 0, src, srcw);
1610
FAIL_IF(!inst);
1611
inst[0] = GROUP_0F;
1612
inst[1] = PREFETCH;
1613
1614
if (op == SLJIT_PREFETCH_L1)
1615
inst[2] |= (1 << 3);
1616
else if (op == SLJIT_PREFETCH_L2)
1617
inst[2] |= (2 << 3);
1618
else if (op == SLJIT_PREFETCH_L3)
1619
inst[2] |= (3 << 3);
1620
1621
return SLJIT_SUCCESS;
1622
}
1623
1624
static sljit_s32 emit_mov_half(struct sljit_compiler *compiler, sljit_s32 sign,
1625
sljit_s32 dst, sljit_sw dstw,
1626
sljit_s32 src, sljit_sw srcw)
1627
{
1628
sljit_u8* inst;
1629
sljit_s32 dst_r;
1630
1631
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1632
compiler->mode32 = 0;
1633
#endif
1634
1635
if (src == SLJIT_IMM) {
1636
if (FAST_IS_REG(dst)) {
1637
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1638
return emit_do_imm(compiler, MOV_r_i32 | reg_map[dst], srcw);
1639
#else
1640
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, 0);
1641
FAIL_IF(!inst);
1642
*inst = MOV_rm_i32;
1643
return SLJIT_SUCCESS;
1644
#endif
1645
}
1646
inst = emit_x86_instruction(compiler, 1 | EX86_HALF_ARG | EX86_NO_REXW | EX86_PREF_66, SLJIT_IMM, srcw, dst, dstw);
1647
FAIL_IF(!inst);
1648
*inst = MOV_rm_i32;
1649
return SLJIT_SUCCESS;
1650
}
1651
1652
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1653
1654
if ((dst & SLJIT_MEM) && FAST_IS_REG(src))
1655
dst_r = src;
1656
else
1657
FAIL_IF(emit_groupf(compiler, sign ? MOVSX_r_rm16 : MOVZX_r_rm16, dst_r, src, srcw));
1658
1659
if (dst & SLJIT_MEM) {
1660
inst = emit_x86_instruction(compiler, 1 | EX86_NO_REXW | EX86_PREF_66, dst_r, 0, dst, dstw);
1661
FAIL_IF(!inst);
1662
*inst = MOV_rm_r;
1663
}
1664
1665
return SLJIT_SUCCESS;
1666
}
1667
1668
static sljit_s32 emit_unary(struct sljit_compiler *compiler, sljit_u8 opcode,
1669
sljit_s32 dst, sljit_sw dstw,
1670
sljit_s32 src, sljit_sw srcw)
1671
{
1672
sljit_u8* inst;
1673
1674
if (dst == src && dstw == srcw) {
1675
/* Same input and output */
1676
inst = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw);
1677
FAIL_IF(!inst);
1678
inst[0] = GROUP_F7;
1679
inst[1] |= opcode;
1680
return SLJIT_SUCCESS;
1681
}
1682
1683
if (FAST_IS_REG(dst)) {
1684
EMIT_MOV(compiler, dst, 0, src, srcw);
1685
inst = emit_x86_instruction(compiler, 1, 0, 0, dst, 0);
1686
FAIL_IF(!inst);
1687
inst[0] = GROUP_F7;
1688
inst[1] |= opcode;
1689
return SLJIT_SUCCESS;
1690
}
1691
1692
EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
1693
inst = emit_x86_instruction(compiler, 1, 0, 0, TMP_REG1, 0);
1694
FAIL_IF(!inst);
1695
inst[0] = GROUP_F7;
1696
inst[1] |= opcode;
1697
EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1698
return SLJIT_SUCCESS;
1699
}
1700
1701
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1702
static const sljit_sw emit_clz_arg = 32 + 31;
1703
static const sljit_sw emit_ctz_arg = 32;
1704
#endif
1705
1706
static sljit_s32 emit_clz_ctz(struct sljit_compiler *compiler, sljit_s32 is_clz,
1707
sljit_s32 dst, sljit_sw dstw,
1708
sljit_s32 src, sljit_sw srcw)
1709
{
1710
sljit_u8* inst;
1711
sljit_s32 dst_r;
1712
sljit_sw max;
1713
1714
SLJIT_ASSERT(cpu_feature_list != 0);
1715
1716
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1717
1718
if (is_clz ? (cpu_feature_list & CPU_FEATURE_LZCNT) : (cpu_feature_list & CPU_FEATURE_TZCNT)) {
1719
FAIL_IF(emit_groupf(compiler, (is_clz ? LZCNT_r_rm : TZCNT_r_rm) | EX86_PREF_F3, dst_r, src, srcw));
1720
1721
if (dst & SLJIT_MEM)
1722
EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1723
return SLJIT_SUCCESS;
1724
}
1725
1726
FAIL_IF(emit_groupf(compiler, is_clz ? BSR_r_rm : BSF_r_rm, dst_r, src, srcw));
1727
1728
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1729
max = is_clz ? (32 + 31) : 32;
1730
1731
if (cpu_feature_list & CPU_FEATURE_CMOV) {
1732
if (dst_r != TMP_REG1) {
1733
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, max);
1734
inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG1, 0);
1735
}
1736
else
1737
inst = emit_x86_instruction(compiler, 2, dst_r, 0, SLJIT_MEM0(), is_clz ? (sljit_sw)&emit_clz_arg : (sljit_sw)&emit_ctz_arg);
1738
1739
FAIL_IF(!inst);
1740
inst[0] = GROUP_0F;
1741
inst[1] = CMOVE_r_rm;
1742
}
1743
else
1744
FAIL_IF(emit_cmov_generic(compiler, SLJIT_EQUAL, dst_r, SLJIT_IMM, max));
1745
1746
if (is_clz) {
1747
inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 31, dst_r, 0);
1748
FAIL_IF(!inst);
1749
*(inst + 1) |= XOR;
1750
}
1751
#else
1752
if (is_clz)
1753
max = compiler->mode32 ? (32 + 31) : (64 + 63);
1754
else
1755
max = compiler->mode32 ? 32 : 64;
1756
1757
if (cpu_feature_list & CPU_FEATURE_CMOV) {
1758
EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_IMM, max);
1759
FAIL_IF(emit_groupf(compiler, CMOVE_r_rm, dst_r, TMP_REG2, 0));
1760
} else
1761
FAIL_IF(emit_cmov_generic(compiler, SLJIT_EQUAL, dst_r, SLJIT_IMM, max));
1762
1763
if (is_clz) {
1764
inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, max >> 1, dst_r, 0);
1765
FAIL_IF(!inst);
1766
*(inst + 1) |= XOR;
1767
}
1768
#endif
1769
1770
if (dst & SLJIT_MEM)
1771
EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1772
return SLJIT_SUCCESS;
1773
}
1774
1775
static sljit_s32 emit_bswap(struct sljit_compiler *compiler,
1776
sljit_s32 op,
1777
sljit_s32 dst, sljit_sw dstw,
1778
sljit_s32 src, sljit_sw srcw)
1779
{
1780
sljit_u8 *inst;
1781
sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1782
sljit_uw size;
1783
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1784
sljit_u8 rex = 0;
1785
#else /* !SLJIT_CONFIG_X86_64 */
1786
sljit_s32 dst_is_ereg = op & SLJIT_32;
1787
#endif /* SLJIT_CONFIG_X86_64 */
1788
1789
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1790
if (op == SLJIT_REV_U32 || op == SLJIT_REV_S32)
1791
compiler->mode32 = 1;
1792
#else /* !SLJIT_CONFIG_X86_64 */
1793
op &= ~SLJIT_32;
1794
#endif /* SLJIT_CONFIG_X86_64 */
1795
1796
if (src != dst_r) {
1797
/* Only the lower 16 bit is read for eregs. */
1798
if (op == SLJIT_REV_U16 || op == SLJIT_REV_S16)
1799
FAIL_IF(emit_mov_half(compiler, 0, dst_r, 0, src, srcw));
1800
else
1801
EMIT_MOV(compiler, dst_r, 0, src, srcw);
1802
}
1803
1804
size = 2;
1805
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1806
if (!compiler->mode32)
1807
rex = REX_W;
1808
1809
if (reg_map[dst_r] >= 8)
1810
rex |= REX_B;
1811
1812
if (rex != 0)
1813
size++;
1814
#endif /* SLJIT_CONFIG_X86_64 */
1815
1816
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
1817
FAIL_IF(!inst);
1818
INC_SIZE(size);
1819
1820
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1821
if (rex != 0)
1822
*inst++ = rex;
1823
1824
inst[0] = GROUP_0F;
1825
inst[1] = BSWAP_r | reg_lmap[dst_r];
1826
#else /* !SLJIT_CONFIG_X86_64 */
1827
inst[0] = GROUP_0F;
1828
inst[1] = BSWAP_r | reg_map[dst_r];
1829
#endif /* SLJIT_CONFIG_X86_64 */
1830
1831
if (op == SLJIT_REV_U16 || op == SLJIT_REV_S16) {
1832
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1833
size = compiler->mode32 ? 16 : 48;
1834
#else /* !SLJIT_CONFIG_X86_64 */
1835
size = 16;
1836
#endif /* SLJIT_CONFIG_X86_64 */
1837
1838
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, (sljit_sw)size, dst_r, 0);
1839
FAIL_IF(!inst);
1840
if (op == SLJIT_REV_U16)
1841
inst[1] |= SHR;
1842
else
1843
inst[1] |= SAR;
1844
}
1845
1846
if (dst & SLJIT_MEM) {
1847
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1848
if (dst_is_ereg)
1849
op = SLJIT_REV;
1850
#endif /* SLJIT_CONFIG_X86_32 */
1851
if (op == SLJIT_REV_U16 || op == SLJIT_REV_S16)
1852
return emit_mov_half(compiler, 0, dst, dstw, TMP_REG1, 0);
1853
1854
return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
1855
}
1856
1857
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1858
if (op == SLJIT_REV_S32) {
1859
compiler->mode32 = 0;
1860
inst = emit_x86_instruction(compiler, 1, dst, 0, dst, 0);
1861
FAIL_IF(!inst);
1862
*inst = MOVSXD_r_rm;
1863
}
1864
#endif /* SLJIT_CONFIG_X86_64 */
1865
1866
return SLJIT_SUCCESS;
1867
}
1868
1869
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
1870
sljit_s32 dst, sljit_sw dstw,
1871
sljit_s32 src, sljit_sw srcw)
1872
{
1873
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1874
sljit_s32 dst_is_ereg = 0;
1875
#else /* !SLJIT_CONFIG_X86_32 */
1876
sljit_s32 op_flags = GET_ALL_FLAGS(op);
1877
#endif /* SLJIT_CONFIG_X86_32 */
1878
1879
CHECK_ERROR();
1880
CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
1881
ADJUST_LOCAL_OFFSET(dst, dstw);
1882
ADJUST_LOCAL_OFFSET(src, srcw);
1883
1884
CHECK_EXTRA_REGS(dst, dstw, dst_is_ereg = 1);
1885
CHECK_EXTRA_REGS(src, srcw, (void)0);
1886
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1887
compiler->mode32 = op_flags & SLJIT_32;
1888
#endif /* SLJIT_CONFIG_X86_64 */
1889
1890
op = GET_OPCODE(op);
1891
1892
if (op >= SLJIT_MOV && op <= SLJIT_MOV_P) {
1893
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1894
compiler->mode32 = 0;
1895
#endif /* SLJIT_CONFIG_X86_64 */
1896
1897
if (FAST_IS_REG(src) && src == dst) {
1898
if (!TYPE_CAST_NEEDED(op))
1899
return SLJIT_SUCCESS;
1900
}
1901
1902
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1903
if (op_flags & SLJIT_32) {
1904
if (src & SLJIT_MEM) {
1905
if (op == SLJIT_MOV_S32)
1906
op = SLJIT_MOV_U32;
1907
}
1908
else if (src == SLJIT_IMM) {
1909
if (op == SLJIT_MOV_U32)
1910
op = SLJIT_MOV_S32;
1911
}
1912
}
1913
#endif /* SLJIT_CONFIG_X86_64 */
1914
1915
if (src == SLJIT_IMM) {
1916
switch (op) {
1917
case SLJIT_MOV_U8:
1918
srcw = (sljit_u8)srcw;
1919
break;
1920
case SLJIT_MOV_S8:
1921
srcw = (sljit_s8)srcw;
1922
break;
1923
case SLJIT_MOV_U16:
1924
srcw = (sljit_u16)srcw;
1925
break;
1926
case SLJIT_MOV_S16:
1927
srcw = (sljit_s16)srcw;
1928
break;
1929
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1930
case SLJIT_MOV_U32:
1931
srcw = (sljit_u32)srcw;
1932
break;
1933
case SLJIT_MOV_S32:
1934
srcw = (sljit_s32)srcw;
1935
break;
1936
#endif /* SLJIT_CONFIG_X86_64 */
1937
}
1938
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1939
if (SLJIT_UNLIKELY(dst_is_ereg))
1940
return emit_mov(compiler, dst, dstw, src, srcw);
1941
#endif /* SLJIT_CONFIG_X86_32 */
1942
}
1943
1944
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1945
if (SLJIT_UNLIKELY(dst_is_ereg) && (!(op == SLJIT_MOV || op == SLJIT_MOV_U32 || op == SLJIT_MOV_S32 || op == SLJIT_MOV_P) || (src & SLJIT_MEM))) {
1946
SLJIT_ASSERT(dst == SLJIT_MEM1(SLJIT_SP));
1947
dst = TMP_REG1;
1948
}
1949
#endif /* SLJIT_CONFIG_X86_32 */
1950
1951
switch (op) {
1952
case SLJIT_MOV:
1953
case SLJIT_MOV_P:
1954
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1955
case SLJIT_MOV_U32:
1956
case SLJIT_MOV_S32:
1957
case SLJIT_MOV32:
1958
#endif /* SLJIT_CONFIG_X86_32 */
1959
EMIT_MOV(compiler, dst, dstw, src, srcw);
1960
break;
1961
case SLJIT_MOV_U8:
1962
FAIL_IF(emit_mov_byte(compiler, 0, dst, dstw, src, srcw));
1963
break;
1964
case SLJIT_MOV_S8:
1965
FAIL_IF(emit_mov_byte(compiler, 1, dst, dstw, src, srcw));
1966
break;
1967
case SLJIT_MOV_U16:
1968
FAIL_IF(emit_mov_half(compiler, 0, dst, dstw, src, srcw));
1969
break;
1970
case SLJIT_MOV_S16:
1971
FAIL_IF(emit_mov_half(compiler, 1, dst, dstw, src, srcw));
1972
break;
1973
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1974
case SLJIT_MOV_U32:
1975
FAIL_IF(emit_mov_int(compiler, 0, dst, dstw, src, srcw));
1976
break;
1977
case SLJIT_MOV_S32:
1978
FAIL_IF(emit_mov_int(compiler, 1, dst, dstw, src, srcw));
1979
break;
1980
case SLJIT_MOV32:
1981
compiler->mode32 = 1;
1982
EMIT_MOV(compiler, dst, dstw, src, srcw);
1983
compiler->mode32 = 0;
1984
break;
1985
#endif /* SLJIT_CONFIG_X86_64 */
1986
}
1987
1988
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1989
if (SLJIT_UNLIKELY(dst_is_ereg) && dst == TMP_REG1)
1990
return emit_mov(compiler, SLJIT_MEM1(SLJIT_SP), dstw, TMP_REG1, 0);
1991
#endif /* SLJIT_CONFIG_X86_32 */
1992
return SLJIT_SUCCESS;
1993
}
1994
1995
switch (op) {
1996
case SLJIT_CLZ:
1997
case SLJIT_CTZ:
1998
return emit_clz_ctz(compiler, (op == SLJIT_CLZ), dst, dstw, src, srcw);
1999
case SLJIT_REV:
2000
case SLJIT_REV_U16:
2001
case SLJIT_REV_S16:
2002
case SLJIT_REV_U32:
2003
case SLJIT_REV_S32:
2004
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2005
if (dst_is_ereg)
2006
op |= SLJIT_32;
2007
#endif /* SLJIT_CONFIG_X86_32 */
2008
return emit_bswap(compiler, op, dst, dstw, src, srcw);
2009
}
2010
2011
return SLJIT_SUCCESS;
2012
}
2013
2014
static sljit_s32 emit_cum_binary(struct sljit_compiler *compiler,
2015
sljit_u32 op_types,
2016
sljit_s32 dst, sljit_sw dstw,
2017
sljit_s32 src1, sljit_sw src1w,
2018
sljit_s32 src2, sljit_sw src2w)
2019
{
2020
sljit_u8* inst;
2021
sljit_u8 op_eax_imm = U8(op_types >> 24);
2022
sljit_u8 op_rm = U8((op_types >> 16) & 0xff);
2023
sljit_u8 op_mr = U8((op_types >> 8) & 0xff);
2024
sljit_u8 op_imm = U8(op_types & 0xff);
2025
2026
if (dst == src1 && dstw == src1w) {
2027
if (src2 == SLJIT_IMM) {
2028
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2029
if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
2030
#else
2031
if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128)) {
2032
#endif
2033
BINARY_EAX_IMM(op_eax_imm, src2w);
2034
}
2035
else {
2036
BINARY_IMM(op_imm, op_mr, src2w, dst, dstw);
2037
}
2038
}
2039
else if (FAST_IS_REG(dst)) {
2040
inst = emit_x86_instruction(compiler, 1, dst, dstw, src2, src2w);
2041
FAIL_IF(!inst);
2042
*inst = op_rm;
2043
}
2044
else if (FAST_IS_REG(src2)) {
2045
/* Special exception for sljit_emit_op_flags. */
2046
inst = emit_x86_instruction(compiler, 1, src2, src2w, dst, dstw);
2047
FAIL_IF(!inst);
2048
*inst = op_mr;
2049
}
2050
else {
2051
EMIT_MOV(compiler, TMP_REG1, 0, src2, src2w);
2052
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
2053
FAIL_IF(!inst);
2054
*inst = op_mr;
2055
}
2056
return SLJIT_SUCCESS;
2057
}
2058
2059
/* Only for cumulative operations. */
2060
if (dst == src2 && dstw == src2w) {
2061
if (src1 == SLJIT_IMM) {
2062
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2063
if ((dst == SLJIT_R0) && (src1w > 127 || src1w < -128) && (compiler->mode32 || IS_HALFWORD(src1w))) {
2064
#else
2065
if ((dst == SLJIT_R0) && (src1w > 127 || src1w < -128)) {
2066
#endif
2067
BINARY_EAX_IMM(op_eax_imm, src1w);
2068
}
2069
else {
2070
BINARY_IMM(op_imm, op_mr, src1w, dst, dstw);
2071
}
2072
}
2073
else if (FAST_IS_REG(dst)) {
2074
inst = emit_x86_instruction(compiler, 1, dst, dstw, src1, src1w);
2075
FAIL_IF(!inst);
2076
*inst = op_rm;
2077
}
2078
else if (FAST_IS_REG(src1)) {
2079
inst = emit_x86_instruction(compiler, 1, src1, src1w, dst, dstw);
2080
FAIL_IF(!inst);
2081
*inst = op_mr;
2082
}
2083
else {
2084
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2085
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
2086
FAIL_IF(!inst);
2087
*inst = op_mr;
2088
}
2089
return SLJIT_SUCCESS;
2090
}
2091
2092
/* General version. */
2093
if (FAST_IS_REG(dst)) {
2094
EMIT_MOV(compiler, dst, 0, src1, src1w);
2095
if (src2 == SLJIT_IMM) {
2096
BINARY_IMM(op_imm, op_mr, src2w, dst, 0);
2097
}
2098
else {
2099
inst = emit_x86_instruction(compiler, 1, dst, 0, src2, src2w);
2100
FAIL_IF(!inst);
2101
*inst = op_rm;
2102
}
2103
}
2104
else {
2105
/* This version requires less memory writing. */
2106
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2107
if (src2 == SLJIT_IMM) {
2108
BINARY_IMM(op_imm, op_mr, src2w, TMP_REG1, 0);
2109
}
2110
else {
2111
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
2112
FAIL_IF(!inst);
2113
*inst = op_rm;
2114
}
2115
EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
2116
}
2117
2118
return SLJIT_SUCCESS;
2119
}
2120
2121
static sljit_s32 emit_non_cum_binary(struct sljit_compiler *compiler,
2122
sljit_u32 op_types,
2123
sljit_s32 dst, sljit_sw dstw,
2124
sljit_s32 src1, sljit_sw src1w,
2125
sljit_s32 src2, sljit_sw src2w)
2126
{
2127
sljit_u8* inst;
2128
sljit_u8 op_eax_imm = U8(op_types >> 24);
2129
sljit_u8 op_rm = U8((op_types >> 16) & 0xff);
2130
sljit_u8 op_mr = U8((op_types >> 8) & 0xff);
2131
sljit_u8 op_imm = U8(op_types & 0xff);
2132
2133
if (dst == src1 && dstw == src1w) {
2134
if (src2 == SLJIT_IMM) {
2135
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2136
if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
2137
#else
2138
if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128)) {
2139
#endif
2140
BINARY_EAX_IMM(op_eax_imm, src2w);
2141
}
2142
else {
2143
BINARY_IMM(op_imm, op_mr, src2w, dst, dstw);
2144
}
2145
}
2146
else if (FAST_IS_REG(dst)) {
2147
inst = emit_x86_instruction(compiler, 1, dst, dstw, src2, src2w);
2148
FAIL_IF(!inst);
2149
*inst = op_rm;
2150
}
2151
else if (FAST_IS_REG(src2)) {
2152
inst = emit_x86_instruction(compiler, 1, src2, src2w, dst, dstw);
2153
FAIL_IF(!inst);
2154
*inst = op_mr;
2155
}
2156
else {
2157
EMIT_MOV(compiler, TMP_REG1, 0, src2, src2w);
2158
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
2159
FAIL_IF(!inst);
2160
*inst = op_mr;
2161
}
2162
return SLJIT_SUCCESS;
2163
}
2164
2165
/* General version. */
2166
if (FAST_IS_REG(dst) && dst != src2) {
2167
EMIT_MOV(compiler, dst, 0, src1, src1w);
2168
if (src2 == SLJIT_IMM) {
2169
BINARY_IMM(op_imm, op_mr, src2w, dst, 0);
2170
}
2171
else {
2172
inst = emit_x86_instruction(compiler, 1, dst, 0, src2, src2w);
2173
FAIL_IF(!inst);
2174
*inst = op_rm;
2175
}
2176
}
2177
else {
2178
/* This version requires less memory writing. */
2179
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2180
if (src2 == SLJIT_IMM) {
2181
BINARY_IMM(op_imm, op_mr, src2w, TMP_REG1, 0);
2182
}
2183
else {
2184
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
2185
FAIL_IF(!inst);
2186
*inst = op_rm;
2187
}
2188
EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
2189
}
2190
2191
return SLJIT_SUCCESS;
2192
}
2193
2194
static sljit_s32 emit_mul(struct sljit_compiler *compiler,
2195
sljit_s32 dst, sljit_sw dstw,
2196
sljit_s32 src1, sljit_sw src1w,
2197
sljit_s32 src2, sljit_sw src2w)
2198
{
2199
sljit_u8* inst;
2200
sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
2201
2202
/* Register destination. */
2203
if (dst_r == src1 && src2 != SLJIT_IMM) {
2204
FAIL_IF(emit_groupf(compiler, IMUL_r_rm, dst_r, src2, src2w));
2205
} else if (dst_r == src2 && src1 != SLJIT_IMM) {
2206
FAIL_IF(emit_groupf(compiler, IMUL_r_rm, dst_r, src1, src1w));
2207
} else if (src1 == SLJIT_IMM) {
2208
if (src2 == SLJIT_IMM) {
2209
EMIT_MOV(compiler, dst_r, 0, SLJIT_IMM, src2w);
2210
src2 = dst_r;
2211
src2w = 0;
2212
}
2213
2214
if (src1w <= 127 && src1w >= -128) {
2215
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
2216
FAIL_IF(!inst);
2217
*inst = IMUL_r_rm_i8;
2218
2219
FAIL_IF(emit_byte(compiler, U8(src1w)));
2220
}
2221
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2222
else {
2223
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
2224
FAIL_IF(!inst);
2225
*inst = IMUL_r_rm_i32;
2226
inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
2227
FAIL_IF(!inst);
2228
INC_SIZE(4);
2229
sljit_unaligned_store_sw(inst, src1w);
2230
}
2231
#else
2232
else if (IS_HALFWORD(src1w)) {
2233
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
2234
FAIL_IF(!inst);
2235
*inst = IMUL_r_rm_i32;
2236
inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
2237
FAIL_IF(!inst);
2238
INC_SIZE(4);
2239
sljit_unaligned_store_s32(inst, (sljit_s32)src1w);
2240
}
2241
else {
2242
if (dst_r != src2)
2243
EMIT_MOV(compiler, dst_r, 0, src2, src2w);
2244
FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src1w));
2245
FAIL_IF(emit_groupf(compiler, IMUL_r_rm, dst_r, TMP_REG2, 0));
2246
}
2247
#endif
2248
}
2249
else if (src2 == SLJIT_IMM) {
2250
/* Note: src1 is NOT immediate. */
2251
2252
if (src2w <= 127 && src2w >= -128) {
2253
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
2254
FAIL_IF(!inst);
2255
*inst = IMUL_r_rm_i8;
2256
2257
FAIL_IF(emit_byte(compiler, U8(src2w)));
2258
}
2259
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2260
else {
2261
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
2262
FAIL_IF(!inst);
2263
*inst = IMUL_r_rm_i32;
2264
2265
inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
2266
FAIL_IF(!inst);
2267
INC_SIZE(4);
2268
sljit_unaligned_store_sw(inst, src2w);
2269
}
2270
#else
2271
else if (IS_HALFWORD(src2w)) {
2272
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
2273
FAIL_IF(!inst);
2274
*inst = IMUL_r_rm_i32;
2275
2276
inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
2277
FAIL_IF(!inst);
2278
INC_SIZE(4);
2279
sljit_unaligned_store_s32(inst, (sljit_s32)src2w);
2280
} else {
2281
if (dst_r != src1)
2282
EMIT_MOV(compiler, dst_r, 0, src1, src1w);
2283
FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src2w));
2284
FAIL_IF(emit_groupf(compiler, IMUL_r_rm, dst_r, TMP_REG2, 0));
2285
}
2286
#endif
2287
} else {
2288
/* Neither argument is immediate. */
2289
if (ADDRESSING_DEPENDS_ON(src2, dst_r))
2290
dst_r = TMP_REG1;
2291
EMIT_MOV(compiler, dst_r, 0, src1, src1w);
2292
FAIL_IF(emit_groupf(compiler, IMUL_r_rm, dst_r, src2, src2w));
2293
}
2294
2295
if (dst & SLJIT_MEM)
2296
EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
2297
2298
return SLJIT_SUCCESS;
2299
}
2300
2301
static sljit_s32 emit_lea_binary(struct sljit_compiler *compiler,
2302
sljit_s32 dst, sljit_sw dstw,
2303
sljit_s32 src1, sljit_sw src1w,
2304
sljit_s32 src2, sljit_sw src2w)
2305
{
2306
sljit_u8* inst;
2307
sljit_s32 dst_r, done = 0;
2308
2309
/* These cases better be left to handled by normal way. */
2310
if (dst == src1 && dstw == src1w)
2311
return SLJIT_ERR_UNSUPPORTED;
2312
if (dst == src2 && dstw == src2w)
2313
return SLJIT_ERR_UNSUPPORTED;
2314
2315
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
2316
2317
if (FAST_IS_REG(src1)) {
2318
if (FAST_IS_REG(src2)) {
2319
inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM2(src1, src2), 0);
2320
FAIL_IF(!inst);
2321
*inst = LEA_r_m;
2322
done = 1;
2323
}
2324
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2325
if (src2 == SLJIT_IMM && (compiler->mode32 || IS_HALFWORD(src2w))) {
2326
inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), (sljit_s32)src2w);
2327
#else
2328
if (src2 == SLJIT_IMM) {
2329
inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), src2w);
2330
#endif
2331
FAIL_IF(!inst);
2332
*inst = LEA_r_m;
2333
done = 1;
2334
}
2335
}
2336
else if (FAST_IS_REG(src2)) {
2337
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2338
if (src1 == SLJIT_IMM && (compiler->mode32 || IS_HALFWORD(src1w))) {
2339
inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), (sljit_s32)src1w);
2340
#else
2341
if (src1 == SLJIT_IMM) {
2342
inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), src1w);
2343
#endif
2344
FAIL_IF(!inst);
2345
*inst = LEA_r_m;
2346
done = 1;
2347
}
2348
}
2349
2350
if (done) {
2351
if (dst_r == TMP_REG1)
2352
return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
2353
return SLJIT_SUCCESS;
2354
}
2355
return SLJIT_ERR_UNSUPPORTED;
2356
}
2357
2358
static sljit_s32 emit_cmp_binary(struct sljit_compiler *compiler,
2359
sljit_s32 src1, sljit_sw src1w,
2360
sljit_s32 src2, sljit_sw src2w)
2361
{
2362
sljit_u8* inst;
2363
2364
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2365
if (src1 == SLJIT_R0 && src2 == SLJIT_IMM && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
2366
#else
2367
if (src1 == SLJIT_R0 && src2 == SLJIT_IMM && (src2w > 127 || src2w < -128)) {
2368
#endif
2369
BINARY_EAX_IMM(CMP_EAX_i32, src2w);
2370
return SLJIT_SUCCESS;
2371
}
2372
2373
if (FAST_IS_REG(src1)) {
2374
if (src2 == SLJIT_IMM) {
2375
BINARY_IMM(CMP, CMP_rm_r, src2w, src1, 0);
2376
}
2377
else {
2378
inst = emit_x86_instruction(compiler, 1, src1, 0, src2, src2w);
2379
FAIL_IF(!inst);
2380
*inst = CMP_r_rm;
2381
}
2382
return SLJIT_SUCCESS;
2383
}
2384
2385
if (FAST_IS_REG(src2) && src1 != SLJIT_IMM) {
2386
inst = emit_x86_instruction(compiler, 1, src2, 0, src1, src1w);
2387
FAIL_IF(!inst);
2388
*inst = CMP_rm_r;
2389
return SLJIT_SUCCESS;
2390
}
2391
2392
if (src2 == SLJIT_IMM) {
2393
if (src1 == SLJIT_IMM) {
2394
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2395
src1 = TMP_REG1;
2396
src1w = 0;
2397
}
2398
BINARY_IMM(CMP, CMP_rm_r, src2w, src1, src1w);
2399
}
2400
else {
2401
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2402
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
2403
FAIL_IF(!inst);
2404
*inst = CMP_r_rm;
2405
}
2406
return SLJIT_SUCCESS;
2407
}
2408
2409
static sljit_s32 emit_test_binary(struct sljit_compiler *compiler,
2410
sljit_s32 src1, sljit_sw src1w,
2411
sljit_s32 src2, sljit_sw src2w)
2412
{
2413
sljit_u8* inst;
2414
2415
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2416
if (src1 == SLJIT_R0 && src2 == SLJIT_IMM && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
2417
#else
2418
if (src1 == SLJIT_R0 && src2 == SLJIT_IMM && (src2w > 127 || src2w < -128)) {
2419
#endif
2420
BINARY_EAX_IMM(TEST_EAX_i32, src2w);
2421
return SLJIT_SUCCESS;
2422
}
2423
2424
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2425
if (src2 == SLJIT_R0 && src1 == SLJIT_IMM && (src1w > 127 || src1w < -128) && (compiler->mode32 || IS_HALFWORD(src1w))) {
2426
#else
2427
if (src2 == SLJIT_R0 && src1 == SLJIT_IMM && (src1w > 127 || src1w < -128)) {
2428
#endif
2429
BINARY_EAX_IMM(TEST_EAX_i32, src1w);
2430
return SLJIT_SUCCESS;
2431
}
2432
2433
if (src1 != SLJIT_IMM) {
2434
if (src2 == SLJIT_IMM) {
2435
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2436
if (IS_HALFWORD(src2w) || compiler->mode32) {
2437
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, src1, src1w);
2438
FAIL_IF(!inst);
2439
*inst = GROUP_F7;
2440
} else {
2441
FAIL_IF(emit_load_imm64(compiler, FAST_IS_REG(src1) ? TMP_REG2 : TMP_REG1, src2w));
2442
inst = emit_x86_instruction(compiler, 1, FAST_IS_REG(src1) ? TMP_REG2 : TMP_REG1, 0, src1, src1w);
2443
FAIL_IF(!inst);
2444
*inst = TEST_rm_r;
2445
}
2446
#else
2447
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, src1, src1w);
2448
FAIL_IF(!inst);
2449
*inst = GROUP_F7;
2450
#endif
2451
return SLJIT_SUCCESS;
2452
}
2453
else if (FAST_IS_REG(src1)) {
2454
inst = emit_x86_instruction(compiler, 1, src1, 0, src2, src2w);
2455
FAIL_IF(!inst);
2456
*inst = TEST_rm_r;
2457
return SLJIT_SUCCESS;
2458
}
2459
}
2460
2461
if (src2 != SLJIT_IMM) {
2462
if (src1 == SLJIT_IMM) {
2463
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2464
if (IS_HALFWORD(src1w) || compiler->mode32) {
2465
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src1w, src2, src2w);
2466
FAIL_IF(!inst);
2467
*inst = GROUP_F7;
2468
}
2469
else {
2470
FAIL_IF(emit_load_imm64(compiler, TMP_REG1, src1w));
2471
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
2472
FAIL_IF(!inst);
2473
*inst = TEST_rm_r;
2474
}
2475
#else
2476
inst = emit_x86_instruction(compiler, 1, src1, src1w, src2, src2w);
2477
FAIL_IF(!inst);
2478
*inst = GROUP_F7;
2479
#endif
2480
return SLJIT_SUCCESS;
2481
}
2482
else if (FAST_IS_REG(src2)) {
2483
inst = emit_x86_instruction(compiler, 1, src2, 0, src1, src1w);
2484
FAIL_IF(!inst);
2485
*inst = TEST_rm_r;
2486
return SLJIT_SUCCESS;
2487
}
2488
}
2489
2490
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2491
if (src2 == SLJIT_IMM) {
2492
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2493
if (IS_HALFWORD(src2w) || compiler->mode32) {
2494
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, TMP_REG1, 0);
2495
FAIL_IF(!inst);
2496
*inst = GROUP_F7;
2497
}
2498
else {
2499
FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src2w));
2500
inst = emit_x86_instruction(compiler, 1, TMP_REG2, 0, TMP_REG1, 0);
2501
FAIL_IF(!inst);
2502
*inst = TEST_rm_r;
2503
}
2504
#else
2505
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, TMP_REG1, 0);
2506
FAIL_IF(!inst);
2507
*inst = GROUP_F7;
2508
#endif
2509
}
2510
else {
2511
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
2512
FAIL_IF(!inst);
2513
*inst = TEST_rm_r;
2514
}
2515
return SLJIT_SUCCESS;
2516
}
2517
2518
static sljit_s32 emit_shift(struct sljit_compiler *compiler,
2519
sljit_u8 mode,
2520
sljit_s32 dst, sljit_sw dstw,
2521
sljit_s32 src1, sljit_sw src1w,
2522
sljit_s32 src2, sljit_sw src2w)
2523
{
2524
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2525
sljit_s32 mode32;
2526
#endif
2527
sljit_u8* inst;
2528
2529
if (src2 == SLJIT_IMM || src2 == SLJIT_PREF_SHIFT_REG) {
2530
if (dst == src1 && dstw == src1w) {
2531
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, dst, dstw);
2532
FAIL_IF(!inst);
2533
inst[1] |= mode;
2534
return SLJIT_SUCCESS;
2535
}
2536
if (dst == SLJIT_PREF_SHIFT_REG && src2 == SLJIT_PREF_SHIFT_REG) {
2537
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2538
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2539
FAIL_IF(!inst);
2540
inst[1] |= mode;
2541
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2542
return SLJIT_SUCCESS;
2543
}
2544
if (FAST_IS_REG(dst)) {
2545
EMIT_MOV(compiler, dst, 0, src1, src1w);
2546
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, dst, 0);
2547
FAIL_IF(!inst);
2548
inst[1] |= mode;
2549
return SLJIT_SUCCESS;
2550
}
2551
2552
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2553
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, TMP_REG1, 0);
2554
FAIL_IF(!inst);
2555
inst[1] |= mode;
2556
EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
2557
return SLJIT_SUCCESS;
2558
}
2559
2560
if (dst == SLJIT_PREF_SHIFT_REG) {
2561
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2562
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
2563
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2564
FAIL_IF(!inst);
2565
inst[1] |= mode;
2566
return emit_mov(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2567
}
2568
2569
if (FAST_IS_REG(dst) && dst != src2 && dst != TMP_REG1 && !ADDRESSING_DEPENDS_ON(src2, dst)) {
2570
if (src1 != dst)
2571
EMIT_MOV(compiler, dst, 0, src1, src1w);
2572
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2573
mode32 = compiler->mode32;
2574
compiler->mode32 = 0;
2575
#endif
2576
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_PREF_SHIFT_REG, 0);
2577
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2578
compiler->mode32 = mode32;
2579
#endif
2580
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
2581
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, dst, 0);
2582
FAIL_IF(!inst);
2583
inst[1] |= mode;
2584
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2585
compiler->mode32 = 0;
2586
#endif
2587
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2588
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2589
compiler->mode32 = mode32;
2590
#endif
2591
return SLJIT_SUCCESS;
2592
}
2593
2594
/* This case is complex since ecx itself may be used for
2595
addressing, and this case must be supported as well. */
2596
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2597
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2598
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), 0, SLJIT_PREF_SHIFT_REG, 0);
2599
#else /* !SLJIT_CONFIG_X86_32 */
2600
mode32 = compiler->mode32;
2601
compiler->mode32 = 0;
2602
EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_PREF_SHIFT_REG, 0);
2603
compiler->mode32 = mode32;
2604
#endif /* SLJIT_CONFIG_X86_32 */
2605
2606
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
2607
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2608
FAIL_IF(!inst);
2609
inst[1] |= mode;
2610
2611
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2612
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, SLJIT_MEM1(SLJIT_SP), 0);
2613
#else
2614
compiler->mode32 = 0;
2615
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG2, 0);
2616
compiler->mode32 = mode32;
2617
#endif /* SLJIT_CONFIG_X86_32 */
2618
2619
if (dst != TMP_REG1)
2620
return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
2621
2622
return SLJIT_SUCCESS;
2623
}
2624
2625
static sljit_s32 emit_shift_with_flags(struct sljit_compiler *compiler,
2626
sljit_u8 mode, sljit_s32 set_flags,
2627
sljit_s32 dst, sljit_sw dstw,
2628
sljit_s32 src1, sljit_sw src1w,
2629
sljit_s32 src2, sljit_sw src2w)
2630
{
2631
/* The CPU does not set flags if the shift count is 0. */
2632
if (src2 == SLJIT_IMM) {
2633
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2634
src2w &= compiler->mode32 ? 0x1f : 0x3f;
2635
#else /* !SLJIT_CONFIG_X86_64 */
2636
src2w &= 0x1f;
2637
#endif /* SLJIT_CONFIG_X86_64 */
2638
if (src2w != 0)
2639
return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w);
2640
2641
if (!set_flags)
2642
return emit_mov(compiler, dst, dstw, src1, src1w);
2643
/* OR dst, src, 0 */
2644
return emit_cum_binary(compiler, BINARY_OPCODE(OR),
2645
dst, dstw, src1, src1w, SLJIT_IMM, 0);
2646
}
2647
2648
if (!set_flags)
2649
return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w);
2650
2651
if (!FAST_IS_REG(dst))
2652
FAIL_IF(emit_cmp_binary(compiler, src1, src1w, SLJIT_IMM, 0));
2653
2654
FAIL_IF(emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w));
2655
2656
if (FAST_IS_REG(dst))
2657
return emit_cmp_binary(compiler, dst, dstw, SLJIT_IMM, 0);
2658
return SLJIT_SUCCESS;
2659
}
2660
2661
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
2662
sljit_s32 dst, sljit_sw dstw,
2663
sljit_s32 src1, sljit_sw src1w,
2664
sljit_s32 src2, sljit_sw src2w)
2665
{
2666
CHECK_ERROR();
2667
CHECK(check_sljit_emit_op2(compiler, op, 0, dst, dstw, src1, src1w, src2, src2w));
2668
ADJUST_LOCAL_OFFSET(dst, dstw);
2669
ADJUST_LOCAL_OFFSET(src1, src1w);
2670
ADJUST_LOCAL_OFFSET(src2, src2w);
2671
2672
CHECK_EXTRA_REGS(dst, dstw, (void)0);
2673
CHECK_EXTRA_REGS(src1, src1w, (void)0);
2674
CHECK_EXTRA_REGS(src2, src2w, (void)0);
2675
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2676
compiler->mode32 = op & SLJIT_32;
2677
#endif
2678
2679
switch (GET_OPCODE(op)) {
2680
case SLJIT_ADD:
2681
if (!HAS_FLAGS(op)) {
2682
if (emit_lea_binary(compiler, dst, dstw, src1, src1w, src2, src2w) != SLJIT_ERR_UNSUPPORTED)
2683
return compiler->error;
2684
}
2685
return emit_cum_binary(compiler, BINARY_OPCODE(ADD),
2686
dst, dstw, src1, src1w, src2, src2w);
2687
case SLJIT_ADDC:
2688
return emit_cum_binary(compiler, BINARY_OPCODE(ADC),
2689
dst, dstw, src1, src1w, src2, src2w);
2690
case SLJIT_SUB:
2691
if (src1 == SLJIT_IMM && src1w == 0)
2692
return emit_unary(compiler, NEG_rm, dst, dstw, src2, src2w);
2693
2694
if (!HAS_FLAGS(op)) {
2695
if (src2 == SLJIT_IMM && emit_lea_binary(compiler, dst, dstw, src1, src1w, SLJIT_IMM, -src2w) != SLJIT_ERR_UNSUPPORTED)
2696
return compiler->error;
2697
if (FAST_IS_REG(dst) && src2 == dst) {
2698
FAIL_IF(emit_non_cum_binary(compiler, BINARY_OPCODE(SUB), dst, 0, dst, 0, src1, src1w));
2699
return emit_unary(compiler, NEG_rm, dst, 0, dst, 0);
2700
}
2701
}
2702
2703
return emit_non_cum_binary(compiler, BINARY_OPCODE(SUB),
2704
dst, dstw, src1, src1w, src2, src2w);
2705
case SLJIT_SUBC:
2706
return emit_non_cum_binary(compiler, BINARY_OPCODE(SBB),
2707
dst, dstw, src1, src1w, src2, src2w);
2708
case SLJIT_MUL:
2709
return emit_mul(compiler, dst, dstw, src1, src1w, src2, src2w);
2710
case SLJIT_AND:
2711
return emit_cum_binary(compiler, BINARY_OPCODE(AND),
2712
dst, dstw, src1, src1w, src2, src2w);
2713
case SLJIT_OR:
2714
return emit_cum_binary(compiler, BINARY_OPCODE(OR),
2715
dst, dstw, src1, src1w, src2, src2w);
2716
case SLJIT_XOR:
2717
if (!HAS_FLAGS(op)) {
2718
if (src2 == SLJIT_IMM && src2w == -1)
2719
return emit_unary(compiler, NOT_rm, dst, dstw, src1, src1w);
2720
if (src1 == SLJIT_IMM && src1w == -1)
2721
return emit_unary(compiler, NOT_rm, dst, dstw, src2, src2w);
2722
}
2723
2724
return emit_cum_binary(compiler, BINARY_OPCODE(XOR),
2725
dst, dstw, src1, src1w, src2, src2w);
2726
case SLJIT_SHL:
2727
case SLJIT_MSHL:
2728
return emit_shift_with_flags(compiler, SHL, HAS_FLAGS(op),
2729
dst, dstw, src1, src1w, src2, src2w);
2730
case SLJIT_LSHR:
2731
case SLJIT_MLSHR:
2732
return emit_shift_with_flags(compiler, SHR, HAS_FLAGS(op),
2733
dst, dstw, src1, src1w, src2, src2w);
2734
case SLJIT_ASHR:
2735
case SLJIT_MASHR:
2736
return emit_shift_with_flags(compiler, SAR, HAS_FLAGS(op),
2737
dst, dstw, src1, src1w, src2, src2w);
2738
case SLJIT_ROTL:
2739
return emit_shift_with_flags(compiler, ROL, 0,
2740
dst, dstw, src1, src1w, src2, src2w);
2741
case SLJIT_ROTR:
2742
return emit_shift_with_flags(compiler, ROR, 0,
2743
dst, dstw, src1, src1w, src2, src2w);
2744
}
2745
2746
return SLJIT_SUCCESS;
2747
}
2748
2749
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compiler, sljit_s32 op,
2750
sljit_s32 src1, sljit_sw src1w,
2751
sljit_s32 src2, sljit_sw src2w)
2752
{
2753
sljit_s32 opcode = GET_OPCODE(op);
2754
2755
CHECK_ERROR();
2756
CHECK(check_sljit_emit_op2(compiler, op, 1, 0, 0, src1, src1w, src2, src2w));
2757
2758
if (opcode != SLJIT_SUB && opcode != SLJIT_AND) {
2759
SLJIT_SKIP_CHECKS(compiler);
2760
return sljit_emit_op2(compiler, op, TMP_REG1, 0, src1, src1w, src2, src2w);
2761
}
2762
2763
ADJUST_LOCAL_OFFSET(src1, src1w);
2764
ADJUST_LOCAL_OFFSET(src2, src2w);
2765
2766
CHECK_EXTRA_REGS(src1, src1w, (void)0);
2767
CHECK_EXTRA_REGS(src2, src2w, (void)0);
2768
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2769
compiler->mode32 = op & SLJIT_32;
2770
#endif
2771
2772
if (opcode == SLJIT_SUB)
2773
return emit_cmp_binary(compiler, src1, src1w, src2, src2w);
2774
2775
return emit_test_binary(compiler, src1, src1w, src2, src2w);
2776
}
2777
2778
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2r(struct sljit_compiler *compiler, sljit_s32 op,
2779
sljit_s32 dst_reg,
2780
sljit_s32 src1, sljit_sw src1w,
2781
sljit_s32 src2, sljit_sw src2w)
2782
{
2783
sljit_u8* inst;
2784
sljit_sw dstw = 0;
2785
2786
CHECK_ERROR();
2787
CHECK(check_sljit_emit_op2r(compiler, op, dst_reg, src1, src1w, src2, src2w));
2788
ADJUST_LOCAL_OFFSET(src1, src1w);
2789
ADJUST_LOCAL_OFFSET(src2, src2w);
2790
2791
CHECK_EXTRA_REGS(dst_reg, dstw, (void)0);
2792
CHECK_EXTRA_REGS(src1, src1w, (void)0);
2793
CHECK_EXTRA_REGS(src2, src2w, (void)0);
2794
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2795
compiler->mode32 = op & SLJIT_32;
2796
#endif
2797
2798
switch (GET_OPCODE(op)) {
2799
case SLJIT_MULADD:
2800
FAIL_IF(emit_mul(compiler, TMP_REG1, 0, src1, src1w, src2, src2w));
2801
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst_reg, dstw);
2802
FAIL_IF(!inst);
2803
*inst = ADD_rm_r;
2804
return SLJIT_SUCCESS;
2805
}
2806
2807
return SLJIT_SUCCESS;
2808
}
2809
2810
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
2811
sljit_s32 dst_reg,
2812
sljit_s32 src1_reg,
2813
sljit_s32 src2_reg,
2814
sljit_s32 src3, sljit_sw src3w)
2815
{
2816
sljit_s32 is_rotate, is_left, move_src1;
2817
sljit_u8* inst;
2818
sljit_sw src1w = 0;
2819
sljit_sw dstw = 0;
2820
/* The whole register must be saved even for 32 bit operations. */
2821
sljit_u8 restore_ecx = 0;
2822
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2823
sljit_sw src2w = 0;
2824
sljit_s32 restore_sp4 = 0;
2825
#endif /* SLJIT_CONFIG_X86_32 */
2826
2827
CHECK_ERROR();
2828
CHECK(check_sljit_emit_shift_into(compiler, op, dst_reg, src1_reg, src2_reg, src3, src3w));
2829
ADJUST_LOCAL_OFFSET(src3, src3w);
2830
2831
CHECK_EXTRA_REGS(dst_reg, dstw, (void)0);
2832
CHECK_EXTRA_REGS(src3, src3w, (void)0);
2833
2834
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2835
compiler->mode32 = op & SLJIT_32;
2836
#endif /* SLJIT_CONFIG_X86_64 */
2837
2838
if (src3 == SLJIT_IMM) {
2839
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2840
src3w &= 0x1f;
2841
#else /* !SLJIT_CONFIG_X86_32 */
2842
src3w &= (op & SLJIT_32) ? 0x1f : 0x3f;
2843
#endif /* SLJIT_CONFIG_X86_32 */
2844
2845
if (src3w == 0)
2846
return SLJIT_SUCCESS;
2847
}
2848
2849
is_left = (GET_OPCODE(op) == SLJIT_SHL || GET_OPCODE(op) == SLJIT_MSHL);
2850
2851
is_rotate = (src1_reg == src2_reg);
2852
CHECK_EXTRA_REGS(src1_reg, src1w, (void)0);
2853
CHECK_EXTRA_REGS(src2_reg, src2w, (void)0);
2854
2855
if (is_rotate)
2856
return emit_shift(compiler, is_left ? ROL : ROR, dst_reg, dstw, src1_reg, src1w, src3, src3w);
2857
2858
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2859
if (src2_reg & SLJIT_MEM) {
2860
EMIT_MOV(compiler, TMP_REG1, 0, src2_reg, src2w);
2861
src2_reg = TMP_REG1;
2862
}
2863
#endif /* SLJIT_CONFIG_X86_32 */
2864
2865
if (dst_reg == SLJIT_PREF_SHIFT_REG && src3 != SLJIT_IMM && (src3 != SLJIT_PREF_SHIFT_REG || src1_reg != SLJIT_PREF_SHIFT_REG)) {
2866
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2867
EMIT_MOV(compiler, TMP_REG1, 0, src1_reg, src1w);
2868
src1_reg = TMP_REG1;
2869
src1w = 0;
2870
#else /* !SLJIT_CONFIG_X86_64 */
2871
if (src2_reg != TMP_REG1) {
2872
EMIT_MOV(compiler, TMP_REG1, 0, src1_reg, src1w);
2873
src1_reg = TMP_REG1;
2874
src1w = 0;
2875
} else if ((src1_reg & SLJIT_MEM) || src1_reg == SLJIT_PREF_SHIFT_REG) {
2876
restore_sp4 = (src3 == SLJIT_R0) ? SLJIT_R1 : SLJIT_R0;
2877
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), sizeof(sljit_s32), restore_sp4, 0);
2878
EMIT_MOV(compiler, restore_sp4, 0, src1_reg, src1w);
2879
src1_reg = restore_sp4;
2880
src1w = 0;
2881
} else {
2882
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), sizeof(sljit_s32), src1_reg, 0);
2883
restore_sp4 = src1_reg;
2884
}
2885
#endif /* SLJIT_CONFIG_X86_64 */
2886
2887
if (src3 != SLJIT_PREF_SHIFT_REG)
2888
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src3, src3w);
2889
} else {
2890
if (src2_reg == SLJIT_PREF_SHIFT_REG && src3 != SLJIT_IMM && src3 != SLJIT_PREF_SHIFT_REG) {
2891
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2892
compiler->mode32 = 0;
2893
#endif /* SLJIT_CONFIG_X86_64 */
2894
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_PREF_SHIFT_REG, 0);
2895
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2896
compiler->mode32 = op & SLJIT_32;
2897
#endif /* SLJIT_CONFIG_X86_64 */
2898
src2_reg = TMP_REG1;
2899
restore_ecx = 1;
2900
}
2901
2902
move_src1 = 0;
2903
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2904
if (dst_reg != src1_reg) {
2905
if (dst_reg != src3) {
2906
EMIT_MOV(compiler, dst_reg, 0, src1_reg, src1w);
2907
src1_reg = dst_reg;
2908
src1w = 0;
2909
} else
2910
move_src1 = 1;
2911
}
2912
#else /* !SLJIT_CONFIG_X86_64 */
2913
if (dst_reg & SLJIT_MEM) {
2914
if (src2_reg != TMP_REG1) {
2915
EMIT_MOV(compiler, TMP_REG1, 0, src1_reg, src1w);
2916
src1_reg = TMP_REG1;
2917
src1w = 0;
2918
} else if ((src1_reg & SLJIT_MEM) || src1_reg == SLJIT_PREF_SHIFT_REG) {
2919
restore_sp4 = (src3 == SLJIT_R0) ? SLJIT_R1 : SLJIT_R0;
2920
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), sizeof(sljit_s32), restore_sp4, 0);
2921
EMIT_MOV(compiler, restore_sp4, 0, src1_reg, src1w);
2922
src1_reg = restore_sp4;
2923
src1w = 0;
2924
} else {
2925
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), sizeof(sljit_s32), src1_reg, 0);
2926
restore_sp4 = src1_reg;
2927
}
2928
} else if (dst_reg != src1_reg) {
2929
if (dst_reg != src3) {
2930
EMIT_MOV(compiler, dst_reg, 0, src1_reg, src1w);
2931
src1_reg = dst_reg;
2932
src1w = 0;
2933
} else
2934
move_src1 = 1;
2935
}
2936
#endif /* SLJIT_CONFIG_X86_64 */
2937
2938
if (src3 != SLJIT_IMM && src3 != SLJIT_PREF_SHIFT_REG) {
2939
if (!restore_ecx) {
2940
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2941
compiler->mode32 = 0;
2942
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_PREF_SHIFT_REG, 0);
2943
compiler->mode32 = op & SLJIT_32;
2944
restore_ecx = 1;
2945
#else /* !SLJIT_CONFIG_X86_64 */
2946
if (src1_reg != TMP_REG1 && src2_reg != TMP_REG1) {
2947
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_PREF_SHIFT_REG, 0);
2948
restore_ecx = 1;
2949
} else {
2950
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), 0, SLJIT_PREF_SHIFT_REG, 0);
2951
restore_ecx = 2;
2952
}
2953
#endif /* SLJIT_CONFIG_X86_64 */
2954
}
2955
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src3, src3w);
2956
}
2957
2958
if (move_src1) {
2959
EMIT_MOV(compiler, dst_reg, 0, src1_reg, src1w);
2960
src1_reg = dst_reg;
2961
src1w = 0;
2962
}
2963
}
2964
2965
inst = emit_x86_instruction(compiler, 2, src2_reg, 0, src1_reg, src1w);
2966
FAIL_IF(!inst);
2967
inst[0] = GROUP_0F;
2968
2969
if (src3 == SLJIT_IMM) {
2970
inst[1] = U8((is_left ? SHLD : SHRD) - 1);
2971
2972
/* Immediate argument is added separately. */
2973
FAIL_IF(emit_byte(compiler, U8(src3w)));
2974
} else
2975
inst[1] = U8(is_left ? SHLD : SHRD);
2976
2977
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2978
if (restore_ecx) {
2979
compiler->mode32 = 0;
2980
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2981
}
2982
2983
if (src1_reg != dst_reg) {
2984
compiler->mode32 = op & SLJIT_32;
2985
return emit_mov(compiler, dst_reg, dstw, src1_reg, 0);
2986
}
2987
#else /* !SLJIT_CONFIG_X86_64 */
2988
if (restore_ecx)
2989
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, restore_ecx == 1 ? TMP_REG1 : SLJIT_MEM1(SLJIT_SP), 0);
2990
2991
if (src1_reg != dst_reg)
2992
EMIT_MOV(compiler, dst_reg, dstw, src1_reg, 0);
2993
2994
if (restore_sp4)
2995
return emit_mov(compiler, restore_sp4, 0, SLJIT_MEM1(SLJIT_SP), sizeof(sljit_s32));
2996
#endif /* SLJIT_CONFIG_X86_32 */
2997
2998
return SLJIT_SUCCESS;
2999
}
3000
3001
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
3002
sljit_s32 src, sljit_sw srcw)
3003
{
3004
CHECK_ERROR();
3005
CHECK(check_sljit_emit_op_src(compiler, op, src, srcw));
3006
ADJUST_LOCAL_OFFSET(src, srcw);
3007
3008
CHECK_EXTRA_REGS(src, srcw, (void)0);
3009
3010
switch (op) {
3011
case SLJIT_FAST_RETURN:
3012
return emit_fast_return(compiler, src, srcw);
3013
case SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN:
3014
/* Don't adjust shadow stack if it isn't enabled. */
3015
if (!cpu_has_shadow_stack ())
3016
return SLJIT_SUCCESS;
3017
return adjust_shadow_stack(compiler, src, srcw);
3018
case SLJIT_PREFETCH_L1:
3019
case SLJIT_PREFETCH_L2:
3020
case SLJIT_PREFETCH_L3:
3021
case SLJIT_PREFETCH_ONCE:
3022
return emit_prefetch(compiler, op, src, srcw);
3023
}
3024
3025
return SLJIT_SUCCESS;
3026
}
3027
3028
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_dst(struct sljit_compiler *compiler, sljit_s32 op,
3029
sljit_s32 dst, sljit_sw dstw)
3030
{
3031
CHECK_ERROR();
3032
CHECK(check_sljit_emit_op_dst(compiler, op, dst, dstw));
3033
ADJUST_LOCAL_OFFSET(dst, dstw);
3034
3035
CHECK_EXTRA_REGS(dst, dstw, (void)0);
3036
3037
switch (op) {
3038
case SLJIT_FAST_ENTER:
3039
return emit_fast_enter(compiler, dst, dstw);
3040
case SLJIT_GET_RETURN_ADDRESS:
3041
return sljit_emit_get_return_address(compiler, dst, dstw);
3042
}
3043
3044
return SLJIT_SUCCESS;
3045
}
3046
3047
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 type, sljit_s32 reg)
3048
{
3049
CHECK_REG_INDEX(check_sljit_get_register_index(type, reg));
3050
3051
if (type == SLJIT_GP_REGISTER) {
3052
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
3053
if (reg >= SLJIT_R3 && reg <= SLJIT_R8)
3054
return -1;
3055
#endif /* SLJIT_CONFIG_X86_32 */
3056
return reg_map[reg];
3057
}
3058
3059
if (type != SLJIT_FLOAT_REGISTER && type != SLJIT_SIMD_REG_128 && type != SLJIT_SIMD_REG_256 && type != SLJIT_SIMD_REG_512)
3060
return -1;
3061
3062
return freg_map[reg];
3063
}
3064
3065
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
3066
void *instruction, sljit_u32 size)
3067
{
3068
sljit_u8 *inst;
3069
3070
CHECK_ERROR();
3071
CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
3072
3073
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
3074
FAIL_IF(!inst);
3075
INC_SIZE(size);
3076
SLJIT_MEMCPY(inst, instruction, size);
3077
return SLJIT_SUCCESS;
3078
}
3079
3080
/* --------------------------------------------------------------------- */
3081
/* Floating point operators */
3082
/* --------------------------------------------------------------------- */
3083
3084
/* Alignment(3) + 4 * 16 bytes. */
3085
static sljit_u32 sse2_data[3 + (4 * 4)];
3086
static sljit_u32 *sse2_buffer;
3087
3088
static void init_compiler(void)
3089
{
3090
get_cpu_features();
3091
3092
/* Align to 16 bytes. */
3093
sse2_buffer = (sljit_u32*)(((sljit_uw)sse2_data + 15) & ~(sljit_uw)0xf);
3094
3095
/* Single precision constants (each constant is 16 byte long). */
3096
sse2_buffer[0] = 0x80000000;
3097
sse2_buffer[4] = 0x7fffffff;
3098
/* Double precision constants (each constant is 16 byte long). */
3099
sse2_buffer[8] = 0;
3100
sse2_buffer[9] = 0x80000000;
3101
sse2_buffer[12] = 0xffffffff;
3102
sse2_buffer[13] = 0x7fffffff;
3103
}
3104
3105
static sljit_s32 emit_groupf(struct sljit_compiler *compiler,
3106
sljit_uw op,
3107
sljit_s32 dst, sljit_s32 src, sljit_sw srcw)
3108
{
3109
sljit_u8 *inst = emit_x86_instruction(compiler, 2 | (op & ~(sljit_uw)0xff), dst, 0, src, srcw);
3110
FAIL_IF(!inst);
3111
inst[0] = GROUP_0F;
3112
inst[1] = op & 0xff;
3113
return SLJIT_SUCCESS;
3114
}
3115
3116
static sljit_s32 emit_groupf_ext(struct sljit_compiler *compiler,
3117
sljit_uw op,
3118
sljit_s32 dst, sljit_s32 src, sljit_sw srcw)
3119
{
3120
sljit_u8 *inst;
3121
3122
SLJIT_ASSERT((op & EX86_SSE2) && ((op & VEX_OP_0F38) || (op & VEX_OP_0F3A)));
3123
3124
inst = emit_x86_instruction(compiler, 3 | (op & ~((sljit_uw)0xff | VEX_OP_0F38 | VEX_OP_0F3A)), dst, 0, src, srcw);
3125
FAIL_IF(!inst);
3126
inst[0] = GROUP_0F;
3127
inst[1] = U8((op & VEX_OP_0F38) ? 0x38 : 0x3A);
3128
inst[2] = op & 0xff;
3129
return SLJIT_SUCCESS;
3130
}
3131
3132
static SLJIT_INLINE sljit_s32 emit_sse2_load(struct sljit_compiler *compiler,
3133
sljit_s32 single, sljit_s32 dst, sljit_s32 src, sljit_sw srcw)
3134
{
3135
return emit_groupf(compiler, MOVSD_x_xm | (single ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2, dst, src, srcw);
3136
}
3137
3138
static SLJIT_INLINE sljit_s32 emit_sse2_store(struct sljit_compiler *compiler,
3139
sljit_s32 single, sljit_s32 dst, sljit_sw dstw, sljit_s32 src)
3140
{
3141
return emit_groupf(compiler, MOVSD_xm_x | (single ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2, src, dst, dstw);
3142
}
3143
3144
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
3145
sljit_s32 dst, sljit_sw dstw,
3146
sljit_s32 src, sljit_sw srcw)
3147
{
3148
sljit_s32 dst_r;
3149
3150
CHECK_EXTRA_REGS(dst, dstw, (void)0);
3151
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
3152
3153
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3154
if (GET_OPCODE(op) == SLJIT_CONV_SW_FROM_F64)
3155
compiler->mode32 = 0;
3156
#endif
3157
3158
FAIL_IF(emit_groupf(compiler, CVTTSD2SI_r_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2_OP2, dst_r, src, srcw));
3159
3160
if (dst & SLJIT_MEM)
3161
return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
3162
return SLJIT_SUCCESS;
3163
}
3164
3165
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
3166
sljit_s32 dst, sljit_sw dstw,
3167
sljit_s32 src, sljit_sw srcw)
3168
{
3169
sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
3170
3171
CHECK_EXTRA_REGS(src, srcw, (void)0);
3172
3173
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3174
if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_SW)
3175
compiler->mode32 = 0;
3176
#endif
3177
3178
if (src == SLJIT_IMM) {
3179
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3180
if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
3181
srcw = (sljit_s32)srcw;
3182
#endif
3183
EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
3184
src = TMP_REG1;
3185
srcw = 0;
3186
}
3187
3188
FAIL_IF(emit_groupf(compiler, CVTSI2SD_x_rm | EX86_SELECT_F2_F3(op) | EX86_SSE2_OP1, dst_r, src, srcw));
3189
3190
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3191
compiler->mode32 = 1;
3192
#endif
3193
if (dst_r == TMP_FREG)
3194
return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
3195
return SLJIT_SUCCESS;
3196
}
3197
3198
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
3199
sljit_s32 src1, sljit_sw src1w,
3200
sljit_s32 src2, sljit_sw src2w)
3201
{
3202
switch (GET_FLAG_TYPE(op)) {
3203
case SLJIT_ORDERED_EQUAL:
3204
/* Also: SLJIT_UNORDERED_OR_NOT_EQUAL */
3205
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src1, src1w));
3206
FAIL_IF(emit_groupf(compiler, CMPS_x_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2, TMP_FREG, src2, src2w));
3207
3208
/* EQ */
3209
FAIL_IF(emit_byte(compiler, 0));
3210
3211
src1 = TMP_FREG;
3212
src2 = TMP_FREG;
3213
src2w = 0;
3214
break;
3215
3216
case SLJIT_ORDERED_LESS:
3217
case SLJIT_UNORDERED_OR_GREATER:
3218
/* Also: SLJIT_UNORDERED_OR_GREATER_EQUAL, SLJIT_ORDERED_LESS_EQUAL */
3219
if (!FAST_IS_REG(src2)) {
3220
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src2, src2w));
3221
src2 = TMP_FREG;
3222
}
3223
3224
return emit_groupf(compiler, UCOMISD_x_xm | EX86_SELECT_66(op) | EX86_SSE2, src2, src1, src1w);
3225
}
3226
3227
if (!FAST_IS_REG(src1)) {
3228
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src1, src1w));
3229
src1 = TMP_FREG;
3230
}
3231
3232
return emit_groupf(compiler, UCOMISD_x_xm | EX86_SELECT_66(op) | EX86_SSE2, src1, src2, src2w);
3233
}
3234
3235
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
3236
sljit_s32 dst, sljit_sw dstw,
3237
sljit_s32 src, sljit_sw srcw)
3238
{
3239
sljit_s32 dst_r;
3240
sljit_u8 *inst;
3241
3242
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3243
compiler->mode32 = 1;
3244
#endif
3245
3246
CHECK_ERROR();
3247
SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
3248
3249
if (GET_OPCODE(op) == SLJIT_MOV_F64) {
3250
if (FAST_IS_REG(dst))
3251
return emit_sse2_load(compiler, op & SLJIT_32, dst, src, srcw);
3252
if (FAST_IS_REG(src))
3253
return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, src);
3254
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src, srcw));
3255
return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
3256
}
3257
3258
if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32) {
3259
dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
3260
if (FAST_IS_REG(src)) {
3261
/* We overwrite the high bits of source. From SLJIT point of view,
3262
this is not an issue.
3263
Note: In SSE3, we could also use MOVDDUP and MOVSLDUP. */
3264
FAIL_IF(emit_groupf(compiler, UNPCKLPD_x_xm | ((op & SLJIT_32) ? EX86_PREF_66 : 0) | EX86_SSE2, src, src, 0));
3265
} else {
3266
FAIL_IF(emit_sse2_load(compiler, !(op & SLJIT_32), TMP_FREG, src, srcw));
3267
src = TMP_FREG;
3268
}
3269
3270
FAIL_IF(emit_groupf(compiler, CVTPD2PS_x_xm | ((op & SLJIT_32) ? EX86_PREF_66 : 0) | EX86_SSE2, dst_r, src, 0));
3271
if (dst_r == TMP_FREG)
3272
return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
3273
return SLJIT_SUCCESS;
3274
}
3275
3276
if (FAST_IS_REG(dst)) {
3277
dst_r = (dst == src) ? TMP_FREG : dst;
3278
3279
if (src & SLJIT_MEM)
3280
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src, srcw));
3281
3282
FAIL_IF(emit_groupf(compiler, PCMPEQD_x_xm | EX86_PREF_66 | EX86_SSE2, dst_r, dst_r, 0));
3283
3284
inst = emit_x86_instruction(compiler, 2 | EX86_PREF_66 | EX86_SSE2_OP2, 0, 0, dst_r, 0);
3285
inst[0] = GROUP_0F;
3286
/* Same as PSRLD_x / PSRLQ_x */
3287
inst[1] = (op & SLJIT_32) ? PSLLD_x_i8 : PSLLQ_x_i8;
3288
3289
if (GET_OPCODE(op) == SLJIT_ABS_F64) {
3290
inst[2] |= 2 << 3;
3291
FAIL_IF(emit_byte(compiler, 1));
3292
} else {
3293
inst[2] |= 6 << 3;
3294
FAIL_IF(emit_byte(compiler, ((op & SLJIT_32) ? 31 : 63)));
3295
}
3296
3297
if (dst_r != TMP_FREG)
3298
dst_r = (src & SLJIT_MEM) ? TMP_FREG : src;
3299
return emit_groupf(compiler, (GET_OPCODE(op) == SLJIT_NEG_F64 ? XORPD_x_xm : ANDPD_x_xm) | EX86_SSE2, dst, dst_r, 0);
3300
}
3301
3302
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src, srcw));
3303
3304
switch (GET_OPCODE(op)) {
3305
case SLJIT_NEG_F64:
3306
FAIL_IF(emit_groupf(compiler, XORPD_x_xm | EX86_SELECT_66(op) | EX86_SSE2, TMP_FREG, SLJIT_MEM0(), (sljit_sw)((op & SLJIT_32) ? sse2_buffer : sse2_buffer + 8)));
3307
break;
3308
3309
case SLJIT_ABS_F64:
3310
FAIL_IF(emit_groupf(compiler, ANDPD_x_xm | EX86_SELECT_66(op) | EX86_SSE2, TMP_FREG, SLJIT_MEM0(), (sljit_sw)((op & SLJIT_32) ? sse2_buffer + 4 : sse2_buffer + 12)));
3311
break;
3312
}
3313
3314
return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
3315
}
3316
3317
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
3318
sljit_s32 dst, sljit_sw dstw,
3319
sljit_s32 src1, sljit_sw src1w,
3320
sljit_s32 src2, sljit_sw src2w)
3321
{
3322
sljit_s32 dst_r;
3323
3324
CHECK_ERROR();
3325
CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
3326
ADJUST_LOCAL_OFFSET(dst, dstw);
3327
ADJUST_LOCAL_OFFSET(src1, src1w);
3328
ADJUST_LOCAL_OFFSET(src2, src2w);
3329
3330
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3331
compiler->mode32 = 1;
3332
#endif
3333
3334
if (FAST_IS_REG(dst)) {
3335
dst_r = dst;
3336
if (dst == src1)
3337
; /* Do nothing here. */
3338
else if (dst == src2 && (GET_OPCODE(op) == SLJIT_ADD_F64 || GET_OPCODE(op) == SLJIT_MUL_F64)) {
3339
/* Swap arguments. */
3340
src2 = src1;
3341
src2w = src1w;
3342
} else if (dst != src2)
3343
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, dst_r, src1, src1w));
3344
else {
3345
dst_r = TMP_FREG;
3346
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src1, src1w));
3347
}
3348
} else {
3349
dst_r = TMP_FREG;
3350
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src1, src1w));
3351
}
3352
3353
switch (GET_OPCODE(op)) {
3354
case SLJIT_ADD_F64:
3355
FAIL_IF(emit_groupf(compiler, ADDSD_x_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2, dst_r, src2, src2w));
3356
break;
3357
3358
case SLJIT_SUB_F64:
3359
FAIL_IF(emit_groupf(compiler, SUBSD_x_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2, dst_r, src2, src2w));
3360
break;
3361
3362
case SLJIT_MUL_F64:
3363
FAIL_IF(emit_groupf(compiler, MULSD_x_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2, dst_r, src2, src2w));
3364
break;
3365
3366
case SLJIT_DIV_F64:
3367
FAIL_IF(emit_groupf(compiler, DIVSD_x_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2, dst_r, src2, src2w));
3368
break;
3369
}
3370
3371
if (dst_r != dst)
3372
return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
3373
return SLJIT_SUCCESS;
3374
}
3375
3376
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2r(struct sljit_compiler *compiler, sljit_s32 op,
3377
sljit_s32 dst_freg,
3378
sljit_s32 src1, sljit_sw src1w,
3379
sljit_s32 src2, sljit_sw src2w)
3380
{
3381
sljit_uw pref;
3382
3383
CHECK_ERROR();
3384
CHECK(check_sljit_emit_fop2r(compiler, op, dst_freg, src1, src1w, src2, src2w));
3385
ADJUST_LOCAL_OFFSET(src1, src1w);
3386
ADJUST_LOCAL_OFFSET(src2, src2w);
3387
3388
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3389
compiler->mode32 = 1;
3390
#endif
3391
3392
if (dst_freg == src1) {
3393
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src2, src2w));
3394
pref = EX86_SELECT_66(op) | EX86_SSE2;
3395
FAIL_IF(emit_groupf(compiler, XORPD_x_xm | pref, TMP_FREG, src1, src1w));
3396
FAIL_IF(emit_groupf(compiler, ANDPD_x_xm | pref, TMP_FREG, SLJIT_MEM0(), (sljit_sw)((op & SLJIT_32) ? sse2_buffer : sse2_buffer + 8)));
3397
return emit_groupf(compiler, XORPD_x_xm | pref, dst_freg, TMP_FREG, 0);
3398
}
3399
3400
if (src1 & SLJIT_MEM) {
3401
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src1, src1w));
3402
src1 = TMP_FREG;
3403
src1w = 0;
3404
}
3405
3406
if (dst_freg != src2)
3407
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, dst_freg, src2, src2w));
3408
3409
pref = EX86_SELECT_66(op) | EX86_SSE2;
3410
FAIL_IF(emit_groupf(compiler, XORPD_x_xm | pref, dst_freg, src1, src1w));
3411
FAIL_IF(emit_groupf(compiler, ANDPD_x_xm | pref, dst_freg, SLJIT_MEM0(), (sljit_sw)((op & SLJIT_32) ? sse2_buffer : sse2_buffer + 8)));
3412
return emit_groupf(compiler, XORPD_x_xm | pref, dst_freg, src1, src1w);
3413
}
3414
3415
/* --------------------------------------------------------------------- */
3416
/* Conditional instructions */
3417
/* --------------------------------------------------------------------- */
3418
3419
SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler)
3420
{
3421
sljit_u8 *inst;
3422
struct sljit_label *label;
3423
3424
CHECK_ERROR_PTR();
3425
CHECK_PTR(check_sljit_emit_label(compiler));
3426
3427
if (compiler->last_label && compiler->last_label->size == compiler->size)
3428
return compiler->last_label;
3429
3430
label = (struct sljit_label*)ensure_abuf(compiler, sizeof(struct sljit_label));
3431
PTR_FAIL_IF(!label);
3432
set_label(label, compiler);
3433
3434
inst = (sljit_u8*)ensure_buf(compiler, 1);
3435
PTR_FAIL_IF(!inst);
3436
inst[0] = SLJIT_INST_LABEL;
3437
3438
return label;
3439
}
3440
3441
SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
3442
{
3443
sljit_u8 *inst;
3444
struct sljit_jump *jump;
3445
3446
CHECK_ERROR_PTR();
3447
CHECK_PTR(check_sljit_emit_jump(compiler, type));
3448
3449
jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
3450
PTR_FAIL_IF_NULL(jump);
3451
set_jump(jump, compiler, (sljit_u32)((type & SLJIT_REWRITABLE_JUMP) | ((type & 0xff) << TYPE_SHIFT)));
3452
type &= 0xff;
3453
3454
jump->addr = compiler->size;
3455
/* Worst case size. */
3456
compiler->size += (type >= SLJIT_JUMP) ? JUMP_MAX_SIZE : CJUMP_MAX_SIZE;
3457
inst = (sljit_u8*)ensure_buf(compiler, 1);
3458
PTR_FAIL_IF_NULL(inst);
3459
3460
inst[0] = SLJIT_INST_JUMP;
3461
return jump;
3462
}
3463
3464
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
3465
{
3466
sljit_u8 *inst;
3467
struct sljit_jump *jump;
3468
3469
CHECK_ERROR();
3470
CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
3471
ADJUST_LOCAL_OFFSET(src, srcw);
3472
3473
CHECK_EXTRA_REGS(src, srcw, (void)0);
3474
3475
if (src == SLJIT_IMM) {
3476
jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
3477
FAIL_IF_NULL(jump);
3478
set_jump(jump, compiler, (sljit_u32)(JUMP_ADDR | (type << TYPE_SHIFT)));
3479
jump->u.target = (sljit_uw)srcw;
3480
3481
jump->addr = compiler->size;
3482
/* Worst case size. */
3483
compiler->size += JUMP_MAX_SIZE;
3484
inst = (sljit_u8*)ensure_buf(compiler, 1);
3485
FAIL_IF_NULL(inst);
3486
3487
inst[0] = SLJIT_INST_JUMP;
3488
} else {
3489
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3490
/* REX_W is not necessary (src is not immediate). */
3491
compiler->mode32 = 1;
3492
#endif
3493
inst = emit_x86_instruction(compiler, 1, 0, 0, src, srcw);
3494
FAIL_IF(!inst);
3495
inst[0] = GROUP_FF;
3496
inst[1] = U8(inst[1] | ((type >= SLJIT_FAST_CALL) ? CALL_rm : JMP_rm));
3497
}
3498
return SLJIT_SUCCESS;
3499
}
3500
3501
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
3502
sljit_s32 dst, sljit_sw dstw,
3503
sljit_s32 type)
3504
{
3505
sljit_u8 *inst;
3506
sljit_u8 cond_set;
3507
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3508
sljit_s32 reg;
3509
sljit_uw size;
3510
#endif /* !SLJIT_CONFIG_X86_64 */
3511
/* ADJUST_LOCAL_OFFSET and CHECK_EXTRA_REGS might overwrite these values. */
3512
sljit_s32 dst_save = dst;
3513
sljit_sw dstw_save = dstw;
3514
3515
CHECK_ERROR();
3516
CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type));
3517
3518
ADJUST_LOCAL_OFFSET(dst, dstw);
3519
CHECK_EXTRA_REGS(dst, dstw, (void)0);
3520
3521
/* setcc = jcc + 0x10. */
3522
cond_set = U8(get_jump_code((sljit_uw)type) + 0x10);
3523
3524
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3525
if (GET_OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && FAST_IS_REG(dst)) {
3526
size = 3 + 2;
3527
if (reg_map[TMP_REG1] >= 4)
3528
size += 1 + 1;
3529
else if (reg_map[dst] >= 4)
3530
size++;
3531
3532
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
3533
FAIL_IF(!inst);
3534
INC_SIZE(size);
3535
/* Set low register to conditional flag. */
3536
if (reg_map[TMP_REG1] >= 4)
3537
*inst++ = (reg_map[TMP_REG1] <= 7) ? REX : REX_B;
3538
3539
inst[0] = GROUP_0F;
3540
inst[1] = cond_set;
3541
inst[2] = MOD_REG | reg_lmap[TMP_REG1];
3542
inst += 3;
3543
3544
if (reg_map[TMP_REG1] >= 4 || reg_map[dst] >= 4)
3545
*inst++ = U8(REX | (reg_map[TMP_REG1] <= 7 ? 0 : REX_R) | (reg_map[dst] <= 7 ? 0 : REX_B));
3546
3547
inst[0] = OR_rm8_r8;
3548
inst[1] = U8(MOD_REG | (reg_lmap[TMP_REG1] << 3) | reg_lmap[dst]);
3549
return SLJIT_SUCCESS;
3550
}
3551
3552
reg = (GET_OPCODE(op) < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG1;
3553
3554
size = 3 + (reg_map[reg] >= 4) + 4;
3555
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
3556
FAIL_IF(!inst);
3557
INC_SIZE(size);
3558
/* Set low register to conditional flag. */
3559
3560
if (reg_map[reg] >= 4)
3561
*inst++ = (reg_map[reg] <= 7) ? REX : REX_B;
3562
3563
inst[0] = GROUP_0F;
3564
inst[1] = cond_set;
3565
inst[2] = MOD_REG | reg_lmap[reg];
3566
3567
inst[3] = REX_W | (reg_map[reg] <= 7 ? 0 : (REX_B | REX_R));
3568
/* The movzx instruction does not affect flags. */
3569
inst[4] = GROUP_0F;
3570
inst[5] = MOVZX_r_rm8;
3571
inst[6] = U8(MOD_REG | (reg_lmap[reg] << 3) | reg_lmap[reg]);
3572
3573
if (reg != TMP_REG1)
3574
return SLJIT_SUCCESS;
3575
3576
if (GET_OPCODE(op) < SLJIT_ADD) {
3577
compiler->mode32 = GET_OPCODE(op) != SLJIT_MOV;
3578
return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
3579
}
3580
3581
SLJIT_SKIP_CHECKS(compiler);
3582
return sljit_emit_op2(compiler, op, dst_save, dstw_save, dst_save, dstw_save, TMP_REG1, 0);
3583
3584
#else /* !SLJIT_CONFIG_X86_64 */
3585
SLJIT_ASSERT(reg_map[TMP_REG1] < 4);
3586
3587
/* The SLJIT_CONFIG_X86_32 code path starts here. */
3588
if (GET_OPCODE(op) < SLJIT_ADD && FAST_IS_REG(dst) && reg_map[dst] <= 4) {
3589
/* Low byte is accessible. */
3590
inst = (sljit_u8*)ensure_buf(compiler, 1 + 3 + 3);
3591
FAIL_IF(!inst);
3592
INC_SIZE(3 + 3);
3593
/* Set low byte to conditional flag. */
3594
inst[0] = GROUP_0F;
3595
inst[1] = cond_set;
3596
inst[2] = U8(MOD_REG | reg_map[dst]);
3597
3598
inst[3] = GROUP_0F;
3599
inst[4] = MOVZX_r_rm8;
3600
inst[5] = U8(MOD_REG | (reg_map[dst] << 3) | reg_map[dst]);
3601
return SLJIT_SUCCESS;
3602
}
3603
3604
if (GET_OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && FAST_IS_REG(dst) && reg_map[dst] <= 4) {
3605
inst = (sljit_u8*)ensure_buf(compiler, 1 + 3 + 2);
3606
FAIL_IF(!inst);
3607
INC_SIZE(3 + 2);
3608
3609
/* Set low byte to conditional flag. */
3610
inst[0] = GROUP_0F;
3611
inst[1] = cond_set;
3612
inst[2] = U8(MOD_REG | reg_map[TMP_REG1]);
3613
3614
inst[3] = OR_rm8_r8;
3615
inst[4] = U8(MOD_REG | (reg_map[TMP_REG1] << 3) | reg_map[dst]);
3616
return SLJIT_SUCCESS;
3617
}
3618
3619
inst = (sljit_u8*)ensure_buf(compiler, 1 + 3 + 3);
3620
FAIL_IF(!inst);
3621
INC_SIZE(3 + 3);
3622
/* Set low byte to conditional flag. */
3623
inst[0] = GROUP_0F;
3624
inst[1] = cond_set;
3625
inst[2] = U8(MOD_REG | reg_map[TMP_REG1]);
3626
3627
inst[3] = GROUP_0F;
3628
inst[4] = MOVZX_r_rm8;
3629
inst[5] = U8(MOD_REG | (reg_map[TMP_REG1] << 3) | reg_map[TMP_REG1]);
3630
3631
if (GET_OPCODE(op) < SLJIT_ADD)
3632
return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
3633
3634
SLJIT_SKIP_CHECKS(compiler);
3635
return sljit_emit_op2(compiler, op, dst_save, dstw_save, dst_save, dstw_save, TMP_REG1, 0);
3636
#endif /* SLJIT_CONFIG_X86_64 */
3637
}
3638
3639
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fselect(struct sljit_compiler *compiler, sljit_s32 type,
3640
sljit_s32 dst_freg,
3641
sljit_s32 src1, sljit_sw src1w,
3642
sljit_s32 src2_freg)
3643
{
3644
sljit_u8* inst;
3645
sljit_uw size;
3646
3647
CHECK_ERROR();
3648
CHECK(check_sljit_emit_fselect(compiler, type, dst_freg, src1, src1w, src2_freg));
3649
3650
ADJUST_LOCAL_OFFSET(src1, src1w);
3651
3652
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3653
compiler->mode32 = 1;
3654
#endif /* SLJIT_CONFIG_X86_64 */
3655
3656
if (dst_freg != src2_freg) {
3657
if (dst_freg == src1) {
3658
src1 = src2_freg;
3659
src1w = 0;
3660
type ^= 0x1;
3661
} else
3662
FAIL_IF(emit_sse2_load(compiler, type & SLJIT_32, dst_freg, src2_freg, 0));
3663
}
3664
3665
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
3666
FAIL_IF(!inst);
3667
INC_SIZE(2);
3668
inst[0] = U8(get_jump_code((sljit_uw)(type & ~SLJIT_32) ^ 0x1) - 0x10);
3669
3670
size = compiler->size;
3671
FAIL_IF(emit_sse2_load(compiler, type & SLJIT_32, dst_freg, src1, src1w));
3672
3673
inst[1] = U8(compiler->size - size);
3674
return SLJIT_SUCCESS;
3675
}
3676
3677
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_mov(struct sljit_compiler *compiler, sljit_s32 type,
3678
sljit_s32 vreg,
3679
sljit_s32 srcdst, sljit_sw srcdstw)
3680
{
3681
sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
3682
sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
3683
sljit_s32 alignment = SLJIT_SIMD_GET_ELEM2_SIZE(type);
3684
sljit_uw op;
3685
3686
CHECK_ERROR();
3687
CHECK(check_sljit_emit_simd_mov(compiler, type, vreg, srcdst, srcdstw));
3688
3689
ADJUST_LOCAL_OFFSET(srcdst, srcdstw);
3690
3691
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3692
compiler->mode32 = 1;
3693
#endif /* SLJIT_CONFIG_X86_64 */
3694
3695
switch (reg_size) {
3696
case 4:
3697
op = EX86_SSE2;
3698
break;
3699
case 5:
3700
if (!(cpu_feature_list & CPU_FEATURE_AVX2))
3701
return SLJIT_ERR_UNSUPPORTED;
3702
op = EX86_SSE2 | VEX_256;
3703
break;
3704
default:
3705
return SLJIT_ERR_UNSUPPORTED;
3706
}
3707
3708
if (!(srcdst & SLJIT_MEM))
3709
alignment = reg_size;
3710
3711
if (type & SLJIT_SIMD_FLOAT) {
3712
if (elem_size == 2 || elem_size == 3) {
3713
op |= alignment >= reg_size ? MOVAPS_x_xm : MOVUPS_x_xm;
3714
3715
if (elem_size == 3)
3716
op |= EX86_PREF_66;
3717
3718
if (type & SLJIT_SIMD_STORE)
3719
op += 1;
3720
} else
3721
return SLJIT_ERR_UNSUPPORTED;
3722
} else {
3723
op |= ((type & SLJIT_SIMD_STORE) ? MOVDQA_xm_x : MOVDQA_x_xm)
3724
| (alignment >= reg_size ? EX86_PREF_66 : EX86_PREF_F3);
3725
}
3726
3727
if (type & SLJIT_SIMD_TEST)
3728
return SLJIT_SUCCESS;
3729
3730
if ((op & VEX_256) || ((cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX)))
3731
return emit_vex_instruction(compiler, op, vreg, 0, srcdst, srcdstw);
3732
3733
return emit_groupf(compiler, op, vreg, srcdst, srcdstw);
3734
}
3735
3736
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_replicate(struct sljit_compiler *compiler, sljit_s32 type,
3737
sljit_s32 vreg,
3738
sljit_s32 src, sljit_sw srcw)
3739
{
3740
sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
3741
sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
3742
sljit_s32 use_vex = (cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX);
3743
sljit_u8 *inst;
3744
sljit_u8 opcode = 0;
3745
sljit_uw op;
3746
3747
CHECK_ERROR();
3748
CHECK(check_sljit_emit_simd_replicate(compiler, type, vreg, src, srcw));
3749
3750
ADJUST_LOCAL_OFFSET(src, srcw);
3751
3752
if (!(type & SLJIT_SIMD_FLOAT)) {
3753
CHECK_EXTRA_REGS(src, srcw, (void)0);
3754
}
3755
3756
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
3757
if ((type & SLJIT_SIMD_FLOAT) ? (elem_size < 2 || elem_size > 3) : (elem_size > 2))
3758
return SLJIT_ERR_UNSUPPORTED;
3759
#else /* !SLJIT_CONFIG_X86_32 */
3760
compiler->mode32 = 1;
3761
3762
if (elem_size > 3 || ((type & SLJIT_SIMD_FLOAT) && elem_size < 2))
3763
return SLJIT_ERR_UNSUPPORTED;
3764
#endif /* SLJIT_CONFIG_X86_32 */
3765
3766
if (reg_size != 4 && (reg_size != 5 || !(cpu_feature_list & CPU_FEATURE_AVX2)))
3767
return SLJIT_ERR_UNSUPPORTED;
3768
3769
if (type & SLJIT_SIMD_TEST)
3770
return SLJIT_SUCCESS;
3771
3772
if (reg_size == 5)
3773
use_vex = 1;
3774
3775
if (use_vex && src != SLJIT_IMM) {
3776
op = 0;
3777
3778
switch (elem_size) {
3779
case 0:
3780
if (cpu_feature_list & CPU_FEATURE_AVX2)
3781
op = VPBROADCASTB_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
3782
break;
3783
case 1:
3784
if (cpu_feature_list & CPU_FEATURE_AVX2)
3785
op = VPBROADCASTW_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
3786
break;
3787
case 2:
3788
if (type & SLJIT_SIMD_FLOAT) {
3789
if ((cpu_feature_list & CPU_FEATURE_AVX2) || ((cpu_feature_list & CPU_FEATURE_AVX) && (src & SLJIT_MEM)))
3790
op = VBROADCASTSS_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
3791
} else if (cpu_feature_list & CPU_FEATURE_AVX2)
3792
op = VPBROADCASTD_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
3793
break;
3794
default:
3795
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3796
if (!(type & SLJIT_SIMD_FLOAT)) {
3797
if (cpu_feature_list & CPU_FEATURE_AVX2)
3798
op = VPBROADCASTQ_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
3799
break;
3800
}
3801
#endif /* SLJIT_CONFIG_X86_64 */
3802
3803
if (reg_size == 5)
3804
op = VBROADCASTSD_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
3805
break;
3806
}
3807
3808
if (op != 0) {
3809
if (!(src & SLJIT_MEM) && !(type & SLJIT_SIMD_FLOAT)) {
3810
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3811
if (elem_size >= 3)
3812
compiler->mode32 = 0;
3813
#endif /* SLJIT_CONFIG_X86_64 */
3814
FAIL_IF(emit_vex_instruction(compiler, MOVD_x_rm | VEX_AUTO_W | EX86_PREF_66 | EX86_SSE2_OP1, vreg, 0, src, srcw));
3815
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3816
compiler->mode32 = 1;
3817
#endif /* SLJIT_CONFIG_X86_64 */
3818
src = vreg;
3819
srcw = 0;
3820
}
3821
3822
if (reg_size == 5)
3823
op |= VEX_256;
3824
3825
return emit_vex_instruction(compiler, op, vreg, 0, src, srcw);
3826
}
3827
}
3828
3829
if (type & SLJIT_SIMD_FLOAT) {
3830
if (src == SLJIT_IMM) {
3831
if (use_vex)
3832
return emit_vex_instruction(compiler, XORPD_x_xm | (reg_size == 5 ? VEX_256 : 0) | (elem_size == 3 ? EX86_PREF_66 : 0) | EX86_SSE2 | VEX_SSE2_OPV, vreg, vreg, vreg, 0);
3833
3834
return emit_groupf(compiler, XORPD_x_xm | (elem_size == 3 ? EX86_PREF_66 : 0) | EX86_SSE2, vreg, vreg, 0);
3835
}
3836
3837
SLJIT_ASSERT(reg_size == 4);
3838
3839
if (use_vex) {
3840
if (elem_size == 3)
3841
return emit_vex_instruction(compiler, MOVDDUP_x_xm | EX86_PREF_F2 | EX86_SSE2, vreg, 0, src, srcw);
3842
3843
SLJIT_ASSERT(!(src & SLJIT_MEM));
3844
FAIL_IF(emit_vex_instruction(compiler, SHUFPS_x_xm | EX86_SSE2 | VEX_SSE2_OPV, vreg, src, src, 0));
3845
return emit_byte(compiler, 0);
3846
}
3847
3848
if (elem_size == 2 && vreg != src) {
3849
FAIL_IF(emit_sse2_load(compiler, 1, vreg, src, srcw));
3850
src = vreg;
3851
srcw = 0;
3852
}
3853
3854
op = (elem_size == 2 ? SHUFPS_x_xm : MOVDDUP_x_xm) | (elem_size == 2 ? 0 : EX86_PREF_F2) | EX86_SSE2;
3855
FAIL_IF(emit_groupf(compiler, op, vreg, src, srcw));
3856
3857
if (elem_size == 2)
3858
return emit_byte(compiler, 0);
3859
return SLJIT_SUCCESS;
3860
}
3861
3862
if (src == SLJIT_IMM) {
3863
if (elem_size == 0) {
3864
srcw = (sljit_u8)srcw;
3865
srcw |= srcw << 8;
3866
srcw |= srcw << 16;
3867
elem_size = 2;
3868
} else if (elem_size == 1) {
3869
srcw = (sljit_u16)srcw;
3870
srcw |= srcw << 16;
3871
elem_size = 2;
3872
}
3873
3874
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3875
if (elem_size == 2 && (sljit_s32)srcw == -1)
3876
srcw = -1;
3877
#endif /* SLJIT_CONFIG_X86_64 */
3878
3879
if (srcw == 0 || srcw == -1) {
3880
if (use_vex)
3881
return emit_vex_instruction(compiler, (srcw == 0 ? PXOR_x_xm : PCMPEQD_x_xm) | (reg_size == 5 ? VEX_256 : 0) | EX86_PREF_66 | EX86_SSE2 | VEX_SSE2_OPV, vreg, vreg, vreg, 0);
3882
3883
return emit_groupf(compiler, (srcw == 0 ? PXOR_x_xm : PCMPEQD_x_xm) | EX86_PREF_66 | EX86_SSE2, vreg, vreg, 0);
3884
}
3885
3886
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3887
if (elem_size == 3)
3888
FAIL_IF(emit_load_imm64(compiler, TMP_REG1, srcw));
3889
else
3890
#endif /* SLJIT_CONFIG_X86_64 */
3891
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, srcw);
3892
3893
src = TMP_REG1;
3894
srcw = 0;
3895
3896
}
3897
3898
op = 2;
3899
opcode = MOVD_x_rm;
3900
3901
switch (elem_size) {
3902
case 0:
3903
if (!FAST_IS_REG(src)) {
3904
opcode = 0x3a /* Prefix of PINSRB_x_rm_i8. */;
3905
op = 3;
3906
}
3907
break;
3908
case 1:
3909
if (!FAST_IS_REG(src))
3910
opcode = PINSRW_x_rm_i8;
3911
break;
3912
case 2:
3913
break;
3914
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3915
case 3:
3916
/* MOVQ */
3917
compiler->mode32 = 0;
3918
break;
3919
#endif /* SLJIT_CONFIG_X86_64 */
3920
}
3921
3922
if (use_vex) {
3923
if (opcode != MOVD_x_rm) {
3924
op = (opcode == 0x3a) ? (PINSRB_x_rm_i8 | VEX_OP_0F3A) : opcode;
3925
FAIL_IF(emit_vex_instruction(compiler, op | EX86_PREF_66 | EX86_SSE2_OP1 | VEX_SSE2_OPV, vreg, vreg, src, srcw));
3926
} else
3927
FAIL_IF(emit_vex_instruction(compiler, MOVD_x_rm | VEX_AUTO_W | EX86_PREF_66 | EX86_SSE2_OP1, vreg, 0, src, srcw));
3928
} else {
3929
inst = emit_x86_instruction(compiler, op | EX86_PREF_66 | EX86_SSE2_OP1, vreg, 0, src, srcw);
3930
FAIL_IF(!inst);
3931
inst[0] = GROUP_0F;
3932
inst[1] = opcode;
3933
3934
if (op == 3) {
3935
SLJIT_ASSERT(opcode == 0x3a);
3936
inst[2] = PINSRB_x_rm_i8;
3937
}
3938
}
3939
3940
if ((cpu_feature_list & CPU_FEATURE_AVX2) && use_vex && elem_size >= 2) {
3941
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
3942
op = VPBROADCASTD_x_xm;
3943
#else /* !SLJIT_CONFIG_X86_32 */
3944
op = (elem_size == 3) ? VPBROADCASTQ_x_xm : VPBROADCASTD_x_xm;
3945
#endif /* SLJIT_CONFIG_X86_32 */
3946
return emit_vex_instruction(compiler, op | ((reg_size == 5) ? VEX_256 : 0) | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, vreg, 0, vreg, 0);
3947
}
3948
3949
SLJIT_ASSERT(reg_size == 4);
3950
3951
if (opcode != MOVD_x_rm)
3952
FAIL_IF(emit_byte(compiler, 0));
3953
3954
switch (elem_size) {
3955
case 0:
3956
if (use_vex) {
3957
FAIL_IF(emit_vex_instruction(compiler, PXOR_x_xm | EX86_PREF_66 | EX86_SSE2 | VEX_SSE2_OPV, TMP_FREG, TMP_FREG, TMP_FREG, 0));
3958
return emit_vex_instruction(compiler, PSHUFB_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2 | VEX_SSE2_OPV, vreg, vreg, TMP_FREG, 0);
3959
}
3960
FAIL_IF(emit_groupf(compiler, PXOR_x_xm | EX86_PREF_66 | EX86_SSE2, TMP_FREG, TMP_FREG, 0));
3961
return emit_groupf_ext(compiler, PSHUFB_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, vreg, TMP_FREG, 0);
3962
case 1:
3963
if (use_vex)
3964
FAIL_IF(emit_vex_instruction(compiler, PSHUFLW_x_xm | EX86_PREF_F2 | EX86_SSE2, vreg, 0, vreg, 0));
3965
else
3966
FAIL_IF(emit_groupf(compiler, PSHUFLW_x_xm | EX86_PREF_F2 | EX86_SSE2, vreg, vreg, 0));
3967
FAIL_IF(emit_byte(compiler, 0));
3968
/* fallthrough */
3969
default:
3970
if (use_vex)
3971
FAIL_IF(emit_vex_instruction(compiler, PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2, vreg, 0, vreg, 0));
3972
else
3973
FAIL_IF(emit_groupf(compiler, PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2, vreg, vreg, 0));
3974
return emit_byte(compiler, 0);
3975
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3976
case 3:
3977
compiler->mode32 = 1;
3978
if (use_vex)
3979
FAIL_IF(emit_vex_instruction(compiler, PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2, vreg, 0, vreg, 0));
3980
else
3981
FAIL_IF(emit_groupf(compiler, PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2, vreg, vreg, 0));
3982
return emit_byte(compiler, 0x44);
3983
#endif /* SLJIT_CONFIG_X86_64 */
3984
}
3985
}
3986
3987
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_mov(struct sljit_compiler *compiler, sljit_s32 type,
3988
sljit_s32 vreg, sljit_s32 lane_index,
3989
sljit_s32 srcdst, sljit_sw srcdstw)
3990
{
3991
sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
3992
sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
3993
sljit_s32 use_vex = (cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX);
3994
sljit_u8 *inst;
3995
sljit_u8 opcode = 0;
3996
sljit_uw op;
3997
sljit_s32 vreg_orig = vreg;
3998
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
3999
sljit_s32 srcdst_is_ereg = 0;
4000
sljit_s32 srcdst_orig = 0;
4001
sljit_sw srcdstw_orig = 0;
4002
#endif /* SLJIT_CONFIG_X86_32 */
4003
4004
CHECK_ERROR();
4005
CHECK(check_sljit_emit_simd_lane_mov(compiler, type, vreg, lane_index, srcdst, srcdstw));
4006
4007
ADJUST_LOCAL_OFFSET(srcdst, srcdstw);
4008
4009
if (reg_size == 5) {
4010
if (!(cpu_feature_list & CPU_FEATURE_AVX2))
4011
return SLJIT_ERR_UNSUPPORTED;
4012
use_vex = 1;
4013
} else if (reg_size != 4)
4014
return SLJIT_ERR_UNSUPPORTED;
4015
4016
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
4017
if ((type & SLJIT_SIMD_FLOAT) ? (elem_size < 2 || elem_size > 3) : elem_size > 2)
4018
return SLJIT_ERR_UNSUPPORTED;
4019
#else /* SLJIT_CONFIG_X86_32 */
4020
if (elem_size > 3 || ((type & SLJIT_SIMD_FLOAT) && elem_size < 2))
4021
return SLJIT_ERR_UNSUPPORTED;
4022
#endif /* SLJIT_CONFIG_X86_32 */
4023
4024
if (type & SLJIT_SIMD_TEST)
4025
return SLJIT_SUCCESS;
4026
4027
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4028
compiler->mode32 = 1;
4029
#else /* !SLJIT_CONFIG_X86_64 */
4030
if (!(type & SLJIT_SIMD_FLOAT)) {
4031
CHECK_EXTRA_REGS(srcdst, srcdstw, srcdst_is_ereg = 1);
4032
4033
if ((type & SLJIT_SIMD_STORE) && ((srcdst_is_ereg && elem_size < 2) || (elem_size == 0 && (type & SLJIT_SIMD_LANE_SIGNED) && FAST_IS_REG(srcdst) && reg_map[srcdst] >= 4))) {
4034
srcdst_orig = srcdst;
4035
srcdstw_orig = srcdstw;
4036
srcdst = TMP_REG1;
4037
srcdstw = 0;
4038
}
4039
}
4040
#endif /* SLJIT_CONFIG_X86_64 */
4041
4042
if (type & SLJIT_SIMD_LANE_ZERO) {
4043
if (lane_index == 0) {
4044
if (!(type & SLJIT_SIMD_FLOAT)) {
4045
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4046
if (elem_size == 3) {
4047
compiler->mode32 = 0;
4048
elem_size = 2;
4049
}
4050
#endif /* SLJIT_CONFIG_X86_64 */
4051
if (srcdst == SLJIT_IMM) {
4052
if (elem_size == 0)
4053
srcdstw = (sljit_u8)srcdstw;
4054
else if (elem_size == 1)
4055
srcdstw = (sljit_u16)srcdstw;
4056
4057
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, srcdstw);
4058
srcdst = TMP_REG1;
4059
srcdstw = 0;
4060
elem_size = 2;
4061
}
4062
4063
if (elem_size == 2) {
4064
if (use_vex)
4065
return emit_vex_instruction(compiler, MOVD_x_rm | VEX_AUTO_W | EX86_PREF_66 | EX86_SSE2_OP1, vreg, 0, srcdst, srcdstw);
4066
return emit_groupf(compiler, MOVD_x_rm | EX86_PREF_66 | EX86_SSE2_OP1, vreg, srcdst, srcdstw);
4067
}
4068
} else if (srcdst & SLJIT_MEM) {
4069
SLJIT_ASSERT(elem_size == 2 || elem_size == 3);
4070
4071
if (use_vex)
4072
return emit_vex_instruction(compiler, MOVSD_x_xm | (elem_size == 2 ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2, vreg, 0, srcdst, srcdstw);
4073
return emit_groupf(compiler, MOVSD_x_xm | (elem_size == 2 ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2, vreg, srcdst, srcdstw);
4074
} else if (elem_size == 3) {
4075
if (use_vex)
4076
return emit_vex_instruction(compiler, MOVQ_x_xm | EX86_PREF_F3 | EX86_SSE2, vreg, 0, srcdst, 0);
4077
return emit_groupf(compiler, MOVQ_x_xm | EX86_PREF_F3 | EX86_SSE2, vreg, srcdst, 0);
4078
} else if (use_vex) {
4079
FAIL_IF(emit_vex_instruction(compiler, XORPD_x_xm | EX86_SSE2 | VEX_SSE2_OPV, TMP_FREG, TMP_FREG, TMP_FREG, 0));
4080
return emit_vex_instruction(compiler, MOVSD_x_xm | EX86_PREF_F3 | EX86_SSE2 | VEX_SSE2_OPV, vreg, TMP_FREG, srcdst, 0);
4081
}
4082
}
4083
4084
if (reg_size == 5 && lane_index >= (1 << (4 - elem_size))) {
4085
vreg = TMP_FREG;
4086
lane_index -= (1 << (4 - elem_size));
4087
} else if ((type & SLJIT_SIMD_FLOAT) && vreg == srcdst) {
4088
if (use_vex)
4089
FAIL_IF(emit_vex_instruction(compiler, MOVSD_x_xm | (elem_size == 2 ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2 | VEX_SSE2_OPV, TMP_FREG, TMP_FREG, srcdst, srcdstw));
4090
else
4091
FAIL_IF(emit_sse2_load(compiler, elem_size == 2, TMP_FREG, srcdst, srcdstw));
4092
srcdst = TMP_FREG;
4093
srcdstw = 0;
4094
}
4095
4096
op = ((!(type & SLJIT_SIMD_FLOAT) || elem_size != 2) ? EX86_PREF_66 : 0)
4097
| ((type & SLJIT_SIMD_FLOAT) ? XORPD_x_xm : PXOR_x_xm) | EX86_SSE2;
4098
4099
if (use_vex)
4100
FAIL_IF(emit_vex_instruction(compiler, op | (reg_size == 5 ? VEX_256 : 0) | VEX_SSE2_OPV, vreg, vreg, vreg, 0));
4101
else
4102
FAIL_IF(emit_groupf(compiler, op, vreg, vreg, 0));
4103
} else if (reg_size == 5 && lane_index >= (1 << (4 - elem_size))) {
4104
FAIL_IF(emit_vex_instruction(compiler, ((type & SLJIT_SIMD_FLOAT) ? VEXTRACTF128_x_ym : VEXTRACTI128_x_ym) | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2, vreg, 0, TMP_FREG, 0));
4105
FAIL_IF(emit_byte(compiler, 1));
4106
4107
vreg = TMP_FREG;
4108
lane_index -= (1 << (4 - elem_size));
4109
}
4110
4111
if (type & SLJIT_SIMD_FLOAT) {
4112
if (elem_size == 3) {
4113
if (srcdst & SLJIT_MEM) {
4114
if (type & SLJIT_SIMD_STORE)
4115
op = lane_index == 0 ? MOVLPD_m_x : MOVHPD_m_x;
4116
else
4117
op = lane_index == 0 ? MOVLPD_x_m : MOVHPD_x_m;
4118
4119
/* VEX prefix clears upper bits of the target register. */
4120
if (use_vex && ((type & SLJIT_SIMD_STORE) || reg_size == 4 || vreg == TMP_FREG))
4121
FAIL_IF(emit_vex_instruction(compiler, op | EX86_PREF_66 | EX86_SSE2
4122
| ((type & SLJIT_SIMD_STORE) ? 0 : VEX_SSE2_OPV), vreg, (type & SLJIT_SIMD_STORE) ? 0 : vreg, srcdst, srcdstw));
4123
else
4124
FAIL_IF(emit_groupf(compiler, op | EX86_PREF_66 | EX86_SSE2, vreg, srcdst, srcdstw));
4125
4126
/* In case of store, vreg is not TMP_FREG. */
4127
} else if (type & SLJIT_SIMD_STORE) {
4128
if (lane_index == 1) {
4129
if (use_vex)
4130
return emit_vex_instruction(compiler, MOVHLPS_x_x | EX86_SSE2 | VEX_SSE2_OPV, srcdst, srcdst, vreg, 0);
4131
return emit_groupf(compiler, MOVHLPS_x_x | EX86_SSE2, srcdst, vreg, 0);
4132
}
4133
if (use_vex)
4134
return emit_vex_instruction(compiler, MOVSD_x_xm | EX86_PREF_F2 | EX86_SSE2 | VEX_SSE2_OPV, srcdst, srcdst, vreg, 0);
4135
return emit_sse2_load(compiler, 0, srcdst, vreg, 0);
4136
} else if (use_vex && (reg_size == 4 || vreg == TMP_FREG)) {
4137
if (lane_index == 1)
4138
FAIL_IF(emit_vex_instruction(compiler, MOVLHPS_x_x | EX86_SSE2 | VEX_SSE2_OPV, vreg, vreg, srcdst, 0));
4139
else
4140
FAIL_IF(emit_vex_instruction(compiler, MOVSD_x_xm | EX86_PREF_F2 | EX86_SSE2 | VEX_SSE2_OPV, vreg, vreg, srcdst, 0));
4141
} else {
4142
if (lane_index == 1)
4143
FAIL_IF(emit_groupf(compiler, MOVLHPS_x_x | EX86_SSE2, vreg, srcdst, 0));
4144
else
4145
FAIL_IF(emit_sse2_load(compiler, 0, vreg, srcdst, 0));
4146
}
4147
} else if (type & SLJIT_SIMD_STORE) {
4148
if (lane_index == 0) {
4149
if (use_vex)
4150
return emit_vex_instruction(compiler, MOVSD_xm_x | EX86_PREF_F3 | EX86_SSE2 | ((srcdst & SLJIT_MEM) ? 0 : VEX_SSE2_OPV),
4151
vreg, ((srcdst & SLJIT_MEM) ? 0 : srcdst), srcdst, srcdstw);
4152
return emit_sse2_store(compiler, 1, srcdst, srcdstw, vreg);
4153
}
4154
4155
if (srcdst & SLJIT_MEM) {
4156
if (use_vex)
4157
FAIL_IF(emit_vex_instruction(compiler, EXTRACTPS_x_xm | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2, vreg, 0, srcdst, srcdstw));
4158
else
4159
FAIL_IF(emit_groupf_ext(compiler, EXTRACTPS_x_xm | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2, vreg, srcdst, srcdstw));
4160
return emit_byte(compiler, U8(lane_index));
4161
}
4162
4163
if (use_vex) {
4164
FAIL_IF(emit_vex_instruction(compiler, SHUFPS_x_xm | EX86_SSE2 | VEX_SSE2_OPV, srcdst, vreg, vreg, 0));
4165
return emit_byte(compiler, U8(lane_index));
4166
}
4167
4168
if (srcdst == vreg)
4169
op = SHUFPS_x_xm | EX86_SSE2;
4170
else {
4171
switch (lane_index) {
4172
case 1:
4173
op = MOVSHDUP_x_xm | EX86_PREF_F3 | EX86_SSE2;
4174
break;
4175
case 2:
4176
op = MOVHLPS_x_x | EX86_SSE2;
4177
break;
4178
default:
4179
SLJIT_ASSERT(lane_index == 3);
4180
op = PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2;
4181
break;
4182
}
4183
}
4184
4185
FAIL_IF(emit_groupf(compiler, op, srcdst, vreg, 0));
4186
4187
op &= 0xff;
4188
if (op == SHUFPS_x_xm || op == PSHUFD_x_xm)
4189
return emit_byte(compiler, U8(lane_index));
4190
4191
return SLJIT_SUCCESS;
4192
} else {
4193
if (lane_index != 0 || (srcdst & SLJIT_MEM)) {
4194
FAIL_IF(emit_groupf_ext(compiler, INSERTPS_x_xm | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2, vreg, srcdst, srcdstw));
4195
FAIL_IF(emit_byte(compiler, U8(lane_index << 4)));
4196
} else
4197
FAIL_IF(emit_sse2_store(compiler, 1, vreg, 0, srcdst));
4198
}
4199
4200
if (vreg != TMP_FREG || (type & SLJIT_SIMD_STORE))
4201
return SLJIT_SUCCESS;
4202
4203
SLJIT_ASSERT(reg_size == 5);
4204
4205
if (type & SLJIT_SIMD_LANE_ZERO) {
4206
FAIL_IF(emit_vex_instruction(compiler, VPERMPD_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, vreg_orig, 0, TMP_FREG, 0));
4207
return emit_byte(compiler, 0x4e);
4208
}
4209
4210
FAIL_IF(emit_vex_instruction(compiler, VINSERTF128_y_y_xm | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2 | VEX_SSE2_OPV, vreg_orig, vreg_orig, TMP_FREG, 0));
4211
return emit_byte(compiler, 1);
4212
}
4213
4214
if (srcdst == SLJIT_IMM) {
4215
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, srcdstw);
4216
srcdst = TMP_REG1;
4217
srcdstw = 0;
4218
}
4219
4220
op = 3;
4221
4222
switch (elem_size) {
4223
case 0:
4224
opcode = (type & SLJIT_SIMD_STORE) ? PEXTRB_rm_x_i8 : PINSRB_x_rm_i8;
4225
break;
4226
case 1:
4227
if (!(type & SLJIT_SIMD_STORE)) {
4228
op = 2;
4229
opcode = PINSRW_x_rm_i8;
4230
} else
4231
opcode = PEXTRW_rm_x_i8;
4232
break;
4233
case 2:
4234
opcode = (type & SLJIT_SIMD_STORE) ? PEXTRD_rm_x_i8 : PINSRD_x_rm_i8;
4235
break;
4236
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4237
case 3:
4238
/* PINSRQ / PEXTRQ */
4239
opcode = (type & SLJIT_SIMD_STORE) ? PEXTRD_rm_x_i8 : PINSRD_x_rm_i8;
4240
compiler->mode32 = 0;
4241
break;
4242
#endif /* SLJIT_CONFIG_X86_64 */
4243
}
4244
4245
if (use_vex && (type & SLJIT_SIMD_STORE)) {
4246
op = opcode | ((op == 3) ? VEX_OP_0F3A : 0);
4247
FAIL_IF(emit_vex_instruction(compiler, op | EX86_PREF_66 | VEX_AUTO_W | EX86_SSE2_OP1 | VEX_SSE2_OPV, vreg, 0, srcdst, srcdstw));
4248
} else {
4249
inst = emit_x86_instruction(compiler, op | EX86_PREF_66 | EX86_SSE2_OP1, vreg, 0, srcdst, srcdstw);
4250
FAIL_IF(!inst);
4251
inst[0] = GROUP_0F;
4252
4253
if (op == 3) {
4254
inst[1] = 0x3a;
4255
inst[2] = opcode;
4256
} else
4257
inst[1] = opcode;
4258
}
4259
4260
FAIL_IF(emit_byte(compiler, U8(lane_index)));
4261
4262
if (!(type & SLJIT_SIMD_LANE_SIGNED) || (srcdst & SLJIT_MEM)) {
4263
if (vreg == TMP_FREG && !(type & SLJIT_SIMD_STORE)) {
4264
SLJIT_ASSERT(reg_size == 5);
4265
4266
if (type & SLJIT_SIMD_LANE_ZERO) {
4267
FAIL_IF(emit_vex_instruction(compiler, VPERMQ_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, vreg_orig, 0, TMP_FREG, 0));
4268
return emit_byte(compiler, 0x4e);
4269
}
4270
4271
FAIL_IF(emit_vex_instruction(compiler, VINSERTI128_y_y_xm | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2 | VEX_SSE2_OPV, vreg_orig, vreg_orig, TMP_FREG, 0));
4272
return emit_byte(compiler, 1);
4273
}
4274
4275
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
4276
if (srcdst_orig & SLJIT_MEM)
4277
return emit_mov(compiler, srcdst_orig, srcdstw_orig, TMP_REG1, 0);
4278
#endif /* SLJIT_CONFIG_X86_32 */
4279
return SLJIT_SUCCESS;
4280
}
4281
4282
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4283
if (elem_size >= 3)
4284
return SLJIT_SUCCESS;
4285
4286
compiler->mode32 = (type & SLJIT_32);
4287
4288
op = 2;
4289
4290
if (elem_size == 0)
4291
op |= EX86_REX;
4292
4293
if (elem_size == 2) {
4294
if (type & SLJIT_32)
4295
return SLJIT_SUCCESS;
4296
4297
SLJIT_ASSERT(!(compiler->mode32));
4298
op = 1;
4299
}
4300
4301
inst = emit_x86_instruction(compiler, op, srcdst, 0, srcdst, 0);
4302
FAIL_IF(!inst);
4303
4304
if (op != 1) {
4305
inst[0] = GROUP_0F;
4306
inst[1] = U8((elem_size == 0) ? MOVSX_r_rm8 : MOVSX_r_rm16);
4307
} else
4308
inst[0] = MOVSXD_r_rm;
4309
#else /* !SLJIT_CONFIG_X86_64 */
4310
if (elem_size >= 2)
4311
return SLJIT_SUCCESS;
4312
4313
FAIL_IF(emit_groupf(compiler, (elem_size == 0) ? MOVSX_r_rm8 : MOVSX_r_rm16,
4314
(srcdst_orig != 0 && FAST_IS_REG(srcdst_orig)) ? srcdst_orig : srcdst, srcdst, 0));
4315
4316
if (srcdst_orig & SLJIT_MEM)
4317
return emit_mov(compiler, srcdst_orig, srcdstw_orig, TMP_REG1, 0);
4318
#endif /* SLJIT_CONFIG_X86_64 */
4319
return SLJIT_SUCCESS;
4320
}
4321
4322
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_replicate(struct sljit_compiler *compiler, sljit_s32 type,
4323
sljit_s32 vreg,
4324
sljit_s32 src, sljit_s32 src_lane_index)
4325
{
4326
sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
4327
sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
4328
sljit_s32 use_vex = (cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX);
4329
sljit_uw pref;
4330
sljit_u8 byte;
4331
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
4332
sljit_s32 opcode3 = TMP_REG1;
4333
#else /* !SLJIT_CONFIG_X86_32 */
4334
sljit_s32 opcode3 = SLJIT_S0;
4335
#endif /* SLJIT_CONFIG_X86_32 */
4336
4337
CHECK_ERROR();
4338
CHECK(check_sljit_emit_simd_lane_replicate(compiler, type, vreg, src, src_lane_index));
4339
4340
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4341
compiler->mode32 = 1;
4342
#endif /* SLJIT_CONFIG_X86_64 */
4343
SLJIT_ASSERT(reg_map[opcode3] == 3);
4344
4345
if (reg_size == 5) {
4346
if (!(cpu_feature_list & CPU_FEATURE_AVX2))
4347
return SLJIT_ERR_UNSUPPORTED;
4348
use_vex = 1;
4349
} else if (reg_size != 4)
4350
return SLJIT_ERR_UNSUPPORTED;
4351
4352
if (type & SLJIT_SIMD_FLOAT) {
4353
pref = 0;
4354
byte = U8(src_lane_index);
4355
4356
if (elem_size == 3) {
4357
if (type & SLJIT_SIMD_TEST)
4358
return SLJIT_SUCCESS;
4359
4360
if (reg_size == 5) {
4361
if (src_lane_index == 0)
4362
return emit_vex_instruction(compiler, VBROADCASTSD_x_xm | VEX_256 | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, vreg, 0, src, 0);
4363
4364
FAIL_IF(emit_vex_instruction(compiler, VPERMPD_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, vreg, 0, src, 0));
4365
4366
byte = U8(byte | (byte << 2));
4367
return emit_byte(compiler, U8(byte | (byte << 4)));
4368
}
4369
4370
if (src_lane_index == 0) {
4371
if (use_vex)
4372
return emit_vex_instruction(compiler, MOVDDUP_x_xm | EX86_PREF_F2 | EX86_SSE2, vreg, 0, src, 0);
4373
return emit_groupf(compiler, MOVDDUP_x_xm | EX86_PREF_F2 | EX86_SSE2, vreg, src, 0);
4374
}
4375
4376
/* Changes it to SHUFPD_x_xm. */
4377
pref = EX86_PREF_66;
4378
} else if (elem_size != 2)
4379
return SLJIT_ERR_UNSUPPORTED;
4380
else if (type & SLJIT_SIMD_TEST)
4381
return SLJIT_SUCCESS;
4382
4383
if (reg_size == 5) {
4384
SLJIT_ASSERT(elem_size == 2);
4385
4386
if (src_lane_index == 0)
4387
return emit_vex_instruction(compiler, VBROADCASTSS_x_xm | VEX_256 | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, vreg, 0, src, 0);
4388
4389
FAIL_IF(emit_vex_instruction(compiler, VPERMPD_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, vreg, 0, src, 0));
4390
4391
byte = 0x44;
4392
if (src_lane_index >= 4) {
4393
byte = 0xee;
4394
src_lane_index -= 4;
4395
}
4396
4397
FAIL_IF(emit_byte(compiler, byte));
4398
FAIL_IF(emit_vex_instruction(compiler, SHUFPS_x_xm | VEX_256 | pref | EX86_SSE2 | VEX_SSE2_OPV, vreg, vreg, vreg, 0));
4399
byte = U8(src_lane_index);
4400
} else if (use_vex) {
4401
FAIL_IF(emit_vex_instruction(compiler, SHUFPS_x_xm | pref | EX86_SSE2 | VEX_SSE2_OPV, vreg, src, src, 0));
4402
} else {
4403
if (vreg != src)
4404
FAIL_IF(emit_groupf(compiler, MOVAPS_x_xm | pref | EX86_SSE2, vreg, src, 0));
4405
4406
FAIL_IF(emit_groupf(compiler, SHUFPS_x_xm | pref | EX86_SSE2, vreg, vreg, 0));
4407
}
4408
4409
if (elem_size == 2) {
4410
byte = U8(byte | (byte << 2));
4411
byte = U8(byte | (byte << 4));
4412
} else
4413
byte = U8(byte | (byte << 1));
4414
4415
return emit_byte(compiler, U8(byte));
4416
}
4417
4418
if (type & SLJIT_SIMD_TEST)
4419
return SLJIT_SUCCESS;
4420
4421
if (elem_size == 0) {
4422
if (reg_size == 5 && src_lane_index >= 16) {
4423
FAIL_IF(emit_vex_instruction(compiler, VPERMQ_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, vreg, 0, src, 0));
4424
FAIL_IF(emit_byte(compiler, src_lane_index >= 24 ? 0xff : 0xaa));
4425
src_lane_index &= 0x7;
4426
src = vreg;
4427
}
4428
4429
if (src_lane_index != 0 || (vreg != src && (!(cpu_feature_list & CPU_FEATURE_AVX2) || !use_vex))) {
4430
pref = 0;
4431
4432
if ((src_lane_index & 0x3) == 0) {
4433
pref = EX86_PREF_66;
4434
byte = U8(src_lane_index >> 2);
4435
} else if (src_lane_index < 8 && (src_lane_index & 0x1) == 0) {
4436
pref = EX86_PREF_F2;
4437
byte = U8(src_lane_index >> 1);
4438
} else {
4439
if (!use_vex) {
4440
if (vreg != src)
4441
FAIL_IF(emit_groupf(compiler, MOVDQA_x_xm | EX86_PREF_66 | EX86_SSE2, vreg, src, 0));
4442
4443
FAIL_IF(emit_groupf(compiler, PSRLDQ_x | EX86_PREF_66 | EX86_SSE2_OP2, opcode3, vreg, 0));
4444
} else
4445
FAIL_IF(emit_vex_instruction(compiler, PSRLDQ_x | EX86_PREF_66 | EX86_SSE2_OP2 | VEX_SSE2_OPV, opcode3, vreg, src, 0));
4446
4447
FAIL_IF(emit_byte(compiler, U8(src_lane_index)));
4448
}
4449
4450
if (pref != 0) {
4451
if (use_vex)
4452
FAIL_IF(emit_vex_instruction(compiler, PSHUFLW_x_xm | pref | EX86_SSE2, vreg, 0, src, 0));
4453
else
4454
FAIL_IF(emit_groupf(compiler, PSHUFLW_x_xm | pref | EX86_SSE2, vreg, src, 0));
4455
FAIL_IF(emit_byte(compiler, byte));
4456
}
4457
4458
src = vreg;
4459
}
4460
4461
if (use_vex && (cpu_feature_list & CPU_FEATURE_AVX2))
4462
return emit_vex_instruction(compiler, VPBROADCASTB_x_xm | (reg_size == 5 ? VEX_256 : 0) | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, vreg, 0, src, 0);
4463
4464
SLJIT_ASSERT(reg_size == 4);
4465
FAIL_IF(emit_groupf(compiler, PXOR_x_xm | EX86_PREF_66 | EX86_SSE2, TMP_FREG, TMP_FREG, 0));
4466
return emit_groupf_ext(compiler, PSHUFB_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, vreg, TMP_FREG, 0);
4467
}
4468
4469
if ((cpu_feature_list & CPU_FEATURE_AVX2) && use_vex && src_lane_index == 0 && elem_size <= 3) {
4470
switch (elem_size) {
4471
case 1:
4472
pref = VPBROADCASTW_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
4473
break;
4474
case 2:
4475
pref = VPBROADCASTD_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
4476
break;
4477
default:
4478
pref = VPBROADCASTQ_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
4479
break;
4480
}
4481
4482
if (reg_size == 5)
4483
pref |= VEX_256;
4484
4485
return emit_vex_instruction(compiler, pref, vreg, 0, src, 0);
4486
}
4487
4488
if (reg_size == 5) {
4489
switch (elem_size) {
4490
case 1:
4491
byte = U8(src_lane_index & 0x3);
4492
src_lane_index >>= 2;
4493
pref = PSHUFLW_x_xm | VEX_256 | ((src_lane_index & 1) == 0 ? EX86_PREF_F2 : EX86_PREF_F3) | EX86_SSE2;
4494
break;
4495
case 2:
4496
byte = U8(src_lane_index & 0x3);
4497
src_lane_index >>= 1;
4498
pref = PSHUFD_x_xm | VEX_256 | EX86_PREF_66 | EX86_SSE2;
4499
break;
4500
case 3:
4501
pref = 0;
4502
break;
4503
default:
4504
FAIL_IF(emit_vex_instruction(compiler, VPERMQ_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, vreg, 0, src, 0));
4505
return emit_byte(compiler, U8(src_lane_index == 0 ? 0x44 : 0xee));
4506
}
4507
4508
if (pref != 0) {
4509
FAIL_IF(emit_vex_instruction(compiler, pref, vreg, 0, src, 0));
4510
byte = U8(byte | (byte << 2));
4511
FAIL_IF(emit_byte(compiler, U8(byte | (byte << 4))));
4512
4513
if (src_lane_index == 0)
4514
return emit_vex_instruction(compiler, VPBROADCASTQ_x_xm | VEX_256 | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, vreg, 0, vreg, 0);
4515
4516
src = vreg;
4517
}
4518
4519
FAIL_IF(emit_vex_instruction(compiler, VPERMQ_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, vreg, 0, src, 0));
4520
byte = U8(src_lane_index);
4521
byte = U8(byte | (byte << 2));
4522
return emit_byte(compiler, U8(byte | (byte << 4)));
4523
}
4524
4525
switch (elem_size) {
4526
case 1:
4527
byte = U8(src_lane_index & 0x3);
4528
src_lane_index >>= 1;
4529
pref = (src_lane_index & 2) == 0 ? EX86_PREF_F2 : EX86_PREF_F3;
4530
4531
if (use_vex)
4532
FAIL_IF(emit_vex_instruction(compiler, PSHUFLW_x_xm | pref | EX86_SSE2, vreg, 0, src, 0));
4533
else
4534
FAIL_IF(emit_groupf(compiler, PSHUFLW_x_xm | pref | EX86_SSE2, vreg, src, 0));
4535
byte = U8(byte | (byte << 2));
4536
FAIL_IF(emit_byte(compiler, U8(byte | (byte << 4))));
4537
4538
if ((cpu_feature_list & CPU_FEATURE_AVX2) && use_vex && pref == EX86_PREF_F2)
4539
return emit_vex_instruction(compiler, VPBROADCASTD_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, vreg, 0, vreg, 0);
4540
4541
src = vreg;
4542
/* fallthrough */
4543
case 2:
4544
byte = U8(src_lane_index);
4545
byte = U8(byte | (byte << 2));
4546
break;
4547
default:
4548
byte = U8(src_lane_index << 1);
4549
byte = U8(byte | (byte << 2) | 0x4);
4550
break;
4551
}
4552
4553
if (use_vex)
4554
FAIL_IF(emit_vex_instruction(compiler, PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2, vreg, 0, src, 0));
4555
else
4556
FAIL_IF(emit_groupf(compiler, PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2, vreg, src, 0));
4557
return emit_byte(compiler, U8(byte | (byte << 4)));
4558
}
4559
4560
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_extend(struct sljit_compiler *compiler, sljit_s32 type,
4561
sljit_s32 vreg,
4562
sljit_s32 src, sljit_sw srcw)
4563
{
4564
sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
4565
sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
4566
sljit_s32 elem2_size = SLJIT_SIMD_GET_ELEM2_SIZE(type);
4567
sljit_s32 use_vex = (cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX);
4568
sljit_u8 opcode;
4569
4570
CHECK_ERROR();
4571
CHECK(check_sljit_emit_simd_extend(compiler, type, vreg, src, srcw));
4572
4573
ADJUST_LOCAL_OFFSET(src, srcw);
4574
4575
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4576
compiler->mode32 = 1;
4577
#endif /* SLJIT_CONFIG_X86_64 */
4578
4579
if (reg_size == 5) {
4580
if (!(cpu_feature_list & CPU_FEATURE_AVX2))
4581
return SLJIT_ERR_UNSUPPORTED;
4582
use_vex = 1;
4583
} else if (reg_size != 4)
4584
return SLJIT_ERR_UNSUPPORTED;
4585
4586
if (type & SLJIT_SIMD_FLOAT) {
4587
if (elem_size != 2 || elem2_size != 3)
4588
return SLJIT_ERR_UNSUPPORTED;
4589
4590
if (type & SLJIT_SIMD_TEST)
4591
return SLJIT_SUCCESS;
4592
4593
if (use_vex)
4594
return emit_vex_instruction(compiler, CVTPS2PD_x_xm | ((reg_size == 5) ? VEX_256 : 0) | EX86_SSE2, vreg, 0, src, srcw);
4595
return emit_groupf(compiler, CVTPS2PD_x_xm | EX86_SSE2, vreg, src, srcw);
4596
}
4597
4598
switch (elem_size) {
4599
case 0:
4600
if (elem2_size == 1)
4601
opcode = (type & SLJIT_SIMD_EXTEND_SIGNED) ? PMOVSXBW_x_xm : PMOVZXBW_x_xm;
4602
else if (elem2_size == 2)
4603
opcode = (type & SLJIT_SIMD_EXTEND_SIGNED) ? PMOVSXBD_x_xm : PMOVZXBD_x_xm;
4604
else if (elem2_size == 3)
4605
opcode = (type & SLJIT_SIMD_EXTEND_SIGNED) ? PMOVSXBQ_x_xm : PMOVZXBQ_x_xm;
4606
else
4607
return SLJIT_ERR_UNSUPPORTED;
4608
break;
4609
case 1:
4610
if (elem2_size == 2)
4611
opcode = (type & SLJIT_SIMD_EXTEND_SIGNED) ? PMOVSXWD_x_xm : PMOVZXWD_x_xm;
4612
else if (elem2_size == 3)
4613
opcode = (type & SLJIT_SIMD_EXTEND_SIGNED) ? PMOVSXWQ_x_xm : PMOVZXWQ_x_xm;
4614
else
4615
return SLJIT_ERR_UNSUPPORTED;
4616
break;
4617
case 2:
4618
if (elem2_size == 3)
4619
opcode = (type & SLJIT_SIMD_EXTEND_SIGNED) ? PMOVSXDQ_x_xm : PMOVZXDQ_x_xm;
4620
else
4621
return SLJIT_ERR_UNSUPPORTED;
4622
break;
4623
default:
4624
return SLJIT_ERR_UNSUPPORTED;
4625
}
4626
4627
if (type & SLJIT_SIMD_TEST)
4628
return SLJIT_SUCCESS;
4629
4630
if (use_vex)
4631
return emit_vex_instruction(compiler, opcode | ((reg_size == 5) ? VEX_256 : 0) | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, vreg, 0, src, srcw);
4632
return emit_groupf_ext(compiler, opcode | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, vreg, src, srcw);
4633
}
4634
4635
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_sign(struct sljit_compiler *compiler, sljit_s32 type,
4636
sljit_s32 vreg,
4637
sljit_s32 dst, sljit_sw dstw)
4638
{
4639
sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
4640
sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
4641
sljit_s32 use_vex = (cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX);
4642
sljit_s32 dst_r;
4643
sljit_uw op;
4644
sljit_u8 *inst;
4645
4646
CHECK_ERROR();
4647
CHECK(check_sljit_emit_simd_sign(compiler, type, vreg, dst, dstw));
4648
4649
ADJUST_LOCAL_OFFSET(dst, dstw);
4650
4651
CHECK_EXTRA_REGS(dst, dstw, (void)0);
4652
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4653
compiler->mode32 = 1;
4654
#endif /* SLJIT_CONFIG_X86_64 */
4655
4656
if (elem_size > 3 || ((type & SLJIT_SIMD_FLOAT) && elem_size < 2))
4657
return SLJIT_ERR_UNSUPPORTED;
4658
4659
if (reg_size == 4) {
4660
if (type & SLJIT_SIMD_TEST)
4661
return SLJIT_SUCCESS;
4662
4663
op = EX86_PREF_66 | EX86_SSE2_OP2;
4664
4665
switch (elem_size) {
4666
case 1:
4667
if (use_vex)
4668
FAIL_IF(emit_vex_instruction(compiler, PACKSSWB_x_xm | EX86_PREF_66 | EX86_SSE2 | VEX_SSE2_OPV, TMP_FREG, vreg, vreg, 0));
4669
else
4670
FAIL_IF(emit_groupf(compiler, PACKSSWB_x_xm | EX86_PREF_66 | EX86_SSE2, TMP_FREG, vreg, 0));
4671
vreg = TMP_FREG;
4672
break;
4673
case 2:
4674
op = EX86_SSE2_OP2;
4675
break;
4676
}
4677
4678
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
4679
op |= (elem_size < 2) ? PMOVMSKB_r_x : MOVMSKPS_r_x;
4680
4681
if (use_vex)
4682
FAIL_IF(emit_vex_instruction(compiler, op, dst_r, 0, vreg, 0));
4683
else
4684
FAIL_IF(emit_groupf(compiler, op, dst_r, vreg, 0));
4685
4686
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4687
compiler->mode32 = type & SLJIT_32;
4688
#endif /* SLJIT_CONFIG_X86_64 */
4689
4690
if (elem_size == 1) {
4691
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 8, dst_r, 0);
4692
FAIL_IF(!inst);
4693
inst[1] |= SHR;
4694
}
4695
4696
if (dst_r == TMP_REG1)
4697
return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
4698
4699
return SLJIT_SUCCESS;
4700
}
4701
4702
if (reg_size != 5 || !(cpu_feature_list & CPU_FEATURE_AVX2))
4703
return SLJIT_ERR_UNSUPPORTED;
4704
4705
if (type & SLJIT_SIMD_TEST)
4706
return SLJIT_SUCCESS;
4707
4708
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
4709
4710
if (elem_size == 1) {
4711
FAIL_IF(emit_vex_instruction(compiler, VEXTRACTI128_x_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2, vreg, 0, TMP_FREG, 0));
4712
FAIL_IF(emit_byte(compiler, 1));
4713
FAIL_IF(emit_vex_instruction(compiler, PACKSSWB_x_xm | VEX_256 | EX86_PREF_66 | EX86_SSE2 | VEX_SSE2_OPV, TMP_FREG, vreg, TMP_FREG, 0));
4714
FAIL_IF(emit_groupf(compiler, PMOVMSKB_r_x | EX86_PREF_66 | EX86_SSE2_OP2, dst_r, TMP_FREG, 0));
4715
} else {
4716
op = MOVMSKPS_r_x | VEX_256 | EX86_SSE2_OP2;
4717
4718
if (elem_size == 0)
4719
op = PMOVMSKB_r_x | VEX_256 | EX86_PREF_66 | EX86_SSE2_OP2;
4720
else if (elem_size == 3)
4721
op |= EX86_PREF_66;
4722
4723
FAIL_IF(emit_vex_instruction(compiler, op, dst_r, 0, vreg, 0));
4724
}
4725
4726
if (dst_r == TMP_REG1) {
4727
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4728
compiler->mode32 = type & SLJIT_32;
4729
#endif /* SLJIT_CONFIG_X86_64 */
4730
return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
4731
}
4732
4733
return SLJIT_SUCCESS;
4734
}
4735
4736
static sljit_s32 emit_simd_mov(struct sljit_compiler *compiler, sljit_s32 type,
4737
sljit_s32 dst_vreg, sljit_s32 src_vreg)
4738
{
4739
sljit_uw op = ((type & SLJIT_SIMD_FLOAT) ? MOVAPS_x_xm : MOVDQA_x_xm) | EX86_SSE2;
4740
4741
SLJIT_ASSERT(SLJIT_SIMD_GET_REG_SIZE(type) == 4);
4742
4743
if (!(type & SLJIT_SIMD_FLOAT) || SLJIT_SIMD_GET_ELEM_SIZE(type) == 3)
4744
op |= EX86_PREF_66;
4745
4746
return emit_groupf(compiler, op, dst_vreg, src_vreg, 0);
4747
}
4748
4749
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_op2(struct sljit_compiler *compiler, sljit_s32 type,
4750
sljit_s32 dst_vreg, sljit_s32 src1_vreg, sljit_s32 src2, sljit_sw src2w)
4751
{
4752
sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
4753
sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
4754
sljit_s32 use_vex = (cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX);
4755
sljit_uw op = 0;
4756
sljit_uw mov_op = 0;
4757
4758
CHECK_ERROR();
4759
CHECK(check_sljit_emit_simd_op2(compiler, type, dst_vreg, src1_vreg, src2, src2w));
4760
ADJUST_LOCAL_OFFSET(src2, src2w);
4761
4762
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4763
compiler->mode32 = 1;
4764
#endif /* SLJIT_CONFIG_X86_64 */
4765
4766
if (reg_size == 5) {
4767
if (!(cpu_feature_list & CPU_FEATURE_AVX2))
4768
return SLJIT_ERR_UNSUPPORTED;
4769
} else if (reg_size != 4)
4770
return SLJIT_ERR_UNSUPPORTED;
4771
4772
if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
4773
return SLJIT_ERR_UNSUPPORTED;
4774
4775
switch (SLJIT_SIMD_GET_OPCODE(type)) {
4776
case SLJIT_SIMD_OP2_AND:
4777
op = (type & SLJIT_SIMD_FLOAT) ? ANDPD_x_xm : PAND_x_xm;
4778
4779
if (!(type & SLJIT_SIMD_FLOAT) || elem_size == 3)
4780
op |= EX86_PREF_66;
4781
break;
4782
case SLJIT_SIMD_OP2_OR:
4783
op = (type & SLJIT_SIMD_FLOAT) ? ORPD_x_xm : POR_x_xm;
4784
4785
if (!(type & SLJIT_SIMD_FLOAT) || elem_size == 3)
4786
op |= EX86_PREF_66;
4787
break;
4788
case SLJIT_SIMD_OP2_XOR:
4789
op = (type & SLJIT_SIMD_FLOAT) ? XORPD_x_xm : PXOR_x_xm;
4790
4791
if (!(type & SLJIT_SIMD_FLOAT) || elem_size == 3)
4792
op |= EX86_PREF_66;
4793
break;
4794
4795
case SLJIT_SIMD_OP2_SHUFFLE:
4796
if (reg_size != 4)
4797
return SLJIT_ERR_UNSUPPORTED;
4798
4799
op = PSHUFB_x_xm | EX86_PREF_66 | VEX_OP_0F38;
4800
break;
4801
}
4802
4803
if (type & SLJIT_SIMD_TEST)
4804
return SLJIT_SUCCESS;
4805
4806
if ((src2 & SLJIT_MEM) && SLJIT_SIMD_GET_ELEM2_SIZE(type) < reg_size) {
4807
mov_op = ((type & SLJIT_SIMD_FLOAT) ? (MOVUPS_x_xm | (elem_size == 3 ? EX86_PREF_66 : 0)) : (MOVDQU_x_xm | EX86_PREF_F3)) | EX86_SSE2;
4808
if (use_vex)
4809
FAIL_IF(emit_vex_instruction(compiler, mov_op, TMP_FREG, 0, src2, src2w));
4810
else
4811
FAIL_IF(emit_groupf(compiler, mov_op, TMP_FREG, src2, src2w));
4812
4813
src2 = TMP_FREG;
4814
src2w = 0;
4815
}
4816
4817
if (reg_size == 5 || use_vex) {
4818
if (reg_size == 5)
4819
op |= VEX_256;
4820
4821
return emit_vex_instruction(compiler, op | EX86_SSE2 | VEX_SSE2_OPV, dst_vreg, src1_vreg, src2, src2w);
4822
}
4823
4824
if (dst_vreg != src1_vreg) {
4825
if (dst_vreg == src2) {
4826
if (SLJIT_SIMD_GET_OPCODE(type) == SLJIT_SIMD_OP2_SHUFFLE) {
4827
FAIL_IF(emit_simd_mov(compiler, type, TMP_FREG, src2));
4828
FAIL_IF(emit_simd_mov(compiler, type, dst_vreg, src1_vreg));
4829
src2 = TMP_FREG;
4830
src2w = 0;
4831
} else
4832
src2 = src1_vreg;
4833
} else
4834
FAIL_IF(emit_simd_mov(compiler, type, dst_vreg, src1_vreg));
4835
}
4836
4837
if (op & (VEX_OP_0F38 | VEX_OP_0F3A))
4838
return emit_groupf_ext(compiler, op | EX86_SSE2, dst_vreg, src2, src2w);
4839
return emit_groupf(compiler, op | EX86_SSE2, dst_vreg, src2, src2w);
4840
}
4841
4842
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_load(struct sljit_compiler *compiler, sljit_s32 op,
4843
sljit_s32 dst_reg,
4844
sljit_s32 mem_reg)
4845
{
4846
CHECK_ERROR();
4847
CHECK(check_sljit_emit_atomic_load(compiler, op, dst_reg, mem_reg));
4848
4849
if ((op & SLJIT_ATOMIC_USE_LS) || GET_OPCODE(op) == SLJIT_MOV_S8 || GET_OPCODE(op) == SLJIT_MOV_S16 || GET_OPCODE(op) == SLJIT_MOV_S32)
4850
return SLJIT_ERR_UNSUPPORTED;
4851
4852
if (op & SLJIT_ATOMIC_TEST)
4853
return SLJIT_SUCCESS;
4854
4855
SLJIT_SKIP_CHECKS(compiler);
4856
return sljit_emit_op1(compiler, op & ~SLJIT_ATOMIC_USE_CAS, dst_reg, 0, SLJIT_MEM1(mem_reg), 0);
4857
}
4858
4859
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_store(struct sljit_compiler *compiler, sljit_s32 op,
4860
sljit_s32 src_reg,
4861
sljit_s32 mem_reg,
4862
sljit_s32 temp_reg)
4863
{
4864
sljit_uw pref;
4865
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
4866
sljit_s32 saved_reg = TMP_REG1;
4867
sljit_s32 swap_tmp = 0;
4868
sljit_sw srcw = 0;
4869
sljit_sw tempw = 0;
4870
#endif /* SLJIT_CONFIG_X86_32 */
4871
4872
CHECK_ERROR();
4873
CHECK(check_sljit_emit_atomic_store(compiler, op, src_reg, mem_reg, temp_reg));
4874
CHECK_EXTRA_REGS(src_reg, srcw, (void)0);
4875
CHECK_EXTRA_REGS(temp_reg, tempw, (void)0);
4876
4877
SLJIT_ASSERT(FAST_IS_REG(src_reg) || src_reg == SLJIT_MEM1(SLJIT_SP));
4878
SLJIT_ASSERT(FAST_IS_REG(temp_reg) || temp_reg == SLJIT_MEM1(SLJIT_SP));
4879
4880
if ((op & SLJIT_ATOMIC_USE_LS) || GET_OPCODE(op) == SLJIT_MOV_S8 || GET_OPCODE(op) == SLJIT_MOV_S16 || GET_OPCODE(op) == SLJIT_MOV_S32)
4881
return SLJIT_ERR_UNSUPPORTED;
4882
4883
if (op & SLJIT_ATOMIC_TEST)
4884
return SLJIT_SUCCESS;
4885
4886
op = GET_OPCODE(op);
4887
4888
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
4889
if (temp_reg == SLJIT_TMP_DEST_REG) {
4890
FAIL_IF(emit_byte(compiler, XCHG_EAX_r | reg_map[TMP_REG1]));
4891
4892
if (src_reg == SLJIT_R0)
4893
src_reg = TMP_REG1;
4894
if (mem_reg == SLJIT_R0)
4895
mem_reg = TMP_REG1;
4896
4897
temp_reg = SLJIT_R0;
4898
swap_tmp = 1;
4899
}
4900
4901
/* Src is virtual register or its low byte is not accessible. */
4902
if ((src_reg & SLJIT_MEM) || (op == SLJIT_MOV_U8 && reg_map[src_reg] >= 4)) {
4903
SLJIT_ASSERT(src_reg != SLJIT_R1 && temp_reg != SLJIT_TMP_DEST_REG);
4904
4905
if (swap_tmp) {
4906
saved_reg = (mem_reg != SLJIT_R1) ? SLJIT_R1 : SLJIT_R2;
4907
4908
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), 0, saved_reg, 0);
4909
EMIT_MOV(compiler, saved_reg, 0, src_reg, srcw);
4910
} else
4911
EMIT_MOV(compiler, TMP_REG1, 0, src_reg, srcw);
4912
4913
src_reg = saved_reg;
4914
4915
if (mem_reg == src_reg)
4916
mem_reg = saved_reg;
4917
}
4918
#endif /* SLJIT_CONFIG_X86_32 */
4919
4920
if (temp_reg != SLJIT_R0) {
4921
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4922
compiler->mode32 = 0;
4923
4924
EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_R0, 0);
4925
EMIT_MOV(compiler, SLJIT_R0, 0, temp_reg, 0);
4926
4927
if (src_reg == SLJIT_R0)
4928
src_reg = TMP_REG2;
4929
if (mem_reg == SLJIT_R0)
4930
mem_reg = TMP_REG2;
4931
#else /* !SLJIT_CONFIG_X86_64 */
4932
SLJIT_ASSERT(!swap_tmp);
4933
4934
if (src_reg == TMP_REG1) {
4935
if (mem_reg == SLJIT_R0) {
4936
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), 0, SLJIT_R1, 0);
4937
EMIT_MOV(compiler, SLJIT_R1, 0, SLJIT_R0, 0);
4938
EMIT_MOV(compiler, SLJIT_R0, 0, temp_reg, tempw);
4939
4940
mem_reg = SLJIT_R1;
4941
saved_reg = SLJIT_R1;
4942
} else {
4943
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), 0, SLJIT_R0, 0);
4944
EMIT_MOV(compiler, SLJIT_R0, 0, temp_reg, tempw);
4945
saved_reg = SLJIT_R0;
4946
}
4947
} else {
4948
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_R0, 0);
4949
EMIT_MOV(compiler, SLJIT_R0, 0, temp_reg, tempw);
4950
4951
if (src_reg == SLJIT_R0)
4952
src_reg = TMP_REG1;
4953
if (mem_reg == SLJIT_R0)
4954
mem_reg = TMP_REG1;
4955
}
4956
#endif /* SLJIT_CONFIG_X86_64 */
4957
}
4958
4959
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4960
compiler->mode32 = op != SLJIT_MOV && op != SLJIT_MOV_P;
4961
#endif /* SLJIT_CONFIG_X86_64 */
4962
4963
/* Lock prefix. */
4964
FAIL_IF(emit_byte(compiler, GROUP_LOCK));
4965
4966
pref = 0;
4967
if (op == SLJIT_MOV_U16)
4968
pref = EX86_HALF_ARG | EX86_PREF_66;
4969
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4970
if (op == SLJIT_MOV_U8)
4971
pref = EX86_REX;
4972
#endif /* SLJIT_CONFIG_X86_64 */
4973
4974
FAIL_IF(emit_groupf(compiler, (op == SLJIT_MOV_U8 ? CMPXCHG_rm8_r : CMPXCHG_rm_r) | pref, src_reg, SLJIT_MEM1(mem_reg), 0));
4975
4976
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
4977
if (swap_tmp) {
4978
SLJIT_ASSERT(temp_reg == SLJIT_R0);
4979
FAIL_IF(emit_byte(compiler, XCHG_EAX_r | reg_map[TMP_REG1]));
4980
4981
if (saved_reg != TMP_REG1)
4982
return emit_mov(compiler, saved_reg, 0, SLJIT_MEM1(SLJIT_SP), 0);
4983
return SLJIT_SUCCESS;
4984
}
4985
#endif /* SLJIT_CONFIG_X86_32 */
4986
4987
if (temp_reg != SLJIT_R0) {
4988
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4989
compiler->mode32 = 0;
4990
return emit_mov(compiler, SLJIT_R0, 0, TMP_REG2, 0);
4991
#else /* !SLJIT_CONFIG_X86_64 */
4992
EMIT_MOV(compiler, SLJIT_R0, 0, (saved_reg == SLJIT_R0) ? SLJIT_MEM1(SLJIT_SP) : saved_reg, 0);
4993
if (saved_reg == SLJIT_R1)
4994
return emit_mov(compiler, SLJIT_R1, 0, SLJIT_MEM1(SLJIT_SP), 0);
4995
#endif /* SLJIT_CONFIG_X86_64 */
4996
}
4997
return SLJIT_SUCCESS;
4998
}
4999
5000
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_local_base(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw offset)
5001
{
5002
CHECK_ERROR();
5003
CHECK(check_sljit_get_local_base(compiler, dst, dstw, offset));
5004
ADJUST_LOCAL_OFFSET(dst, dstw);
5005
5006
CHECK_EXTRA_REGS(dst, dstw, (void)0);
5007
5008
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5009
compiler->mode32 = 0;
5010
#endif
5011
5012
ADJUST_LOCAL_OFFSET(SLJIT_MEM1(SLJIT_SP), offset);
5013
5014
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5015
if (NOT_HALFWORD(offset)) {
5016
FAIL_IF(emit_load_imm64(compiler, TMP_REG1, offset));
5017
#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
5018
SLJIT_ASSERT(emit_lea_binary(compiler, dst, dstw, SLJIT_SP, 0, TMP_REG1, 0) != SLJIT_ERR_UNSUPPORTED);
5019
return compiler->error;
5020
#else
5021
return emit_lea_binary(compiler, dst, dstw, SLJIT_SP, 0, TMP_REG1, 0);
5022
#endif
5023
}
5024
#endif
5025
5026
if (offset != 0)
5027
return emit_lea_binary(compiler, dst, dstw, SLJIT_SP, 0, SLJIT_IMM, offset);
5028
return emit_mov(compiler, dst, dstw, SLJIT_SP, 0);
5029
}
5030
5031
SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
5032
{
5033
sljit_u8 *inst;
5034
struct sljit_const *const_;
5035
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5036
sljit_s32 reg;
5037
#endif
5038
5039
CHECK_ERROR_PTR();
5040
CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
5041
ADJUST_LOCAL_OFFSET(dst, dstw);
5042
5043
CHECK_EXTRA_REGS(dst, dstw, (void)0);
5044
5045
const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const));
5046
PTR_FAIL_IF(!const_);
5047
set_const(const_, compiler);
5048
5049
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5050
compiler->mode32 = 0;
5051
reg = FAST_IS_REG(dst) ? dst : TMP_REG1;
5052
5053
if (emit_load_imm64(compiler, reg, init_value))
5054
return NULL;
5055
#else
5056
if (emit_mov(compiler, dst, dstw, SLJIT_IMM, init_value))
5057
return NULL;
5058
#endif
5059
5060
inst = (sljit_u8*)ensure_buf(compiler, 1);
5061
PTR_FAIL_IF(!inst);
5062
5063
inst[0] = SLJIT_INST_CONST;
5064
5065
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5066
if (dst & SLJIT_MEM)
5067
if (emit_mov(compiler, dst, dstw, TMP_REG1, 0))
5068
return NULL;
5069
#endif
5070
5071
return const_;
5072
}
5073
5074
SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_mov_addr(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
5075
{
5076
struct sljit_jump *jump;
5077
sljit_u8 *inst;
5078
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5079
sljit_s32 reg;
5080
#endif /* SLJIT_CONFIG_X86_64 */
5081
5082
CHECK_ERROR_PTR();
5083
CHECK_PTR(check_sljit_emit_mov_addr(compiler, dst, dstw));
5084
ADJUST_LOCAL_OFFSET(dst, dstw);
5085
5086
CHECK_EXTRA_REGS(dst, dstw, (void)0);
5087
5088
jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
5089
PTR_FAIL_IF(!jump);
5090
set_mov_addr(jump, compiler, 0);
5091
5092
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5093
compiler->mode32 = 0;
5094
reg = FAST_IS_REG(dst) ? dst : TMP_REG1;
5095
5096
PTR_FAIL_IF(emit_load_imm64(compiler, reg, 0));
5097
jump->addr = compiler->size;
5098
5099
if (reg_map[reg] >= 8)
5100
jump->flags |= MOV_ADDR_HI;
5101
#else /* !SLJIT_CONFIG_X86_64 */
5102
PTR_FAIL_IF(emit_mov(compiler, dst, dstw, SLJIT_IMM, 0));
5103
#endif /* SLJIT_CONFIG_X86_64 */
5104
5105
inst = (sljit_u8*)ensure_buf(compiler, 1);
5106
PTR_FAIL_IF(!inst);
5107
5108
inst[0] = SLJIT_INST_MOV_ADDR;
5109
5110
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5111
if (dst & SLJIT_MEM)
5112
PTR_FAIL_IF(emit_mov(compiler, dst, dstw, TMP_REG1, 0));
5113
#endif /* SLJIT_CONFIG_X86_64 */
5114
5115
return jump;
5116
}
5117
5118
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
5119
{
5120
SLJIT_UNUSED_ARG(executable_offset);
5121
5122
SLJIT_UPDATE_WX_FLAGS((void*)addr, (void*)(addr + sizeof(sljit_uw)), 0);
5123
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
5124
sljit_unaligned_store_sw((void*)addr, (sljit_sw)(new_target - (addr + 4) - (sljit_uw)executable_offset));
5125
#else
5126
sljit_unaligned_store_sw((void*)addr, (sljit_sw)new_target);
5127
#endif
5128
SLJIT_UPDATE_WX_FLAGS((void*)addr, (void*)(addr + sizeof(sljit_uw)), 1);
5129
}
5130
5131
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
5132
{
5133
SLJIT_UNUSED_ARG(executable_offset);
5134
5135
SLJIT_UPDATE_WX_FLAGS((void*)addr, (void*)(addr + sizeof(sljit_sw)), 0);
5136
sljit_unaligned_store_sw((void*)addr, new_constant);
5137
SLJIT_UPDATE_WX_FLAGS((void*)addr, (void*)(addr + sizeof(sljit_sw)), 1);
5138
}
5139
5140