Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
godotengine
GitHub Repository: godotengine/godot
Path: blob/master/thirdparty/pcre2/deps/sljit/sljit_src/sljitNativeX86_common.c
22467 views
1
/*
2
* Stack-less Just-In-Time compiler
3
*
4
* Copyright Zoltan Herczeg ([email protected]). All rights reserved.
5
*
6
* Redistribution and use in source and binary forms, with or without modification, are
7
* permitted provided that the following conditions are met:
8
*
9
* 1. Redistributions of source code must retain the above copyright notice, this list of
10
* conditions and the following disclaimer.
11
*
12
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
13
* of conditions and the following disclaimer in the documentation and/or other materials
14
* provided with the distribution.
15
*
16
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
17
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
19
* SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
22
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
24
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25
*/
26
27
SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
28
{
29
return "x86" SLJIT_CPUINFO;
30
}
31
32
/*
33
32b register indexes:
34
0 - EAX
35
1 - ECX
36
2 - EDX
37
3 - EBX
38
4 - ESP
39
5 - EBP
40
6 - ESI
41
7 - EDI
42
*/
43
44
/*
45
64b register indexes:
46
0 - RAX
47
1 - RCX
48
2 - RDX
49
3 - RBX
50
4 - RSP
51
5 - RBP
52
6 - RSI
53
7 - RDI
54
8 - R8 - From now on REX prefix is required
55
9 - R9
56
10 - R10
57
11 - R11
58
12 - R12
59
13 - R13
60
14 - R14
61
15 - R15
62
*/
63
64
#define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
65
#define TMP_FREG (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
66
67
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
68
69
static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 3] = {
70
0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 5, 7, 6, 4, 3
71
};
72
73
static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2] = {
74
0, 1, 2, 3, 4, 5, 6, 7, 0
75
};
76
77
#define CHECK_EXTRA_REGS(p, w, do) \
78
if (p >= SLJIT_R3 && p <= SLJIT_S3) { \
79
w = (2 * SSIZE_OF(sw)) + ((p) - SLJIT_R3) * SSIZE_OF(sw); \
80
p = SLJIT_MEM1(SLJIT_SP); \
81
do; \
82
}
83
84
#else /* SLJIT_CONFIG_X86_32 */
85
86
#define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3)
87
88
/* Note: r12 & 0x7 == 0b100, which decoded as SIB byte present
89
Note: avoid to use r12 and r13 for memory addressing
90
therefore r12 is better to be a higher saved register. */
91
#ifndef _WIN64
92
/* Args: rdi(=7), rsi(=6), rdx(=2), rcx(=1), r8, r9. Scratches: rax(=0), r10, r11 */
93
static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 4] = {
94
0, 0, 6, 7, 1, 8, 11, 10, 12, 5, 13, 14, 15, 3, 4, 2, 9
95
};
96
/* low-map. reg_map & 0x7. */
97
static const sljit_u8 reg_lmap[SLJIT_NUMBER_OF_REGISTERS + 4] = {
98
0, 0, 6, 7, 1, 0, 3, 2, 4, 5, 5, 6, 7, 3, 4, 2, 1
99
};
100
#else
101
/* Args: rcx(=1), rdx(=2), r8, r9. Scratches: rax(=0), r10, r11 */
102
static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 4] = {
103
0, 0, 2, 8, 1, 11, 12, 5, 13, 14, 15, 7, 6, 3, 4, 9, 10
104
};
105
/* low-map. reg_map & 0x7. */
106
static const sljit_u8 reg_lmap[SLJIT_NUMBER_OF_REGISTERS + 4] = {
107
0, 0, 2, 0, 1, 3, 4, 5, 5, 6, 7, 7, 6, 3, 4, 1, 2
108
};
109
#endif
110
111
/* Args: xmm0-xmm3 */
112
static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2] = {
113
0, 0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 4
114
};
115
/* low-map. freg_map & 0x7. */
116
static const sljit_u8 freg_lmap[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2] = {
117
0, 0, 1, 2, 3, 5, 6, 7, 0, 1, 2, 3, 4, 5, 6, 7, 4
118
};
119
120
#define REX_W 0x48
121
#define REX_R 0x44
122
#define REX_X 0x42
123
#define REX_B 0x41
124
#define REX 0x40
125
126
#ifndef _WIN64
127
#define HALFWORD_MAX 0x7fffffffl
128
#define HALFWORD_MIN -0x80000000l
129
#else
130
#define HALFWORD_MAX 0x7fffffffll
131
#define HALFWORD_MIN -0x80000000ll
132
#endif
133
134
#define IS_HALFWORD(x) ((x) <= HALFWORD_MAX && (x) >= HALFWORD_MIN)
135
#define NOT_HALFWORD(x) ((x) > HALFWORD_MAX || (x) < HALFWORD_MIN)
136
137
#define CHECK_EXTRA_REGS(p, w, do)
138
139
#endif /* SLJIT_CONFIG_X86_32 */
140
141
#define U8(v) ((sljit_u8)(v))
142
143
/* Size flags for emit_x86_instruction: */
144
#define EX86_BIN_INS ((sljit_uw)0x000010)
145
#define EX86_SHIFT_INS ((sljit_uw)0x000020)
146
#define EX86_BYTE_ARG ((sljit_uw)0x000040)
147
#define EX86_HALF_ARG ((sljit_uw)0x000080)
148
/* Size flags for both emit_x86_instruction and emit_vex_instruction: */
149
#define EX86_REX ((sljit_uw)0x000100)
150
#define EX86_NO_REXW ((sljit_uw)0x000200)
151
#define EX86_PREF_66 ((sljit_uw)0x000400)
152
#define EX86_PREF_F2 ((sljit_uw)0x000800)
153
#define EX86_PREF_F3 ((sljit_uw)0x001000)
154
#define EX86_SSE2_OP1 ((sljit_uw)0x002000)
155
#define EX86_SSE2_OP2 ((sljit_uw)0x004000)
156
#define EX86_SSE2 (EX86_SSE2_OP1 | EX86_SSE2_OP2)
157
#define EX86_VEX_EXT ((sljit_uw)0x008000)
158
/* Op flags for emit_vex_instruction: */
159
#define VEX_OP_0F38 ((sljit_uw)0x010000)
160
#define VEX_OP_0F3A ((sljit_uw)0x020000)
161
#define VEX_SSE2_OPV ((sljit_uw)0x040000)
162
#define VEX_AUTO_W ((sljit_uw)0x080000)
163
#define VEX_W ((sljit_uw)0x100000)
164
#define VEX_256 ((sljit_uw)0x200000)
165
166
#define EX86_SELECT_66(op) (((op) & SLJIT_32) ? 0 : EX86_PREF_66)
167
#define EX86_SELECT_F2_F3(op) (((op) & SLJIT_32) ? EX86_PREF_F3 : EX86_PREF_F2)
168
169
/* --------------------------------------------------------------------- */
170
/* Instruction forms */
171
/* --------------------------------------------------------------------- */
172
173
#define ADD (/* BINARY */ 0 << 3)
174
#define ADD_EAX_i32 0x05
175
#define ADD_r_rm 0x03
176
#define ADD_rm_r 0x01
177
#define ADDSD_x_xm 0x58
178
#define ADC (/* BINARY */ 2 << 3)
179
#define ADC_EAX_i32 0x15
180
#define ADC_r_rm 0x13
181
#define ADC_rm_r 0x11
182
#define AND (/* BINARY */ 4 << 3)
183
#define AND_EAX_i32 0x25
184
#define AND_r_rm 0x23
185
#define AND_rm_r 0x21
186
#define ANDPD_x_xm 0x54
187
#define BSR_r_rm (/* GROUP_0F */ 0xbd)
188
#define BSF_r_rm (/* GROUP_0F */ 0xbc)
189
#define BSWAP_r (/* GROUP_0F */ 0xc8)
190
#define CALL_i32 0xe8
191
#define CALL_rm (/* GROUP_FF */ 2 << 3)
192
#define CDQ 0x99
193
#define CMOVE_r_rm (/* GROUP_0F */ 0x44)
194
#define CMP (/* BINARY */ 7 << 3)
195
#define CMP_EAX_i32 0x3d
196
#define CMP_r_rm 0x3b
197
#define CMP_rm_r 0x39
198
#define CMPS_x_xm 0xc2
199
#define CMPXCHG_rm_r 0xb1
200
#define CMPXCHG_rm8_r 0xb0
201
#define CVTPD2PS_x_xm 0x5a
202
#define CVTPS2PD_x_xm 0x5a
203
#define CVTSI2SD_x_rm 0x2a
204
#define CVTTSD2SI_r_xm 0x2c
205
#define DIV (/* GROUP_F7 */ 6 << 3)
206
#define DIVSD_x_xm 0x5e
207
#define EXTRACTPS_x_xm 0x17
208
#define FLDS 0xd9
209
#define FLDL 0xdd
210
#define FSTPS 0xd9
211
#define FSTPD 0xdd
212
#define INSERTPS_x_xm 0x21
213
#define INT3 0xcc
214
#define IDIV (/* GROUP_F7 */ 7 << 3)
215
#define IMUL (/* GROUP_F7 */ 5 << 3)
216
#define IMUL_r_rm (/* GROUP_0F */ 0xaf)
217
#define IMUL_r_rm_i8 0x6b
218
#define IMUL_r_rm_i32 0x69
219
#define JL_i8 0x7c
220
#define JE_i8 0x74
221
#define JNC_i8 0x73
222
#define JNE_i8 0x75
223
#define JMP_i8 0xeb
224
#define JMP_i32 0xe9
225
#define JMP_rm (/* GROUP_FF */ 4 << 3)
226
#define LEA_r_m 0x8d
227
#define LOOP_i8 0xe2
228
#define LZCNT_r_rm (/* GROUP_F3 */ /* GROUP_0F */ 0xbd)
229
#define MOV_r_rm 0x8b
230
#define MOV_r_i32 0xb8
231
#define MOV_rm_r 0x89
232
#define MOV_rm_i32 0xc7
233
#define MOV_rm8_i8 0xc6
234
#define MOV_rm8_r8 0x88
235
#define MOVAPS_x_xm 0x28
236
#define MOVAPS_xm_x 0x29
237
#define MOVD_x_rm 0x6e
238
#define MOVD_rm_x 0x7e
239
#define MOVDDUP_x_xm 0x12
240
#define MOVDQA_x_xm 0x6f
241
#define MOVDQA_xm_x 0x7f
242
#define MOVDQU_x_xm 0x6f
243
#define MOVHLPS_x_x 0x12
244
#define MOVHPD_m_x 0x17
245
#define MOVHPD_x_m 0x16
246
#define MOVLHPS_x_x 0x16
247
#define MOVLPD_m_x 0x13
248
#define MOVLPD_x_m 0x12
249
#define MOVMSKPS_r_x (/* GROUP_0F */ 0x50)
250
#define MOVQ_x_xm (/* GROUP_0F */ 0x7e)
251
#define MOVSD_x_xm 0x10
252
#define MOVSD_xm_x 0x11
253
#define MOVSHDUP_x_xm 0x16
254
#define MOVSXD_r_rm 0x63
255
#define MOVSX_r_rm8 (/* GROUP_0F */ 0xbe)
256
#define MOVSX_r_rm16 (/* GROUP_0F */ 0xbf)
257
#define MOVUPS_x_xm 0x10
258
#define MOVZX_r_rm8 (/* GROUP_0F */ 0xb6)
259
#define MOVZX_r_rm16 (/* GROUP_0F */ 0xb7)
260
#define MUL (/* GROUP_F7 */ 4 << 3)
261
#define MULSD_x_xm 0x59
262
#define NEG_rm (/* GROUP_F7 */ 3 << 3)
263
#define NOP 0x90
264
#define NOT_rm (/* GROUP_F7 */ 2 << 3)
265
#define OR (/* BINARY */ 1 << 3)
266
#define OR_r_rm 0x0b
267
#define OR_EAX_i32 0x0d
268
#define OR_rm_r 0x09
269
#define OR_rm8_r8 0x08
270
#define ORPD_x_xm 0x56
271
#define PACKSSWB_x_xm (/* GROUP_0F */ 0x63)
272
#define PAND_x_xm 0xdb
273
#define PCMPEQD_x_xm 0x76
274
#define PINSRB_x_rm_i8 0x20
275
#define PINSRW_x_rm_i8 0xc4
276
#define PINSRD_x_rm_i8 0x22
277
#define PEXTRB_rm_x_i8 0x14
278
#define PEXTRW_rm_x_i8 0x15
279
#define PEXTRD_rm_x_i8 0x16
280
#define PMOVMSKB_r_x (/* GROUP_0F */ 0xd7)
281
#define PMOVSXBD_x_xm 0x21
282
#define PMOVSXBQ_x_xm 0x22
283
#define PMOVSXBW_x_xm 0x20
284
#define PMOVSXDQ_x_xm 0x25
285
#define PMOVSXWD_x_xm 0x23
286
#define PMOVSXWQ_x_xm 0x24
287
#define PMOVZXBD_x_xm 0x31
288
#define PMOVZXBQ_x_xm 0x32
289
#define PMOVZXBW_x_xm 0x30
290
#define PMOVZXDQ_x_xm 0x35
291
#define PMOVZXWD_x_xm 0x33
292
#define PMOVZXWQ_x_xm 0x34
293
#define POP_r 0x58
294
#define POP_rm 0x8f
295
#define POPF 0x9d
296
#define POR_x_xm 0xeb
297
#define PREFETCH 0x18
298
#define PSHUFB_x_xm 0x00
299
#define PSHUFD_x_xm 0x70
300
#define PSHUFLW_x_xm 0x70
301
#define PSRLDQ_x 0x73
302
#define PSLLD_x_i8 0x72
303
#define PSLLQ_x_i8 0x73
304
#define PUSH_i32 0x68
305
#define PUSH_r 0x50
306
#define PUSH_rm (/* GROUP_FF */ 6 << 3)
307
#define PUSHF 0x9c
308
#define PXOR_x_xm 0xef
309
#define ROL (/* SHIFT */ 0 << 3)
310
#define ROR (/* SHIFT */ 1 << 3)
311
#define RET_near 0xc3
312
#define RET_i16 0xc2
313
#define SBB (/* BINARY */ 3 << 3)
314
#define SBB_EAX_i32 0x1d
315
#define SBB_r_rm 0x1b
316
#define SBB_rm_r 0x19
317
#define SAR (/* SHIFT */ 7 << 3)
318
#define SHL (/* SHIFT */ 4 << 3)
319
#define SHLD (/* GROUP_0F */ 0xa5)
320
#define SHRD (/* GROUP_0F */ 0xad)
321
#define SHR (/* SHIFT */ 5 << 3)
322
#define SHUFPS_x_xm 0xc6
323
#define SUB (/* BINARY */ 5 << 3)
324
#define SUB_EAX_i32 0x2d
325
#define SUB_r_rm 0x2b
326
#define SUB_rm_r 0x29
327
#define SUBSD_x_xm 0x5c
328
#define TEST_EAX_i32 0xa9
329
#define TEST_rm_r 0x85
330
#define TZCNT_r_rm (/* GROUP_F3 */ /* GROUP_0F */ 0xbc)
331
#define UCOMISD_x_xm 0x2e
332
#define UNPCKLPD_x_xm 0x14
333
#define UNPCKLPS_x_xm 0x14
334
#define VBROADCASTSD_x_xm 0x19
335
#define VBROADCASTSS_x_xm 0x18
336
#define VEXTRACTF128_x_ym 0x19
337
#define VEXTRACTI128_x_ym 0x39
338
#define VINSERTF128_y_y_xm 0x18
339
#define VINSERTI128_y_y_xm 0x38
340
#define VPBROADCASTB_x_xm 0x78
341
#define VPBROADCASTD_x_xm 0x58
342
#define VPBROADCASTQ_x_xm 0x59
343
#define VPBROADCASTW_x_xm 0x79
344
#define VPERMPD_y_ym 0x01
345
#define VPERMQ_y_ym 0x00
346
#define XCHG_EAX_r 0x90
347
#define XCHG_r_rm 0x87
348
#define XOR (/* BINARY */ 6 << 3)
349
#define XOR_EAX_i32 0x35
350
#define XOR_r_rm 0x33
351
#define XOR_rm_r 0x31
352
#define XORPD_x_xm 0x57
353
354
#define GROUP_0F 0x0f
355
#define GROUP_66 0x66
356
#define GROUP_F3 0xf3
357
#define GROUP_F7 0xf7
358
#define GROUP_FF 0xff
359
#define GROUP_BINARY_81 0x81
360
#define GROUP_BINARY_83 0x83
361
#define GROUP_SHIFT_1 0xd1
362
#define GROUP_SHIFT_N 0xc1
363
#define GROUP_SHIFT_CL 0xd3
364
#define GROUP_LOCK 0xf0
365
366
#define MOD_REG 0xc0
367
#define MOD_DISP8 0x40
368
369
#define INC_SIZE(s) (*inst++ = U8(s), compiler->size += (s))
370
371
#define PUSH_REG(r) (*inst++ = U8(PUSH_r + (r)))
372
#define POP_REG(r) (*inst++ = U8(POP_r + (r)))
373
#define RET() (*inst++ = RET_near)
374
#define RET_I16(n) (*inst++ = RET_i16, *inst++ = U8(n), *inst++ = 0)
375
376
#define SLJIT_INST_LABEL 255
377
#define SLJIT_INST_JUMP 254
378
#define SLJIT_INST_MOV_ADDR 253
379
#define SLJIT_INST_CONST 252
380
381
/* Multithreading does not affect these static variables, since they store
382
built-in CPU features. Therefore they can be overwritten by different threads
383
if they detect the CPU features in the same time. */
384
#define CPU_FEATURE_DETECTED 0x001
385
#if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
386
#define CPU_FEATURE_SSE2 0x002
387
#endif
388
#define CPU_FEATURE_SSE41 0x004
389
#define CPU_FEATURE_LZCNT 0x008
390
#define CPU_FEATURE_TZCNT 0x010
391
#define CPU_FEATURE_CMOV 0x020
392
#define CPU_FEATURE_AVX 0x040
393
#define CPU_FEATURE_AVX2 0x080
394
#define CPU_FEATURE_OSXSAVE 0x100
395
396
static sljit_u32 cpu_feature_list = 0;
397
398
#ifdef _WIN32_WCE
399
#include <cmnintrin.h>
400
#elif defined(_MSC_VER) && _MSC_VER >= 1400
401
#include <intrin.h>
402
#elif defined(__INTEL_COMPILER)
403
#include <cpuid.h>
404
#endif
405
406
#if (defined(_MSC_VER) && _MSC_VER >= 1400) || defined(__INTEL_COMPILER) \
407
|| (defined(__INTEL_LLVM_COMPILER) && defined(__XSAVE__))
408
#include <immintrin.h>
409
#endif
410
411
/******************************************************/
412
/* Unaligned-store functions */
413
/******************************************************/
414
415
static SLJIT_INLINE void sljit_unaligned_store_s16(void *addr, sljit_s16 value)
416
{
417
SLJIT_MEMCPY(addr, &value, sizeof(value));
418
}
419
420
static SLJIT_INLINE void sljit_unaligned_store_s32(void *addr, sljit_s32 value)
421
{
422
SLJIT_MEMCPY(addr, &value, sizeof(value));
423
}
424
425
static SLJIT_INLINE void sljit_unaligned_store_sw(void *addr, sljit_sw value)
426
{
427
SLJIT_MEMCPY(addr, &value, sizeof(value));
428
}
429
430
/******************************************************/
431
/* Utility functions */
432
/******************************************************/
433
434
static void execute_cpu_id(sljit_u32 info[4])
435
{
436
#if (defined(_MSC_VER) && _MSC_VER >= 1400) \
437
|| (defined(__INTEL_COMPILER) && __INTEL_COMPILER == 2021 && __INTEL_COMPILER_UPDATE >= 7)
438
439
__cpuidex((int*)info, (int)info[0], (int)info[2]);
440
441
#elif (defined(__INTEL_COMPILER) && __INTEL_COMPILER >= 1900)
442
443
__get_cpuid_count(info[0], info[2], info, info + 1, info + 2, info + 3);
444
445
#elif (defined(_MSC_VER) || defined(__INTEL_COMPILER)) \
446
&& (defined(SLJIT_CONFIG_X86_32) && SLJIT_CONFIG_X86_32)
447
448
/* Intel syntax. */
449
__asm {
450
mov esi, info
451
mov eax, [esi]
452
mov ecx, [esi + 8]
453
cpuid
454
mov [esi], eax
455
mov [esi + 4], ebx
456
mov [esi + 8], ecx
457
mov [esi + 12], edx
458
}
459
460
#else
461
462
__asm__ __volatile__ (
463
"cpuid\n"
464
: "=a" (info[0]), "=b" (info[1]), "=c" (info[2]), "=d" (info[3])
465
: "0" (info[0]), "2" (info[2])
466
);
467
468
#endif
469
}
470
471
static sljit_u32 execute_get_xcr0_low(void)
472
{
473
sljit_u32 xcr0;
474
475
#if (defined(_MSC_VER) && _MSC_VER >= 1400) || defined(__INTEL_COMPILER) \
476
|| (defined(__INTEL_LLVM_COMPILER) && defined(__XSAVE__))
477
478
xcr0 = (sljit_u32)_xgetbv(0);
479
480
#elif defined(__TINYC__)
481
482
__asm__ (
483
"xorl %%ecx, %%ecx\n"
484
".byte 0x0f\n"
485
".byte 0x01\n"
486
".byte 0xd0\n"
487
: "=a" (xcr0)
488
:
489
#if defined(SLJIT_CONFIG_X86_32) && SLJIT_CONFIG_X86_32
490
: "ecx", "edx"
491
#else /* !SLJIT_CONFIG_X86_32 */
492
: "rcx", "rdx"
493
#endif /* SLJIT_CONFIG_X86_32 */
494
);
495
496
#elif (defined(__INTEL_LLVM_COMPILER) && __INTEL_LLVM_COMPILER < 20220100) \
497
|| (defined(__clang__) && __clang_major__ < 14) \
498
|| (defined(__GNUC__) && __GNUC__ < 3) \
499
|| defined(__SUNPRO_C) || defined(__SUNPRO_CC)
500
501
/* AT&T syntax. */
502
__asm__ (
503
"xorl %%ecx, %%ecx\n"
504
"xgetbv\n"
505
: "=a" (xcr0)
506
:
507
#if defined(SLJIT_CONFIG_X86_32) && SLJIT_CONFIG_X86_32
508
: "ecx", "edx"
509
#else /* !SLJIT_CONFIG_X86_32 */
510
: "rcx", "rdx"
511
#endif /* SLJIT_CONFIG_X86_32 */
512
);
513
514
#elif defined(_MSC_VER)
515
516
/* Intel syntax. */
517
__asm {
518
xor ecx, ecx
519
xgetbv
520
mov xcr0, eax
521
}
522
523
#else
524
525
__asm__ (
526
"xor{l %%ecx, %%ecx | ecx, ecx}\n"
527
"xgetbv\n"
528
: "=a" (xcr0)
529
:
530
#if defined(SLJIT_CONFIG_X86_32) && SLJIT_CONFIG_X86_32
531
: "ecx", "edx"
532
#else /* !SLJIT_CONFIG_X86_32 */
533
: "rcx", "rdx"
534
#endif /* SLJIT_CONFIG_X86_32 */
535
);
536
537
#endif
538
return xcr0;
539
}
540
541
static void get_cpu_features(void)
542
{
543
sljit_u32 feature_list = CPU_FEATURE_DETECTED;
544
sljit_u32 info[4] = {0};
545
sljit_u32 max_id;
546
547
execute_cpu_id(info);
548
max_id = info[0];
549
550
if (max_id >= 7) {
551
info[0] = 7;
552
info[2] = 0;
553
execute_cpu_id(info);
554
555
if (info[1] & 0x8)
556
feature_list |= CPU_FEATURE_TZCNT;
557
if (info[1] & 0x20)
558
feature_list |= CPU_FEATURE_AVX2;
559
}
560
561
if (max_id >= 1) {
562
info[0] = 1;
563
#if defined(SLJIT_CONFIG_X86_32) && SLJIT_CONFIG_X86_32
564
/* Winchip 2 and Cyrix MII bugs */
565
info[1] = info[2] = 0;
566
#endif
567
execute_cpu_id(info);
568
569
if (info[2] & 0x80000)
570
feature_list |= CPU_FEATURE_SSE41;
571
if (info[2] & 0x8000000)
572
feature_list |= CPU_FEATURE_OSXSAVE;
573
if (info[2] & 0x10000000)
574
feature_list |= CPU_FEATURE_AVX;
575
#if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
576
if (info[3] & 0x4000000)
577
feature_list |= CPU_FEATURE_SSE2;
578
#endif
579
if (info[3] & 0x8000)
580
feature_list |= CPU_FEATURE_CMOV;
581
}
582
583
info[0] = 0x80000000;
584
execute_cpu_id(info);
585
max_id = info[0];
586
587
if (max_id >= 0x80000001) {
588
info[0] = 0x80000001;
589
execute_cpu_id(info);
590
591
if (info[2] & 0x20)
592
feature_list |= CPU_FEATURE_LZCNT;
593
}
594
595
if ((feature_list & CPU_FEATURE_OSXSAVE) && (execute_get_xcr0_low() & 0x4) == 0)
596
feature_list &= ~(sljit_u32)(CPU_FEATURE_AVX | CPU_FEATURE_AVX2);
597
598
cpu_feature_list = feature_list;
599
}
600
601
static sljit_u8 get_jump_code(sljit_uw type)
602
{
603
switch (type) {
604
case SLJIT_EQUAL:
605
case SLJIT_ATOMIC_STORED:
606
case SLJIT_F_EQUAL:
607
case SLJIT_UNORDERED_OR_EQUAL:
608
return 0x84 /* je */;
609
610
case SLJIT_NOT_EQUAL:
611
case SLJIT_ATOMIC_NOT_STORED:
612
case SLJIT_F_NOT_EQUAL:
613
case SLJIT_ORDERED_NOT_EQUAL:
614
return 0x85 /* jne */;
615
616
case SLJIT_LESS:
617
case SLJIT_CARRY:
618
case SLJIT_F_LESS:
619
case SLJIT_UNORDERED_OR_LESS:
620
case SLJIT_UNORDERED_OR_GREATER:
621
return 0x82 /* jc */;
622
623
case SLJIT_GREATER_EQUAL:
624
case SLJIT_NOT_CARRY:
625
case SLJIT_F_GREATER_EQUAL:
626
case SLJIT_ORDERED_GREATER_EQUAL:
627
case SLJIT_ORDERED_LESS_EQUAL:
628
return 0x83 /* jae */;
629
630
case SLJIT_GREATER:
631
case SLJIT_F_GREATER:
632
case SLJIT_ORDERED_LESS:
633
case SLJIT_ORDERED_GREATER:
634
return 0x87 /* jnbe */;
635
636
case SLJIT_LESS_EQUAL:
637
case SLJIT_F_LESS_EQUAL:
638
case SLJIT_UNORDERED_OR_GREATER_EQUAL:
639
case SLJIT_UNORDERED_OR_LESS_EQUAL:
640
return 0x86 /* jbe */;
641
642
case SLJIT_SIG_LESS:
643
return 0x8c /* jl */;
644
645
case SLJIT_SIG_GREATER_EQUAL:
646
return 0x8d /* jnl */;
647
648
case SLJIT_SIG_GREATER:
649
return 0x8f /* jnle */;
650
651
case SLJIT_SIG_LESS_EQUAL:
652
return 0x8e /* jle */;
653
654
case SLJIT_OVERFLOW:
655
return 0x80 /* jo */;
656
657
case SLJIT_NOT_OVERFLOW:
658
return 0x81 /* jno */;
659
660
case SLJIT_UNORDERED:
661
case SLJIT_ORDERED_EQUAL: /* NaN. */
662
return 0x8a /* jp */;
663
664
case SLJIT_ORDERED:
665
case SLJIT_UNORDERED_OR_NOT_EQUAL: /* Not NaN. */
666
return 0x8b /* jpo */;
667
}
668
return 0;
669
}
670
671
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
672
static sljit_u8* detect_far_jump_type(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_sw executable_offset);
673
#else /* !SLJIT_CONFIG_X86_32 */
674
static sljit_u8* detect_far_jump_type(struct sljit_jump *jump, sljit_u8 *code_ptr);
675
static sljit_u8* generate_mov_addr_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_u8 *code, sljit_sw executable_offset);
676
#endif /* SLJIT_CONFIG_X86_32 */
677
678
static sljit_u8* detect_near_jump_type(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_u8 *code, sljit_sw executable_offset)
679
{
680
sljit_uw type = jump->flags >> TYPE_SHIFT;
681
sljit_s32 short_jump;
682
sljit_uw label_addr;
683
sljit_uw jump_addr;
684
685
jump_addr = (sljit_uw)code_ptr;
686
if (!(jump->flags & JUMP_ADDR)) {
687
label_addr = (sljit_uw)(code + jump->u.label->size);
688
689
if (jump->u.label->size > jump->addr)
690
jump_addr = (sljit_uw)(code + jump->addr);
691
} else
692
label_addr = jump->u.target - (sljit_uw)executable_offset;
693
694
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
695
if ((sljit_sw)(label_addr - (jump_addr + 6)) > HALFWORD_MAX || (sljit_sw)(label_addr - (jump_addr + 5)) < HALFWORD_MIN)
696
return detect_far_jump_type(jump, code_ptr);
697
#endif /* SLJIT_CONFIG_X86_64 */
698
699
short_jump = (sljit_sw)(label_addr - (jump_addr + 2)) >= -0x80 && (sljit_sw)(label_addr - (jump_addr + 2)) <= 0x7f;
700
701
if (type == SLJIT_JUMP) {
702
if (short_jump)
703
*code_ptr++ = JMP_i8;
704
else
705
*code_ptr++ = JMP_i32;
706
} else if (type > SLJIT_JUMP) {
707
short_jump = 0;
708
*code_ptr++ = CALL_i32;
709
} else if (short_jump) {
710
*code_ptr++ = U8(get_jump_code(type) - 0x10);
711
} else {
712
*code_ptr++ = GROUP_0F;
713
*code_ptr++ = get_jump_code(type);
714
}
715
716
jump->addr = (sljit_uw)code_ptr;
717
718
if (short_jump) {
719
jump->flags |= PATCH_MB;
720
code_ptr += sizeof(sljit_s8);
721
} else {
722
jump->flags |= PATCH_MW;
723
code_ptr += sizeof(sljit_s32);
724
}
725
726
return code_ptr;
727
}
728
729
static void generate_jump_or_mov_addr(struct sljit_jump *jump, sljit_sw executable_offset)
730
{
731
sljit_uw flags = jump->flags;
732
sljit_uw addr = (flags & JUMP_ADDR) ? jump->u.target : jump->u.label->u.addr;
733
sljit_uw jump_addr = jump->addr;
734
SLJIT_UNUSED_ARG(executable_offset);
735
736
if (SLJIT_UNLIKELY(flags & JUMP_MOV_ADDR)) {
737
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
738
sljit_unaligned_store_sw((void*)(jump_addr - sizeof(sljit_sw)), (sljit_sw)addr);
739
#else /* SLJIT_CONFIG_X86_32 */
740
if (flags & PATCH_MD) {
741
SLJIT_ASSERT(addr > HALFWORD_MAX);
742
sljit_unaligned_store_sw((void*)(jump_addr - sizeof(sljit_sw)), (sljit_sw)addr);
743
return;
744
}
745
746
if (flags & PATCH_MW) {
747
addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET((sljit_u8*)jump_addr, executable_offset);
748
SLJIT_ASSERT((sljit_sw)addr <= HALFWORD_MAX && (sljit_sw)addr >= HALFWORD_MIN);
749
} else {
750
SLJIT_ASSERT(addr <= HALFWORD_MAX);
751
}
752
sljit_unaligned_store_s32((void*)(jump_addr - sizeof(sljit_s32)), (sljit_s32)addr);
753
#endif /* !SLJIT_CONFIG_X86_32 */
754
return;
755
}
756
757
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
758
if (SLJIT_UNLIKELY(flags & PATCH_MD)) {
759
SLJIT_ASSERT(!(flags & JUMP_ADDR));
760
sljit_unaligned_store_sw((void*)jump_addr, (sljit_sw)addr);
761
return;
762
}
763
#endif /* SLJIT_CONFIG_X86_64 */
764
765
addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET((sljit_u8*)jump_addr, executable_offset);
766
767
if (flags & PATCH_MB) {
768
addr -= sizeof(sljit_s8);
769
SLJIT_ASSERT((sljit_sw)addr <= 0x7f && (sljit_sw)addr >= -0x80);
770
*(sljit_u8*)jump_addr = U8(addr);
771
return;
772
} else if (flags & PATCH_MW) {
773
addr -= sizeof(sljit_s32);
774
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
775
sljit_unaligned_store_sw((void*)jump_addr, (sljit_sw)addr);
776
#else /* !SLJIT_CONFIG_X86_32 */
777
SLJIT_ASSERT((sljit_sw)addr <= HALFWORD_MAX && (sljit_sw)addr >= HALFWORD_MIN);
778
sljit_unaligned_store_s32((void*)jump_addr, (sljit_s32)addr);
779
#endif /* SLJIT_CONFIG_X86_32 */
780
}
781
}
782
783
static sljit_u8 *process_extended_label(sljit_u8 *code_ptr, struct sljit_extended_label *ext_label)
784
{
785
sljit_uw mask;
786
sljit_u8 *ptr = code_ptr;
787
788
SLJIT_ASSERT(ext_label->label.u.index == SLJIT_LABEL_ALIGNED);
789
mask = ext_label->data;
790
791
code_ptr = (sljit_u8*)(((sljit_uw)code_ptr + mask) & ~mask);
792
793
while (ptr < code_ptr)
794
*ptr++ = NOP;
795
796
return code_ptr;
797
}
798
799
static void reduce_code_size(struct sljit_compiler *compiler)
800
{
801
struct sljit_label *label;
802
struct sljit_jump *jump;
803
sljit_uw next_label_size;
804
sljit_uw next_jump_addr;
805
sljit_uw next_min_addr;
806
sljit_uw size_reduce = 0;
807
sljit_sw diff;
808
sljit_uw type;
809
#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
810
sljit_uw size_reduce_max;
811
#endif /* SLJIT_DEBUG */
812
813
label = compiler->labels;
814
jump = compiler->jumps;
815
816
next_label_size = SLJIT_GET_NEXT_SIZE(label);
817
next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
818
819
while (1) {
820
next_min_addr = next_label_size;
821
if (next_jump_addr < next_min_addr)
822
next_min_addr = next_jump_addr;
823
824
if (next_min_addr == SLJIT_MAX_ADDRESS)
825
break;
826
827
if (next_min_addr == next_label_size) {
828
label->size -= size_reduce;
829
830
label = label->next;
831
next_label_size = SLJIT_GET_NEXT_SIZE(label);
832
}
833
834
if (next_min_addr != next_jump_addr)
835
continue;
836
837
jump->addr -= size_reduce;
838
if (!(jump->flags & JUMP_MOV_ADDR)) {
839
#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
840
size_reduce_max = size_reduce + (((jump->flags >> TYPE_SHIFT) < SLJIT_JUMP) ? CJUMP_MAX_SIZE : JUMP_MAX_SIZE);
841
#endif /* SLJIT_DEBUG */
842
843
if (!(jump->flags & SLJIT_REWRITABLE_JUMP)) {
844
if (jump->flags & JUMP_ADDR) {
845
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
846
if (jump->u.target <= 0xffffffffl)
847
size_reduce += sizeof(sljit_s32);
848
#endif /* SLJIT_CONFIG_X86_64 */
849
} else {
850
/* Unit size: instruction. */
851
diff = (sljit_sw)jump->u.label->size - (sljit_sw)jump->addr;
852
if (jump->u.label->size > jump->addr) {
853
SLJIT_ASSERT(jump->u.label->size - size_reduce >= jump->addr);
854
diff -= (sljit_sw)size_reduce;
855
}
856
type = jump->flags >> TYPE_SHIFT;
857
858
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
859
if (type == SLJIT_JUMP) {
860
if (diff <= 0x7f + 2 && diff >= -0x80 + 2)
861
size_reduce += JUMP_MAX_SIZE - 2;
862
else if (diff <= HALFWORD_MAX + 5 && diff >= HALFWORD_MIN + 5)
863
size_reduce += JUMP_MAX_SIZE - 5;
864
} else if (type < SLJIT_JUMP) {
865
if (diff <= 0x7f + 2 && diff >= -0x80 + 2)
866
size_reduce += CJUMP_MAX_SIZE - 2;
867
else if (diff <= HALFWORD_MAX + 6 && diff >= HALFWORD_MIN + 6)
868
size_reduce += CJUMP_MAX_SIZE - 6;
869
} else {
870
if (diff <= HALFWORD_MAX + 5 && diff >= HALFWORD_MIN + 5)
871
size_reduce += JUMP_MAX_SIZE - 5;
872
}
873
#else /* !SLJIT_CONFIG_X86_64 */
874
if (type == SLJIT_JUMP) {
875
if (diff <= 0x7f + 2 && diff >= -0x80 + 2)
876
size_reduce += JUMP_MAX_SIZE - 2;
877
} else if (type < SLJIT_JUMP) {
878
if (diff <= 0x7f + 2 && diff >= -0x80 + 2)
879
size_reduce += CJUMP_MAX_SIZE - 2;
880
}
881
#endif /* SLJIT_CONFIG_X86_64 */
882
}
883
}
884
885
#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
886
jump->flags |= (size_reduce_max - size_reduce) << JUMP_SIZE_SHIFT;
887
#endif /* SLJIT_DEBUG */
888
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
889
} else {
890
#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
891
size_reduce_max = size_reduce + 10;
892
#endif /* SLJIT_DEBUG */
893
894
if (!(jump->flags & JUMP_ADDR)) {
895
diff = (sljit_sw)jump->u.label->size - (sljit_sw)(jump->addr - 3);
896
897
if (diff <= HALFWORD_MAX && diff >= HALFWORD_MIN)
898
size_reduce += 3;
899
} else if (jump->u.target <= 0xffffffffl)
900
size_reduce += (jump->flags & MOV_ADDR_HI) ? 4 : 5;
901
902
#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
903
jump->flags |= (size_reduce_max - size_reduce) << JUMP_SIZE_SHIFT;
904
#endif /* SLJIT_DEBUG */
905
#endif /* SLJIT_CONFIG_X86_64 */
906
}
907
908
jump = jump->next;
909
next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
910
}
911
912
compiler->size -= size_reduce;
913
}
914
915
SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler, sljit_s32 options, void *exec_allocator_data)
916
{
917
struct sljit_memory_fragment *buf;
918
sljit_u8 *code;
919
sljit_u8 *code_ptr;
920
sljit_u8 *buf_ptr;
921
sljit_u8 *buf_end;
922
sljit_u8 len;
923
sljit_sw executable_offset;
924
#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
925
sljit_uw addr;
926
#endif /* SLJIT_DEBUG */
927
928
struct sljit_label *label;
929
struct sljit_jump *jump;
930
struct sljit_const *const_;
931
932
CHECK_ERROR_PTR();
933
CHECK_PTR(check_sljit_generate_code(compiler, options));
934
935
reduce_code_size(compiler);
936
937
/* Second code generation pass. */
938
code = (sljit_u8*)allocate_executable_memory(compiler->size, options, exec_allocator_data, &executable_offset);
939
PTR_FAIL_WITH_EXEC_IF(code);
940
941
reverse_buf(compiler);
942
buf = compiler->buf;
943
944
code_ptr = code;
945
label = compiler->labels;
946
jump = compiler->jumps;
947
const_ = compiler->consts;
948
949
do {
950
buf_ptr = buf->memory;
951
buf_end = buf_ptr + buf->used_size;
952
do {
953
len = *buf_ptr++;
954
SLJIT_ASSERT(len > 0);
955
if (len < SLJIT_INST_CONST) {
956
/* The code is already generated. */
957
SLJIT_MEMCPY(code_ptr, buf_ptr, len);
958
code_ptr += len;
959
buf_ptr += len;
960
} else {
961
switch (len) {
962
case SLJIT_INST_LABEL:
963
if (label->u.index >= SLJIT_LABEL_ALIGNED)
964
code_ptr = process_extended_label(code_ptr, (struct sljit_extended_label*)label);
965
966
label->u.addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
967
label->size = (sljit_uw)(code_ptr - code);
968
label = label->next;
969
break;
970
case SLJIT_INST_JUMP:
971
#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
972
addr = (sljit_uw)code_ptr;
973
#endif /* SLJIT_DEBUG */
974
if (!(jump->flags & SLJIT_REWRITABLE_JUMP))
975
code_ptr = detect_near_jump_type(jump, code_ptr, code, executable_offset);
976
else {
977
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
978
code_ptr = detect_far_jump_type(jump, code_ptr, executable_offset);
979
#else /* !SLJIT_CONFIG_X86_32 */
980
code_ptr = detect_far_jump_type(jump, code_ptr);
981
#endif /* SLJIT_CONFIG_X86_32 */
982
}
983
984
SLJIT_ASSERT((sljit_uw)code_ptr - addr <= ((jump->flags >> JUMP_SIZE_SHIFT) & 0xff));
985
jump = jump->next;
986
break;
987
case SLJIT_INST_MOV_ADDR:
988
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
989
code_ptr = generate_mov_addr_code(jump, code_ptr, code, executable_offset);
990
#endif /* SLJIT_CONFIG_X86_64 */
991
jump->addr = (sljit_uw)code_ptr;
992
jump = jump->next;
993
break;
994
default:
995
SLJIT_ASSERT(len == SLJIT_INST_CONST);
996
const_->addr = (sljit_uw)code_ptr;
997
const_ = const_->next;
998
break;
999
}
1000
}
1001
} while (buf_ptr < buf_end);
1002
1003
SLJIT_ASSERT(buf_ptr == buf_end);
1004
buf = buf->next;
1005
} while (buf);
1006
1007
SLJIT_ASSERT(!label);
1008
SLJIT_ASSERT(!jump);
1009
SLJIT_ASSERT(!const_);
1010
SLJIT_ASSERT(code_ptr <= code + compiler->size);
1011
1012
jump = compiler->jumps;
1013
while (jump) {
1014
generate_jump_or_mov_addr(jump, executable_offset);
1015
jump = jump->next;
1016
}
1017
1018
compiler->error = SLJIT_ERR_COMPILED;
1019
compiler->executable_offset = executable_offset;
1020
compiler->executable_size = (sljit_uw)(code_ptr - code);
1021
1022
code = (sljit_u8*)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
1023
1024
SLJIT_UPDATE_WX_FLAGS(code, (sljit_u8*)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset), 1);
1025
return (void*)code;
1026
}
1027
1028
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
1029
{
1030
switch (feature_type) {
1031
case SLJIT_HAS_FPU:
1032
#ifdef SLJIT_IS_FPU_AVAILABLE
1033
return (SLJIT_IS_FPU_AVAILABLE) != 0;
1034
#elif (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
1035
if (cpu_feature_list == 0)
1036
get_cpu_features();
1037
return (cpu_feature_list & CPU_FEATURE_SSE2) != 0;
1038
#else /* SLJIT_DETECT_SSE2 */
1039
return 1;
1040
#endif /* SLJIT_DETECT_SSE2 */
1041
1042
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1043
case SLJIT_HAS_VIRTUAL_REGISTERS:
1044
return 1;
1045
#endif /* SLJIT_CONFIG_X86_32 */
1046
1047
case SLJIT_HAS_CLZ:
1048
if (cpu_feature_list == 0)
1049
get_cpu_features();
1050
1051
return (cpu_feature_list & CPU_FEATURE_LZCNT) ? 1 : 2;
1052
1053
case SLJIT_HAS_CTZ:
1054
if (cpu_feature_list == 0)
1055
get_cpu_features();
1056
1057
return (cpu_feature_list & CPU_FEATURE_TZCNT) ? 1 : 2;
1058
1059
case SLJIT_HAS_CMOV:
1060
if (cpu_feature_list == 0)
1061
get_cpu_features();
1062
return (cpu_feature_list & CPU_FEATURE_CMOV) != 0;
1063
1064
case SLJIT_HAS_REV:
1065
case SLJIT_HAS_ROT:
1066
case SLJIT_HAS_PREFETCH:
1067
case SLJIT_HAS_COPY_F32:
1068
case SLJIT_HAS_COPY_F64:
1069
case SLJIT_HAS_ATOMIC:
1070
case SLJIT_HAS_MEMORY_BARRIER:
1071
return 1;
1072
1073
#if !(defined SLJIT_IS_FPU_AVAILABLE) || SLJIT_IS_FPU_AVAILABLE
1074
case SLJIT_HAS_AVX:
1075
if (cpu_feature_list == 0)
1076
get_cpu_features();
1077
return (cpu_feature_list & CPU_FEATURE_AVX) != 0;
1078
case SLJIT_HAS_AVX2:
1079
if (cpu_feature_list == 0)
1080
get_cpu_features();
1081
return (cpu_feature_list & CPU_FEATURE_AVX2) != 0;
1082
case SLJIT_HAS_SIMD:
1083
if (cpu_feature_list == 0)
1084
get_cpu_features();
1085
return (cpu_feature_list & CPU_FEATURE_SSE41) != 0;
1086
#endif /* SLJIT_IS_FPU_AVAILABLE */
1087
default:
1088
return 0;
1089
}
1090
}
1091
1092
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type)
1093
{
1094
switch (type) {
1095
case SLJIT_ORDERED_EQUAL:
1096
case SLJIT_UNORDERED_OR_NOT_EQUAL:
1097
return 2;
1098
}
1099
1100
return 0;
1101
}
1102
1103
/* --------------------------------------------------------------------- */
1104
/* Operators */
1105
/* --------------------------------------------------------------------- */
1106
1107
#define BINARY_OPCODE(opcode) (((opcode ## _EAX_i32) << 24) | ((opcode ## _r_rm) << 16) | ((opcode ## _rm_r) << 8) | (opcode))
1108
1109
#define BINARY_IMM32(op_imm, immw, arg, argw) \
1110
do { \
1111
inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, immw, arg, argw); \
1112
FAIL_IF(!inst); \
1113
*(inst + 1) |= (op_imm); \
1114
} while (0)
1115
1116
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1117
1118
#define BINARY_IMM(op_imm, op_mr, immw, arg, argw) \
1119
do { \
1120
if (IS_HALFWORD(immw) || compiler->mode32) { \
1121
BINARY_IMM32(op_imm, immw, arg, argw); \
1122
} \
1123
else { \
1124
FAIL_IF(emit_load_imm64(compiler, FAST_IS_REG(arg) ? TMP_REG2 : TMP_REG1, immw)); \
1125
inst = emit_x86_instruction(compiler, 1, FAST_IS_REG(arg) ? TMP_REG2 : TMP_REG1, 0, arg, argw); \
1126
FAIL_IF(!inst); \
1127
*inst = (op_mr); \
1128
} \
1129
} while (0)
1130
1131
#define BINARY_EAX_IMM(op_eax_imm, immw) \
1132
FAIL_IF(emit_do_imm32(compiler, (!compiler->mode32) ? REX_W : 0, (op_eax_imm), immw))
1133
1134
#else /* !SLJIT_CONFIG_X86_64 */
1135
1136
#define BINARY_IMM(op_imm, op_mr, immw, arg, argw) \
1137
BINARY_IMM32(op_imm, immw, arg, argw)
1138
1139
#define BINARY_EAX_IMM(op_eax_imm, immw) \
1140
FAIL_IF(emit_do_imm(compiler, (op_eax_imm), immw))
1141
1142
#endif /* SLJIT_CONFIG_X86_64 */
1143
1144
static sljit_s32 emit_byte(struct sljit_compiler *compiler, sljit_u8 byte)
1145
{
1146
sljit_u8 *inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
1147
FAIL_IF(!inst);
1148
INC_SIZE(1);
1149
*inst = byte;
1150
return SLJIT_SUCCESS;
1151
}
1152
1153
static sljit_s32 emit_mov(struct sljit_compiler *compiler,
1154
sljit_s32 dst, sljit_sw dstw,
1155
sljit_s32 src, sljit_sw srcw);
1156
1157
#define EMIT_MOV(compiler, dst, dstw, src, srcw) \
1158
FAIL_IF(emit_mov(compiler, dst, dstw, src, srcw));
1159
1160
static sljit_s32 emit_groupf(struct sljit_compiler *compiler,
1161
sljit_uw op,
1162
sljit_s32 dst, sljit_s32 src, sljit_sw srcw);
1163
1164
static sljit_s32 emit_groupf_ext(struct sljit_compiler *compiler,
1165
sljit_uw op,
1166
sljit_s32 dst, sljit_s32 src, sljit_sw srcw);
1167
1168
static SLJIT_INLINE sljit_s32 emit_sse2_store(struct sljit_compiler *compiler,
1169
sljit_s32 single, sljit_s32 dst, sljit_sw dstw, sljit_s32 src);
1170
1171
static SLJIT_INLINE sljit_s32 emit_sse2_load(struct sljit_compiler *compiler,
1172
sljit_s32 single, sljit_s32 dst, sljit_s32 src, sljit_sw srcw);
1173
1174
static sljit_s32 emit_cmp_binary(struct sljit_compiler *compiler,
1175
sljit_s32 src1, sljit_sw src1w,
1176
sljit_s32 src2, sljit_sw src2w);
1177
1178
static sljit_s32 emit_cmov_generic(struct sljit_compiler *compiler, sljit_s32 type,
1179
sljit_s32 dst_reg,
1180
sljit_s32 src, sljit_sw srcw);
1181
1182
static SLJIT_INLINE sljit_s32 emit_endbranch(struct sljit_compiler *compiler)
1183
{
1184
#if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET)
1185
/* Emit endbr32/endbr64 when CET is enabled. */
1186
sljit_u8 *inst;
1187
inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
1188
FAIL_IF(!inst);
1189
INC_SIZE(4);
1190
inst[0] = GROUP_F3;
1191
inst[1] = GROUP_0F;
1192
inst[2] = 0x1e;
1193
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1194
inst[3] = 0xfb;
1195
#else /* !SLJIT_CONFIG_X86_32 */
1196
inst[3] = 0xfa;
1197
#endif /* SLJIT_CONFIG_X86_32 */
1198
#else /* !SLJIT_CONFIG_X86_CET */
1199
SLJIT_UNUSED_ARG(compiler);
1200
#endif /* SLJIT_CONFIG_X86_CET */
1201
return SLJIT_SUCCESS;
1202
}
1203
1204
#if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET) && defined (__SHSTK__)
1205
1206
static SLJIT_INLINE sljit_s32 emit_rdssp(struct sljit_compiler *compiler, sljit_s32 reg)
1207
{
1208
sljit_u8 *inst;
1209
sljit_s32 size;
1210
1211
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1212
size = 5;
1213
#else
1214
size = 4;
1215
#endif
1216
1217
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
1218
FAIL_IF(!inst);
1219
INC_SIZE(size);
1220
*inst++ = GROUP_F3;
1221
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1222
*inst++ = REX_W | (reg_map[reg] <= 7 ? 0 : REX_B);
1223
#endif
1224
inst[0] = GROUP_0F;
1225
inst[1] = 0x1e;
1226
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1227
inst[2] = U8(MOD_REG | (0x1 << 3) | reg_lmap[reg]);
1228
#else
1229
inst[2] = U8(MOD_REG | (0x1 << 3) | reg_map[reg]);
1230
#endif
1231
return SLJIT_SUCCESS;
1232
}
1233
1234
static SLJIT_INLINE sljit_s32 emit_incssp(struct sljit_compiler *compiler, sljit_s32 reg)
1235
{
1236
sljit_u8 *inst;
1237
sljit_s32 size;
1238
1239
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1240
size = 5;
1241
#else
1242
size = 4;
1243
#endif
1244
1245
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
1246
FAIL_IF(!inst);
1247
INC_SIZE(size);
1248
*inst++ = GROUP_F3;
1249
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1250
*inst++ = REX_W | (reg_map[reg] <= 7 ? 0 : REX_B);
1251
#endif
1252
inst[0] = GROUP_0F;
1253
inst[1] = 0xae;
1254
inst[2] = (0x3 << 6) | (0x5 << 3) | (reg_map[reg] & 0x7);
1255
return SLJIT_SUCCESS;
1256
}
1257
1258
#endif /* SLJIT_CONFIG_X86_CET && __SHSTK__ */
1259
1260
static SLJIT_INLINE sljit_s32 cpu_has_shadow_stack(void)
1261
{
1262
#if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET) && defined (__SHSTK__)
1263
return _get_ssp() != 0;
1264
#else /* !SLJIT_CONFIG_X86_CET || !__SHSTK__ */
1265
return 0;
1266
#endif /* SLJIT_CONFIG_X86_CET && __SHSTK__ */
1267
}
1268
1269
static SLJIT_INLINE sljit_s32 adjust_shadow_stack(struct sljit_compiler *compiler,
1270
sljit_s32 src, sljit_sw srcw)
1271
{
1272
#if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET) && defined (__SHSTK__)
1273
sljit_u8 *inst, *jz_after_cmp_inst;
1274
sljit_uw size_jz_after_cmp_inst;
1275
1276
sljit_uw size_before_rdssp_inst = compiler->size;
1277
1278
/* Generate "RDSSP TMP_REG1". */
1279
FAIL_IF(emit_rdssp(compiler, TMP_REG1));
1280
1281
/* Load return address on shadow stack into TMP_REG1. */
1282
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(TMP_REG1), 0);
1283
1284
/* Compare return address against TMP_REG1. */
1285
FAIL_IF(emit_cmp_binary (compiler, TMP_REG1, 0, src, srcw));
1286
1287
/* Generate JZ to skip shadow stack ajdustment when shadow
1288
stack matches normal stack. */
1289
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
1290
FAIL_IF(!inst);
1291
INC_SIZE(2);
1292
*inst++ = get_jump_code(SLJIT_EQUAL) - 0x10;
1293
size_jz_after_cmp_inst = compiler->size;
1294
jz_after_cmp_inst = inst;
1295
1296
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1297
/* REX_W is not necessary. */
1298
compiler->mode32 = 1;
1299
#endif
1300
/* Load 1 into TMP_REG1. */
1301
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, 1);
1302
1303
/* Generate "INCSSP TMP_REG1". */
1304
FAIL_IF(emit_incssp(compiler, TMP_REG1));
1305
1306
/* Jump back to "RDSSP TMP_REG1" to check shadow stack again. */
1307
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
1308
FAIL_IF(!inst);
1309
INC_SIZE(2);
1310
inst[0] = JMP_i8;
1311
inst[1] = size_before_rdssp_inst - compiler->size;
1312
1313
*jz_after_cmp_inst = compiler->size - size_jz_after_cmp_inst;
1314
#else /* !SLJIT_CONFIG_X86_CET || !__SHSTK__ */
1315
SLJIT_UNUSED_ARG(compiler);
1316
SLJIT_UNUSED_ARG(src);
1317
SLJIT_UNUSED_ARG(srcw);
1318
#endif /* SLJIT_CONFIG_X86_CET && __SHSTK__ */
1319
return SLJIT_SUCCESS;
1320
}
1321
1322
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1323
#include "sljitNativeX86_32.c"
1324
#else
1325
#include "sljitNativeX86_64.c"
1326
#endif
1327
1328
static sljit_s32 emit_mov(struct sljit_compiler *compiler,
1329
sljit_s32 dst, sljit_sw dstw,
1330
sljit_s32 src, sljit_sw srcw)
1331
{
1332
sljit_u8* inst;
1333
1334
if (FAST_IS_REG(src)) {
1335
inst = emit_x86_instruction(compiler, 1, src, 0, dst, dstw);
1336
FAIL_IF(!inst);
1337
*inst = MOV_rm_r;
1338
return SLJIT_SUCCESS;
1339
}
1340
1341
if (src == SLJIT_IMM) {
1342
if (FAST_IS_REG(dst)) {
1343
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1344
return emit_do_imm(compiler, MOV_r_i32 | reg_map[dst], srcw);
1345
#else
1346
if (!compiler->mode32) {
1347
if (NOT_HALFWORD(srcw))
1348
return emit_load_imm64(compiler, dst, srcw);
1349
}
1350
else
1351
return emit_do_imm32(compiler, (reg_map[dst] >= 8) ? REX_B : 0, U8(MOV_r_i32 | reg_lmap[dst]), srcw);
1352
#endif
1353
}
1354
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1355
if (!compiler->mode32 && NOT_HALFWORD(srcw)) {
1356
/* Immediate to memory move. Only SLJIT_MOV operation copies
1357
an immediate directly into memory so TMP_REG1 can be used. */
1358
FAIL_IF(emit_load_imm64(compiler, TMP_REG1, srcw));
1359
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
1360
FAIL_IF(!inst);
1361
*inst = MOV_rm_r;
1362
return SLJIT_SUCCESS;
1363
}
1364
#endif
1365
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, dstw);
1366
FAIL_IF(!inst);
1367
*inst = MOV_rm_i32;
1368
return SLJIT_SUCCESS;
1369
}
1370
if (FAST_IS_REG(dst)) {
1371
inst = emit_x86_instruction(compiler, 1, dst, 0, src, srcw);
1372
FAIL_IF(!inst);
1373
*inst = MOV_r_rm;
1374
return SLJIT_SUCCESS;
1375
}
1376
1377
/* Memory to memory move. Only SLJIT_MOV operation copies
1378
data from memory to memory so TMP_REG1 can be used. */
1379
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src, srcw);
1380
FAIL_IF(!inst);
1381
*inst = MOV_r_rm;
1382
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
1383
FAIL_IF(!inst);
1384
*inst = MOV_rm_r;
1385
return SLJIT_SUCCESS;
1386
}
1387
1388
static sljit_s32 emit_cmov_generic(struct sljit_compiler *compiler, sljit_s32 type,
1389
sljit_s32 dst_reg,
1390
sljit_s32 src, sljit_sw srcw)
1391
{
1392
sljit_u8* inst;
1393
sljit_uw size;
1394
1395
SLJIT_ASSERT(type >= SLJIT_EQUAL && type <= SLJIT_ORDERED_LESS_EQUAL);
1396
1397
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
1398
FAIL_IF(!inst);
1399
INC_SIZE(2);
1400
inst[0] = U8(get_jump_code((sljit_uw)type ^ 0x1) - 0x10);
1401
1402
size = compiler->size;
1403
EMIT_MOV(compiler, dst_reg, 0, src, srcw);
1404
1405
inst[1] = U8(compiler->size - size);
1406
return SLJIT_SUCCESS;
1407
}
1408
1409
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
1410
{
1411
sljit_u8 *inst;
1412
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1413
sljit_uw size;
1414
#endif
1415
1416
CHECK_ERROR();
1417
CHECK(check_sljit_emit_op0(compiler, op));
1418
1419
switch (GET_OPCODE(op)) {
1420
case SLJIT_BREAKPOINT:
1421
return emit_byte(compiler, INT3);
1422
case SLJIT_NOP:
1423
return emit_byte(compiler, NOP);
1424
case SLJIT_LMUL_UW:
1425
case SLJIT_LMUL_SW:
1426
case SLJIT_DIVMOD_UW:
1427
case SLJIT_DIVMOD_SW:
1428
case SLJIT_DIV_UW:
1429
case SLJIT_DIV_SW:
1430
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1431
#ifdef _WIN64
1432
SLJIT_ASSERT(
1433
reg_map[SLJIT_R0] == 0
1434
&& reg_map[SLJIT_R1] == 2
1435
&& reg_map[TMP_REG1] > 7);
1436
#else
1437
SLJIT_ASSERT(
1438
reg_map[SLJIT_R0] == 0
1439
&& reg_map[SLJIT_R1] < 7
1440
&& reg_map[TMP_REG1] == 2);
1441
#endif
1442
compiler->mode32 = op & SLJIT_32;
1443
#endif
1444
SLJIT_COMPILE_ASSERT((SLJIT_DIVMOD_UW & 0x2) == 0 && SLJIT_DIV_UW - 0x2 == SLJIT_DIVMOD_UW, bad_div_opcode_assignments);
1445
1446
op = GET_OPCODE(op);
1447
if ((op | 0x2) == SLJIT_DIV_UW) {
1448
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || defined(_WIN64)
1449
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_R1, 0);
1450
inst = emit_x86_instruction(compiler, 1, SLJIT_R1, 0, SLJIT_R1, 0);
1451
#else
1452
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, TMP_REG1, 0);
1453
#endif
1454
FAIL_IF(!inst);
1455
*inst = XOR_r_rm;
1456
}
1457
1458
if ((op | 0x2) == SLJIT_DIV_SW) {
1459
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || defined(_WIN64)
1460
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_R1, 0);
1461
#endif
1462
1463
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1464
FAIL_IF(emit_byte(compiler, CDQ));
1465
#else
1466
if (!compiler->mode32) {
1467
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
1468
FAIL_IF(!inst);
1469
INC_SIZE(2);
1470
inst[0] = REX_W;
1471
inst[1] = CDQ;
1472
} else
1473
FAIL_IF(emit_byte(compiler, CDQ));
1474
#endif
1475
}
1476
1477
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1478
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
1479
FAIL_IF(!inst);
1480
INC_SIZE(2);
1481
inst[0] = GROUP_F7;
1482
inst[1] = MOD_REG | ((op >= SLJIT_DIVMOD_UW) ? reg_map[TMP_REG1] : reg_map[SLJIT_R1]);
1483
#else /* !SLJIT_CONFIG_X86_32 */
1484
#ifdef _WIN64
1485
size = (!compiler->mode32 || op >= SLJIT_DIVMOD_UW) ? 3 : 2;
1486
#else /* !_WIN64 */
1487
size = (!compiler->mode32) ? 3 : 2;
1488
#endif /* _WIN64 */
1489
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
1490
FAIL_IF(!inst);
1491
INC_SIZE(size);
1492
#ifdef _WIN64
1493
if (!compiler->mode32)
1494
*inst++ = REX_W | ((op >= SLJIT_DIVMOD_UW) ? REX_B : 0);
1495
else if (op >= SLJIT_DIVMOD_UW)
1496
*inst++ = REX_B;
1497
inst[0] = GROUP_F7;
1498
inst[1] = MOD_REG | ((op >= SLJIT_DIVMOD_UW) ? reg_lmap[TMP_REG1] : reg_lmap[SLJIT_R1]);
1499
#else /* !_WIN64 */
1500
if (!compiler->mode32)
1501
*inst++ = REX_W;
1502
inst[0] = GROUP_F7;
1503
inst[1] = MOD_REG | reg_map[SLJIT_R1];
1504
#endif /* _WIN64 */
1505
#endif /* SLJIT_CONFIG_X86_32 */
1506
switch (op) {
1507
case SLJIT_LMUL_UW:
1508
inst[1] |= MUL;
1509
break;
1510
case SLJIT_LMUL_SW:
1511
inst[1] |= IMUL;
1512
break;
1513
case SLJIT_DIVMOD_UW:
1514
case SLJIT_DIV_UW:
1515
inst[1] |= DIV;
1516
break;
1517
case SLJIT_DIVMOD_SW:
1518
case SLJIT_DIV_SW:
1519
inst[1] |= IDIV;
1520
break;
1521
}
1522
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) && !defined(_WIN64)
1523
if (op <= SLJIT_DIVMOD_SW)
1524
EMIT_MOV(compiler, SLJIT_R1, 0, TMP_REG1, 0);
1525
#else
1526
if (op >= SLJIT_DIV_UW)
1527
EMIT_MOV(compiler, SLJIT_R1, 0, TMP_REG1, 0);
1528
#endif
1529
break;
1530
case SLJIT_MEMORY_BARRIER:
1531
inst = (sljit_u8*)ensure_buf(compiler, 1 + 3);
1532
FAIL_IF(!inst);
1533
INC_SIZE(3);
1534
inst[0] = GROUP_0F;
1535
inst[1] = 0xae;
1536
inst[2] = 0xf0;
1537
return SLJIT_SUCCESS;
1538
case SLJIT_ENDBR:
1539
return emit_endbranch(compiler);
1540
case SLJIT_SKIP_FRAMES_BEFORE_RETURN:
1541
return skip_frames_before_return(compiler);
1542
}
1543
1544
return SLJIT_SUCCESS;
1545
}
1546
1547
static sljit_s32 emit_mov_byte(struct sljit_compiler *compiler, sljit_s32 sign,
1548
sljit_s32 dst, sljit_sw dstw,
1549
sljit_s32 src, sljit_sw srcw)
1550
{
1551
sljit_u8* inst;
1552
sljit_s32 dst_r;
1553
1554
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1555
compiler->mode32 = 0;
1556
#endif
1557
1558
if (src == SLJIT_IMM) {
1559
if (FAST_IS_REG(dst)) {
1560
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1561
return emit_do_imm(compiler, MOV_r_i32 | reg_map[dst], srcw);
1562
#else
1563
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, 0);
1564
FAIL_IF(!inst);
1565
*inst = MOV_rm_i32;
1566
return SLJIT_SUCCESS;
1567
#endif
1568
}
1569
inst = emit_x86_instruction(compiler, 1 | EX86_BYTE_ARG | EX86_NO_REXW, SLJIT_IMM, srcw, dst, dstw);
1570
FAIL_IF(!inst);
1571
*inst = MOV_rm8_i8;
1572
return SLJIT_SUCCESS;
1573
}
1574
1575
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1576
1577
if ((dst & SLJIT_MEM) && FAST_IS_REG(src)) {
1578
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1579
if (reg_map[src] >= 4) {
1580
SLJIT_ASSERT(dst_r == TMP_REG1);
1581
EMIT_MOV(compiler, TMP_REG1, 0, src, 0);
1582
} else
1583
dst_r = src;
1584
#else
1585
dst_r = src;
1586
#endif
1587
} else {
1588
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1589
if (FAST_IS_REG(src) && reg_map[src] >= 4) {
1590
/* Both src and dst are registers. */
1591
SLJIT_ASSERT(FAST_IS_REG(dst));
1592
1593
if (src == dst && !sign) {
1594
inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 0xff, dst, 0);
1595
FAIL_IF(!inst);
1596
*(inst + 1) |= AND;
1597
return SLJIT_SUCCESS;
1598
}
1599
1600
EMIT_MOV(compiler, TMP_REG1, 0, src, 0);
1601
src = TMP_REG1;
1602
srcw = 0;
1603
}
1604
#endif /* !SLJIT_CONFIG_X86_32 */
1605
1606
/* src can be memory addr or reg_map[src] < 4 on x86_32 architectures. */
1607
FAIL_IF(emit_groupf(compiler, sign ? MOVSX_r_rm8 : MOVZX_r_rm8, dst_r, src, srcw));
1608
}
1609
1610
if (dst & SLJIT_MEM) {
1611
inst = emit_x86_instruction(compiler, 1 | EX86_REX | EX86_NO_REXW, dst_r, 0, dst, dstw);
1612
FAIL_IF(!inst);
1613
*inst = MOV_rm8_r8;
1614
}
1615
1616
return SLJIT_SUCCESS;
1617
}
1618
1619
static sljit_s32 emit_prefetch(struct sljit_compiler *compiler, sljit_s32 op,
1620
sljit_s32 src, sljit_sw srcw)
1621
{
1622
sljit_u8* inst;
1623
1624
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1625
compiler->mode32 = 1;
1626
#endif
1627
1628
inst = emit_x86_instruction(compiler, 2, 0, 0, src, srcw);
1629
FAIL_IF(!inst);
1630
inst[0] = GROUP_0F;
1631
inst[1] = PREFETCH;
1632
1633
if (op == SLJIT_PREFETCH_L1)
1634
inst[2] |= (1 << 3);
1635
else if (op == SLJIT_PREFETCH_L2)
1636
inst[2] |= (2 << 3);
1637
else if (op == SLJIT_PREFETCH_L3)
1638
inst[2] |= (3 << 3);
1639
1640
return SLJIT_SUCCESS;
1641
}
1642
1643
static sljit_s32 emit_mov_half(struct sljit_compiler *compiler, sljit_s32 sign,
1644
sljit_s32 dst, sljit_sw dstw,
1645
sljit_s32 src, sljit_sw srcw)
1646
{
1647
sljit_u8* inst;
1648
sljit_s32 dst_r;
1649
1650
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1651
compiler->mode32 = 0;
1652
#endif
1653
1654
if (src == SLJIT_IMM) {
1655
if (FAST_IS_REG(dst)) {
1656
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1657
return emit_do_imm(compiler, MOV_r_i32 | reg_map[dst], srcw);
1658
#else
1659
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, 0);
1660
FAIL_IF(!inst);
1661
*inst = MOV_rm_i32;
1662
return SLJIT_SUCCESS;
1663
#endif
1664
}
1665
inst = emit_x86_instruction(compiler, 1 | EX86_HALF_ARG | EX86_NO_REXW | EX86_PREF_66, SLJIT_IMM, srcw, dst, dstw);
1666
FAIL_IF(!inst);
1667
*inst = MOV_rm_i32;
1668
return SLJIT_SUCCESS;
1669
}
1670
1671
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1672
1673
if ((dst & SLJIT_MEM) && FAST_IS_REG(src))
1674
dst_r = src;
1675
else
1676
FAIL_IF(emit_groupf(compiler, sign ? MOVSX_r_rm16 : MOVZX_r_rm16, dst_r, src, srcw));
1677
1678
if (dst & SLJIT_MEM) {
1679
inst = emit_x86_instruction(compiler, 1 | EX86_NO_REXW | EX86_PREF_66, dst_r, 0, dst, dstw);
1680
FAIL_IF(!inst);
1681
*inst = MOV_rm_r;
1682
}
1683
1684
return SLJIT_SUCCESS;
1685
}
1686
1687
static sljit_s32 emit_unary(struct sljit_compiler *compiler, sljit_u8 opcode,
1688
sljit_s32 dst, sljit_sw dstw,
1689
sljit_s32 src, sljit_sw srcw)
1690
{
1691
sljit_u8* inst;
1692
1693
if (dst == src && dstw == srcw) {
1694
/* Same input and output */
1695
inst = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw);
1696
FAIL_IF(!inst);
1697
inst[0] = GROUP_F7;
1698
inst[1] |= opcode;
1699
return SLJIT_SUCCESS;
1700
}
1701
1702
if (FAST_IS_REG(dst)) {
1703
EMIT_MOV(compiler, dst, 0, src, srcw);
1704
inst = emit_x86_instruction(compiler, 1, 0, 0, dst, 0);
1705
FAIL_IF(!inst);
1706
inst[0] = GROUP_F7;
1707
inst[1] |= opcode;
1708
return SLJIT_SUCCESS;
1709
}
1710
1711
EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
1712
inst = emit_x86_instruction(compiler, 1, 0, 0, TMP_REG1, 0);
1713
FAIL_IF(!inst);
1714
inst[0] = GROUP_F7;
1715
inst[1] |= opcode;
1716
EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1717
return SLJIT_SUCCESS;
1718
}
1719
1720
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1721
static const sljit_sw emit_clz_arg = 32 + 31;
1722
static const sljit_sw emit_ctz_arg = 32;
1723
#endif
1724
1725
static sljit_s32 emit_clz_ctz(struct sljit_compiler *compiler, sljit_s32 is_clz,
1726
sljit_s32 dst, sljit_sw dstw,
1727
sljit_s32 src, sljit_sw srcw)
1728
{
1729
sljit_u8* inst;
1730
sljit_s32 dst_r;
1731
sljit_sw max;
1732
1733
SLJIT_ASSERT(cpu_feature_list != 0);
1734
1735
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1736
1737
if (is_clz ? (cpu_feature_list & CPU_FEATURE_LZCNT) : (cpu_feature_list & CPU_FEATURE_TZCNT)) {
1738
FAIL_IF(emit_groupf(compiler, (is_clz ? LZCNT_r_rm : TZCNT_r_rm) | EX86_PREF_F3, dst_r, src, srcw));
1739
1740
if (dst & SLJIT_MEM)
1741
EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1742
return SLJIT_SUCCESS;
1743
}
1744
1745
FAIL_IF(emit_groupf(compiler, is_clz ? BSR_r_rm : BSF_r_rm, dst_r, src, srcw));
1746
1747
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1748
max = is_clz ? (32 + 31) : 32;
1749
1750
if (cpu_feature_list & CPU_FEATURE_CMOV) {
1751
if (dst_r != TMP_REG1) {
1752
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, max);
1753
inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG1, 0);
1754
}
1755
else
1756
inst = emit_x86_instruction(compiler, 2, dst_r, 0, SLJIT_MEM0(), is_clz ? (sljit_sw)&emit_clz_arg : (sljit_sw)&emit_ctz_arg);
1757
1758
FAIL_IF(!inst);
1759
inst[0] = GROUP_0F;
1760
inst[1] = CMOVE_r_rm;
1761
}
1762
else
1763
FAIL_IF(emit_cmov_generic(compiler, SLJIT_EQUAL, dst_r, SLJIT_IMM, max));
1764
1765
if (is_clz) {
1766
inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 31, dst_r, 0);
1767
FAIL_IF(!inst);
1768
*(inst + 1) |= XOR;
1769
}
1770
#else
1771
if (is_clz)
1772
max = compiler->mode32 ? (32 + 31) : (64 + 63);
1773
else
1774
max = compiler->mode32 ? 32 : 64;
1775
1776
if (cpu_feature_list & CPU_FEATURE_CMOV) {
1777
EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_IMM, max);
1778
FAIL_IF(emit_groupf(compiler, CMOVE_r_rm, dst_r, TMP_REG2, 0));
1779
} else
1780
FAIL_IF(emit_cmov_generic(compiler, SLJIT_EQUAL, dst_r, SLJIT_IMM, max));
1781
1782
if (is_clz) {
1783
inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, max >> 1, dst_r, 0);
1784
FAIL_IF(!inst);
1785
*(inst + 1) |= XOR;
1786
}
1787
#endif
1788
1789
if (dst & SLJIT_MEM)
1790
EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1791
return SLJIT_SUCCESS;
1792
}
1793
1794
static sljit_s32 emit_bswap(struct sljit_compiler *compiler,
1795
sljit_s32 op,
1796
sljit_s32 dst, sljit_sw dstw,
1797
sljit_s32 src, sljit_sw srcw)
1798
{
1799
sljit_u8 *inst;
1800
sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1801
sljit_uw size;
1802
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1803
sljit_u8 rex = 0;
1804
#else /* !SLJIT_CONFIG_X86_64 */
1805
sljit_s32 dst_is_ereg = op & SLJIT_32;
1806
#endif /* SLJIT_CONFIG_X86_64 */
1807
1808
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1809
if (op == SLJIT_REV_U32 || op == SLJIT_REV_S32)
1810
compiler->mode32 = 1;
1811
#else /* !SLJIT_CONFIG_X86_64 */
1812
op &= ~SLJIT_32;
1813
#endif /* SLJIT_CONFIG_X86_64 */
1814
1815
if (src != dst_r) {
1816
/* Only the lower 16 bit is read for eregs. */
1817
if (op == SLJIT_REV_U16 || op == SLJIT_REV_S16)
1818
FAIL_IF(emit_mov_half(compiler, 0, dst_r, 0, src, srcw));
1819
else
1820
EMIT_MOV(compiler, dst_r, 0, src, srcw);
1821
}
1822
1823
size = 2;
1824
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1825
if (!compiler->mode32)
1826
rex = REX_W;
1827
1828
if (reg_map[dst_r] >= 8)
1829
rex |= REX_B;
1830
1831
if (rex != 0)
1832
size++;
1833
#endif /* SLJIT_CONFIG_X86_64 */
1834
1835
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
1836
FAIL_IF(!inst);
1837
INC_SIZE(size);
1838
1839
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1840
if (rex != 0)
1841
*inst++ = rex;
1842
1843
inst[0] = GROUP_0F;
1844
inst[1] = BSWAP_r | reg_lmap[dst_r];
1845
#else /* !SLJIT_CONFIG_X86_64 */
1846
inst[0] = GROUP_0F;
1847
inst[1] = BSWAP_r | reg_map[dst_r];
1848
#endif /* SLJIT_CONFIG_X86_64 */
1849
1850
if (op == SLJIT_REV_U16 || op == SLJIT_REV_S16) {
1851
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1852
size = compiler->mode32 ? 16 : 48;
1853
#else /* !SLJIT_CONFIG_X86_64 */
1854
size = 16;
1855
#endif /* SLJIT_CONFIG_X86_64 */
1856
1857
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, (sljit_sw)size, dst_r, 0);
1858
FAIL_IF(!inst);
1859
if (op == SLJIT_REV_U16)
1860
inst[1] |= SHR;
1861
else
1862
inst[1] |= SAR;
1863
}
1864
1865
if (dst & SLJIT_MEM) {
1866
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1867
if (dst_is_ereg)
1868
op = SLJIT_REV;
1869
#endif /* SLJIT_CONFIG_X86_32 */
1870
if (op == SLJIT_REV_U16 || op == SLJIT_REV_S16)
1871
return emit_mov_half(compiler, 0, dst, dstw, TMP_REG1, 0);
1872
1873
return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
1874
}
1875
1876
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1877
if (op == SLJIT_REV_S32) {
1878
compiler->mode32 = 0;
1879
inst = emit_x86_instruction(compiler, 1, dst, 0, dst, 0);
1880
FAIL_IF(!inst);
1881
*inst = MOVSXD_r_rm;
1882
}
1883
#endif /* SLJIT_CONFIG_X86_64 */
1884
1885
return SLJIT_SUCCESS;
1886
}
1887
1888
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
1889
sljit_s32 dst, sljit_sw dstw,
1890
sljit_s32 src, sljit_sw srcw)
1891
{
1892
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1893
sljit_s32 dst_is_ereg = 0;
1894
#else /* !SLJIT_CONFIG_X86_32 */
1895
sljit_s32 op_flags = GET_ALL_FLAGS(op);
1896
#endif /* SLJIT_CONFIG_X86_32 */
1897
1898
CHECK_ERROR();
1899
CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
1900
ADJUST_LOCAL_OFFSET(dst, dstw);
1901
ADJUST_LOCAL_OFFSET(src, srcw);
1902
1903
CHECK_EXTRA_REGS(dst, dstw, dst_is_ereg = 1);
1904
CHECK_EXTRA_REGS(src, srcw, (void)0);
1905
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1906
compiler->mode32 = op_flags & SLJIT_32;
1907
#endif /* SLJIT_CONFIG_X86_64 */
1908
1909
op = GET_OPCODE(op);
1910
1911
if (op >= SLJIT_MOV && op <= SLJIT_MOV_P) {
1912
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1913
compiler->mode32 = 0;
1914
#endif /* SLJIT_CONFIG_X86_64 */
1915
1916
if (FAST_IS_REG(src) && src == dst) {
1917
if (!TYPE_CAST_NEEDED(op))
1918
return SLJIT_SUCCESS;
1919
}
1920
1921
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1922
if (op_flags & SLJIT_32) {
1923
if (src & SLJIT_MEM) {
1924
if (op == SLJIT_MOV_S32)
1925
op = SLJIT_MOV_U32;
1926
}
1927
else if (src == SLJIT_IMM) {
1928
if (op == SLJIT_MOV_U32)
1929
op = SLJIT_MOV_S32;
1930
}
1931
}
1932
#endif /* SLJIT_CONFIG_X86_64 */
1933
1934
if (src == SLJIT_IMM) {
1935
switch (op) {
1936
case SLJIT_MOV_U8:
1937
srcw = (sljit_u8)srcw;
1938
break;
1939
case SLJIT_MOV_S8:
1940
srcw = (sljit_s8)srcw;
1941
break;
1942
case SLJIT_MOV_U16:
1943
srcw = (sljit_u16)srcw;
1944
break;
1945
case SLJIT_MOV_S16:
1946
srcw = (sljit_s16)srcw;
1947
break;
1948
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1949
case SLJIT_MOV_U32:
1950
srcw = (sljit_u32)srcw;
1951
break;
1952
case SLJIT_MOV_S32:
1953
srcw = (sljit_s32)srcw;
1954
break;
1955
#endif /* SLJIT_CONFIG_X86_64 */
1956
}
1957
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1958
if (SLJIT_UNLIKELY(dst_is_ereg))
1959
return emit_mov(compiler, dst, dstw, src, srcw);
1960
#endif /* SLJIT_CONFIG_X86_32 */
1961
}
1962
1963
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1964
if (SLJIT_UNLIKELY(dst_is_ereg) && (!(op == SLJIT_MOV || op == SLJIT_MOV_U32 || op == SLJIT_MOV_S32 || op == SLJIT_MOV_P) || (src & SLJIT_MEM))) {
1965
SLJIT_ASSERT(dst == SLJIT_MEM1(SLJIT_SP));
1966
dst = TMP_REG1;
1967
}
1968
#endif /* SLJIT_CONFIG_X86_32 */
1969
1970
switch (op) {
1971
case SLJIT_MOV:
1972
case SLJIT_MOV_P:
1973
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1974
case SLJIT_MOV_U32:
1975
case SLJIT_MOV_S32:
1976
case SLJIT_MOV32:
1977
#endif /* SLJIT_CONFIG_X86_32 */
1978
EMIT_MOV(compiler, dst, dstw, src, srcw);
1979
break;
1980
case SLJIT_MOV_U8:
1981
FAIL_IF(emit_mov_byte(compiler, 0, dst, dstw, src, srcw));
1982
break;
1983
case SLJIT_MOV_S8:
1984
FAIL_IF(emit_mov_byte(compiler, 1, dst, dstw, src, srcw));
1985
break;
1986
case SLJIT_MOV_U16:
1987
FAIL_IF(emit_mov_half(compiler, 0, dst, dstw, src, srcw));
1988
break;
1989
case SLJIT_MOV_S16:
1990
FAIL_IF(emit_mov_half(compiler, 1, dst, dstw, src, srcw));
1991
break;
1992
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1993
case SLJIT_MOV_U32:
1994
FAIL_IF(emit_mov_int(compiler, 0, dst, dstw, src, srcw));
1995
break;
1996
case SLJIT_MOV_S32:
1997
FAIL_IF(emit_mov_int(compiler, 1, dst, dstw, src, srcw));
1998
break;
1999
case SLJIT_MOV32:
2000
compiler->mode32 = 1;
2001
EMIT_MOV(compiler, dst, dstw, src, srcw);
2002
compiler->mode32 = 0;
2003
break;
2004
#endif /* SLJIT_CONFIG_X86_64 */
2005
}
2006
2007
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2008
if (SLJIT_UNLIKELY(dst_is_ereg) && dst == TMP_REG1)
2009
return emit_mov(compiler, SLJIT_MEM1(SLJIT_SP), dstw, TMP_REG1, 0);
2010
#endif /* SLJIT_CONFIG_X86_32 */
2011
return SLJIT_SUCCESS;
2012
}
2013
2014
switch (op) {
2015
case SLJIT_CLZ:
2016
case SLJIT_CTZ:
2017
return emit_clz_ctz(compiler, (op == SLJIT_CLZ), dst, dstw, src, srcw);
2018
case SLJIT_REV:
2019
case SLJIT_REV_U16:
2020
case SLJIT_REV_S16:
2021
case SLJIT_REV_U32:
2022
case SLJIT_REV_S32:
2023
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2024
if (dst_is_ereg)
2025
op |= SLJIT_32;
2026
#endif /* SLJIT_CONFIG_X86_32 */
2027
return emit_bswap(compiler, op, dst, dstw, src, srcw);
2028
}
2029
2030
return SLJIT_SUCCESS;
2031
}
2032
2033
static sljit_s32 emit_cum_binary(struct sljit_compiler *compiler,
2034
sljit_u32 op_types,
2035
sljit_s32 dst, sljit_sw dstw,
2036
sljit_s32 src1, sljit_sw src1w,
2037
sljit_s32 src2, sljit_sw src2w)
2038
{
2039
sljit_u8* inst;
2040
sljit_u8 op_eax_imm = U8(op_types >> 24);
2041
sljit_u8 op_rm = U8((op_types >> 16) & 0xff);
2042
sljit_u8 op_mr = U8((op_types >> 8) & 0xff);
2043
sljit_u8 op_imm = U8(op_types & 0xff);
2044
2045
if (dst == src1 && dstw == src1w) {
2046
if (src2 == SLJIT_IMM) {
2047
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2048
if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
2049
#else
2050
if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128)) {
2051
#endif
2052
BINARY_EAX_IMM(op_eax_imm, src2w);
2053
}
2054
else {
2055
BINARY_IMM(op_imm, op_mr, src2w, dst, dstw);
2056
}
2057
}
2058
else if (FAST_IS_REG(dst)) {
2059
inst = emit_x86_instruction(compiler, 1, dst, dstw, src2, src2w);
2060
FAIL_IF(!inst);
2061
*inst = op_rm;
2062
}
2063
else if (FAST_IS_REG(src2)) {
2064
/* Special exception for sljit_emit_op_flags. */
2065
inst = emit_x86_instruction(compiler, 1, src2, src2w, dst, dstw);
2066
FAIL_IF(!inst);
2067
*inst = op_mr;
2068
}
2069
else {
2070
EMIT_MOV(compiler, TMP_REG1, 0, src2, src2w);
2071
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
2072
FAIL_IF(!inst);
2073
*inst = op_mr;
2074
}
2075
return SLJIT_SUCCESS;
2076
}
2077
2078
/* Only for cumulative operations. */
2079
if (dst == src2 && dstw == src2w) {
2080
if (src1 == SLJIT_IMM) {
2081
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2082
if ((dst == SLJIT_R0) && (src1w > 127 || src1w < -128) && (compiler->mode32 || IS_HALFWORD(src1w))) {
2083
#else
2084
if ((dst == SLJIT_R0) && (src1w > 127 || src1w < -128)) {
2085
#endif
2086
BINARY_EAX_IMM(op_eax_imm, src1w);
2087
}
2088
else {
2089
BINARY_IMM(op_imm, op_mr, src1w, dst, dstw);
2090
}
2091
}
2092
else if (FAST_IS_REG(dst)) {
2093
inst = emit_x86_instruction(compiler, 1, dst, dstw, src1, src1w);
2094
FAIL_IF(!inst);
2095
*inst = op_rm;
2096
}
2097
else if (FAST_IS_REG(src1)) {
2098
inst = emit_x86_instruction(compiler, 1, src1, src1w, dst, dstw);
2099
FAIL_IF(!inst);
2100
*inst = op_mr;
2101
}
2102
else {
2103
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2104
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
2105
FAIL_IF(!inst);
2106
*inst = op_mr;
2107
}
2108
return SLJIT_SUCCESS;
2109
}
2110
2111
/* General version. */
2112
if (FAST_IS_REG(dst)) {
2113
EMIT_MOV(compiler, dst, 0, src1, src1w);
2114
if (src2 == SLJIT_IMM) {
2115
BINARY_IMM(op_imm, op_mr, src2w, dst, 0);
2116
}
2117
else {
2118
inst = emit_x86_instruction(compiler, 1, dst, 0, src2, src2w);
2119
FAIL_IF(!inst);
2120
*inst = op_rm;
2121
}
2122
}
2123
else {
2124
/* This version requires less memory writing. */
2125
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2126
if (src2 == SLJIT_IMM) {
2127
BINARY_IMM(op_imm, op_mr, src2w, TMP_REG1, 0);
2128
}
2129
else {
2130
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
2131
FAIL_IF(!inst);
2132
*inst = op_rm;
2133
}
2134
EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
2135
}
2136
2137
return SLJIT_SUCCESS;
2138
}
2139
2140
static sljit_s32 emit_non_cum_binary(struct sljit_compiler *compiler,
2141
sljit_u32 op_types,
2142
sljit_s32 dst, sljit_sw dstw,
2143
sljit_s32 src1, sljit_sw src1w,
2144
sljit_s32 src2, sljit_sw src2w)
2145
{
2146
sljit_u8* inst;
2147
sljit_u8 op_eax_imm = U8(op_types >> 24);
2148
sljit_u8 op_rm = U8((op_types >> 16) & 0xff);
2149
sljit_u8 op_mr = U8((op_types >> 8) & 0xff);
2150
sljit_u8 op_imm = U8(op_types & 0xff);
2151
2152
if (dst == src1 && dstw == src1w) {
2153
if (src2 == SLJIT_IMM) {
2154
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2155
if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
2156
#else
2157
if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128)) {
2158
#endif
2159
BINARY_EAX_IMM(op_eax_imm, src2w);
2160
}
2161
else {
2162
BINARY_IMM(op_imm, op_mr, src2w, dst, dstw);
2163
}
2164
}
2165
else if (FAST_IS_REG(dst)) {
2166
inst = emit_x86_instruction(compiler, 1, dst, dstw, src2, src2w);
2167
FAIL_IF(!inst);
2168
*inst = op_rm;
2169
}
2170
else if (FAST_IS_REG(src2)) {
2171
inst = emit_x86_instruction(compiler, 1, src2, src2w, dst, dstw);
2172
FAIL_IF(!inst);
2173
*inst = op_mr;
2174
}
2175
else {
2176
EMIT_MOV(compiler, TMP_REG1, 0, src2, src2w);
2177
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
2178
FAIL_IF(!inst);
2179
*inst = op_mr;
2180
}
2181
return SLJIT_SUCCESS;
2182
}
2183
2184
/* General version. */
2185
if (FAST_IS_REG(dst) && dst != src2) {
2186
EMIT_MOV(compiler, dst, 0, src1, src1w);
2187
if (src2 == SLJIT_IMM) {
2188
BINARY_IMM(op_imm, op_mr, src2w, dst, 0);
2189
}
2190
else {
2191
inst = emit_x86_instruction(compiler, 1, dst, 0, src2, src2w);
2192
FAIL_IF(!inst);
2193
*inst = op_rm;
2194
}
2195
}
2196
else {
2197
/* This version requires less memory writing. */
2198
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2199
if (src2 == SLJIT_IMM) {
2200
BINARY_IMM(op_imm, op_mr, src2w, TMP_REG1, 0);
2201
}
2202
else {
2203
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
2204
FAIL_IF(!inst);
2205
*inst = op_rm;
2206
}
2207
EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
2208
}
2209
2210
return SLJIT_SUCCESS;
2211
}
2212
2213
static sljit_s32 emit_mul(struct sljit_compiler *compiler,
2214
sljit_s32 dst, sljit_sw dstw,
2215
sljit_s32 src1, sljit_sw src1w,
2216
sljit_s32 src2, sljit_sw src2w)
2217
{
2218
sljit_u8* inst;
2219
sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
2220
2221
/* Register destination. */
2222
if (dst_r == src1 && src2 != SLJIT_IMM) {
2223
FAIL_IF(emit_groupf(compiler, IMUL_r_rm, dst_r, src2, src2w));
2224
} else if (dst_r == src2 && src1 != SLJIT_IMM) {
2225
FAIL_IF(emit_groupf(compiler, IMUL_r_rm, dst_r, src1, src1w));
2226
} else if (src1 == SLJIT_IMM) {
2227
if (src2 == SLJIT_IMM) {
2228
EMIT_MOV(compiler, dst_r, 0, SLJIT_IMM, src2w);
2229
src2 = dst_r;
2230
src2w = 0;
2231
}
2232
2233
if (src1w <= 127 && src1w >= -128) {
2234
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
2235
FAIL_IF(!inst);
2236
*inst = IMUL_r_rm_i8;
2237
2238
FAIL_IF(emit_byte(compiler, U8(src1w)));
2239
}
2240
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2241
else {
2242
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
2243
FAIL_IF(!inst);
2244
*inst = IMUL_r_rm_i32;
2245
inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
2246
FAIL_IF(!inst);
2247
INC_SIZE(4);
2248
sljit_unaligned_store_sw(inst, src1w);
2249
}
2250
#else
2251
else if (IS_HALFWORD(src1w)) {
2252
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
2253
FAIL_IF(!inst);
2254
*inst = IMUL_r_rm_i32;
2255
inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
2256
FAIL_IF(!inst);
2257
INC_SIZE(4);
2258
sljit_unaligned_store_s32(inst, (sljit_s32)src1w);
2259
}
2260
else {
2261
if (dst_r != src2)
2262
EMIT_MOV(compiler, dst_r, 0, src2, src2w);
2263
FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src1w));
2264
FAIL_IF(emit_groupf(compiler, IMUL_r_rm, dst_r, TMP_REG2, 0));
2265
}
2266
#endif
2267
}
2268
else if (src2 == SLJIT_IMM) {
2269
/* Note: src1 is NOT immediate. */
2270
2271
if (src2w <= 127 && src2w >= -128) {
2272
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
2273
FAIL_IF(!inst);
2274
*inst = IMUL_r_rm_i8;
2275
2276
FAIL_IF(emit_byte(compiler, U8(src2w)));
2277
}
2278
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2279
else {
2280
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
2281
FAIL_IF(!inst);
2282
*inst = IMUL_r_rm_i32;
2283
2284
inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
2285
FAIL_IF(!inst);
2286
INC_SIZE(4);
2287
sljit_unaligned_store_sw(inst, src2w);
2288
}
2289
#else
2290
else if (IS_HALFWORD(src2w)) {
2291
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
2292
FAIL_IF(!inst);
2293
*inst = IMUL_r_rm_i32;
2294
2295
inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
2296
FAIL_IF(!inst);
2297
INC_SIZE(4);
2298
sljit_unaligned_store_s32(inst, (sljit_s32)src2w);
2299
} else {
2300
if (dst_r != src1)
2301
EMIT_MOV(compiler, dst_r, 0, src1, src1w);
2302
FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src2w));
2303
FAIL_IF(emit_groupf(compiler, IMUL_r_rm, dst_r, TMP_REG2, 0));
2304
}
2305
#endif
2306
} else {
2307
/* Neither argument is immediate. */
2308
if (ADDRESSING_DEPENDS_ON(src2, dst_r))
2309
dst_r = TMP_REG1;
2310
EMIT_MOV(compiler, dst_r, 0, src1, src1w);
2311
FAIL_IF(emit_groupf(compiler, IMUL_r_rm, dst_r, src2, src2w));
2312
}
2313
2314
if (dst & SLJIT_MEM)
2315
EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
2316
2317
return SLJIT_SUCCESS;
2318
}
2319
2320
static sljit_s32 emit_lea_binary(struct sljit_compiler *compiler,
2321
sljit_s32 dst, sljit_sw dstw,
2322
sljit_s32 src1, sljit_sw src1w,
2323
sljit_s32 src2, sljit_sw src2w)
2324
{
2325
sljit_u8* inst;
2326
sljit_s32 dst_r, done = 0;
2327
2328
/* These cases better be left to handled by normal way. */
2329
if (dst == src1 && dstw == src1w)
2330
return SLJIT_ERR_UNSUPPORTED;
2331
if (dst == src2 && dstw == src2w)
2332
return SLJIT_ERR_UNSUPPORTED;
2333
2334
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
2335
2336
if (FAST_IS_REG(src1)) {
2337
if (FAST_IS_REG(src2)) {
2338
inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM2(src1, src2), 0);
2339
FAIL_IF(!inst);
2340
*inst = LEA_r_m;
2341
done = 1;
2342
}
2343
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2344
if (src2 == SLJIT_IMM && (compiler->mode32 || IS_HALFWORD(src2w))) {
2345
inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), (sljit_s32)src2w);
2346
#else
2347
if (src2 == SLJIT_IMM) {
2348
inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), src2w);
2349
#endif
2350
FAIL_IF(!inst);
2351
*inst = LEA_r_m;
2352
done = 1;
2353
}
2354
}
2355
else if (FAST_IS_REG(src2)) {
2356
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2357
if (src1 == SLJIT_IMM && (compiler->mode32 || IS_HALFWORD(src1w))) {
2358
inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), (sljit_s32)src1w);
2359
#else
2360
if (src1 == SLJIT_IMM) {
2361
inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), src1w);
2362
#endif
2363
FAIL_IF(!inst);
2364
*inst = LEA_r_m;
2365
done = 1;
2366
}
2367
}
2368
2369
if (done) {
2370
if (dst_r == TMP_REG1)
2371
return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
2372
return SLJIT_SUCCESS;
2373
}
2374
return SLJIT_ERR_UNSUPPORTED;
2375
}
2376
2377
static sljit_s32 emit_cmp_binary(struct sljit_compiler *compiler,
2378
sljit_s32 src1, sljit_sw src1w,
2379
sljit_s32 src2, sljit_sw src2w)
2380
{
2381
sljit_u8* inst;
2382
2383
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2384
if (src1 == SLJIT_R0 && src2 == SLJIT_IMM && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
2385
#else
2386
if (src1 == SLJIT_R0 && src2 == SLJIT_IMM && (src2w > 127 || src2w < -128)) {
2387
#endif
2388
BINARY_EAX_IMM(CMP_EAX_i32, src2w);
2389
return SLJIT_SUCCESS;
2390
}
2391
2392
if (FAST_IS_REG(src1)) {
2393
if (src2 == SLJIT_IMM) {
2394
BINARY_IMM(CMP, CMP_rm_r, src2w, src1, 0);
2395
}
2396
else {
2397
inst = emit_x86_instruction(compiler, 1, src1, 0, src2, src2w);
2398
FAIL_IF(!inst);
2399
*inst = CMP_r_rm;
2400
}
2401
return SLJIT_SUCCESS;
2402
}
2403
2404
if (FAST_IS_REG(src2) && src1 != SLJIT_IMM) {
2405
inst = emit_x86_instruction(compiler, 1, src2, 0, src1, src1w);
2406
FAIL_IF(!inst);
2407
*inst = CMP_rm_r;
2408
return SLJIT_SUCCESS;
2409
}
2410
2411
if (src2 == SLJIT_IMM) {
2412
if (src1 == SLJIT_IMM) {
2413
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2414
src1 = TMP_REG1;
2415
src1w = 0;
2416
}
2417
BINARY_IMM(CMP, CMP_rm_r, src2w, src1, src1w);
2418
}
2419
else {
2420
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2421
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
2422
FAIL_IF(!inst);
2423
*inst = CMP_r_rm;
2424
}
2425
return SLJIT_SUCCESS;
2426
}
2427
2428
static sljit_s32 emit_test_binary(struct sljit_compiler *compiler,
2429
sljit_s32 src1, sljit_sw src1w,
2430
sljit_s32 src2, sljit_sw src2w)
2431
{
2432
sljit_u8* inst;
2433
2434
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2435
if (src1 == SLJIT_R0 && src2 == SLJIT_IMM && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
2436
#else
2437
if (src1 == SLJIT_R0 && src2 == SLJIT_IMM && (src2w > 127 || src2w < -128)) {
2438
#endif
2439
BINARY_EAX_IMM(TEST_EAX_i32, src2w);
2440
return SLJIT_SUCCESS;
2441
}
2442
2443
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2444
if (src2 == SLJIT_R0 && src1 == SLJIT_IMM && (src1w > 127 || src1w < -128) && (compiler->mode32 || IS_HALFWORD(src1w))) {
2445
#else
2446
if (src2 == SLJIT_R0 && src1 == SLJIT_IMM && (src1w > 127 || src1w < -128)) {
2447
#endif
2448
BINARY_EAX_IMM(TEST_EAX_i32, src1w);
2449
return SLJIT_SUCCESS;
2450
}
2451
2452
if (src1 != SLJIT_IMM) {
2453
if (src2 == SLJIT_IMM) {
2454
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2455
if (IS_HALFWORD(src2w) || compiler->mode32) {
2456
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, src1, src1w);
2457
FAIL_IF(!inst);
2458
*inst = GROUP_F7;
2459
} else {
2460
FAIL_IF(emit_load_imm64(compiler, FAST_IS_REG(src1) ? TMP_REG2 : TMP_REG1, src2w));
2461
inst = emit_x86_instruction(compiler, 1, FAST_IS_REG(src1) ? TMP_REG2 : TMP_REG1, 0, src1, src1w);
2462
FAIL_IF(!inst);
2463
*inst = TEST_rm_r;
2464
}
2465
#else
2466
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, src1, src1w);
2467
FAIL_IF(!inst);
2468
*inst = GROUP_F7;
2469
#endif
2470
return SLJIT_SUCCESS;
2471
}
2472
else if (FAST_IS_REG(src1)) {
2473
inst = emit_x86_instruction(compiler, 1, src1, 0, src2, src2w);
2474
FAIL_IF(!inst);
2475
*inst = TEST_rm_r;
2476
return SLJIT_SUCCESS;
2477
}
2478
}
2479
2480
if (src2 != SLJIT_IMM) {
2481
if (src1 == SLJIT_IMM) {
2482
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2483
if (IS_HALFWORD(src1w) || compiler->mode32) {
2484
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src1w, src2, src2w);
2485
FAIL_IF(!inst);
2486
*inst = GROUP_F7;
2487
}
2488
else {
2489
FAIL_IF(emit_load_imm64(compiler, TMP_REG1, src1w));
2490
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
2491
FAIL_IF(!inst);
2492
*inst = TEST_rm_r;
2493
}
2494
#else
2495
inst = emit_x86_instruction(compiler, 1, src1, src1w, src2, src2w);
2496
FAIL_IF(!inst);
2497
*inst = GROUP_F7;
2498
#endif
2499
return SLJIT_SUCCESS;
2500
}
2501
else if (FAST_IS_REG(src2)) {
2502
inst = emit_x86_instruction(compiler, 1, src2, 0, src1, src1w);
2503
FAIL_IF(!inst);
2504
*inst = TEST_rm_r;
2505
return SLJIT_SUCCESS;
2506
}
2507
}
2508
2509
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2510
if (src2 == SLJIT_IMM) {
2511
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2512
if (IS_HALFWORD(src2w) || compiler->mode32) {
2513
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, TMP_REG1, 0);
2514
FAIL_IF(!inst);
2515
*inst = GROUP_F7;
2516
}
2517
else {
2518
FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src2w));
2519
inst = emit_x86_instruction(compiler, 1, TMP_REG2, 0, TMP_REG1, 0);
2520
FAIL_IF(!inst);
2521
*inst = TEST_rm_r;
2522
}
2523
#else
2524
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, TMP_REG1, 0);
2525
FAIL_IF(!inst);
2526
*inst = GROUP_F7;
2527
#endif
2528
}
2529
else {
2530
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
2531
FAIL_IF(!inst);
2532
*inst = TEST_rm_r;
2533
}
2534
return SLJIT_SUCCESS;
2535
}
2536
2537
static sljit_s32 emit_shift(struct sljit_compiler *compiler,
2538
sljit_u8 mode,
2539
sljit_s32 dst, sljit_sw dstw,
2540
sljit_s32 src1, sljit_sw src1w,
2541
sljit_s32 src2, sljit_sw src2w)
2542
{
2543
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2544
sljit_s32 mode32;
2545
#endif
2546
sljit_u8* inst;
2547
2548
if (src2 == SLJIT_IMM || src2 == SLJIT_PREF_SHIFT_REG) {
2549
if (dst == src1 && dstw == src1w) {
2550
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, dst, dstw);
2551
FAIL_IF(!inst);
2552
inst[1] |= mode;
2553
return SLJIT_SUCCESS;
2554
}
2555
if (dst == SLJIT_PREF_SHIFT_REG && src2 == SLJIT_PREF_SHIFT_REG) {
2556
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2557
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2558
FAIL_IF(!inst);
2559
inst[1] |= mode;
2560
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2561
return SLJIT_SUCCESS;
2562
}
2563
if (FAST_IS_REG(dst)) {
2564
EMIT_MOV(compiler, dst, 0, src1, src1w);
2565
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, dst, 0);
2566
FAIL_IF(!inst);
2567
inst[1] |= mode;
2568
return SLJIT_SUCCESS;
2569
}
2570
2571
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2572
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, TMP_REG1, 0);
2573
FAIL_IF(!inst);
2574
inst[1] |= mode;
2575
EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
2576
return SLJIT_SUCCESS;
2577
}
2578
2579
if (dst == SLJIT_PREF_SHIFT_REG) {
2580
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2581
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
2582
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2583
FAIL_IF(!inst);
2584
inst[1] |= mode;
2585
return emit_mov(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2586
}
2587
2588
if (FAST_IS_REG(dst) && dst != src2 && dst != TMP_REG1 && !ADDRESSING_DEPENDS_ON(src2, dst)) {
2589
if (src1 != dst)
2590
EMIT_MOV(compiler, dst, 0, src1, src1w);
2591
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2592
mode32 = compiler->mode32;
2593
compiler->mode32 = 0;
2594
#endif
2595
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_PREF_SHIFT_REG, 0);
2596
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2597
compiler->mode32 = mode32;
2598
#endif
2599
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
2600
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, dst, 0);
2601
FAIL_IF(!inst);
2602
inst[1] |= mode;
2603
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2604
compiler->mode32 = 0;
2605
#endif
2606
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2607
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2608
compiler->mode32 = mode32;
2609
#endif
2610
return SLJIT_SUCCESS;
2611
}
2612
2613
/* This case is complex since ecx itself may be used for
2614
addressing, and this case must be supported as well. */
2615
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2616
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2617
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), 0, SLJIT_PREF_SHIFT_REG, 0);
2618
#else /* !SLJIT_CONFIG_X86_32 */
2619
mode32 = compiler->mode32;
2620
compiler->mode32 = 0;
2621
EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_PREF_SHIFT_REG, 0);
2622
compiler->mode32 = mode32;
2623
#endif /* SLJIT_CONFIG_X86_32 */
2624
2625
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
2626
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2627
FAIL_IF(!inst);
2628
inst[1] |= mode;
2629
2630
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2631
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, SLJIT_MEM1(SLJIT_SP), 0);
2632
#else
2633
compiler->mode32 = 0;
2634
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG2, 0);
2635
compiler->mode32 = mode32;
2636
#endif /* SLJIT_CONFIG_X86_32 */
2637
2638
if (dst != TMP_REG1)
2639
return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
2640
2641
return SLJIT_SUCCESS;
2642
}
2643
2644
static sljit_s32 emit_shift_with_flags(struct sljit_compiler *compiler,
2645
sljit_u8 mode, sljit_s32 set_flags,
2646
sljit_s32 dst, sljit_sw dstw,
2647
sljit_s32 src1, sljit_sw src1w,
2648
sljit_s32 src2, sljit_sw src2w)
2649
{
2650
/* The CPU does not set flags if the shift count is 0. */
2651
if (src2 == SLJIT_IMM) {
2652
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2653
src2w &= compiler->mode32 ? 0x1f : 0x3f;
2654
#else /* !SLJIT_CONFIG_X86_64 */
2655
src2w &= 0x1f;
2656
#endif /* SLJIT_CONFIG_X86_64 */
2657
if (src2w != 0)
2658
return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w);
2659
2660
if (!set_flags)
2661
return emit_mov(compiler, dst, dstw, src1, src1w);
2662
/* OR dst, src, 0 */
2663
return emit_cum_binary(compiler, BINARY_OPCODE(OR),
2664
dst, dstw, src1, src1w, SLJIT_IMM, 0);
2665
}
2666
2667
if (!set_flags)
2668
return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w);
2669
2670
if (!FAST_IS_REG(dst))
2671
FAIL_IF(emit_cmp_binary(compiler, src1, src1w, SLJIT_IMM, 0));
2672
2673
FAIL_IF(emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w));
2674
2675
if (FAST_IS_REG(dst))
2676
return emit_cmp_binary(compiler, dst, dstw, SLJIT_IMM, 0);
2677
return SLJIT_SUCCESS;
2678
}
2679
2680
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
2681
sljit_s32 dst, sljit_sw dstw,
2682
sljit_s32 src1, sljit_sw src1w,
2683
sljit_s32 src2, sljit_sw src2w)
2684
{
2685
CHECK_ERROR();
2686
CHECK(check_sljit_emit_op2(compiler, op, 0, dst, dstw, src1, src1w, src2, src2w));
2687
ADJUST_LOCAL_OFFSET(dst, dstw);
2688
ADJUST_LOCAL_OFFSET(src1, src1w);
2689
ADJUST_LOCAL_OFFSET(src2, src2w);
2690
2691
CHECK_EXTRA_REGS(dst, dstw, (void)0);
2692
CHECK_EXTRA_REGS(src1, src1w, (void)0);
2693
CHECK_EXTRA_REGS(src2, src2w, (void)0);
2694
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2695
compiler->mode32 = op & SLJIT_32;
2696
#endif
2697
2698
switch (GET_OPCODE(op)) {
2699
case SLJIT_ADD:
2700
if (!HAS_FLAGS(op)) {
2701
if (emit_lea_binary(compiler, dst, dstw, src1, src1w, src2, src2w) != SLJIT_ERR_UNSUPPORTED)
2702
return compiler->error;
2703
}
2704
return emit_cum_binary(compiler, BINARY_OPCODE(ADD),
2705
dst, dstw, src1, src1w, src2, src2w);
2706
case SLJIT_ADDC:
2707
return emit_cum_binary(compiler, BINARY_OPCODE(ADC),
2708
dst, dstw, src1, src1w, src2, src2w);
2709
case SLJIT_SUB:
2710
if (src1 == SLJIT_IMM && src1w == 0)
2711
return emit_unary(compiler, NEG_rm, dst, dstw, src2, src2w);
2712
2713
if (!HAS_FLAGS(op)) {
2714
if (src2 == SLJIT_IMM && emit_lea_binary(compiler, dst, dstw, src1, src1w, SLJIT_IMM, -src2w) != SLJIT_ERR_UNSUPPORTED)
2715
return compiler->error;
2716
if (FAST_IS_REG(dst) && src2 == dst) {
2717
FAIL_IF(emit_non_cum_binary(compiler, BINARY_OPCODE(SUB), dst, 0, dst, 0, src1, src1w));
2718
return emit_unary(compiler, NEG_rm, dst, 0, dst, 0);
2719
}
2720
}
2721
2722
return emit_non_cum_binary(compiler, BINARY_OPCODE(SUB),
2723
dst, dstw, src1, src1w, src2, src2w);
2724
case SLJIT_SUBC:
2725
return emit_non_cum_binary(compiler, BINARY_OPCODE(SBB),
2726
dst, dstw, src1, src1w, src2, src2w);
2727
case SLJIT_MUL:
2728
return emit_mul(compiler, dst, dstw, src1, src1w, src2, src2w);
2729
case SLJIT_AND:
2730
return emit_cum_binary(compiler, BINARY_OPCODE(AND),
2731
dst, dstw, src1, src1w, src2, src2w);
2732
case SLJIT_OR:
2733
return emit_cum_binary(compiler, BINARY_OPCODE(OR),
2734
dst, dstw, src1, src1w, src2, src2w);
2735
case SLJIT_XOR:
2736
if (!HAS_FLAGS(op)) {
2737
if (src2 == SLJIT_IMM && src2w == -1)
2738
return emit_unary(compiler, NOT_rm, dst, dstw, src1, src1w);
2739
if (src1 == SLJIT_IMM && src1w == -1)
2740
return emit_unary(compiler, NOT_rm, dst, dstw, src2, src2w);
2741
}
2742
2743
return emit_cum_binary(compiler, BINARY_OPCODE(XOR),
2744
dst, dstw, src1, src1w, src2, src2w);
2745
case SLJIT_SHL:
2746
case SLJIT_MSHL:
2747
return emit_shift_with_flags(compiler, SHL, HAS_FLAGS(op),
2748
dst, dstw, src1, src1w, src2, src2w);
2749
case SLJIT_LSHR:
2750
case SLJIT_MLSHR:
2751
return emit_shift_with_flags(compiler, SHR, HAS_FLAGS(op),
2752
dst, dstw, src1, src1w, src2, src2w);
2753
case SLJIT_ASHR:
2754
case SLJIT_MASHR:
2755
return emit_shift_with_flags(compiler, SAR, HAS_FLAGS(op),
2756
dst, dstw, src1, src1w, src2, src2w);
2757
case SLJIT_ROTL:
2758
return emit_shift_with_flags(compiler, ROL, 0,
2759
dst, dstw, src1, src1w, src2, src2w);
2760
case SLJIT_ROTR:
2761
return emit_shift_with_flags(compiler, ROR, 0,
2762
dst, dstw, src1, src1w, src2, src2w);
2763
}
2764
2765
return SLJIT_SUCCESS;
2766
}
2767
2768
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compiler, sljit_s32 op,
2769
sljit_s32 src1, sljit_sw src1w,
2770
sljit_s32 src2, sljit_sw src2w)
2771
{
2772
sljit_s32 opcode = GET_OPCODE(op);
2773
2774
CHECK_ERROR();
2775
CHECK(check_sljit_emit_op2(compiler, op, 1, 0, 0, src1, src1w, src2, src2w));
2776
2777
if (opcode != SLJIT_SUB && opcode != SLJIT_AND) {
2778
SLJIT_SKIP_CHECKS(compiler);
2779
return sljit_emit_op2(compiler, op, TMP_REG1, 0, src1, src1w, src2, src2w);
2780
}
2781
2782
ADJUST_LOCAL_OFFSET(src1, src1w);
2783
ADJUST_LOCAL_OFFSET(src2, src2w);
2784
2785
CHECK_EXTRA_REGS(src1, src1w, (void)0);
2786
CHECK_EXTRA_REGS(src2, src2w, (void)0);
2787
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2788
compiler->mode32 = op & SLJIT_32;
2789
#endif
2790
2791
if (opcode == SLJIT_SUB)
2792
return emit_cmp_binary(compiler, src1, src1w, src2, src2w);
2793
2794
return emit_test_binary(compiler, src1, src1w, src2, src2w);
2795
}
2796
2797
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2r(struct sljit_compiler *compiler, sljit_s32 op,
2798
sljit_s32 dst_reg,
2799
sljit_s32 src1, sljit_sw src1w,
2800
sljit_s32 src2, sljit_sw src2w)
2801
{
2802
sljit_u8* inst;
2803
sljit_sw dstw = 0;
2804
2805
CHECK_ERROR();
2806
CHECK(check_sljit_emit_op2r(compiler, op, dst_reg, src1, src1w, src2, src2w));
2807
ADJUST_LOCAL_OFFSET(src1, src1w);
2808
ADJUST_LOCAL_OFFSET(src2, src2w);
2809
2810
CHECK_EXTRA_REGS(dst_reg, dstw, (void)0);
2811
CHECK_EXTRA_REGS(src1, src1w, (void)0);
2812
CHECK_EXTRA_REGS(src2, src2w, (void)0);
2813
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2814
compiler->mode32 = op & SLJIT_32;
2815
#endif
2816
2817
switch (GET_OPCODE(op)) {
2818
case SLJIT_MULADD:
2819
FAIL_IF(emit_mul(compiler, TMP_REG1, 0, src1, src1w, src2, src2w));
2820
inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst_reg, dstw);
2821
FAIL_IF(!inst);
2822
*inst = ADD_rm_r;
2823
return SLJIT_SUCCESS;
2824
}
2825
2826
return SLJIT_SUCCESS;
2827
}
2828
2829
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
2830
sljit_s32 dst_reg,
2831
sljit_s32 src1_reg,
2832
sljit_s32 src2_reg,
2833
sljit_s32 src3, sljit_sw src3w)
2834
{
2835
sljit_s32 is_rotate, is_left, move_src1;
2836
sljit_u8* inst;
2837
sljit_sw src1w = 0;
2838
sljit_sw dstw = 0;
2839
/* The whole register must be saved even for 32 bit operations. */
2840
sljit_u8 restore_ecx = 0;
2841
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2842
sljit_sw src2w = 0;
2843
sljit_s32 restore_sp4 = 0;
2844
#endif /* SLJIT_CONFIG_X86_32 */
2845
2846
CHECK_ERROR();
2847
CHECK(check_sljit_emit_shift_into(compiler, op, dst_reg, src1_reg, src2_reg, src3, src3w));
2848
ADJUST_LOCAL_OFFSET(src3, src3w);
2849
2850
CHECK_EXTRA_REGS(dst_reg, dstw, (void)0);
2851
CHECK_EXTRA_REGS(src3, src3w, (void)0);
2852
2853
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2854
compiler->mode32 = op & SLJIT_32;
2855
#endif /* SLJIT_CONFIG_X86_64 */
2856
2857
if (src3 == SLJIT_IMM) {
2858
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2859
src3w &= 0x1f;
2860
#else /* !SLJIT_CONFIG_X86_32 */
2861
src3w &= (op & SLJIT_32) ? 0x1f : 0x3f;
2862
#endif /* SLJIT_CONFIG_X86_32 */
2863
2864
if (src3w == 0)
2865
return SLJIT_SUCCESS;
2866
}
2867
2868
is_left = (GET_OPCODE(op) == SLJIT_SHL || GET_OPCODE(op) == SLJIT_MSHL);
2869
2870
is_rotate = (src1_reg == src2_reg);
2871
CHECK_EXTRA_REGS(src1_reg, src1w, (void)0);
2872
CHECK_EXTRA_REGS(src2_reg, src2w, (void)0);
2873
2874
if (is_rotate)
2875
return emit_shift(compiler, is_left ? ROL : ROR, dst_reg, dstw, src1_reg, src1w, src3, src3w);
2876
2877
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2878
if (src2_reg & SLJIT_MEM) {
2879
EMIT_MOV(compiler, TMP_REG1, 0, src2_reg, src2w);
2880
src2_reg = TMP_REG1;
2881
}
2882
#endif /* SLJIT_CONFIG_X86_32 */
2883
2884
if (dst_reg == SLJIT_PREF_SHIFT_REG && src3 != SLJIT_IMM && (src3 != SLJIT_PREF_SHIFT_REG || src1_reg != SLJIT_PREF_SHIFT_REG)) {
2885
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2886
EMIT_MOV(compiler, TMP_REG1, 0, src1_reg, src1w);
2887
src1_reg = TMP_REG1;
2888
src1w = 0;
2889
#else /* !SLJIT_CONFIG_X86_64 */
2890
if (src2_reg != TMP_REG1) {
2891
EMIT_MOV(compiler, TMP_REG1, 0, src1_reg, src1w);
2892
src1_reg = TMP_REG1;
2893
src1w = 0;
2894
} else if ((src1_reg & SLJIT_MEM) || src1_reg == SLJIT_PREF_SHIFT_REG) {
2895
restore_sp4 = (src3 == SLJIT_R0) ? SLJIT_R1 : SLJIT_R0;
2896
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), sizeof(sljit_s32), restore_sp4, 0);
2897
EMIT_MOV(compiler, restore_sp4, 0, src1_reg, src1w);
2898
src1_reg = restore_sp4;
2899
src1w = 0;
2900
} else {
2901
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), sizeof(sljit_s32), src1_reg, 0);
2902
restore_sp4 = src1_reg;
2903
}
2904
#endif /* SLJIT_CONFIG_X86_64 */
2905
2906
if (src3 != SLJIT_PREF_SHIFT_REG)
2907
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src3, src3w);
2908
} else {
2909
if (src2_reg == SLJIT_PREF_SHIFT_REG && src3 != SLJIT_IMM && src3 != SLJIT_PREF_SHIFT_REG) {
2910
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2911
compiler->mode32 = 0;
2912
#endif /* SLJIT_CONFIG_X86_64 */
2913
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_PREF_SHIFT_REG, 0);
2914
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2915
compiler->mode32 = op & SLJIT_32;
2916
#endif /* SLJIT_CONFIG_X86_64 */
2917
src2_reg = TMP_REG1;
2918
restore_ecx = 1;
2919
}
2920
2921
move_src1 = 0;
2922
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2923
if (dst_reg != src1_reg) {
2924
if (dst_reg != src3) {
2925
EMIT_MOV(compiler, dst_reg, 0, src1_reg, src1w);
2926
src1_reg = dst_reg;
2927
src1w = 0;
2928
} else
2929
move_src1 = 1;
2930
}
2931
#else /* !SLJIT_CONFIG_X86_64 */
2932
if (dst_reg & SLJIT_MEM) {
2933
if (src2_reg != TMP_REG1) {
2934
EMIT_MOV(compiler, TMP_REG1, 0, src1_reg, src1w);
2935
src1_reg = TMP_REG1;
2936
src1w = 0;
2937
} else if ((src1_reg & SLJIT_MEM) || src1_reg == SLJIT_PREF_SHIFT_REG) {
2938
restore_sp4 = (src3 == SLJIT_R0) ? SLJIT_R1 : SLJIT_R0;
2939
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), sizeof(sljit_s32), restore_sp4, 0);
2940
EMIT_MOV(compiler, restore_sp4, 0, src1_reg, src1w);
2941
src1_reg = restore_sp4;
2942
src1w = 0;
2943
} else {
2944
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), sizeof(sljit_s32), src1_reg, 0);
2945
restore_sp4 = src1_reg;
2946
}
2947
} else if (dst_reg != src1_reg) {
2948
if (dst_reg != src3) {
2949
EMIT_MOV(compiler, dst_reg, 0, src1_reg, src1w);
2950
src1_reg = dst_reg;
2951
src1w = 0;
2952
} else
2953
move_src1 = 1;
2954
}
2955
#endif /* SLJIT_CONFIG_X86_64 */
2956
2957
if (src3 != SLJIT_IMM && src3 != SLJIT_PREF_SHIFT_REG) {
2958
if (!restore_ecx) {
2959
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2960
compiler->mode32 = 0;
2961
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_PREF_SHIFT_REG, 0);
2962
compiler->mode32 = op & SLJIT_32;
2963
restore_ecx = 1;
2964
#else /* !SLJIT_CONFIG_X86_64 */
2965
if (src1_reg != TMP_REG1 && src2_reg != TMP_REG1) {
2966
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_PREF_SHIFT_REG, 0);
2967
restore_ecx = 1;
2968
} else {
2969
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), 0, SLJIT_PREF_SHIFT_REG, 0);
2970
restore_ecx = 2;
2971
}
2972
#endif /* SLJIT_CONFIG_X86_64 */
2973
}
2974
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src3, src3w);
2975
}
2976
2977
if (move_src1) {
2978
EMIT_MOV(compiler, dst_reg, 0, src1_reg, src1w);
2979
src1_reg = dst_reg;
2980
src1w = 0;
2981
}
2982
}
2983
2984
inst = emit_x86_instruction(compiler, 2, src2_reg, 0, src1_reg, src1w);
2985
FAIL_IF(!inst);
2986
inst[0] = GROUP_0F;
2987
2988
if (src3 == SLJIT_IMM) {
2989
inst[1] = U8((is_left ? SHLD : SHRD) - 1);
2990
2991
/* Immediate argument is added separately. */
2992
FAIL_IF(emit_byte(compiler, U8(src3w)));
2993
} else
2994
inst[1] = U8(is_left ? SHLD : SHRD);
2995
2996
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2997
if (restore_ecx) {
2998
compiler->mode32 = 0;
2999
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
3000
}
3001
3002
if (src1_reg != dst_reg) {
3003
compiler->mode32 = op & SLJIT_32;
3004
return emit_mov(compiler, dst_reg, dstw, src1_reg, 0);
3005
}
3006
#else /* !SLJIT_CONFIG_X86_64 */
3007
if (restore_ecx)
3008
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, restore_ecx == 1 ? TMP_REG1 : SLJIT_MEM1(SLJIT_SP), 0);
3009
3010
if (src1_reg != dst_reg)
3011
EMIT_MOV(compiler, dst_reg, dstw, src1_reg, 0);
3012
3013
if (restore_sp4)
3014
return emit_mov(compiler, restore_sp4, 0, SLJIT_MEM1(SLJIT_SP), sizeof(sljit_s32));
3015
#endif /* SLJIT_CONFIG_X86_32 */
3016
3017
return SLJIT_SUCCESS;
3018
}
3019
3020
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2_shift(struct sljit_compiler *compiler, sljit_s32 op,
3021
sljit_s32 dst, sljit_sw dstw,
3022
sljit_s32 src1, sljit_sw src1w,
3023
sljit_s32 src2, sljit_sw src2w,
3024
sljit_sw shift_arg)
3025
{
3026
sljit_s32 dst_r;
3027
int use_lea = 0;
3028
sljit_u8* inst;
3029
3030
CHECK_ERROR();
3031
CHECK(check_sljit_emit_op2_shift(compiler, op, dst, dstw, src1, src1w, src2, src2w, shift_arg));
3032
ADJUST_LOCAL_OFFSET(dst, dstw);
3033
ADJUST_LOCAL_OFFSET(src1, src1w);
3034
ADJUST_LOCAL_OFFSET(src2, src2w);
3035
3036
shift_arg &= (sljit_sw)((sizeof(sljit_sw) * 8) - 1);
3037
3038
if (src2 == SLJIT_IMM) {
3039
src2w = src2w << shift_arg;
3040
shift_arg = 0;
3041
}
3042
3043
if (shift_arg == 0) {
3044
SLJIT_SKIP_CHECKS(compiler);
3045
return sljit_emit_op2(compiler, GET_OPCODE(op), dst, dstw, src1, src1w, src2, src2w);
3046
}
3047
3048
CHECK_EXTRA_REGS(dst, dstw, (void)0);
3049
CHECK_EXTRA_REGS(src1, src1w, (void)0);
3050
CHECK_EXTRA_REGS(src2, src2w, (void)0);
3051
3052
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3053
compiler->mode32 = 0;
3054
#endif
3055
3056
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3057
if (shift_arg <= 3) {
3058
use_lea = 1;
3059
if (!FAST_IS_REG(src2)) {
3060
EMIT_MOV(compiler, TMP_REG1, 0, src2, src2w);
3061
src2 = TMP_REG1;
3062
}
3063
3064
if (!FAST_IS_REG(src1)) {
3065
EMIT_MOV(compiler, src2 == TMP_REG1 ? TMP_REG2 : TMP_REG1, 0, src1, src1w);
3066
src1 = src2 == TMP_REG1 ? TMP_REG2 : TMP_REG1;
3067
}
3068
}
3069
#else /* !SLJIT_CONFIG_X86_64 */
3070
if (shift_arg <= 3 && (FAST_IS_REG(src1) || (FAST_IS_REG(src2) && src2 != TMP_REG1))) {
3071
use_lea = 1;
3072
if (!FAST_IS_REG(src2)) {
3073
EMIT_MOV(compiler, TMP_REG1, 0, src2, src2w);
3074
src2 = TMP_REG1;
3075
}
3076
3077
if (!FAST_IS_REG(src1)) {
3078
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
3079
src1 = TMP_REG1;
3080
}
3081
}
3082
#endif /* SLJIT_CONFIG_X86_64 */
3083
3084
if (use_lea) {
3085
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
3086
3087
inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM2(src1, src2), shift_arg);
3088
FAIL_IF(!inst);
3089
*inst = LEA_r_m;
3090
3091
if (!FAST_IS_REG(dst))
3092
return emit_mov(compiler, dst, dstw, dst_r, 0);
3093
3094
return SLJIT_SUCCESS;
3095
}
3096
3097
if ((op & SLJIT_SRC2_UNDEFINED) != 0 && FAST_IS_REG(src2) && src1 != src2)
3098
dst_r = src2;
3099
else {
3100
dst_r = FAST_IS_REG(dst) && (dst != src1) ? dst : TMP_REG1;
3101
3102
if (src2 != dst_r) {
3103
EMIT_MOV(compiler, dst_r, 0, src2, src2w);
3104
}
3105
}
3106
3107
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, shift_arg, dst_r, 0);
3108
FAIL_IF(!inst);
3109
inst[1] |= SHL;
3110
3111
if (dst == src1 && dstw == src1w) {
3112
inst = emit_x86_instruction(compiler, 1, dst_r, 0, dst, dstw);
3113
FAIL_IF(!inst);
3114
*inst = ADD_rm_r;
3115
return SLJIT_SUCCESS;
3116
}
3117
3118
if (FAST_IS_REG(dst) && FAST_IS_REG(src1)) {
3119
inst = emit_x86_instruction(compiler, 1, dst, 0, SLJIT_MEM2(src1, dst_r), 0);
3120
FAIL_IF(!inst);
3121
*inst = LEA_r_m;
3122
return SLJIT_SUCCESS;
3123
}
3124
3125
if (src1 == SLJIT_IMM) {
3126
BINARY_IMM(ADD, ADD_rm_r, src1w, dst_r, 0);
3127
} else {
3128
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
3129
FAIL_IF(!inst);
3130
*inst = ADD_r_rm;
3131
}
3132
3133
if (dst != dst_r)
3134
return emit_mov(compiler, dst, dstw, dst_r, 0);
3135
3136
return SLJIT_SUCCESS;
3137
}
3138
3139
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
3140
sljit_s32 src, sljit_sw srcw)
3141
{
3142
CHECK_ERROR();
3143
CHECK(check_sljit_emit_op_src(compiler, op, src, srcw));
3144
ADJUST_LOCAL_OFFSET(src, srcw);
3145
3146
CHECK_EXTRA_REGS(src, srcw, (void)0);
3147
3148
switch (op) {
3149
case SLJIT_FAST_RETURN:
3150
return emit_fast_return(compiler, src, srcw);
3151
case SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN:
3152
/* Don't adjust shadow stack if it isn't enabled. */
3153
if (!cpu_has_shadow_stack ())
3154
return SLJIT_SUCCESS;
3155
return adjust_shadow_stack(compiler, src, srcw);
3156
case SLJIT_PREFETCH_L1:
3157
case SLJIT_PREFETCH_L2:
3158
case SLJIT_PREFETCH_L3:
3159
case SLJIT_PREFETCH_ONCE:
3160
return emit_prefetch(compiler, op, src, srcw);
3161
}
3162
3163
return SLJIT_SUCCESS;
3164
}
3165
3166
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_dst(struct sljit_compiler *compiler, sljit_s32 op,
3167
sljit_s32 dst, sljit_sw dstw)
3168
{
3169
CHECK_ERROR();
3170
CHECK(check_sljit_emit_op_dst(compiler, op, dst, dstw));
3171
ADJUST_LOCAL_OFFSET(dst, dstw);
3172
3173
CHECK_EXTRA_REGS(dst, dstw, (void)0);
3174
3175
switch (op) {
3176
case SLJIT_FAST_ENTER:
3177
return emit_fast_enter(compiler, dst, dstw);
3178
case SLJIT_GET_RETURN_ADDRESS:
3179
return sljit_emit_get_return_address(compiler, dst, dstw);
3180
}
3181
3182
return SLJIT_SUCCESS;
3183
}
3184
3185
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 type, sljit_s32 reg)
3186
{
3187
CHECK_REG_INDEX(check_sljit_get_register_index(type, reg));
3188
3189
if (type == SLJIT_GP_REGISTER) {
3190
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
3191
if (reg >= SLJIT_R3 && reg <= SLJIT_R8)
3192
return -1;
3193
#endif /* SLJIT_CONFIG_X86_32 */
3194
return reg_map[reg];
3195
}
3196
3197
if (type != SLJIT_FLOAT_REGISTER && type != SLJIT_SIMD_REG_128 && type != SLJIT_SIMD_REG_256 && type != SLJIT_SIMD_REG_512)
3198
return -1;
3199
3200
return freg_map[reg];
3201
}
3202
3203
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
3204
void *instruction, sljit_u32 size)
3205
{
3206
sljit_u8 *inst;
3207
3208
CHECK_ERROR();
3209
CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
3210
3211
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
3212
FAIL_IF(!inst);
3213
INC_SIZE(size);
3214
SLJIT_MEMCPY(inst, instruction, size);
3215
return SLJIT_SUCCESS;
3216
}
3217
3218
/* --------------------------------------------------------------------- */
3219
/* Floating point operators */
3220
/* --------------------------------------------------------------------- */
3221
3222
/* Alignment(3) + 4 * 16 bytes. */
3223
static sljit_u32 sse2_data[3 + (4 * 4)];
3224
static sljit_u32 *sse2_buffer;
3225
3226
static void init_compiler(void)
3227
{
3228
get_cpu_features();
3229
3230
/* Align to 16 bytes. */
3231
sse2_buffer = (sljit_u32*)(((sljit_uw)sse2_data + 15) & ~(sljit_uw)0xf);
3232
3233
/* Single precision constants (each constant is 16 byte long). */
3234
sse2_buffer[0] = 0x80000000;
3235
sse2_buffer[4] = 0x7fffffff;
3236
/* Double precision constants (each constant is 16 byte long). */
3237
sse2_buffer[8] = 0;
3238
sse2_buffer[9] = 0x80000000;
3239
sse2_buffer[12] = 0xffffffff;
3240
sse2_buffer[13] = 0x7fffffff;
3241
}
3242
3243
static sljit_s32 emit_groupf(struct sljit_compiler *compiler,
3244
sljit_uw op,
3245
sljit_s32 dst, sljit_s32 src, sljit_sw srcw)
3246
{
3247
sljit_u8 *inst = emit_x86_instruction(compiler, 2 | (op & ~(sljit_uw)0xff), dst, 0, src, srcw);
3248
FAIL_IF(!inst);
3249
inst[0] = GROUP_0F;
3250
inst[1] = op & 0xff;
3251
return SLJIT_SUCCESS;
3252
}
3253
3254
static sljit_s32 emit_groupf_ext(struct sljit_compiler *compiler,
3255
sljit_uw op,
3256
sljit_s32 dst, sljit_s32 src, sljit_sw srcw)
3257
{
3258
sljit_u8 *inst;
3259
3260
SLJIT_ASSERT((op & EX86_SSE2) && ((op & VEX_OP_0F38) || (op & VEX_OP_0F3A)));
3261
3262
inst = emit_x86_instruction(compiler, 3 | (op & ~((sljit_uw)0xff | VEX_OP_0F38 | VEX_OP_0F3A)), dst, 0, src, srcw);
3263
FAIL_IF(!inst);
3264
inst[0] = GROUP_0F;
3265
inst[1] = U8((op & VEX_OP_0F38) ? 0x38 : 0x3A);
3266
inst[2] = op & 0xff;
3267
return SLJIT_SUCCESS;
3268
}
3269
3270
static SLJIT_INLINE sljit_s32 emit_sse2_load(struct sljit_compiler *compiler,
3271
sljit_s32 single, sljit_s32 dst, sljit_s32 src, sljit_sw srcw)
3272
{
3273
return emit_groupf(compiler, MOVSD_x_xm | (single ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2, dst, src, srcw);
3274
}
3275
3276
static SLJIT_INLINE sljit_s32 emit_sse2_store(struct sljit_compiler *compiler,
3277
sljit_s32 single, sljit_s32 dst, sljit_sw dstw, sljit_s32 src)
3278
{
3279
return emit_groupf(compiler, MOVSD_xm_x | (single ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2, src, dst, dstw);
3280
}
3281
3282
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
3283
sljit_s32 dst, sljit_sw dstw,
3284
sljit_s32 src, sljit_sw srcw)
3285
{
3286
sljit_s32 dst_r;
3287
3288
CHECK_EXTRA_REGS(dst, dstw, (void)0);
3289
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
3290
3291
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3292
if (GET_OPCODE(op) == SLJIT_CONV_SW_FROM_F64)
3293
compiler->mode32 = 0;
3294
#endif
3295
3296
FAIL_IF(emit_groupf(compiler, CVTTSD2SI_r_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2_OP2, dst_r, src, srcw));
3297
3298
if (dst & SLJIT_MEM)
3299
return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
3300
return SLJIT_SUCCESS;
3301
}
3302
3303
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
3304
sljit_s32 dst, sljit_sw dstw,
3305
sljit_s32 src, sljit_sw srcw)
3306
{
3307
sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
3308
3309
CHECK_EXTRA_REGS(src, srcw, (void)0);
3310
3311
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3312
if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_SW)
3313
compiler->mode32 = 0;
3314
#endif
3315
3316
if (src == SLJIT_IMM) {
3317
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3318
if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
3319
srcw = (sljit_s32)srcw;
3320
#endif
3321
EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
3322
src = TMP_REG1;
3323
srcw = 0;
3324
}
3325
3326
FAIL_IF(emit_groupf(compiler, CVTSI2SD_x_rm | EX86_SELECT_F2_F3(op) | EX86_SSE2_OP1, dst_r, src, srcw));
3327
3328
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3329
compiler->mode32 = 1;
3330
#endif
3331
if (dst_r == TMP_FREG)
3332
return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
3333
return SLJIT_SUCCESS;
3334
}
3335
3336
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
3337
sljit_s32 src1, sljit_sw src1w,
3338
sljit_s32 src2, sljit_sw src2w)
3339
{
3340
switch (GET_FLAG_TYPE(op)) {
3341
case SLJIT_ORDERED_EQUAL:
3342
/* Also: SLJIT_UNORDERED_OR_NOT_EQUAL */
3343
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src1, src1w));
3344
FAIL_IF(emit_groupf(compiler, CMPS_x_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2, TMP_FREG, src2, src2w));
3345
3346
/* EQ */
3347
FAIL_IF(emit_byte(compiler, 0));
3348
3349
src1 = TMP_FREG;
3350
src2 = TMP_FREG;
3351
src2w = 0;
3352
break;
3353
3354
case SLJIT_ORDERED_LESS:
3355
case SLJIT_UNORDERED_OR_GREATER:
3356
/* Also: SLJIT_UNORDERED_OR_GREATER_EQUAL, SLJIT_ORDERED_LESS_EQUAL */
3357
if (!FAST_IS_REG(src2)) {
3358
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src2, src2w));
3359
src2 = TMP_FREG;
3360
}
3361
3362
return emit_groupf(compiler, UCOMISD_x_xm | EX86_SELECT_66(op) | EX86_SSE2, src2, src1, src1w);
3363
}
3364
3365
if (!FAST_IS_REG(src1)) {
3366
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src1, src1w));
3367
src1 = TMP_FREG;
3368
}
3369
3370
return emit_groupf(compiler, UCOMISD_x_xm | EX86_SELECT_66(op) | EX86_SSE2, src1, src2, src2w);
3371
}
3372
3373
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
3374
sljit_s32 dst, sljit_sw dstw,
3375
sljit_s32 src, sljit_sw srcw)
3376
{
3377
sljit_s32 dst_r;
3378
sljit_u8 *inst;
3379
3380
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3381
compiler->mode32 = 1;
3382
#endif
3383
3384
CHECK_ERROR();
3385
SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
3386
3387
if (GET_OPCODE(op) == SLJIT_MOV_F64) {
3388
if (FAST_IS_REG(dst))
3389
return emit_sse2_load(compiler, op & SLJIT_32, dst, src, srcw);
3390
if (FAST_IS_REG(src))
3391
return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, src);
3392
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src, srcw));
3393
return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
3394
}
3395
3396
if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32) {
3397
dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
3398
if (FAST_IS_REG(src)) {
3399
/* We overwrite the high bits of source. From SLJIT point of view,
3400
this is not an issue.
3401
Note: In SSE3, we could also use MOVDDUP and MOVSLDUP. */
3402
FAIL_IF(emit_groupf(compiler, UNPCKLPD_x_xm | ((op & SLJIT_32) ? EX86_PREF_66 : 0) | EX86_SSE2, src, src, 0));
3403
} else {
3404
FAIL_IF(emit_sse2_load(compiler, !(op & SLJIT_32), TMP_FREG, src, srcw));
3405
src = TMP_FREG;
3406
}
3407
3408
FAIL_IF(emit_groupf(compiler, CVTPD2PS_x_xm | ((op & SLJIT_32) ? EX86_PREF_66 : 0) | EX86_SSE2, dst_r, src, 0));
3409
if (dst_r == TMP_FREG)
3410
return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
3411
return SLJIT_SUCCESS;
3412
}
3413
3414
if (FAST_IS_REG(dst)) {
3415
dst_r = (dst == src) ? TMP_FREG : dst;
3416
3417
if (src & SLJIT_MEM)
3418
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src, srcw));
3419
3420
FAIL_IF(emit_groupf(compiler, PCMPEQD_x_xm | EX86_PREF_66 | EX86_SSE2, dst_r, dst_r, 0));
3421
3422
inst = emit_x86_instruction(compiler, 2 | EX86_PREF_66 | EX86_SSE2_OP2, 0, 0, dst_r, 0);
3423
inst[0] = GROUP_0F;
3424
/* Same as PSRLD_x / PSRLQ_x */
3425
inst[1] = (op & SLJIT_32) ? PSLLD_x_i8 : PSLLQ_x_i8;
3426
3427
if (GET_OPCODE(op) == SLJIT_ABS_F64) {
3428
inst[2] |= 2 << 3;
3429
FAIL_IF(emit_byte(compiler, 1));
3430
} else {
3431
inst[2] |= 6 << 3;
3432
FAIL_IF(emit_byte(compiler, ((op & SLJIT_32) ? 31 : 63)));
3433
}
3434
3435
if (dst_r != TMP_FREG)
3436
dst_r = (src & SLJIT_MEM) ? TMP_FREG : src;
3437
return emit_groupf(compiler, (GET_OPCODE(op) == SLJIT_NEG_F64 ? XORPD_x_xm : ANDPD_x_xm) | EX86_SSE2, dst, dst_r, 0);
3438
}
3439
3440
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src, srcw));
3441
3442
switch (GET_OPCODE(op)) {
3443
case SLJIT_NEG_F64:
3444
FAIL_IF(emit_groupf(compiler, XORPD_x_xm | EX86_SELECT_66(op) | EX86_SSE2, TMP_FREG, SLJIT_MEM0(), (sljit_sw)((op & SLJIT_32) ? sse2_buffer : sse2_buffer + 8)));
3445
break;
3446
3447
case SLJIT_ABS_F64:
3448
FAIL_IF(emit_groupf(compiler, ANDPD_x_xm | EX86_SELECT_66(op) | EX86_SSE2, TMP_FREG, SLJIT_MEM0(), (sljit_sw)((op & SLJIT_32) ? sse2_buffer + 4 : sse2_buffer + 12)));
3449
break;
3450
}
3451
3452
return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
3453
}
3454
3455
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
3456
sljit_s32 dst, sljit_sw dstw,
3457
sljit_s32 src1, sljit_sw src1w,
3458
sljit_s32 src2, sljit_sw src2w)
3459
{
3460
sljit_s32 dst_r;
3461
3462
CHECK_ERROR();
3463
CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
3464
ADJUST_LOCAL_OFFSET(dst, dstw);
3465
ADJUST_LOCAL_OFFSET(src1, src1w);
3466
ADJUST_LOCAL_OFFSET(src2, src2w);
3467
3468
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3469
compiler->mode32 = 1;
3470
#endif
3471
3472
if (FAST_IS_REG(dst)) {
3473
dst_r = dst;
3474
if (dst == src1)
3475
; /* Do nothing here. */
3476
else if (dst == src2 && (GET_OPCODE(op) == SLJIT_ADD_F64 || GET_OPCODE(op) == SLJIT_MUL_F64)) {
3477
/* Swap arguments. */
3478
src2 = src1;
3479
src2w = src1w;
3480
} else if (dst != src2)
3481
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, dst_r, src1, src1w));
3482
else {
3483
dst_r = TMP_FREG;
3484
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src1, src1w));
3485
}
3486
} else {
3487
dst_r = TMP_FREG;
3488
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src1, src1w));
3489
}
3490
3491
switch (GET_OPCODE(op)) {
3492
case SLJIT_ADD_F64:
3493
FAIL_IF(emit_groupf(compiler, ADDSD_x_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2, dst_r, src2, src2w));
3494
break;
3495
3496
case SLJIT_SUB_F64:
3497
FAIL_IF(emit_groupf(compiler, SUBSD_x_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2, dst_r, src2, src2w));
3498
break;
3499
3500
case SLJIT_MUL_F64:
3501
FAIL_IF(emit_groupf(compiler, MULSD_x_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2, dst_r, src2, src2w));
3502
break;
3503
3504
case SLJIT_DIV_F64:
3505
FAIL_IF(emit_groupf(compiler, DIVSD_x_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2, dst_r, src2, src2w));
3506
break;
3507
}
3508
3509
if (dst_r != dst)
3510
return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
3511
return SLJIT_SUCCESS;
3512
}
3513
3514
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2r(struct sljit_compiler *compiler, sljit_s32 op,
3515
sljit_s32 dst_freg,
3516
sljit_s32 src1, sljit_sw src1w,
3517
sljit_s32 src2, sljit_sw src2w)
3518
{
3519
sljit_uw pref;
3520
3521
CHECK_ERROR();
3522
CHECK(check_sljit_emit_fop2r(compiler, op, dst_freg, src1, src1w, src2, src2w));
3523
ADJUST_LOCAL_OFFSET(src1, src1w);
3524
ADJUST_LOCAL_OFFSET(src2, src2w);
3525
3526
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3527
compiler->mode32 = 1;
3528
#endif
3529
3530
if (dst_freg == src1) {
3531
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src2, src2w));
3532
pref = EX86_SELECT_66(op) | EX86_SSE2;
3533
FAIL_IF(emit_groupf(compiler, XORPD_x_xm | pref, TMP_FREG, src1, src1w));
3534
FAIL_IF(emit_groupf(compiler, ANDPD_x_xm | pref, TMP_FREG, SLJIT_MEM0(), (sljit_sw)((op & SLJIT_32) ? sse2_buffer : sse2_buffer + 8)));
3535
return emit_groupf(compiler, XORPD_x_xm | pref, dst_freg, TMP_FREG, 0);
3536
}
3537
3538
if (src1 & SLJIT_MEM) {
3539
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src1, src1w));
3540
src1 = TMP_FREG;
3541
src1w = 0;
3542
}
3543
3544
if (dst_freg != src2)
3545
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, dst_freg, src2, src2w));
3546
3547
pref = EX86_SELECT_66(op) | EX86_SSE2;
3548
FAIL_IF(emit_groupf(compiler, XORPD_x_xm | pref, dst_freg, src1, src1w));
3549
FAIL_IF(emit_groupf(compiler, ANDPD_x_xm | pref, dst_freg, SLJIT_MEM0(), (sljit_sw)((op & SLJIT_32) ? sse2_buffer : sse2_buffer + 8)));
3550
return emit_groupf(compiler, XORPD_x_xm | pref, dst_freg, src1, src1w);
3551
}
3552
3553
/* --------------------------------------------------------------------- */
3554
/* Conditional instructions */
3555
/* --------------------------------------------------------------------- */
3556
3557
SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler)
3558
{
3559
sljit_u8 *inst;
3560
struct sljit_label *label;
3561
3562
CHECK_ERROR_PTR();
3563
CHECK_PTR(check_sljit_emit_label(compiler));
3564
3565
if (compiler->last_label && compiler->last_label->size == compiler->size)
3566
return compiler->last_label;
3567
3568
label = (struct sljit_label*)ensure_abuf(compiler, sizeof(struct sljit_label));
3569
PTR_FAIL_IF(!label);
3570
set_label(label, compiler);
3571
3572
inst = (sljit_u8*)ensure_buf(compiler, 1);
3573
PTR_FAIL_IF(!inst);
3574
inst[0] = SLJIT_INST_LABEL;
3575
3576
return label;
3577
}
3578
3579
SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_aligned_label(struct sljit_compiler *compiler,
3580
sljit_s32 alignment, struct sljit_read_only_buffer *buffers)
3581
{
3582
sljit_uw mask, size;
3583
sljit_u8 *inst;
3584
struct sljit_label *label;
3585
struct sljit_label *next_label;
3586
struct sljit_extended_label *ext_label;
3587
3588
CHECK_ERROR_PTR();
3589
CHECK_PTR(check_sljit_emit_aligned_label(compiler, alignment, buffers));
3590
3591
sljit_reset_read_only_buffers(buffers);
3592
3593
if (alignment <= SLJIT_LABEL_ALIGN_1) {
3594
SLJIT_SKIP_CHECKS(compiler);
3595
label = sljit_emit_label(compiler);
3596
PTR_FAIL_IF(!label);
3597
} else {
3598
/* The used space is filled with NOPs. */
3599
mask = ((sljit_uw)1 << alignment) - 1;
3600
compiler->size += mask;
3601
3602
inst = (sljit_u8*)ensure_buf(compiler, 1);
3603
PTR_FAIL_IF(!inst);
3604
inst[0] = SLJIT_INST_LABEL;
3605
3606
ext_label = (struct sljit_extended_label*)ensure_abuf(compiler, sizeof(struct sljit_extended_label));
3607
PTR_FAIL_IF(!ext_label);
3608
set_extended_label(ext_label, compiler, SLJIT_LABEL_ALIGNED, mask);
3609
label = &ext_label->label;
3610
}
3611
3612
if (buffers == NULL)
3613
return label;
3614
3615
next_label = label;
3616
3617
while (1) {
3618
buffers->u.label = next_label;
3619
size = buffers->size;
3620
3621
while (size >= 4) {
3622
inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
3623
PTR_FAIL_IF(!inst);
3624
INC_SIZE(4);
3625
inst[0] = NOP;
3626
inst[1] = NOP;
3627
inst[2] = NOP;
3628
inst[3] = NOP;
3629
size -= 4;
3630
}
3631
3632
if (size > 0) {
3633
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
3634
PTR_FAIL_IF(!inst);
3635
INC_SIZE(size);
3636
3637
do {
3638
*inst++ = NOP;
3639
} while (--size != 0);
3640
}
3641
3642
buffers = buffers->next;
3643
3644
if (buffers == NULL)
3645
break;
3646
3647
SLJIT_SKIP_CHECKS(compiler);
3648
next_label = sljit_emit_label(compiler);
3649
PTR_FAIL_IF(!next_label);
3650
}
3651
3652
return label;
3653
}
3654
3655
SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
3656
{
3657
sljit_u8 *inst;
3658
struct sljit_jump *jump;
3659
3660
CHECK_ERROR_PTR();
3661
CHECK_PTR(check_sljit_emit_jump(compiler, type));
3662
3663
jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
3664
PTR_FAIL_IF_NULL(jump);
3665
set_jump(jump, compiler, (sljit_u32)((type & SLJIT_REWRITABLE_JUMP) | ((type & 0xff) << TYPE_SHIFT)));
3666
type &= 0xff;
3667
3668
jump->addr = compiler->size;
3669
/* Worst case size. */
3670
compiler->size += (type >= SLJIT_JUMP) ? JUMP_MAX_SIZE : CJUMP_MAX_SIZE;
3671
inst = (sljit_u8*)ensure_buf(compiler, 1);
3672
PTR_FAIL_IF_NULL(inst);
3673
3674
inst[0] = SLJIT_INST_JUMP;
3675
return jump;
3676
}
3677
3678
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
3679
{
3680
sljit_u8 *inst;
3681
struct sljit_jump *jump;
3682
3683
CHECK_ERROR();
3684
CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
3685
ADJUST_LOCAL_OFFSET(src, srcw);
3686
3687
CHECK_EXTRA_REGS(src, srcw, (void)0);
3688
3689
if (src == SLJIT_IMM) {
3690
jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
3691
FAIL_IF_NULL(jump);
3692
set_jump(jump, compiler, (sljit_u32)(JUMP_ADDR | (type << TYPE_SHIFT)));
3693
jump->u.target = (sljit_uw)srcw;
3694
3695
jump->addr = compiler->size;
3696
/* Worst case size. */
3697
compiler->size += JUMP_MAX_SIZE;
3698
inst = (sljit_u8*)ensure_buf(compiler, 1);
3699
FAIL_IF_NULL(inst);
3700
3701
inst[0] = SLJIT_INST_JUMP;
3702
} else {
3703
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3704
/* REX_W is not necessary (src is not immediate). */
3705
compiler->mode32 = 1;
3706
#endif
3707
inst = emit_x86_instruction(compiler, 1, 0, 0, src, srcw);
3708
FAIL_IF(!inst);
3709
inst[0] = GROUP_FF;
3710
inst[1] = U8(inst[1] | ((type >= SLJIT_FAST_CALL) ? CALL_rm : JMP_rm));
3711
}
3712
return SLJIT_SUCCESS;
3713
}
3714
3715
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
3716
sljit_s32 dst, sljit_sw dstw,
3717
sljit_s32 type)
3718
{
3719
sljit_u8 *inst;
3720
sljit_u8 cond_set;
3721
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3722
sljit_s32 reg;
3723
sljit_uw size;
3724
#endif /* !SLJIT_CONFIG_X86_64 */
3725
/* ADJUST_LOCAL_OFFSET and CHECK_EXTRA_REGS might overwrite these values. */
3726
sljit_s32 dst_save = dst;
3727
sljit_sw dstw_save = dstw;
3728
3729
CHECK_ERROR();
3730
CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type));
3731
3732
ADJUST_LOCAL_OFFSET(dst, dstw);
3733
CHECK_EXTRA_REGS(dst, dstw, (void)0);
3734
3735
/* setcc = jcc + 0x10. */
3736
cond_set = U8(get_jump_code((sljit_uw)type) + 0x10);
3737
3738
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3739
if (GET_OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && FAST_IS_REG(dst)) {
3740
size = 3 + 2;
3741
if (reg_map[TMP_REG1] >= 4)
3742
size += 1 + 1;
3743
else if (reg_map[dst] >= 4)
3744
size++;
3745
3746
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
3747
FAIL_IF(!inst);
3748
INC_SIZE(size);
3749
/* Set low register to conditional flag. */
3750
if (reg_map[TMP_REG1] >= 4)
3751
*inst++ = (reg_map[TMP_REG1] <= 7) ? REX : REX_B;
3752
3753
inst[0] = GROUP_0F;
3754
inst[1] = cond_set;
3755
inst[2] = MOD_REG | reg_lmap[TMP_REG1];
3756
inst += 3;
3757
3758
if (reg_map[TMP_REG1] >= 4 || reg_map[dst] >= 4)
3759
*inst++ = U8(REX | (reg_map[TMP_REG1] <= 7 ? 0 : REX_R) | (reg_map[dst] <= 7 ? 0 : REX_B));
3760
3761
inst[0] = OR_rm8_r8;
3762
inst[1] = U8(MOD_REG | (reg_lmap[TMP_REG1] << 3) | reg_lmap[dst]);
3763
return SLJIT_SUCCESS;
3764
}
3765
3766
reg = (GET_OPCODE(op) < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG1;
3767
3768
size = 3 + (reg_map[reg] >= 4) + 4;
3769
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
3770
FAIL_IF(!inst);
3771
INC_SIZE(size);
3772
/* Set low register to conditional flag. */
3773
3774
if (reg_map[reg] >= 4)
3775
*inst++ = (reg_map[reg] <= 7) ? REX : REX_B;
3776
3777
inst[0] = GROUP_0F;
3778
inst[1] = cond_set;
3779
inst[2] = MOD_REG | reg_lmap[reg];
3780
3781
inst[3] = REX_W | (reg_map[reg] <= 7 ? 0 : (REX_B | REX_R));
3782
/* The movzx instruction does not affect flags. */
3783
inst[4] = GROUP_0F;
3784
inst[5] = MOVZX_r_rm8;
3785
inst[6] = U8(MOD_REG | (reg_lmap[reg] << 3) | reg_lmap[reg]);
3786
3787
if (reg != TMP_REG1)
3788
return SLJIT_SUCCESS;
3789
3790
if (GET_OPCODE(op) < SLJIT_ADD) {
3791
compiler->mode32 = GET_OPCODE(op) != SLJIT_MOV;
3792
return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
3793
}
3794
3795
SLJIT_SKIP_CHECKS(compiler);
3796
return sljit_emit_op2(compiler, op, dst_save, dstw_save, dst_save, dstw_save, TMP_REG1, 0);
3797
3798
#else /* !SLJIT_CONFIG_X86_64 */
3799
SLJIT_ASSERT(reg_map[TMP_REG1] < 4);
3800
3801
/* The SLJIT_CONFIG_X86_32 code path starts here. */
3802
if (GET_OPCODE(op) < SLJIT_ADD && FAST_IS_REG(dst) && reg_map[dst] <= 4) {
3803
/* Low byte is accessible. */
3804
inst = (sljit_u8*)ensure_buf(compiler, 1 + 3 + 3);
3805
FAIL_IF(!inst);
3806
INC_SIZE(3 + 3);
3807
/* Set low byte to conditional flag. */
3808
inst[0] = GROUP_0F;
3809
inst[1] = cond_set;
3810
inst[2] = U8(MOD_REG | reg_map[dst]);
3811
3812
inst[3] = GROUP_0F;
3813
inst[4] = MOVZX_r_rm8;
3814
inst[5] = U8(MOD_REG | (reg_map[dst] << 3) | reg_map[dst]);
3815
return SLJIT_SUCCESS;
3816
}
3817
3818
if (GET_OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && FAST_IS_REG(dst) && reg_map[dst] <= 4) {
3819
inst = (sljit_u8*)ensure_buf(compiler, 1 + 3 + 2);
3820
FAIL_IF(!inst);
3821
INC_SIZE(3 + 2);
3822
3823
/* Set low byte to conditional flag. */
3824
inst[0] = GROUP_0F;
3825
inst[1] = cond_set;
3826
inst[2] = U8(MOD_REG | reg_map[TMP_REG1]);
3827
3828
inst[3] = OR_rm8_r8;
3829
inst[4] = U8(MOD_REG | (reg_map[TMP_REG1] << 3) | reg_map[dst]);
3830
return SLJIT_SUCCESS;
3831
}
3832
3833
inst = (sljit_u8*)ensure_buf(compiler, 1 + 3 + 3);
3834
FAIL_IF(!inst);
3835
INC_SIZE(3 + 3);
3836
/* Set low byte to conditional flag. */
3837
inst[0] = GROUP_0F;
3838
inst[1] = cond_set;
3839
inst[2] = U8(MOD_REG | reg_map[TMP_REG1]);
3840
3841
inst[3] = GROUP_0F;
3842
inst[4] = MOVZX_r_rm8;
3843
inst[5] = U8(MOD_REG | (reg_map[TMP_REG1] << 3) | reg_map[TMP_REG1]);
3844
3845
if (GET_OPCODE(op) < SLJIT_ADD)
3846
return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
3847
3848
SLJIT_SKIP_CHECKS(compiler);
3849
return sljit_emit_op2(compiler, op, dst_save, dstw_save, dst_save, dstw_save, TMP_REG1, 0);
3850
#endif /* SLJIT_CONFIG_X86_64 */
3851
}
3852
3853
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fselect(struct sljit_compiler *compiler, sljit_s32 type,
3854
sljit_s32 dst_freg,
3855
sljit_s32 src1, sljit_sw src1w,
3856
sljit_s32 src2_freg)
3857
{
3858
sljit_u8* inst;
3859
sljit_uw size;
3860
3861
CHECK_ERROR();
3862
CHECK(check_sljit_emit_fselect(compiler, type, dst_freg, src1, src1w, src2_freg));
3863
3864
ADJUST_LOCAL_OFFSET(src1, src1w);
3865
3866
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3867
compiler->mode32 = 1;
3868
#endif /* SLJIT_CONFIG_X86_64 */
3869
3870
if (dst_freg != src2_freg) {
3871
if (dst_freg == src1) {
3872
src1 = src2_freg;
3873
src1w = 0;
3874
type ^= 0x1;
3875
} else
3876
FAIL_IF(emit_sse2_load(compiler, type & SLJIT_32, dst_freg, src2_freg, 0));
3877
}
3878
3879
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
3880
FAIL_IF(!inst);
3881
INC_SIZE(2);
3882
inst[0] = U8(get_jump_code((sljit_uw)(type & ~SLJIT_32) ^ 0x1) - 0x10);
3883
3884
size = compiler->size;
3885
FAIL_IF(emit_sse2_load(compiler, type & SLJIT_32, dst_freg, src1, src1w));
3886
3887
inst[1] = U8(compiler->size - size);
3888
return SLJIT_SUCCESS;
3889
}
3890
3891
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_mov(struct sljit_compiler *compiler, sljit_s32 type,
3892
sljit_s32 vreg,
3893
sljit_s32 srcdst, sljit_sw srcdstw)
3894
{
3895
sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
3896
sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
3897
sljit_s32 alignment = SLJIT_SIMD_GET_ELEM2_SIZE(type);
3898
sljit_uw op;
3899
3900
CHECK_ERROR();
3901
CHECK(check_sljit_emit_simd_mov(compiler, type, vreg, srcdst, srcdstw));
3902
3903
ADJUST_LOCAL_OFFSET(srcdst, srcdstw);
3904
3905
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3906
compiler->mode32 = 1;
3907
#endif /* SLJIT_CONFIG_X86_64 */
3908
3909
switch (reg_size) {
3910
case 4:
3911
op = EX86_SSE2;
3912
break;
3913
case 5:
3914
if (!(cpu_feature_list & CPU_FEATURE_AVX2))
3915
return SLJIT_ERR_UNSUPPORTED;
3916
op = EX86_SSE2 | VEX_256;
3917
break;
3918
default:
3919
return SLJIT_ERR_UNSUPPORTED;
3920
}
3921
3922
if (!(srcdst & SLJIT_MEM))
3923
alignment = reg_size;
3924
3925
if (type & SLJIT_SIMD_FLOAT) {
3926
if (elem_size == 2 || elem_size == 3) {
3927
op |= alignment >= reg_size ? MOVAPS_x_xm : MOVUPS_x_xm;
3928
3929
if (elem_size == 3)
3930
op |= EX86_PREF_66;
3931
3932
if (type & SLJIT_SIMD_STORE)
3933
op += 1;
3934
} else
3935
return SLJIT_ERR_UNSUPPORTED;
3936
} else {
3937
op |= ((type & SLJIT_SIMD_STORE) ? MOVDQA_xm_x : MOVDQA_x_xm)
3938
| (alignment >= reg_size ? EX86_PREF_66 : EX86_PREF_F3);
3939
}
3940
3941
if (type & SLJIT_SIMD_TEST)
3942
return SLJIT_SUCCESS;
3943
3944
if ((op & VEX_256) || ((cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX)))
3945
return emit_vex_instruction(compiler, op, vreg, 0, srcdst, srcdstw);
3946
3947
return emit_groupf(compiler, op, vreg, srcdst, srcdstw);
3948
}
3949
3950
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_replicate(struct sljit_compiler *compiler, sljit_s32 type,
3951
sljit_s32 vreg,
3952
sljit_s32 src, sljit_sw srcw)
3953
{
3954
sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
3955
sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
3956
sljit_s32 use_vex = (cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX);
3957
sljit_u8 *inst;
3958
sljit_u8 opcode = 0;
3959
sljit_uw op;
3960
3961
CHECK_ERROR();
3962
CHECK(check_sljit_emit_simd_replicate(compiler, type, vreg, src, srcw));
3963
3964
ADJUST_LOCAL_OFFSET(src, srcw);
3965
3966
if (!(type & SLJIT_SIMD_FLOAT)) {
3967
CHECK_EXTRA_REGS(src, srcw, (void)0);
3968
}
3969
3970
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
3971
if ((type & SLJIT_SIMD_FLOAT) ? (elem_size < 2 || elem_size > 3) : (elem_size > 2))
3972
return SLJIT_ERR_UNSUPPORTED;
3973
#else /* !SLJIT_CONFIG_X86_32 */
3974
compiler->mode32 = 1;
3975
3976
if (elem_size > 3 || ((type & SLJIT_SIMD_FLOAT) && elem_size < 2))
3977
return SLJIT_ERR_UNSUPPORTED;
3978
#endif /* SLJIT_CONFIG_X86_32 */
3979
3980
if (reg_size != 4 && (reg_size != 5 || !(cpu_feature_list & CPU_FEATURE_AVX2)))
3981
return SLJIT_ERR_UNSUPPORTED;
3982
3983
if (type & SLJIT_SIMD_TEST)
3984
return SLJIT_SUCCESS;
3985
3986
if (reg_size == 5)
3987
use_vex = 1;
3988
3989
if (use_vex && src != SLJIT_IMM) {
3990
op = 0;
3991
3992
switch (elem_size) {
3993
case 0:
3994
if (cpu_feature_list & CPU_FEATURE_AVX2)
3995
op = VPBROADCASTB_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
3996
break;
3997
case 1:
3998
if (cpu_feature_list & CPU_FEATURE_AVX2)
3999
op = VPBROADCASTW_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
4000
break;
4001
case 2:
4002
if (type & SLJIT_SIMD_FLOAT) {
4003
if ((cpu_feature_list & CPU_FEATURE_AVX2) || ((cpu_feature_list & CPU_FEATURE_AVX) && (src & SLJIT_MEM)))
4004
op = VBROADCASTSS_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
4005
} else if (cpu_feature_list & CPU_FEATURE_AVX2)
4006
op = VPBROADCASTD_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
4007
break;
4008
default:
4009
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4010
if (!(type & SLJIT_SIMD_FLOAT)) {
4011
if (cpu_feature_list & CPU_FEATURE_AVX2)
4012
op = VPBROADCASTQ_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
4013
break;
4014
}
4015
#endif /* SLJIT_CONFIG_X86_64 */
4016
4017
if (reg_size == 5)
4018
op = VBROADCASTSD_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
4019
break;
4020
}
4021
4022
if (op != 0) {
4023
if (!(src & SLJIT_MEM) && !(type & SLJIT_SIMD_FLOAT)) {
4024
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4025
if (elem_size >= 3)
4026
compiler->mode32 = 0;
4027
#endif /* SLJIT_CONFIG_X86_64 */
4028
FAIL_IF(emit_vex_instruction(compiler, MOVD_x_rm | VEX_AUTO_W | EX86_PREF_66 | EX86_SSE2_OP1, vreg, 0, src, srcw));
4029
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4030
compiler->mode32 = 1;
4031
#endif /* SLJIT_CONFIG_X86_64 */
4032
src = vreg;
4033
srcw = 0;
4034
}
4035
4036
if (reg_size == 5)
4037
op |= VEX_256;
4038
4039
return emit_vex_instruction(compiler, op, vreg, 0, src, srcw);
4040
}
4041
}
4042
4043
if (type & SLJIT_SIMD_FLOAT) {
4044
if (src == SLJIT_IMM) {
4045
if (use_vex)
4046
return emit_vex_instruction(compiler, XORPD_x_xm | (reg_size == 5 ? VEX_256 : 0) | (elem_size == 3 ? EX86_PREF_66 : 0) | EX86_SSE2 | VEX_SSE2_OPV, vreg, vreg, vreg, 0);
4047
4048
return emit_groupf(compiler, XORPD_x_xm | (elem_size == 3 ? EX86_PREF_66 : 0) | EX86_SSE2, vreg, vreg, 0);
4049
}
4050
4051
SLJIT_ASSERT(reg_size == 4);
4052
4053
if (use_vex) {
4054
if (elem_size == 3)
4055
return emit_vex_instruction(compiler, MOVDDUP_x_xm | EX86_PREF_F2 | EX86_SSE2, vreg, 0, src, srcw);
4056
4057
SLJIT_ASSERT(!(src & SLJIT_MEM));
4058
FAIL_IF(emit_vex_instruction(compiler, SHUFPS_x_xm | EX86_SSE2 | VEX_SSE2_OPV, vreg, src, src, 0));
4059
return emit_byte(compiler, 0);
4060
}
4061
4062
if (elem_size == 2 && vreg != src) {
4063
FAIL_IF(emit_sse2_load(compiler, 1, vreg, src, srcw));
4064
src = vreg;
4065
srcw = 0;
4066
}
4067
4068
op = (elem_size == 2 ? SHUFPS_x_xm : MOVDDUP_x_xm) | (elem_size == 2 ? 0 : EX86_PREF_F2) | EX86_SSE2;
4069
FAIL_IF(emit_groupf(compiler, op, vreg, src, srcw));
4070
4071
if (elem_size == 2)
4072
return emit_byte(compiler, 0);
4073
return SLJIT_SUCCESS;
4074
}
4075
4076
if (src == SLJIT_IMM) {
4077
if (elem_size == 0) {
4078
srcw = (sljit_u8)srcw;
4079
srcw |= srcw << 8;
4080
srcw |= srcw << 16;
4081
elem_size = 2;
4082
} else if (elem_size == 1) {
4083
srcw = (sljit_u16)srcw;
4084
srcw |= srcw << 16;
4085
elem_size = 2;
4086
}
4087
4088
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4089
if (elem_size == 2 && (sljit_s32)srcw == -1)
4090
srcw = -1;
4091
#endif /* SLJIT_CONFIG_X86_64 */
4092
4093
if (srcw == 0 || srcw == -1) {
4094
if (use_vex)
4095
return emit_vex_instruction(compiler, (srcw == 0 ? PXOR_x_xm : PCMPEQD_x_xm) | (reg_size == 5 ? VEX_256 : 0) | EX86_PREF_66 | EX86_SSE2 | VEX_SSE2_OPV, vreg, vreg, vreg, 0);
4096
4097
return emit_groupf(compiler, (srcw == 0 ? PXOR_x_xm : PCMPEQD_x_xm) | EX86_PREF_66 | EX86_SSE2, vreg, vreg, 0);
4098
}
4099
4100
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4101
if (elem_size == 3)
4102
FAIL_IF(emit_load_imm64(compiler, TMP_REG1, srcw));
4103
else
4104
#endif /* SLJIT_CONFIG_X86_64 */
4105
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, srcw);
4106
4107
src = TMP_REG1;
4108
srcw = 0;
4109
4110
}
4111
4112
op = 2;
4113
opcode = MOVD_x_rm;
4114
4115
switch (elem_size) {
4116
case 0:
4117
if (!FAST_IS_REG(src)) {
4118
opcode = 0x3a /* Prefix of PINSRB_x_rm_i8. */;
4119
op = 3;
4120
}
4121
break;
4122
case 1:
4123
if (!FAST_IS_REG(src))
4124
opcode = PINSRW_x_rm_i8;
4125
break;
4126
case 2:
4127
break;
4128
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4129
case 3:
4130
/* MOVQ */
4131
compiler->mode32 = 0;
4132
break;
4133
#endif /* SLJIT_CONFIG_X86_64 */
4134
}
4135
4136
if (use_vex) {
4137
if (opcode != MOVD_x_rm) {
4138
op = (opcode == 0x3a) ? (PINSRB_x_rm_i8 | VEX_OP_0F3A) : opcode;
4139
FAIL_IF(emit_vex_instruction(compiler, op | EX86_PREF_66 | EX86_SSE2_OP1 | VEX_SSE2_OPV, vreg, vreg, src, srcw));
4140
} else
4141
FAIL_IF(emit_vex_instruction(compiler, MOVD_x_rm | VEX_AUTO_W | EX86_PREF_66 | EX86_SSE2_OP1, vreg, 0, src, srcw));
4142
} else {
4143
inst = emit_x86_instruction(compiler, op | EX86_PREF_66 | EX86_SSE2_OP1, vreg, 0, src, srcw);
4144
FAIL_IF(!inst);
4145
inst[0] = GROUP_0F;
4146
inst[1] = opcode;
4147
4148
if (op == 3) {
4149
SLJIT_ASSERT(opcode == 0x3a);
4150
inst[2] = PINSRB_x_rm_i8;
4151
}
4152
}
4153
4154
if ((cpu_feature_list & CPU_FEATURE_AVX2) && use_vex && elem_size >= 2) {
4155
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
4156
op = VPBROADCASTD_x_xm;
4157
#else /* !SLJIT_CONFIG_X86_32 */
4158
op = (elem_size == 3) ? VPBROADCASTQ_x_xm : VPBROADCASTD_x_xm;
4159
#endif /* SLJIT_CONFIG_X86_32 */
4160
return emit_vex_instruction(compiler, op | ((reg_size == 5) ? VEX_256 : 0) | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, vreg, 0, vreg, 0);
4161
}
4162
4163
SLJIT_ASSERT(reg_size == 4);
4164
4165
if (opcode != MOVD_x_rm)
4166
FAIL_IF(emit_byte(compiler, 0));
4167
4168
switch (elem_size) {
4169
case 0:
4170
if (use_vex) {
4171
FAIL_IF(emit_vex_instruction(compiler, PXOR_x_xm | EX86_PREF_66 | EX86_SSE2 | VEX_SSE2_OPV, TMP_FREG, TMP_FREG, TMP_FREG, 0));
4172
return emit_vex_instruction(compiler, PSHUFB_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2 | VEX_SSE2_OPV, vreg, vreg, TMP_FREG, 0);
4173
}
4174
FAIL_IF(emit_groupf(compiler, PXOR_x_xm | EX86_PREF_66 | EX86_SSE2, TMP_FREG, TMP_FREG, 0));
4175
return emit_groupf_ext(compiler, PSHUFB_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, vreg, TMP_FREG, 0);
4176
case 1:
4177
if (use_vex)
4178
FAIL_IF(emit_vex_instruction(compiler, PSHUFLW_x_xm | EX86_PREF_F2 | EX86_SSE2, vreg, 0, vreg, 0));
4179
else
4180
FAIL_IF(emit_groupf(compiler, PSHUFLW_x_xm | EX86_PREF_F2 | EX86_SSE2, vreg, vreg, 0));
4181
FAIL_IF(emit_byte(compiler, 0));
4182
SLJIT_FALLTHROUGH
4183
default:
4184
if (use_vex)
4185
FAIL_IF(emit_vex_instruction(compiler, PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2, vreg, 0, vreg, 0));
4186
else
4187
FAIL_IF(emit_groupf(compiler, PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2, vreg, vreg, 0));
4188
return emit_byte(compiler, 0);
4189
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4190
case 3:
4191
compiler->mode32 = 1;
4192
if (use_vex)
4193
FAIL_IF(emit_vex_instruction(compiler, PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2, vreg, 0, vreg, 0));
4194
else
4195
FAIL_IF(emit_groupf(compiler, PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2, vreg, vreg, 0));
4196
return emit_byte(compiler, 0x44);
4197
#endif /* SLJIT_CONFIG_X86_64 */
4198
}
4199
}
4200
4201
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_mov(struct sljit_compiler *compiler, sljit_s32 type,
4202
sljit_s32 vreg, sljit_s32 lane_index,
4203
sljit_s32 srcdst, sljit_sw srcdstw)
4204
{
4205
sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
4206
sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
4207
sljit_s32 use_vex = (cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX);
4208
sljit_u8 *inst;
4209
sljit_u8 opcode = 0;
4210
sljit_uw op;
4211
sljit_s32 vreg_orig = vreg;
4212
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
4213
sljit_s32 srcdst_is_ereg = 0;
4214
sljit_s32 srcdst_orig = 0;
4215
sljit_sw srcdstw_orig = 0;
4216
#endif /* SLJIT_CONFIG_X86_32 */
4217
4218
CHECK_ERROR();
4219
CHECK(check_sljit_emit_simd_lane_mov(compiler, type, vreg, lane_index, srcdst, srcdstw));
4220
4221
ADJUST_LOCAL_OFFSET(srcdst, srcdstw);
4222
4223
if (reg_size == 5) {
4224
if (!(cpu_feature_list & CPU_FEATURE_AVX2))
4225
return SLJIT_ERR_UNSUPPORTED;
4226
use_vex = 1;
4227
} else if (reg_size != 4)
4228
return SLJIT_ERR_UNSUPPORTED;
4229
4230
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
4231
if ((type & SLJIT_SIMD_FLOAT) ? (elem_size < 2 || elem_size > 3) : elem_size > 2)
4232
return SLJIT_ERR_UNSUPPORTED;
4233
#else /* SLJIT_CONFIG_X86_32 */
4234
if (elem_size > 3 || ((type & SLJIT_SIMD_FLOAT) && elem_size < 2))
4235
return SLJIT_ERR_UNSUPPORTED;
4236
#endif /* SLJIT_CONFIG_X86_32 */
4237
4238
if (type & SLJIT_SIMD_TEST)
4239
return SLJIT_SUCCESS;
4240
4241
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4242
compiler->mode32 = 1;
4243
#else /* !SLJIT_CONFIG_X86_64 */
4244
if (!(type & SLJIT_SIMD_FLOAT)) {
4245
CHECK_EXTRA_REGS(srcdst, srcdstw, srcdst_is_ereg = 1);
4246
4247
if ((type & SLJIT_SIMD_STORE) && ((srcdst_is_ereg && elem_size < 2) || (elem_size == 0 && (type & SLJIT_SIMD_LANE_SIGNED) && FAST_IS_REG(srcdst) && reg_map[srcdst] >= 4))) {
4248
srcdst_orig = srcdst;
4249
srcdstw_orig = srcdstw;
4250
srcdst = TMP_REG1;
4251
srcdstw = 0;
4252
}
4253
}
4254
#endif /* SLJIT_CONFIG_X86_64 */
4255
4256
if (type & SLJIT_SIMD_LANE_ZERO) {
4257
if (lane_index == 0) {
4258
if (!(type & SLJIT_SIMD_FLOAT)) {
4259
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4260
if (elem_size == 3) {
4261
compiler->mode32 = 0;
4262
elem_size = 2;
4263
}
4264
#endif /* SLJIT_CONFIG_X86_64 */
4265
if (srcdst == SLJIT_IMM) {
4266
if (elem_size == 0)
4267
srcdstw = (sljit_u8)srcdstw;
4268
else if (elem_size == 1)
4269
srcdstw = (sljit_u16)srcdstw;
4270
4271
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, srcdstw);
4272
srcdst = TMP_REG1;
4273
srcdstw = 0;
4274
elem_size = 2;
4275
}
4276
4277
if (elem_size == 2) {
4278
if (use_vex)
4279
return emit_vex_instruction(compiler, MOVD_x_rm | VEX_AUTO_W | EX86_PREF_66 | EX86_SSE2_OP1, vreg, 0, srcdst, srcdstw);
4280
return emit_groupf(compiler, MOVD_x_rm | EX86_PREF_66 | EX86_SSE2_OP1, vreg, srcdst, srcdstw);
4281
}
4282
} else if (srcdst & SLJIT_MEM) {
4283
SLJIT_ASSERT(elem_size == 2 || elem_size == 3);
4284
4285
if (use_vex)
4286
return emit_vex_instruction(compiler, MOVSD_x_xm | (elem_size == 2 ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2, vreg, 0, srcdst, srcdstw);
4287
return emit_groupf(compiler, MOVSD_x_xm | (elem_size == 2 ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2, vreg, srcdst, srcdstw);
4288
} else if (elem_size == 3) {
4289
if (use_vex)
4290
return emit_vex_instruction(compiler, MOVQ_x_xm | EX86_PREF_F3 | EX86_SSE2, vreg, 0, srcdst, 0);
4291
return emit_groupf(compiler, MOVQ_x_xm | EX86_PREF_F3 | EX86_SSE2, vreg, srcdst, 0);
4292
} else if (use_vex) {
4293
FAIL_IF(emit_vex_instruction(compiler, XORPD_x_xm | EX86_SSE2 | VEX_SSE2_OPV, TMP_FREG, TMP_FREG, TMP_FREG, 0));
4294
return emit_vex_instruction(compiler, MOVSD_x_xm | EX86_PREF_F3 | EX86_SSE2 | VEX_SSE2_OPV, vreg, TMP_FREG, srcdst, 0);
4295
}
4296
}
4297
4298
if (reg_size == 5 && lane_index >= (1 << (4 - elem_size))) {
4299
vreg = TMP_FREG;
4300
lane_index -= (1 << (4 - elem_size));
4301
} else if ((type & SLJIT_SIMD_FLOAT) && vreg == srcdst) {
4302
if (use_vex)
4303
FAIL_IF(emit_vex_instruction(compiler, MOVSD_x_xm | (elem_size == 2 ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2 | VEX_SSE2_OPV, TMP_FREG, TMP_FREG, srcdst, srcdstw));
4304
else
4305
FAIL_IF(emit_sse2_load(compiler, elem_size == 2, TMP_FREG, srcdst, srcdstw));
4306
srcdst = TMP_FREG;
4307
srcdstw = 0;
4308
}
4309
4310
op = ((!(type & SLJIT_SIMD_FLOAT) || elem_size != 2) ? EX86_PREF_66 : 0)
4311
| ((type & SLJIT_SIMD_FLOAT) ? XORPD_x_xm : PXOR_x_xm) | EX86_SSE2;
4312
4313
if (use_vex)
4314
FAIL_IF(emit_vex_instruction(compiler, op | (reg_size == 5 ? VEX_256 : 0) | VEX_SSE2_OPV, vreg, vreg, vreg, 0));
4315
else
4316
FAIL_IF(emit_groupf(compiler, op, vreg, vreg, 0));
4317
} else if (reg_size == 5 && lane_index >= (1 << (4 - elem_size))) {
4318
FAIL_IF(emit_vex_instruction(compiler, ((type & SLJIT_SIMD_FLOAT) ? VEXTRACTF128_x_ym : VEXTRACTI128_x_ym) | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2, vreg, 0, TMP_FREG, 0));
4319
FAIL_IF(emit_byte(compiler, 1));
4320
4321
vreg = TMP_FREG;
4322
lane_index -= (1 << (4 - elem_size));
4323
}
4324
4325
if (type & SLJIT_SIMD_FLOAT) {
4326
if (elem_size == 3) {
4327
if (srcdst & SLJIT_MEM) {
4328
if (type & SLJIT_SIMD_STORE)
4329
op = lane_index == 0 ? MOVLPD_m_x : MOVHPD_m_x;
4330
else
4331
op = lane_index == 0 ? MOVLPD_x_m : MOVHPD_x_m;
4332
4333
/* VEX prefix clears upper bits of the target register. */
4334
if (use_vex && ((type & SLJIT_SIMD_STORE) || reg_size == 4 || vreg == TMP_FREG))
4335
FAIL_IF(emit_vex_instruction(compiler, op | EX86_PREF_66 | EX86_SSE2
4336
| ((type & SLJIT_SIMD_STORE) ? 0 : VEX_SSE2_OPV), vreg, (type & SLJIT_SIMD_STORE) ? 0 : vreg, srcdst, srcdstw));
4337
else
4338
FAIL_IF(emit_groupf(compiler, op | EX86_PREF_66 | EX86_SSE2, vreg, srcdst, srcdstw));
4339
4340
/* In case of store, vreg is not TMP_FREG. */
4341
} else if (type & SLJIT_SIMD_STORE) {
4342
if (lane_index == 1) {
4343
if (use_vex)
4344
return emit_vex_instruction(compiler, MOVHLPS_x_x | EX86_SSE2 | VEX_SSE2_OPV, srcdst, srcdst, vreg, 0);
4345
return emit_groupf(compiler, MOVHLPS_x_x | EX86_SSE2, srcdst, vreg, 0);
4346
}
4347
if (use_vex)
4348
return emit_vex_instruction(compiler, MOVSD_x_xm | EX86_PREF_F2 | EX86_SSE2 | VEX_SSE2_OPV, srcdst, srcdst, vreg, 0);
4349
return emit_sse2_load(compiler, 0, srcdst, vreg, 0);
4350
} else if (use_vex && (reg_size == 4 || vreg == TMP_FREG)) {
4351
if (lane_index == 1)
4352
FAIL_IF(emit_vex_instruction(compiler, MOVLHPS_x_x | EX86_SSE2 | VEX_SSE2_OPV, vreg, vreg, srcdst, 0));
4353
else
4354
FAIL_IF(emit_vex_instruction(compiler, MOVSD_x_xm | EX86_PREF_F2 | EX86_SSE2 | VEX_SSE2_OPV, vreg, vreg, srcdst, 0));
4355
} else {
4356
if (lane_index == 1)
4357
FAIL_IF(emit_groupf(compiler, MOVLHPS_x_x | EX86_SSE2, vreg, srcdst, 0));
4358
else
4359
FAIL_IF(emit_sse2_load(compiler, 0, vreg, srcdst, 0));
4360
}
4361
} else if (type & SLJIT_SIMD_STORE) {
4362
if (lane_index == 0) {
4363
if (use_vex)
4364
return emit_vex_instruction(compiler, MOVSD_xm_x | EX86_PREF_F3 | EX86_SSE2 | ((srcdst & SLJIT_MEM) ? 0 : VEX_SSE2_OPV),
4365
vreg, ((srcdst & SLJIT_MEM) ? 0 : srcdst), srcdst, srcdstw);
4366
return emit_sse2_store(compiler, 1, srcdst, srcdstw, vreg);
4367
}
4368
4369
if (srcdst & SLJIT_MEM) {
4370
if (use_vex)
4371
FAIL_IF(emit_vex_instruction(compiler, EXTRACTPS_x_xm | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2, vreg, 0, srcdst, srcdstw));
4372
else
4373
FAIL_IF(emit_groupf_ext(compiler, EXTRACTPS_x_xm | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2, vreg, srcdst, srcdstw));
4374
return emit_byte(compiler, U8(lane_index));
4375
}
4376
4377
if (use_vex) {
4378
FAIL_IF(emit_vex_instruction(compiler, SHUFPS_x_xm | EX86_SSE2 | VEX_SSE2_OPV, srcdst, vreg, vreg, 0));
4379
return emit_byte(compiler, U8(lane_index));
4380
}
4381
4382
if (srcdst == vreg)
4383
op = SHUFPS_x_xm | EX86_SSE2;
4384
else {
4385
switch (lane_index) {
4386
case 1:
4387
op = MOVSHDUP_x_xm | EX86_PREF_F3 | EX86_SSE2;
4388
break;
4389
case 2:
4390
op = MOVHLPS_x_x | EX86_SSE2;
4391
break;
4392
default:
4393
SLJIT_ASSERT(lane_index == 3);
4394
op = PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2;
4395
break;
4396
}
4397
}
4398
4399
FAIL_IF(emit_groupf(compiler, op, srcdst, vreg, 0));
4400
4401
op &= 0xff;
4402
if (op == SHUFPS_x_xm || op == PSHUFD_x_xm)
4403
return emit_byte(compiler, U8(lane_index));
4404
4405
return SLJIT_SUCCESS;
4406
} else {
4407
if (lane_index != 0 || (srcdst & SLJIT_MEM)) {
4408
FAIL_IF(emit_groupf_ext(compiler, INSERTPS_x_xm | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2, vreg, srcdst, srcdstw));
4409
FAIL_IF(emit_byte(compiler, U8(lane_index << 4)));
4410
} else
4411
FAIL_IF(emit_sse2_store(compiler, 1, vreg, 0, srcdst));
4412
}
4413
4414
if (vreg != TMP_FREG || (type & SLJIT_SIMD_STORE))
4415
return SLJIT_SUCCESS;
4416
4417
SLJIT_ASSERT(reg_size == 5);
4418
4419
if (type & SLJIT_SIMD_LANE_ZERO) {
4420
FAIL_IF(emit_vex_instruction(compiler, VPERMPD_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, vreg_orig, 0, TMP_FREG, 0));
4421
return emit_byte(compiler, 0x4e);
4422
}
4423
4424
FAIL_IF(emit_vex_instruction(compiler, VINSERTF128_y_y_xm | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2 | VEX_SSE2_OPV, vreg_orig, vreg_orig, TMP_FREG, 0));
4425
return emit_byte(compiler, 1);
4426
}
4427
4428
if (srcdst == SLJIT_IMM) {
4429
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, srcdstw);
4430
srcdst = TMP_REG1;
4431
srcdstw = 0;
4432
}
4433
4434
op = 3;
4435
4436
switch (elem_size) {
4437
case 0:
4438
opcode = (type & SLJIT_SIMD_STORE) ? PEXTRB_rm_x_i8 : PINSRB_x_rm_i8;
4439
break;
4440
case 1:
4441
if (!(type & SLJIT_SIMD_STORE)) {
4442
op = 2;
4443
opcode = PINSRW_x_rm_i8;
4444
} else
4445
opcode = PEXTRW_rm_x_i8;
4446
break;
4447
case 2:
4448
opcode = (type & SLJIT_SIMD_STORE) ? PEXTRD_rm_x_i8 : PINSRD_x_rm_i8;
4449
break;
4450
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4451
case 3:
4452
/* PINSRQ / PEXTRQ */
4453
opcode = (type & SLJIT_SIMD_STORE) ? PEXTRD_rm_x_i8 : PINSRD_x_rm_i8;
4454
compiler->mode32 = 0;
4455
break;
4456
#endif /* SLJIT_CONFIG_X86_64 */
4457
}
4458
4459
if (use_vex && (type & SLJIT_SIMD_STORE)) {
4460
op = opcode | ((op == 3) ? VEX_OP_0F3A : 0);
4461
FAIL_IF(emit_vex_instruction(compiler, op | EX86_PREF_66 | VEX_AUTO_W | EX86_SSE2_OP1 | VEX_SSE2_OPV, vreg, 0, srcdst, srcdstw));
4462
} else {
4463
inst = emit_x86_instruction(compiler, op | EX86_PREF_66 | EX86_SSE2_OP1, vreg, 0, srcdst, srcdstw);
4464
FAIL_IF(!inst);
4465
inst[0] = GROUP_0F;
4466
4467
if (op == 3) {
4468
inst[1] = 0x3a;
4469
inst[2] = opcode;
4470
} else
4471
inst[1] = opcode;
4472
}
4473
4474
FAIL_IF(emit_byte(compiler, U8(lane_index)));
4475
4476
if (!(type & SLJIT_SIMD_LANE_SIGNED) || (srcdst & SLJIT_MEM)) {
4477
if (vreg == TMP_FREG && !(type & SLJIT_SIMD_STORE)) {
4478
SLJIT_ASSERT(reg_size == 5);
4479
4480
if (type & SLJIT_SIMD_LANE_ZERO) {
4481
FAIL_IF(emit_vex_instruction(compiler, VPERMQ_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, vreg_orig, 0, TMP_FREG, 0));
4482
return emit_byte(compiler, 0x4e);
4483
}
4484
4485
FAIL_IF(emit_vex_instruction(compiler, VINSERTI128_y_y_xm | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2 | VEX_SSE2_OPV, vreg_orig, vreg_orig, TMP_FREG, 0));
4486
return emit_byte(compiler, 1);
4487
}
4488
4489
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
4490
if (srcdst_orig & SLJIT_MEM)
4491
return emit_mov(compiler, srcdst_orig, srcdstw_orig, TMP_REG1, 0);
4492
#endif /* SLJIT_CONFIG_X86_32 */
4493
return SLJIT_SUCCESS;
4494
}
4495
4496
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4497
if (elem_size >= 3)
4498
return SLJIT_SUCCESS;
4499
4500
compiler->mode32 = (type & SLJIT_32);
4501
4502
op = 2;
4503
4504
if (elem_size == 0)
4505
op |= EX86_REX;
4506
4507
if (elem_size == 2) {
4508
if (type & SLJIT_32)
4509
return SLJIT_SUCCESS;
4510
4511
SLJIT_ASSERT(!(compiler->mode32));
4512
op = 1;
4513
}
4514
4515
inst = emit_x86_instruction(compiler, op, srcdst, 0, srcdst, 0);
4516
FAIL_IF(!inst);
4517
4518
if (op != 1) {
4519
inst[0] = GROUP_0F;
4520
inst[1] = U8((elem_size == 0) ? MOVSX_r_rm8 : MOVSX_r_rm16);
4521
} else
4522
inst[0] = MOVSXD_r_rm;
4523
#else /* !SLJIT_CONFIG_X86_64 */
4524
if (elem_size >= 2)
4525
return SLJIT_SUCCESS;
4526
4527
FAIL_IF(emit_groupf(compiler, (elem_size == 0) ? MOVSX_r_rm8 : MOVSX_r_rm16,
4528
(srcdst_orig != 0 && FAST_IS_REG(srcdst_orig)) ? srcdst_orig : srcdst, srcdst, 0));
4529
4530
if (srcdst_orig & SLJIT_MEM)
4531
return emit_mov(compiler, srcdst_orig, srcdstw_orig, TMP_REG1, 0);
4532
#endif /* SLJIT_CONFIG_X86_64 */
4533
return SLJIT_SUCCESS;
4534
}
4535
4536
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_replicate(struct sljit_compiler *compiler, sljit_s32 type,
4537
sljit_s32 vreg,
4538
sljit_s32 src, sljit_s32 src_lane_index)
4539
{
4540
sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
4541
sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
4542
sljit_s32 use_vex = (cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX);
4543
sljit_uw pref;
4544
sljit_u8 byte;
4545
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
4546
sljit_s32 opcode3 = TMP_REG1;
4547
#else /* !SLJIT_CONFIG_X86_32 */
4548
sljit_s32 opcode3 = SLJIT_S0;
4549
#endif /* SLJIT_CONFIG_X86_32 */
4550
4551
CHECK_ERROR();
4552
CHECK(check_sljit_emit_simd_lane_replicate(compiler, type, vreg, src, src_lane_index));
4553
4554
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4555
compiler->mode32 = 1;
4556
#endif /* SLJIT_CONFIG_X86_64 */
4557
SLJIT_ASSERT(reg_map[opcode3] == 3);
4558
4559
if (reg_size == 5) {
4560
if (!(cpu_feature_list & CPU_FEATURE_AVX2))
4561
return SLJIT_ERR_UNSUPPORTED;
4562
use_vex = 1;
4563
} else if (reg_size != 4)
4564
return SLJIT_ERR_UNSUPPORTED;
4565
4566
if (type & SLJIT_SIMD_FLOAT) {
4567
pref = 0;
4568
byte = U8(src_lane_index);
4569
4570
if (elem_size == 3) {
4571
if (type & SLJIT_SIMD_TEST)
4572
return SLJIT_SUCCESS;
4573
4574
if (reg_size == 5) {
4575
if (src_lane_index == 0)
4576
return emit_vex_instruction(compiler, VBROADCASTSD_x_xm | VEX_256 | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, vreg, 0, src, 0);
4577
4578
FAIL_IF(emit_vex_instruction(compiler, VPERMPD_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, vreg, 0, src, 0));
4579
4580
byte = U8(byte | (byte << 2));
4581
return emit_byte(compiler, U8(byte | (byte << 4)));
4582
}
4583
4584
if (src_lane_index == 0) {
4585
if (use_vex)
4586
return emit_vex_instruction(compiler, MOVDDUP_x_xm | EX86_PREF_F2 | EX86_SSE2, vreg, 0, src, 0);
4587
return emit_groupf(compiler, MOVDDUP_x_xm | EX86_PREF_F2 | EX86_SSE2, vreg, src, 0);
4588
}
4589
4590
/* Changes it to SHUFPD_x_xm. */
4591
pref = EX86_PREF_66;
4592
} else if (elem_size != 2)
4593
return SLJIT_ERR_UNSUPPORTED;
4594
else if (type & SLJIT_SIMD_TEST)
4595
return SLJIT_SUCCESS;
4596
4597
if (reg_size == 5) {
4598
SLJIT_ASSERT(elem_size == 2);
4599
4600
if (src_lane_index == 0)
4601
return emit_vex_instruction(compiler, VBROADCASTSS_x_xm | VEX_256 | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, vreg, 0, src, 0);
4602
4603
FAIL_IF(emit_vex_instruction(compiler, VPERMPD_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, vreg, 0, src, 0));
4604
4605
byte = 0x44;
4606
if (src_lane_index >= 4) {
4607
byte = 0xee;
4608
src_lane_index -= 4;
4609
}
4610
4611
FAIL_IF(emit_byte(compiler, byte));
4612
FAIL_IF(emit_vex_instruction(compiler, SHUFPS_x_xm | VEX_256 | pref | EX86_SSE2 | VEX_SSE2_OPV, vreg, vreg, vreg, 0));
4613
byte = U8(src_lane_index);
4614
} else if (use_vex) {
4615
FAIL_IF(emit_vex_instruction(compiler, SHUFPS_x_xm | pref | EX86_SSE2 | VEX_SSE2_OPV, vreg, src, src, 0));
4616
} else {
4617
if (vreg != src)
4618
FAIL_IF(emit_groupf(compiler, MOVAPS_x_xm | pref | EX86_SSE2, vreg, src, 0));
4619
4620
FAIL_IF(emit_groupf(compiler, SHUFPS_x_xm | pref | EX86_SSE2, vreg, vreg, 0));
4621
}
4622
4623
if (elem_size == 2) {
4624
byte = U8(byte | (byte << 2));
4625
byte = U8(byte | (byte << 4));
4626
} else
4627
byte = U8(byte | (byte << 1));
4628
4629
return emit_byte(compiler, U8(byte));
4630
}
4631
4632
if (type & SLJIT_SIMD_TEST)
4633
return SLJIT_SUCCESS;
4634
4635
if (elem_size == 0) {
4636
if (reg_size == 5 && src_lane_index >= 16) {
4637
FAIL_IF(emit_vex_instruction(compiler, VPERMQ_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, vreg, 0, src, 0));
4638
FAIL_IF(emit_byte(compiler, src_lane_index >= 24 ? 0xff : 0xaa));
4639
src_lane_index &= 0x7;
4640
src = vreg;
4641
}
4642
4643
if (src_lane_index != 0 || (vreg != src && (!(cpu_feature_list & CPU_FEATURE_AVX2) || !use_vex))) {
4644
pref = 0;
4645
4646
if ((src_lane_index & 0x3) == 0) {
4647
pref = EX86_PREF_66;
4648
byte = U8(src_lane_index >> 2);
4649
} else if (src_lane_index < 8 && (src_lane_index & 0x1) == 0) {
4650
pref = EX86_PREF_F2;
4651
byte = U8(src_lane_index >> 1);
4652
} else {
4653
if (!use_vex) {
4654
if (vreg != src)
4655
FAIL_IF(emit_groupf(compiler, MOVDQA_x_xm | EX86_PREF_66 | EX86_SSE2, vreg, src, 0));
4656
4657
FAIL_IF(emit_groupf(compiler, PSRLDQ_x | EX86_PREF_66 | EX86_SSE2_OP2, opcode3, vreg, 0));
4658
} else
4659
FAIL_IF(emit_vex_instruction(compiler, PSRLDQ_x | EX86_PREF_66 | EX86_SSE2_OP2 | VEX_SSE2_OPV, opcode3, vreg, src, 0));
4660
4661
FAIL_IF(emit_byte(compiler, U8(src_lane_index)));
4662
}
4663
4664
if (pref != 0) {
4665
if (use_vex)
4666
FAIL_IF(emit_vex_instruction(compiler, PSHUFLW_x_xm | pref | EX86_SSE2, vreg, 0, src, 0));
4667
else
4668
FAIL_IF(emit_groupf(compiler, PSHUFLW_x_xm | pref | EX86_SSE2, vreg, src, 0));
4669
FAIL_IF(emit_byte(compiler, byte));
4670
}
4671
4672
src = vreg;
4673
}
4674
4675
if (use_vex && (cpu_feature_list & CPU_FEATURE_AVX2))
4676
return emit_vex_instruction(compiler, VPBROADCASTB_x_xm | (reg_size == 5 ? VEX_256 : 0) | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, vreg, 0, src, 0);
4677
4678
SLJIT_ASSERT(reg_size == 4);
4679
FAIL_IF(emit_groupf(compiler, PXOR_x_xm | EX86_PREF_66 | EX86_SSE2, TMP_FREG, TMP_FREG, 0));
4680
return emit_groupf_ext(compiler, PSHUFB_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, vreg, TMP_FREG, 0);
4681
}
4682
4683
if ((cpu_feature_list & CPU_FEATURE_AVX2) && use_vex && src_lane_index == 0 && elem_size <= 3) {
4684
switch (elem_size) {
4685
case 1:
4686
pref = VPBROADCASTW_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
4687
break;
4688
case 2:
4689
pref = VPBROADCASTD_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
4690
break;
4691
default:
4692
pref = VPBROADCASTQ_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
4693
break;
4694
}
4695
4696
if (reg_size == 5)
4697
pref |= VEX_256;
4698
4699
return emit_vex_instruction(compiler, pref, vreg, 0, src, 0);
4700
}
4701
4702
if (reg_size == 5) {
4703
switch (elem_size) {
4704
case 1:
4705
byte = U8(src_lane_index & 0x3);
4706
src_lane_index >>= 2;
4707
pref = PSHUFLW_x_xm | VEX_256 | ((src_lane_index & 1) == 0 ? EX86_PREF_F2 : EX86_PREF_F3) | EX86_SSE2;
4708
break;
4709
case 2:
4710
byte = U8(src_lane_index & 0x3);
4711
src_lane_index >>= 1;
4712
pref = PSHUFD_x_xm | VEX_256 | EX86_PREF_66 | EX86_SSE2;
4713
break;
4714
case 3:
4715
pref = 0;
4716
break;
4717
default:
4718
FAIL_IF(emit_vex_instruction(compiler, VPERMQ_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, vreg, 0, src, 0));
4719
return emit_byte(compiler, U8(src_lane_index == 0 ? 0x44 : 0xee));
4720
}
4721
4722
if (pref != 0) {
4723
FAIL_IF(emit_vex_instruction(compiler, pref, vreg, 0, src, 0));
4724
byte = U8(byte | (byte << 2));
4725
FAIL_IF(emit_byte(compiler, U8(byte | (byte << 4))));
4726
4727
if (src_lane_index == 0)
4728
return emit_vex_instruction(compiler, VPBROADCASTQ_x_xm | VEX_256 | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, vreg, 0, vreg, 0);
4729
4730
src = vreg;
4731
}
4732
4733
FAIL_IF(emit_vex_instruction(compiler, VPERMQ_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, vreg, 0, src, 0));
4734
byte = U8(src_lane_index);
4735
byte = U8(byte | (byte << 2));
4736
return emit_byte(compiler, U8(byte | (byte << 4)));
4737
}
4738
4739
switch (elem_size) {
4740
case 1:
4741
byte = U8(src_lane_index & 0x3);
4742
src_lane_index >>= 1;
4743
pref = (src_lane_index & 2) == 0 ? EX86_PREF_F2 : EX86_PREF_F3;
4744
4745
if (use_vex)
4746
FAIL_IF(emit_vex_instruction(compiler, PSHUFLW_x_xm | pref | EX86_SSE2, vreg, 0, src, 0));
4747
else
4748
FAIL_IF(emit_groupf(compiler, PSHUFLW_x_xm | pref | EX86_SSE2, vreg, src, 0));
4749
byte = U8(byte | (byte << 2));
4750
FAIL_IF(emit_byte(compiler, U8(byte | (byte << 4))));
4751
4752
if ((cpu_feature_list & CPU_FEATURE_AVX2) && use_vex && pref == EX86_PREF_F2)
4753
return emit_vex_instruction(compiler, VPBROADCASTD_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, vreg, 0, vreg, 0);
4754
4755
src = vreg;
4756
SLJIT_FALLTHROUGH
4757
case 2:
4758
byte = U8(src_lane_index);
4759
byte = U8(byte | (byte << 2));
4760
break;
4761
default:
4762
byte = U8(src_lane_index << 1);
4763
byte = U8(byte | (byte << 2) | 0x4);
4764
break;
4765
}
4766
4767
if (use_vex)
4768
FAIL_IF(emit_vex_instruction(compiler, PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2, vreg, 0, src, 0));
4769
else
4770
FAIL_IF(emit_groupf(compiler, PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2, vreg, src, 0));
4771
return emit_byte(compiler, U8(byte | (byte << 4)));
4772
}
4773
4774
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_extend(struct sljit_compiler *compiler, sljit_s32 type,
4775
sljit_s32 vreg,
4776
sljit_s32 src, sljit_sw srcw)
4777
{
4778
sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
4779
sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
4780
sljit_s32 elem2_size = SLJIT_SIMD_GET_ELEM2_SIZE(type);
4781
sljit_s32 use_vex = (cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX);
4782
sljit_u8 opcode;
4783
4784
CHECK_ERROR();
4785
CHECK(check_sljit_emit_simd_extend(compiler, type, vreg, src, srcw));
4786
4787
ADJUST_LOCAL_OFFSET(src, srcw);
4788
4789
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4790
compiler->mode32 = 1;
4791
#endif /* SLJIT_CONFIG_X86_64 */
4792
4793
if (reg_size == 5) {
4794
if (!(cpu_feature_list & CPU_FEATURE_AVX2))
4795
return SLJIT_ERR_UNSUPPORTED;
4796
use_vex = 1;
4797
} else if (reg_size != 4)
4798
return SLJIT_ERR_UNSUPPORTED;
4799
4800
if (type & SLJIT_SIMD_FLOAT) {
4801
if (elem_size != 2 || elem2_size != 3)
4802
return SLJIT_ERR_UNSUPPORTED;
4803
4804
if (type & SLJIT_SIMD_TEST)
4805
return SLJIT_SUCCESS;
4806
4807
if (use_vex)
4808
return emit_vex_instruction(compiler, CVTPS2PD_x_xm | ((reg_size == 5) ? VEX_256 : 0) | EX86_SSE2, vreg, 0, src, srcw);
4809
return emit_groupf(compiler, CVTPS2PD_x_xm | EX86_SSE2, vreg, src, srcw);
4810
}
4811
4812
switch (elem_size) {
4813
case 0:
4814
if (elem2_size == 1)
4815
opcode = (type & SLJIT_SIMD_EXTEND_SIGNED) ? PMOVSXBW_x_xm : PMOVZXBW_x_xm;
4816
else if (elem2_size == 2)
4817
opcode = (type & SLJIT_SIMD_EXTEND_SIGNED) ? PMOVSXBD_x_xm : PMOVZXBD_x_xm;
4818
else if (elem2_size == 3)
4819
opcode = (type & SLJIT_SIMD_EXTEND_SIGNED) ? PMOVSXBQ_x_xm : PMOVZXBQ_x_xm;
4820
else
4821
return SLJIT_ERR_UNSUPPORTED;
4822
break;
4823
case 1:
4824
if (elem2_size == 2)
4825
opcode = (type & SLJIT_SIMD_EXTEND_SIGNED) ? PMOVSXWD_x_xm : PMOVZXWD_x_xm;
4826
else if (elem2_size == 3)
4827
opcode = (type & SLJIT_SIMD_EXTEND_SIGNED) ? PMOVSXWQ_x_xm : PMOVZXWQ_x_xm;
4828
else
4829
return SLJIT_ERR_UNSUPPORTED;
4830
break;
4831
case 2:
4832
if (elem2_size == 3)
4833
opcode = (type & SLJIT_SIMD_EXTEND_SIGNED) ? PMOVSXDQ_x_xm : PMOVZXDQ_x_xm;
4834
else
4835
return SLJIT_ERR_UNSUPPORTED;
4836
break;
4837
default:
4838
return SLJIT_ERR_UNSUPPORTED;
4839
}
4840
4841
if (type & SLJIT_SIMD_TEST)
4842
return SLJIT_SUCCESS;
4843
4844
if (use_vex)
4845
return emit_vex_instruction(compiler, opcode | ((reg_size == 5) ? VEX_256 : 0) | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, vreg, 0, src, srcw);
4846
return emit_groupf_ext(compiler, opcode | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, vreg, src, srcw);
4847
}
4848
4849
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_sign(struct sljit_compiler *compiler, sljit_s32 type,
4850
sljit_s32 vreg,
4851
sljit_s32 dst, sljit_sw dstw)
4852
{
4853
sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
4854
sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
4855
sljit_s32 use_vex = (cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX);
4856
sljit_s32 dst_r;
4857
sljit_uw op;
4858
sljit_u8 *inst;
4859
4860
CHECK_ERROR();
4861
CHECK(check_sljit_emit_simd_sign(compiler, type, vreg, dst, dstw));
4862
4863
ADJUST_LOCAL_OFFSET(dst, dstw);
4864
4865
CHECK_EXTRA_REGS(dst, dstw, (void)0);
4866
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4867
compiler->mode32 = 1;
4868
#endif /* SLJIT_CONFIG_X86_64 */
4869
4870
if (elem_size > 3 || ((type & SLJIT_SIMD_FLOAT) && elem_size < 2))
4871
return SLJIT_ERR_UNSUPPORTED;
4872
4873
if (reg_size == 4) {
4874
if (type & SLJIT_SIMD_TEST)
4875
return SLJIT_SUCCESS;
4876
4877
op = EX86_PREF_66 | EX86_SSE2_OP2;
4878
4879
switch (elem_size) {
4880
case 1:
4881
if (use_vex)
4882
FAIL_IF(emit_vex_instruction(compiler, PACKSSWB_x_xm | EX86_PREF_66 | EX86_SSE2 | VEX_SSE2_OPV, TMP_FREG, vreg, vreg, 0));
4883
else
4884
FAIL_IF(emit_groupf(compiler, PACKSSWB_x_xm | EX86_PREF_66 | EX86_SSE2, TMP_FREG, vreg, 0));
4885
vreg = TMP_FREG;
4886
break;
4887
case 2:
4888
op = EX86_SSE2_OP2;
4889
break;
4890
}
4891
4892
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
4893
op |= (elem_size < 2) ? PMOVMSKB_r_x : MOVMSKPS_r_x;
4894
4895
if (use_vex)
4896
FAIL_IF(emit_vex_instruction(compiler, op, dst_r, 0, vreg, 0));
4897
else
4898
FAIL_IF(emit_groupf(compiler, op, dst_r, vreg, 0));
4899
4900
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4901
compiler->mode32 = type & SLJIT_32;
4902
#endif /* SLJIT_CONFIG_X86_64 */
4903
4904
if (elem_size == 1) {
4905
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 8, dst_r, 0);
4906
FAIL_IF(!inst);
4907
inst[1] |= SHR;
4908
}
4909
4910
if (dst_r == TMP_REG1)
4911
return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
4912
4913
return SLJIT_SUCCESS;
4914
}
4915
4916
if (reg_size != 5 || !(cpu_feature_list & CPU_FEATURE_AVX2))
4917
return SLJIT_ERR_UNSUPPORTED;
4918
4919
if (type & SLJIT_SIMD_TEST)
4920
return SLJIT_SUCCESS;
4921
4922
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
4923
4924
if (elem_size == 1) {
4925
FAIL_IF(emit_vex_instruction(compiler, VEXTRACTI128_x_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2, vreg, 0, TMP_FREG, 0));
4926
FAIL_IF(emit_byte(compiler, 1));
4927
FAIL_IF(emit_vex_instruction(compiler, PACKSSWB_x_xm | VEX_256 | EX86_PREF_66 | EX86_SSE2 | VEX_SSE2_OPV, TMP_FREG, vreg, TMP_FREG, 0));
4928
FAIL_IF(emit_groupf(compiler, PMOVMSKB_r_x | EX86_PREF_66 | EX86_SSE2_OP2, dst_r, TMP_FREG, 0));
4929
} else {
4930
op = MOVMSKPS_r_x | VEX_256 | EX86_SSE2_OP2;
4931
4932
if (elem_size == 0)
4933
op = PMOVMSKB_r_x | VEX_256 | EX86_PREF_66 | EX86_SSE2_OP2;
4934
else if (elem_size == 3)
4935
op |= EX86_PREF_66;
4936
4937
FAIL_IF(emit_vex_instruction(compiler, op, dst_r, 0, vreg, 0));
4938
}
4939
4940
if (dst_r == TMP_REG1) {
4941
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4942
compiler->mode32 = type & SLJIT_32;
4943
#endif /* SLJIT_CONFIG_X86_64 */
4944
return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
4945
}
4946
4947
return SLJIT_SUCCESS;
4948
}
4949
4950
static sljit_s32 emit_simd_mov(struct sljit_compiler *compiler, sljit_s32 type,
4951
sljit_s32 dst_vreg, sljit_s32 src_vreg)
4952
{
4953
sljit_uw op = ((type & SLJIT_SIMD_FLOAT) ? MOVAPS_x_xm : MOVDQA_x_xm) | EX86_SSE2;
4954
4955
SLJIT_ASSERT(SLJIT_SIMD_GET_REG_SIZE(type) == 4);
4956
4957
if (!(type & SLJIT_SIMD_FLOAT) || SLJIT_SIMD_GET_ELEM_SIZE(type) == 3)
4958
op |= EX86_PREF_66;
4959
4960
return emit_groupf(compiler, op, dst_vreg, src_vreg, 0);
4961
}
4962
4963
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_op2(struct sljit_compiler *compiler, sljit_s32 type,
4964
sljit_s32 dst_vreg, sljit_s32 src1_vreg, sljit_s32 src2, sljit_sw src2w)
4965
{
4966
sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
4967
sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
4968
sljit_s32 use_vex = (cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX);
4969
sljit_uw op = 0;
4970
sljit_uw mov_op = 0;
4971
4972
CHECK_ERROR();
4973
CHECK(check_sljit_emit_simd_op2(compiler, type, dst_vreg, src1_vreg, src2, src2w));
4974
ADJUST_LOCAL_OFFSET(src2, src2w);
4975
4976
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
4977
compiler->mode32 = 1;
4978
#endif /* SLJIT_CONFIG_X86_64 */
4979
4980
if (reg_size == 5) {
4981
if (!(cpu_feature_list & CPU_FEATURE_AVX2))
4982
return SLJIT_ERR_UNSUPPORTED;
4983
} else if (reg_size != 4)
4984
return SLJIT_ERR_UNSUPPORTED;
4985
4986
if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
4987
return SLJIT_ERR_UNSUPPORTED;
4988
4989
switch (SLJIT_SIMD_GET_OPCODE(type)) {
4990
case SLJIT_SIMD_OP2_AND:
4991
op = (type & SLJIT_SIMD_FLOAT) ? ANDPD_x_xm : PAND_x_xm;
4992
4993
if (!(type & SLJIT_SIMD_FLOAT) || elem_size == 3)
4994
op |= EX86_PREF_66;
4995
break;
4996
case SLJIT_SIMD_OP2_OR:
4997
op = (type & SLJIT_SIMD_FLOAT) ? ORPD_x_xm : POR_x_xm;
4998
4999
if (!(type & SLJIT_SIMD_FLOAT) || elem_size == 3)
5000
op |= EX86_PREF_66;
5001
break;
5002
case SLJIT_SIMD_OP2_XOR:
5003
op = (type & SLJIT_SIMD_FLOAT) ? XORPD_x_xm : PXOR_x_xm;
5004
5005
if (!(type & SLJIT_SIMD_FLOAT) || elem_size == 3)
5006
op |= EX86_PREF_66;
5007
break;
5008
5009
case SLJIT_SIMD_OP2_SHUFFLE:
5010
if (reg_size != 4)
5011
return SLJIT_ERR_UNSUPPORTED;
5012
5013
op = PSHUFB_x_xm | EX86_PREF_66 | VEX_OP_0F38;
5014
break;
5015
}
5016
5017
if (type & SLJIT_SIMD_TEST)
5018
return SLJIT_SUCCESS;
5019
5020
if ((src2 & SLJIT_MEM) && SLJIT_SIMD_GET_ELEM2_SIZE(type) < reg_size) {
5021
mov_op = ((type & SLJIT_SIMD_FLOAT) ? (MOVUPS_x_xm | (elem_size == 3 ? EX86_PREF_66 : 0)) : (MOVDQU_x_xm | EX86_PREF_F3)) | EX86_SSE2;
5022
if (use_vex)
5023
FAIL_IF(emit_vex_instruction(compiler, mov_op, TMP_FREG, 0, src2, src2w));
5024
else
5025
FAIL_IF(emit_groupf(compiler, mov_op, TMP_FREG, src2, src2w));
5026
5027
src2 = TMP_FREG;
5028
src2w = 0;
5029
}
5030
5031
if (reg_size == 5 || use_vex) {
5032
if (reg_size == 5)
5033
op |= VEX_256;
5034
5035
return emit_vex_instruction(compiler, op | EX86_SSE2 | VEX_SSE2_OPV, dst_vreg, src1_vreg, src2, src2w);
5036
}
5037
5038
if (dst_vreg != src1_vreg) {
5039
if (dst_vreg == src2) {
5040
if (SLJIT_SIMD_GET_OPCODE(type) == SLJIT_SIMD_OP2_SHUFFLE) {
5041
FAIL_IF(emit_simd_mov(compiler, type, TMP_FREG, src2));
5042
FAIL_IF(emit_simd_mov(compiler, type, dst_vreg, src1_vreg));
5043
src2 = TMP_FREG;
5044
src2w = 0;
5045
} else
5046
src2 = src1_vreg;
5047
} else
5048
FAIL_IF(emit_simd_mov(compiler, type, dst_vreg, src1_vreg));
5049
}
5050
5051
if (op & (VEX_OP_0F38 | VEX_OP_0F3A))
5052
return emit_groupf_ext(compiler, op | EX86_SSE2, dst_vreg, src2, src2w);
5053
return emit_groupf(compiler, op | EX86_SSE2, dst_vreg, src2, src2w);
5054
}
5055
5056
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_load(struct sljit_compiler *compiler, sljit_s32 op,
5057
sljit_s32 dst_reg,
5058
sljit_s32 mem_reg)
5059
{
5060
CHECK_ERROR();
5061
CHECK(check_sljit_emit_atomic_load(compiler, op, dst_reg, mem_reg));
5062
5063
if ((op & SLJIT_ATOMIC_USE_LS) || GET_OPCODE(op) == SLJIT_MOV_S8 || GET_OPCODE(op) == SLJIT_MOV_S16 || GET_OPCODE(op) == SLJIT_MOV_S32)
5064
return SLJIT_ERR_UNSUPPORTED;
5065
5066
if (op & SLJIT_ATOMIC_TEST)
5067
return SLJIT_SUCCESS;
5068
5069
SLJIT_SKIP_CHECKS(compiler);
5070
return sljit_emit_op1(compiler, op & ~SLJIT_ATOMIC_USE_CAS, dst_reg, 0, SLJIT_MEM1(mem_reg), 0);
5071
}
5072
5073
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_store(struct sljit_compiler *compiler, sljit_s32 op,
5074
sljit_s32 src_reg,
5075
sljit_s32 mem_reg,
5076
sljit_s32 temp_reg)
5077
{
5078
sljit_uw pref;
5079
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
5080
sljit_s32 saved_reg = TMP_REG1;
5081
sljit_s32 swap_tmp = 0;
5082
sljit_sw srcw = 0;
5083
sljit_sw tempw = 0;
5084
#endif /* SLJIT_CONFIG_X86_32 */
5085
5086
CHECK_ERROR();
5087
CHECK(check_sljit_emit_atomic_store(compiler, op, src_reg, mem_reg, temp_reg));
5088
CHECK_EXTRA_REGS(src_reg, srcw, (void)0);
5089
CHECK_EXTRA_REGS(temp_reg, tempw, (void)0);
5090
5091
SLJIT_ASSERT(FAST_IS_REG(src_reg) || src_reg == SLJIT_MEM1(SLJIT_SP));
5092
SLJIT_ASSERT(FAST_IS_REG(temp_reg) || temp_reg == SLJIT_MEM1(SLJIT_SP));
5093
5094
if ((op & SLJIT_ATOMIC_USE_LS) || GET_OPCODE(op) == SLJIT_MOV_S8 || GET_OPCODE(op) == SLJIT_MOV_S16 || GET_OPCODE(op) == SLJIT_MOV_S32)
5095
return SLJIT_ERR_UNSUPPORTED;
5096
5097
if (op & SLJIT_ATOMIC_TEST)
5098
return SLJIT_SUCCESS;
5099
5100
op = GET_OPCODE(op);
5101
5102
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
5103
if (temp_reg == SLJIT_TMP_DEST_REG) {
5104
FAIL_IF(emit_byte(compiler, XCHG_EAX_r | reg_map[TMP_REG1]));
5105
5106
if (src_reg == SLJIT_R0)
5107
src_reg = TMP_REG1;
5108
if (mem_reg == SLJIT_R0)
5109
mem_reg = TMP_REG1;
5110
5111
temp_reg = SLJIT_R0;
5112
swap_tmp = 1;
5113
}
5114
5115
/* Src is virtual register or its low byte is not accessible. */
5116
if ((src_reg & SLJIT_MEM) || (op == SLJIT_MOV_U8 && reg_map[src_reg] >= 4)) {
5117
SLJIT_ASSERT(src_reg != SLJIT_R1 && temp_reg != SLJIT_TMP_DEST_REG);
5118
5119
if (swap_tmp) {
5120
saved_reg = (mem_reg != SLJIT_R1) ? SLJIT_R1 : SLJIT_R2;
5121
5122
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), 0, saved_reg, 0);
5123
EMIT_MOV(compiler, saved_reg, 0, src_reg, srcw);
5124
} else
5125
EMIT_MOV(compiler, TMP_REG1, 0, src_reg, srcw);
5126
5127
src_reg = saved_reg;
5128
5129
if (mem_reg == src_reg)
5130
mem_reg = saved_reg;
5131
}
5132
#endif /* SLJIT_CONFIG_X86_32 */
5133
5134
if (temp_reg != SLJIT_R0) {
5135
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5136
compiler->mode32 = 0;
5137
5138
EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_R0, 0);
5139
EMIT_MOV(compiler, SLJIT_R0, 0, temp_reg, 0);
5140
5141
if (src_reg == SLJIT_R0)
5142
src_reg = TMP_REG2;
5143
if (mem_reg == SLJIT_R0)
5144
mem_reg = TMP_REG2;
5145
#else /* !SLJIT_CONFIG_X86_64 */
5146
SLJIT_ASSERT(!swap_tmp);
5147
5148
if (src_reg == TMP_REG1) {
5149
if (mem_reg == SLJIT_R0) {
5150
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), 0, SLJIT_R1, 0);
5151
EMIT_MOV(compiler, SLJIT_R1, 0, SLJIT_R0, 0);
5152
EMIT_MOV(compiler, SLJIT_R0, 0, temp_reg, tempw);
5153
5154
mem_reg = SLJIT_R1;
5155
saved_reg = SLJIT_R1;
5156
} else {
5157
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), 0, SLJIT_R0, 0);
5158
EMIT_MOV(compiler, SLJIT_R0, 0, temp_reg, tempw);
5159
saved_reg = SLJIT_R0;
5160
}
5161
} else {
5162
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_R0, 0);
5163
EMIT_MOV(compiler, SLJIT_R0, 0, temp_reg, tempw);
5164
5165
if (src_reg == SLJIT_R0)
5166
src_reg = TMP_REG1;
5167
if (mem_reg == SLJIT_R0)
5168
mem_reg = TMP_REG1;
5169
}
5170
#endif /* SLJIT_CONFIG_X86_64 */
5171
}
5172
5173
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5174
compiler->mode32 = op != SLJIT_MOV && op != SLJIT_MOV_P;
5175
#endif /* SLJIT_CONFIG_X86_64 */
5176
5177
/* Lock prefix. */
5178
FAIL_IF(emit_byte(compiler, GROUP_LOCK));
5179
5180
pref = 0;
5181
if (op == SLJIT_MOV_U16)
5182
pref = EX86_HALF_ARG | EX86_PREF_66;
5183
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5184
if (op == SLJIT_MOV_U8)
5185
pref = EX86_REX;
5186
#endif /* SLJIT_CONFIG_X86_64 */
5187
5188
FAIL_IF(emit_groupf(compiler, (op == SLJIT_MOV_U8 ? CMPXCHG_rm8_r : CMPXCHG_rm_r) | pref, src_reg, SLJIT_MEM1(mem_reg), 0));
5189
5190
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
5191
if (swap_tmp) {
5192
SLJIT_ASSERT(temp_reg == SLJIT_R0);
5193
FAIL_IF(emit_byte(compiler, XCHG_EAX_r | reg_map[TMP_REG1]));
5194
5195
if (saved_reg != TMP_REG1)
5196
return emit_mov(compiler, saved_reg, 0, SLJIT_MEM1(SLJIT_SP), 0);
5197
return SLJIT_SUCCESS;
5198
}
5199
#endif /* SLJIT_CONFIG_X86_32 */
5200
5201
if (temp_reg != SLJIT_R0) {
5202
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5203
compiler->mode32 = 0;
5204
return emit_mov(compiler, SLJIT_R0, 0, TMP_REG2, 0);
5205
#else /* !SLJIT_CONFIG_X86_64 */
5206
EMIT_MOV(compiler, SLJIT_R0, 0, (saved_reg == SLJIT_R0) ? SLJIT_MEM1(SLJIT_SP) : saved_reg, 0);
5207
if (saved_reg == SLJIT_R1)
5208
return emit_mov(compiler, SLJIT_R1, 0, SLJIT_MEM1(SLJIT_SP), 0);
5209
#endif /* SLJIT_CONFIG_X86_64 */
5210
}
5211
return SLJIT_SUCCESS;
5212
}
5213
5214
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_local_base(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw offset)
5215
{
5216
CHECK_ERROR();
5217
CHECK(check_sljit_get_local_base(compiler, dst, dstw, offset));
5218
ADJUST_LOCAL_OFFSET(dst, dstw);
5219
ADJUST_LOCAL_OFFSET(SLJIT_MEM1(SLJIT_SP), offset);
5220
5221
CHECK_EXTRA_REGS(dst, dstw, (void)0);
5222
5223
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5224
compiler->mode32 = 0;
5225
#endif
5226
5227
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5228
if (NOT_HALFWORD(offset)) {
5229
FAIL_IF(emit_load_imm64(compiler, TMP_REG1, offset));
5230
#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
5231
SLJIT_ASSERT(emit_lea_binary(compiler, dst, dstw, SLJIT_SP, 0, TMP_REG1, 0) != SLJIT_ERR_UNSUPPORTED);
5232
return compiler->error;
5233
#else
5234
return emit_lea_binary(compiler, dst, dstw, SLJIT_SP, 0, TMP_REG1, 0);
5235
#endif
5236
}
5237
#endif
5238
5239
if (offset != 0)
5240
return emit_lea_binary(compiler, dst, dstw, SLJIT_SP, 0, SLJIT_IMM, offset);
5241
return emit_mov(compiler, dst, dstw, SLJIT_SP, 0);
5242
}
5243
5244
SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 op,
5245
sljit_s32 dst, sljit_sw dstw,
5246
sljit_sw init_value)
5247
{
5248
sljit_u8 *inst;
5249
struct sljit_const *const_;
5250
sljit_s32 reg;
5251
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
5252
sljit_s32 dst_is_ereg = 0;
5253
#endif /* !SLJIT_CONFIG_X86_32 */
5254
5255
CHECK_ERROR_PTR();
5256
CHECK_PTR(check_sljit_emit_const(compiler, op, dst, dstw, init_value));
5257
ADJUST_LOCAL_OFFSET(dst, dstw);
5258
5259
CHECK_EXTRA_REGS(dst, dstw, dst_is_ereg = 1);
5260
5261
const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const));
5262
PTR_FAIL_IF(!const_);
5263
set_const(const_, compiler);
5264
5265
switch (GET_OPCODE(op)) {
5266
case SLJIT_MOV_U8:
5267
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5268
compiler->mode32 = (op & SLJIT_32);
5269
#endif /* SLJIT_CONFIG_X86_64 */
5270
5271
if ((init_value & 0x100) != 0)
5272
init_value = init_value | -(sljit_sw)0x100;
5273
else
5274
init_value = (sljit_u8)init_value;
5275
5276
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
5277
if (dst_is_ereg) {
5278
if (emit_mov(compiler, dst, dstw, SLJIT_IMM, (sljit_s32)init_value))
5279
return NULL;
5280
dst = 0;
5281
break;
5282
}
5283
#endif /* !SLJIT_CONFIG_X86_32 */
5284
5285
reg = FAST_IS_REG(dst) ? dst : TMP_REG1;
5286
5287
if (emit_mov(compiler, reg, 0, SLJIT_IMM, init_value))
5288
return NULL;
5289
break;
5290
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5291
case SLJIT_MOV:
5292
compiler->mode32 = 0;
5293
reg = FAST_IS_REG(dst) ? dst : TMP_REG1;
5294
5295
if (emit_load_imm64(compiler, reg, init_value))
5296
return NULL;
5297
break;
5298
#endif /* SLJIT_CONFIG_X86_64 */
5299
default:
5300
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5301
compiler->mode32 = (op == SLJIT_MOV32);
5302
#endif /* SLJIT_CONFIG_X86_64 */
5303
5304
if (emit_mov(compiler, dst, dstw, SLJIT_IMM, (sljit_s32)init_value))
5305
return NULL;
5306
dst = 0;
5307
break;
5308
}
5309
5310
inst = (sljit_u8*)ensure_buf(compiler, 1);
5311
PTR_FAIL_IF(!inst);
5312
5313
inst[0] = SLJIT_INST_CONST;
5314
5315
if (dst & SLJIT_MEM) {
5316
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5317
if (op == SLJIT_MOV) {
5318
if (emit_mov(compiler, dst, dstw, TMP_REG1, 0))
5319
return NULL;
5320
return const_;
5321
}
5322
#endif
5323
5324
if (emit_mov_byte(compiler, 0, dst, dstw, TMP_REG1, 0))
5325
return NULL;
5326
}
5327
5328
return const_;
5329
}
5330
5331
SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_op_addr(struct sljit_compiler *compiler, sljit_s32 op,
5332
sljit_s32 dst, sljit_sw dstw)
5333
{
5334
struct sljit_jump *jump;
5335
sljit_u8 *inst;
5336
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5337
sljit_s32 reg;
5338
#endif /* SLJIT_CONFIG_X86_64 */
5339
SLJIT_UNUSED_ARG(op);
5340
5341
CHECK_ERROR_PTR();
5342
CHECK_PTR(check_sljit_emit_op_addr(compiler, op, dst, dstw));
5343
ADJUST_LOCAL_OFFSET(dst, dstw);
5344
5345
CHECK_EXTRA_REGS(dst, dstw, (void)0);
5346
5347
jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
5348
PTR_FAIL_IF(!jump);
5349
set_mov_addr(jump, compiler, 0);
5350
5351
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5352
compiler->mode32 = 0;
5353
if (dst & SLJIT_MEM)
5354
reg = TMP_REG1;
5355
else
5356
reg = (op != SLJIT_ADD_ABS_ADDR) ? dst : TMP_REG2;
5357
5358
PTR_FAIL_IF(emit_load_imm64(compiler, reg, 0));
5359
jump->addr = compiler->size;
5360
5361
if (reg_map[reg] >= 8)
5362
jump->flags |= MOV_ADDR_HI;
5363
#else /* !SLJIT_CONFIG_X86_64 */
5364
if (op == SLJIT_ADD_ABS_ADDR) {
5365
if (dst != SLJIT_R0) {
5366
/* Must not be a signed byte argument. */
5367
inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 0x100, dst, dstw);
5368
PTR_FAIL_IF(!inst);
5369
*(inst + 1) |= ADD;
5370
} else
5371
PTR_FAIL_IF(emit_do_imm(compiler, ADD_EAX_i32, 0));
5372
} else {
5373
PTR_FAIL_IF(emit_mov(compiler, dst, dstw, SLJIT_IMM, 0));
5374
}
5375
#endif /* SLJIT_CONFIG_X86_64 */
5376
5377
inst = (sljit_u8*)ensure_buf(compiler, 1);
5378
PTR_FAIL_IF(!inst);
5379
5380
inst[0] = SLJIT_INST_MOV_ADDR;
5381
5382
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5383
if (op == SLJIT_ADD_ABS_ADDR) {
5384
inst = emit_x86_instruction(compiler, 1, reg, 0, dst, dstw);
5385
PTR_FAIL_IF(!inst);
5386
*inst = ADD_rm_r;
5387
} else if (dst & SLJIT_MEM)
5388
PTR_FAIL_IF(emit_mov(compiler, dst, dstw, TMP_REG1, 0));
5389
#endif /* SLJIT_CONFIG_X86_64 */
5390
5391
return jump;
5392
}
5393
5394
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
5395
{
5396
SLJIT_UNUSED_ARG(executable_offset);
5397
5398
SLJIT_UPDATE_WX_FLAGS((void*)addr, (void*)(addr + sizeof(sljit_uw)), 0);
5399
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
5400
sljit_unaligned_store_sw((void*)addr, (sljit_sw)(new_target - (addr + 4) - (sljit_uw)executable_offset));
5401
#else
5402
sljit_unaligned_store_sw((void*)addr, (sljit_sw)new_target);
5403
#endif
5404
SLJIT_UPDATE_WX_FLAGS((void*)addr, (void*)(addr + sizeof(sljit_uw)), 1);
5405
}
5406
5407
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_s32 op, sljit_sw new_constant, sljit_sw executable_offset)
5408
{
5409
void *start_addr;
5410
SLJIT_UNUSED_ARG(executable_offset);
5411
5412
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
5413
if (op == SLJIT_MOV) {
5414
start_addr = (void*)(addr - sizeof(sljit_sw));
5415
SLJIT_UPDATE_WX_FLAGS(start_addr, (void*)addr, 0);
5416
sljit_unaligned_store_sw(start_addr, new_constant);
5417
SLJIT_UPDATE_WX_FLAGS(start_addr, (void*)addr, 1);
5418
return;
5419
}
5420
#endif
5421
5422
start_addr = (void*)(addr - sizeof(sljit_s32));
5423
5424
if ((op | SLJIT_32) == SLJIT_MOV32_U8) {
5425
if ((new_constant & 0x100) != 0)
5426
new_constant = new_constant | -(sljit_sw)0x100;
5427
else
5428
new_constant = (sljit_u8)new_constant;
5429
}
5430
5431
SLJIT_UPDATE_WX_FLAGS(start_addr, (void*)addr, 0);
5432
sljit_unaligned_store_s32(start_addr, (sljit_s32)new_constant);
5433
SLJIT_UPDATE_WX_FLAGS(start_addr, (void*)addr, 1);
5434
}
5435
5436