Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Roblox
GitHub Repository: Roblox/luau
Path: blob/master/CodeGen/src/IrBuilder.cpp
2725 views
1
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
2
#include "Luau/IrBuilder.h"
3
4
#include "Luau/Bytecode.h"
5
#include "Luau/BytecodeAnalysis.h"
6
#include "Luau/IrData.h"
7
#include "Luau/IrUtils.h"
8
9
#include "IrTranslation.h"
10
11
#include "lapi.h"
12
13
#include <string.h>
14
15
LUAU_FASTFLAG(LuauCodegenBlockSafeEnv)
16
LUAU_FASTFLAG(LuauCodegenSetBlockEntryState3)
17
18
namespace Luau
19
{
20
namespace CodeGen
21
{
22
23
constexpr unsigned kNoAssociatedBlockIndex = ~0u;
24
25
IrBuilder::IrBuilder(const HostIrHooks& hostHooks)
26
: hostHooks(hostHooks)
27
, constantMap({IrConstKind::Tag, ~0ull})
28
{
29
}
30
31
static bool hasTypedParameters(const BytecodeTypeInfo& typeInfo)
32
{
33
for (auto el : typeInfo.argumentTypes)
34
{
35
if (el != LBC_TYPE_ANY)
36
return true;
37
}
38
39
return false;
40
}
41
42
static void buildArgumentTypeChecks(IrBuilder& build, IrOp entry)
43
{
44
const BytecodeTypeInfo& typeInfo = FFlag::LuauCodegenSetBlockEntryState3 ? build.function.bcOriginalTypeInfo : build.function.bcTypeInfo;
45
CODEGEN_ASSERT(hasTypedParameters(typeInfo));
46
47
if (FFlag::LuauCodegenSetBlockEntryState3)
48
build.function.blockOp(entry).flags |= kBlockFlagEntryArgCheck;
49
50
for (size_t i = 0; i < typeInfo.argumentTypes.size(); i++)
51
{
52
uint8_t et = typeInfo.argumentTypes[i];
53
54
uint8_t tag = et & ~LBC_TYPE_OPTIONAL_BIT;
55
uint8_t optional = et & LBC_TYPE_OPTIONAL_BIT;
56
57
if (tag == LBC_TYPE_ANY)
58
continue;
59
60
IrOp load = build.inst(IrCmd::LOAD_TAG, build.vmReg(uint8_t(i)));
61
62
IrOp nextCheck;
63
if (optional)
64
{
65
nextCheck = build.block(IrBlockKind::Internal);
66
IrOp fallbackCheck = build.block(IrBlockKind::Internal);
67
68
build.inst(IrCmd::JUMP_EQ_TAG, load, build.constTag(LUA_TNIL), nextCheck, fallbackCheck);
69
70
build.beginBlock(fallbackCheck);
71
72
if (FFlag::LuauCodegenSetBlockEntryState3)
73
build.function.blockOp(fallbackCheck).flags |= kBlockFlagEntryArgCheck;
74
}
75
76
switch (tag)
77
{
78
case LBC_TYPE_NIL:
79
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TNIL), build.vmExit(kVmExitEntryGuardPc));
80
break;
81
case LBC_TYPE_BOOLEAN:
82
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TBOOLEAN), build.vmExit(kVmExitEntryGuardPc));
83
break;
84
case LBC_TYPE_NUMBER:
85
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TNUMBER), build.vmExit(kVmExitEntryGuardPc));
86
break;
87
case LBC_TYPE_INTEGER:
88
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TINTEGER), build.vmExit(kVmExitEntryGuardPc));
89
break;
90
case LBC_TYPE_STRING:
91
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TSTRING), build.vmExit(kVmExitEntryGuardPc));
92
break;
93
case LBC_TYPE_TABLE:
94
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TTABLE), build.vmExit(kVmExitEntryGuardPc));
95
break;
96
case LBC_TYPE_FUNCTION:
97
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TFUNCTION), build.vmExit(kVmExitEntryGuardPc));
98
break;
99
case LBC_TYPE_THREAD:
100
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TTHREAD), build.vmExit(kVmExitEntryGuardPc));
101
break;
102
case LBC_TYPE_USERDATA:
103
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TUSERDATA), build.vmExit(kVmExitEntryGuardPc));
104
break;
105
case LBC_TYPE_VECTOR:
106
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TVECTOR), build.vmExit(kVmExitEntryGuardPc));
107
break;
108
case LBC_TYPE_BUFFER:
109
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TBUFFER), build.vmExit(kVmExitEntryGuardPc));
110
break;
111
default:
112
if (tag >= LBC_TYPE_TAGGED_USERDATA_BASE && tag < LBC_TYPE_TAGGED_USERDATA_END)
113
{
114
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TUSERDATA), build.vmExit(kVmExitEntryGuardPc));
115
}
116
else
117
{
118
CODEGEN_ASSERT(!"unknown argument type tag");
119
}
120
break;
121
}
122
123
if (optional)
124
{
125
build.inst(IrCmd::JUMP, nextCheck);
126
127
build.beginBlock(nextCheck);
128
129
if (FFlag::LuauCodegenSetBlockEntryState3)
130
build.function.blockOp(nextCheck).flags |= kBlockFlagEntryArgCheck;
131
}
132
}
133
134
// If the last argument is optional, we can skip creating a new internal block since one will already have been created.
135
if (!(typeInfo.argumentTypes.back() & LBC_TYPE_OPTIONAL_BIT))
136
{
137
IrOp next = build.block(IrBlockKind::Internal);
138
build.inst(IrCmd::JUMP, next);
139
140
build.beginBlock(next);
141
}
142
}
143
144
void IrBuilder::buildFunctionIr(Proto* proto)
145
{
146
function.proto = proto;
147
function.variadic = proto->is_vararg != 0;
148
149
loadBytecodeTypeInfo(function);
150
151
// Reserve entry block
152
bool generateTypeChecks = hasTypedParameters(function.bcTypeInfo);
153
IrOp entry = generateTypeChecks ? block(IrBlockKind::Internal) : IrOp{};
154
155
// Rebuild original control flow blocks
156
rebuildBytecodeBasicBlocks(proto);
157
158
// Infer register tags in bytecode
159
analyzeBytecodeTypes(function, hostHooks);
160
161
function.bcMapping.resize(proto->sizecode, {~0u, ~0u});
162
163
if (generateTypeChecks)
164
{
165
beginBlock(entry);
166
167
buildArgumentTypeChecks(*this, entry);
168
169
inst(IrCmd::JUMP, blockAtInst(0));
170
}
171
else
172
{
173
entry = blockAtInst(0);
174
}
175
176
function.entryBlock = entry.index;
177
178
// Translate all instructions to IR inside blocks
179
for (int i = 0; i < proto->sizecode;)
180
{
181
const Instruction* pc = &proto->code[i];
182
LuauOpcode op = LuauOpcode(LUAU_INSN_OP(*pc));
183
184
int nexti = i + getOpLength(op);
185
CODEGEN_ASSERT(nexti <= proto->sizecode);
186
187
function.bcMapping[i] = {uint32_t(function.instructions.size()), ~0u};
188
189
// Begin new block at this instruction if it was in the bytecode or requested during translation
190
if (instIndexToBlock[i] != kNoAssociatedBlockIndex)
191
{
192
if (FFlag::LuauCodegenBlockSafeEnv)
193
{
194
IrOp block = blockAtInst(i);
195
196
beginBlock(block);
197
198
function.blockOp(block).startpc = uint32_t(i);
199
}
200
else
201
{
202
beginBlock(blockAtInst(i));
203
}
204
}
205
206
// Numeric for loops require additional processing to maintain loop stack
207
// Notably, this must be performed even when the block is dead so that we maintain the pairing FORNPREP-FORNLOOP
208
if (int(op) == LOP_FORNPREP)
209
beforeInstForNPrep(*this, pc, i);
210
211
// We skip dead bytecode instructions when they appear after block was already terminated
212
if (!inTerminatedBlock)
213
{
214
if (interruptRequested)
215
{
216
interruptRequested = false;
217
inst(IrCmd::INTERRUPT, constUint(i));
218
}
219
220
translateInst(op, pc, i);
221
222
if (cmdSkipTarget != -1)
223
{
224
nexti = cmdSkipTarget;
225
cmdSkipTarget = -1;
226
}
227
}
228
229
// See above for FORNPREP..FORNLOOP processing
230
if (int(op) == LOP_FORNLOOP)
231
afterInstForNLoop(*this, pc);
232
233
i = nexti;
234
CODEGEN_ASSERT(i <= proto->sizecode);
235
236
// If we are going into a new block at the next instruction and it's a fallthrough, jump has to be placed to mark block termination
237
if (i < int(instIndexToBlock.size()) && instIndexToBlock[i] != kNoAssociatedBlockIndex)
238
{
239
if (!isBlockTerminator(function.instructions.back().cmd))
240
inst(IrCmd::JUMP, blockAtInst(i));
241
}
242
}
243
244
// Now that all has been generated, compute use counts
245
updateUseCounts(function);
246
}
247
248
void IrBuilder::rebuildBytecodeBasicBlocks(Proto* proto)
249
{
250
instIndexToBlock.resize(proto->sizecode, kNoAssociatedBlockIndex);
251
252
// Mark jump targets
253
std::vector<uint8_t> jumpTargets(proto->sizecode, 0);
254
255
for (int i = 0; i < proto->sizecode;)
256
{
257
const Instruction* pc = &proto->code[i];
258
LuauOpcode op = LuauOpcode(LUAU_INSN_OP(*pc));
259
260
int target = getJumpTarget(*pc, uint32_t(i));
261
262
if (target >= 0 && !isFastCall(op))
263
jumpTargets[target] = true;
264
265
i += getOpLength(op);
266
CODEGEN_ASSERT(i <= proto->sizecode);
267
}
268
269
// Bytecode blocks are created at bytecode jump targets and the start of a function
270
jumpTargets[0] = true;
271
272
for (int i = 0; i < proto->sizecode; i++)
273
{
274
if (jumpTargets[i])
275
{
276
IrOp b = block(IrBlockKind::Bytecode);
277
instIndexToBlock[i] = b.index;
278
}
279
}
280
281
buildBytecodeBlocks(function, jumpTargets);
282
}
283
284
static bool isDirectCompare(Proto* proto, const Instruction* pc, int i)
285
{
286
// Matching the compiler sequence for generating 0 or 1 based on a comparison between values:
287
// LOP_JUMP** Lx
288
// [aux]
289
// LOADB Rx, 0 +1
290
// Lx: LOADB Rx, 1
291
if (i + 3 < proto->sizecode && LUAU_INSN_D(*pc) == 2)
292
{
293
const Instruction loadTrue = pc[2];
294
const Instruction loadFalse = pc[3];
295
296
if (LUAU_INSN_OP(loadTrue) == LOP_LOADB && LUAU_INSN_OP(loadFalse) == LOP_LOADB)
297
{
298
bool sameTarget = LUAU_INSN_A(loadTrue) == LUAU_INSN_A(loadFalse);
299
bool zeroAndOne = LUAU_INSN_B(loadTrue) == 0 && LUAU_INSN_B(loadFalse) == 1;
300
bool correctJumps = LUAU_INSN_C(loadTrue) == 1 && LUAU_INSN_C(loadFalse) == 0;
301
302
return sameTarget && zeroAndOne && correctJumps;
303
}
304
}
305
306
return false;
307
}
308
309
void IrBuilder::translateInst(LuauOpcode op, const Instruction* pc, int i)
310
{
311
switch (int(op))
312
{
313
case LOP_NOP:
314
break;
315
case LOP_LOADNIL:
316
translateInstLoadNil(*this, pc);
317
break;
318
case LOP_LOADB:
319
translateInstLoadB(*this, pc, i);
320
break;
321
case LOP_LOADN:
322
translateInstLoadN(*this, pc);
323
break;
324
case LOP_LOADK:
325
translateInstLoadK(*this, pc);
326
break;
327
case LOP_LOADKX:
328
translateInstLoadKX(*this, pc);
329
break;
330
case LOP_MOVE:
331
translateInstMove(*this, pc);
332
break;
333
case LOP_GETGLOBAL:
334
translateInstGetGlobal(*this, pc, i);
335
break;
336
case LOP_SETGLOBAL:
337
translateInstSetGlobal(*this, pc, i);
338
break;
339
case LOP_CALL:
340
inst(IrCmd::INTERRUPT, constUint(i));
341
inst(IrCmd::SET_SAVEDPC, constUint(i + 1));
342
343
inst(IrCmd::CALL, vmReg(LUAU_INSN_A(*pc)), constInt(LUAU_INSN_B(*pc) - 1), constInt(LUAU_INSN_C(*pc) - 1));
344
345
if (activeFastcallFallback)
346
{
347
inst(IrCmd::JUMP, fastcallFallbackReturn);
348
349
beginBlock(fastcallFallbackReturn);
350
351
activeFastcallFallback = false;
352
}
353
break;
354
case LOP_RETURN:
355
inst(IrCmd::INTERRUPT, constUint(i));
356
357
inst(IrCmd::RETURN, vmReg(LUAU_INSN_A(*pc)), constInt(LUAU_INSN_B(*pc) - 1));
358
break;
359
case LOP_GETTABLE:
360
translateInstGetTable(*this, pc, i);
361
break;
362
case LOP_SETTABLE:
363
translateInstSetTable(*this, pc, i);
364
break;
365
case LOP_GETTABLEKS:
366
translateInstGetTableKS(*this, pc, i);
367
break;
368
case LOP_SETTABLEKS:
369
translateInstSetTableKS(*this, pc, i);
370
break;
371
case LOP_GETTABLEN:
372
translateInstGetTableN(*this, pc, i);
373
break;
374
case LOP_SETTABLEN:
375
translateInstSetTableN(*this, pc, i);
376
break;
377
case LOP_JUMP:
378
translateInstJump(*this, pc, i);
379
break;
380
case LOP_JUMPBACK:
381
translateInstJumpBack(*this, pc, i);
382
break;
383
case LOP_JUMPIF:
384
translateInstJumpIf(*this, pc, i, /* not_ */ false);
385
break;
386
case LOP_JUMPIFNOT:
387
translateInstJumpIf(*this, pc, i, /* not_ */ true);
388
break;
389
case LOP_JUMPIFEQ:
390
if (isDirectCompare(function.proto, pc, i))
391
{
392
translateInstJumpIfEqShortcut(*this, pc, i, /* not_ */ false);
393
394
// We complete the current instruction and the first LOADB, but we do not skip the second LOADB
395
// This is because the second LOADB was a jump target so there is a block prepared to handle it
396
cmdSkipTarget = i + 3;
397
break;
398
}
399
400
translateInstJumpIfEq(*this, pc, i, /* not_ */ false);
401
break;
402
case LOP_JUMPIFLE:
403
translateInstJumpIfCond(*this, pc, i, IrCondition::LessEqual);
404
break;
405
case LOP_JUMPIFLT:
406
translateInstJumpIfCond(*this, pc, i, IrCondition::Less);
407
break;
408
case LOP_JUMPIFNOTEQ:
409
if (isDirectCompare(function.proto, pc, i))
410
{
411
translateInstJumpIfEqShortcut(*this, pc, i, /* not_ */ true);
412
413
// We complete the current instruction and the first LOADB, but we do not skip the second LOADB
414
// This is because the second LOADB was a jump target so there is a block prepared to handle it
415
cmdSkipTarget = i + 3;
416
break;
417
}
418
419
translateInstJumpIfEq(*this, pc, i, /* not_ */ true);
420
break;
421
case LOP_JUMPIFNOTLE:
422
translateInstJumpIfCond(*this, pc, i, IrCondition::NotLessEqual);
423
break;
424
case LOP_JUMPIFNOTLT:
425
translateInstJumpIfCond(*this, pc, i, IrCondition::NotLess);
426
break;
427
case LOP_JUMPX:
428
translateInstJumpX(*this, pc, i);
429
break;
430
case LOP_JUMPXEQKNIL:
431
if (isDirectCompare(function.proto, pc, i))
432
{
433
translateInstJumpxEqNilShortcut(*this, pc, i);
434
435
// We complete the current instruction and the first LOADB, but we do not skip the second LOADB
436
// This is because the second LOADB was a jump target so there is a block prepared to handle it
437
cmdSkipTarget = i + 3;
438
break;
439
}
440
441
translateInstJumpxEqNil(*this, pc, i);
442
break;
443
case LOP_JUMPXEQKB:
444
if (isDirectCompare(function.proto, pc, i))
445
{
446
translateInstJumpxEqBShortcut(*this, pc, i);
447
448
// We complete the current instruction and the first LOADB, but we do not skip the second LOADB
449
// This is because the second LOADB was a jump target so there is a block prepared to handle it
450
cmdSkipTarget = i + 3;
451
break;
452
}
453
454
translateInstJumpxEqB(*this, pc, i);
455
break;
456
case LOP_JUMPXEQKN:
457
if (isDirectCompare(function.proto, pc, i))
458
{
459
translateInstJumpxEqNShortcut(*this, pc, i);
460
461
// We complete the current instruction and the first LOADB, but we do not skip the second LOADB
462
// This is because the second LOADB was a jump target so there is a block prepared to handle it
463
cmdSkipTarget = i + 3;
464
break;
465
}
466
467
translateInstJumpxEqN(*this, pc, i);
468
break;
469
case LOP_JUMPXEQKS:
470
if (isDirectCompare(function.proto, pc, i))
471
{
472
translateInstJumpxEqSShortcut(*this, pc, i);
473
474
// We complete the current instruction and the first LOADB, but we do not skip the second LOADB
475
// This is because the second LOADB was a jump target so there is a block prepared to handle it
476
cmdSkipTarget = i + 3;
477
break;
478
}
479
480
translateInstJumpxEqS(*this, pc, i);
481
break;
482
case LOP_ADD:
483
translateInstBinary(*this, pc, i, TM_ADD);
484
break;
485
case LOP_SUB:
486
translateInstBinary(*this, pc, i, TM_SUB);
487
break;
488
case LOP_MUL:
489
translateInstBinary(*this, pc, i, TM_MUL);
490
break;
491
case LOP_DIV:
492
translateInstBinary(*this, pc, i, TM_DIV);
493
break;
494
case LOP_IDIV:
495
translateInstBinary(*this, pc, i, TM_IDIV);
496
break;
497
case LOP_MOD:
498
translateInstBinary(*this, pc, i, TM_MOD);
499
break;
500
case LOP_POW:
501
translateInstBinary(*this, pc, i, TM_POW);
502
break;
503
case LOP_ADDK:
504
translateInstBinaryK(*this, pc, i, TM_ADD);
505
break;
506
case LOP_SUBK:
507
translateInstBinaryK(*this, pc, i, TM_SUB);
508
break;
509
case LOP_MULK:
510
translateInstBinaryK(*this, pc, i, TM_MUL);
511
break;
512
case LOP_DIVK:
513
translateInstBinaryK(*this, pc, i, TM_DIV);
514
break;
515
case LOP_IDIVK:
516
translateInstBinaryK(*this, pc, i, TM_IDIV);
517
break;
518
case LOP_MODK:
519
translateInstBinaryK(*this, pc, i, TM_MOD);
520
break;
521
case LOP_POWK:
522
translateInstBinaryK(*this, pc, i, TM_POW);
523
break;
524
case LOP_SUBRK:
525
translateInstBinaryRK(*this, pc, i, TM_SUB);
526
break;
527
case LOP_DIVRK:
528
translateInstBinaryRK(*this, pc, i, TM_DIV);
529
break;
530
case LOP_NOT:
531
translateInstNot(*this, pc);
532
break;
533
case LOP_MINUS:
534
translateInstMinus(*this, pc, i);
535
break;
536
case LOP_LENGTH:
537
translateInstLength(*this, pc, i);
538
break;
539
case LOP_NEWTABLE:
540
translateInstNewTable(*this, pc, i);
541
break;
542
case LOP_DUPTABLE:
543
translateInstDupTable(*this, pc, i);
544
break;
545
case LOP_SETLIST:
546
inst(
547
IrCmd::SETLIST, constUint(i), vmReg(LUAU_INSN_A(*pc)), vmReg(LUAU_INSN_B(*pc)), constInt(LUAU_INSN_C(*pc) - 1), constUint(pc[1]), undef()
548
);
549
break;
550
case LOP_GETUPVAL:
551
translateInstGetUpval(*this, pc, i);
552
break;
553
case LOP_SETUPVAL:
554
translateInstSetUpval(*this, pc, i);
555
break;
556
case LOP_CLOSEUPVALS:
557
translateInstCloseUpvals(*this, pc);
558
break;
559
case LOP_FASTCALL:
560
handleFastcallFallback(translateFastCallN(*this, pc, i, false, 0, {}, {}), pc, i);
561
break;
562
case LOP_FASTCALL1:
563
handleFastcallFallback(translateFastCallN(*this, pc, i, true, 1, undef(), undef()), pc, i);
564
break;
565
case LOP_FASTCALL2:
566
handleFastcallFallback(translateFastCallN(*this, pc, i, true, 2, vmReg(pc[1]), undef()), pc, i);
567
break;
568
case LOP_FASTCALL2K:
569
handleFastcallFallback(translateFastCallN(*this, pc, i, true, 2, vmConst(pc[1]), undef()), pc, i);
570
break;
571
case LOP_FASTCALL3:
572
handleFastcallFallback(translateFastCallN(*this, pc, i, true, 3, vmReg(pc[1] & 0xff), vmReg((pc[1] >> 8) & 0xff)), pc, i);
573
break;
574
case LOP_FORNPREP:
575
translateInstForNPrep(*this, pc, i);
576
break;
577
case LOP_FORNLOOP:
578
translateInstForNLoop(*this, pc, i);
579
break;
580
case LOP_FORGLOOP:
581
{
582
int aux = int(pc[1]);
583
584
// We have a translation for ipairs-style traversal, general loop iteration is still too complex
585
if (aux < 0)
586
{
587
translateInstForGLoopIpairs(*this, pc, i);
588
}
589
else
590
{
591
int ra = LUAU_INSN_A(*pc);
592
593
IrOp loopRepeat = blockAtInst(i + 1 + LUAU_INSN_D(*pc));
594
IrOp loopExit = blockAtInst(i + getOpLength(LuauOpcode(LOP_FORGLOOP)));
595
IrOp fallback = fallbackBlock(i);
596
597
inst(IrCmd::INTERRUPT, constUint(i));
598
loadAndCheckTag(vmReg(ra), LUA_TNIL, fallback);
599
600
inst(IrCmd::FORGLOOP, vmReg(ra), constInt(aux), loopRepeat, loopExit);
601
602
beginBlock(fallback);
603
inst(IrCmd::SET_SAVEDPC, constUint(i + 1));
604
inst(IrCmd::FORGLOOP_FALLBACK, vmReg(ra), constInt(aux), loopRepeat, loopExit);
605
606
beginBlock(loopExit);
607
}
608
break;
609
}
610
case LOP_FORGPREP_NEXT:
611
translateInstForGPrepNext(*this, pc, i);
612
break;
613
case LOP_FORGPREP_INEXT:
614
translateInstForGPrepInext(*this, pc, i);
615
break;
616
case LOP_AND:
617
translateInstAndX(*this, pc, i, vmReg(LUAU_INSN_C(*pc)));
618
break;
619
case LOP_ANDK:
620
translateInstAndX(*this, pc, i, vmConst(LUAU_INSN_C(*pc)));
621
break;
622
case LOP_OR:
623
translateInstOrX(*this, pc, i, vmReg(LUAU_INSN_C(*pc)));
624
break;
625
case LOP_ORK:
626
translateInstOrX(*this, pc, i, vmConst(LUAU_INSN_C(*pc)));
627
break;
628
case LOP_COVERAGE:
629
inst(IrCmd::COVERAGE, constUint(i));
630
break;
631
case LOP_GETIMPORT:
632
translateInstGetImport(*this, pc, i);
633
break;
634
case LOP_CONCAT:
635
translateInstConcat(*this, pc, i);
636
break;
637
case LOP_CAPTURE:
638
translateInstCapture(*this, pc, i);
639
break;
640
case LOP_NAMECALL:
641
if (translateInstNamecall(*this, pc, i))
642
cmdSkipTarget = i + 3;
643
break;
644
case LOP_PREPVARARGS:
645
inst(IrCmd::FALLBACK_PREPVARARGS, constUint(i), constInt(LUAU_INSN_A(*pc)));
646
break;
647
case LOP_GETVARARGS:
648
inst(IrCmd::FALLBACK_GETVARARGS, constUint(i), vmReg(LUAU_INSN_A(*pc)), constInt(LUAU_INSN_B(*pc) - 1));
649
break;
650
case LOP_NEWCLOSURE:
651
translateInstNewClosure(*this, pc, i);
652
break;
653
case LOP_DUPCLOSURE:
654
inst(IrCmd::FALLBACK_DUPCLOSURE, constUint(i), vmReg(LUAU_INSN_A(*pc)), vmConst(LUAU_INSN_D(*pc)));
655
break;
656
case LOP_FORGPREP:
657
{
658
IrOp loopStart = blockAtInst(i + 1 + LUAU_INSN_D(*pc));
659
660
inst(IrCmd::FALLBACK_FORGPREP, constUint(i), vmReg(LUAU_INSN_A(*pc)), loopStart);
661
break;
662
}
663
default:
664
CODEGEN_ASSERT(!"Unknown instruction");
665
}
666
}
667
668
void IrBuilder::handleFastcallFallback(IrOp fallbackOrUndef, const Instruction* pc, int i)
669
{
670
int skip = LUAU_INSN_C(*pc);
671
672
if (fallbackOrUndef.kind != IrOpKind::Undef)
673
{
674
IrOp next = blockAtInst(i + skip + 2);
675
inst(IrCmd::JUMP, next);
676
beginBlock(fallbackOrUndef);
677
678
activeFastcallFallback = true;
679
fastcallFallbackReturn = next;
680
}
681
else
682
{
683
cmdSkipTarget = i + skip + 2;
684
}
685
}
686
687
bool IrBuilder::isInternalBlock(IrOp block)
688
{
689
IrBlock& target = function.blocks[block.index];
690
691
return target.kind == IrBlockKind::Internal;
692
}
693
694
void IrBuilder::beginBlock(IrOp block)
695
{
696
IrBlock& target = function.blocks[block.index];
697
activeBlockIdx = block.index;
698
699
CODEGEN_ASSERT(target.start == ~0u || target.start == uint32_t(function.instructions.size()));
700
701
target.start = uint32_t(function.instructions.size());
702
target.sortkey = target.start;
703
704
inTerminatedBlock = false;
705
}
706
707
void IrBuilder::loadAndCheckTag(IrOp loc, uint8_t tag, IrOp fallback)
708
{
709
inst(IrCmd::CHECK_TAG, inst(IrCmd::LOAD_TAG, loc), constTag(tag), fallback);
710
}
711
712
void IrBuilder::checkSafeEnv(int pcpos)
713
{
714
IrBlock& active = function.blocks[activeBlockIdx];
715
716
// If the block start is associated with a bytecode position, we can perform an early safeenv check
717
if (active.startpc != kBlockNoStartPc)
718
{
719
// If the block hasn't cleared the safeenv flag yet, we can still set it at block entry
720
if ((active.flags & kBlockFlagSafeEnvClear) == 0)
721
active.flags |= kBlockFlagSafeEnvCheck;
722
}
723
724
inst(IrCmd::CHECK_SAFE_ENV, vmExit(pcpos));
725
}
726
727
void IrBuilder::clone(std::vector<uint32_t> sourceIdxs, bool removeCurrentTerminator)
728
{
729
DenseHashMap<uint32_t, uint32_t> instRedir{~0u};
730
731
auto redirect = [&instRedir](IrOp& op)
732
{
733
if (op.kind == IrOpKind::Inst)
734
{
735
if (const uint32_t* newIndex = instRedir.find(op.index))
736
op.index = *newIndex;
737
else
738
CODEGEN_ASSERT(!"Values can only be used if they are defined in the same block");
739
}
740
};
741
742
for (uint32_t sourceIdx : sourceIdxs)
743
{
744
const IrBlock& source = function.blocks[sourceIdx];
745
746
if (removeCurrentTerminator && inTerminatedBlock)
747
{
748
IrBlock& active = function.blocks[activeBlockIdx];
749
IrInst& term = function.instructions[active.finish];
750
751
kill(function, term);
752
inTerminatedBlock = false;
753
}
754
755
// Implicit safe environment checks become materialized as real ones
756
if ((source.flags & kBlockFlagSafeEnvCheck) != 0)
757
{
758
CODEGEN_ASSERT(source.startpc != kBlockNoStartPc);
759
inst(IrCmd::CHECK_SAFE_ENV, vmExit(source.startpc));
760
}
761
762
for (uint32_t index = source.start; index <= source.finish; index++)
763
{
764
CODEGEN_ASSERT(index < function.instructions.size());
765
IrInst clone = function.instructions[index];
766
767
// Skip pseudo instructions to make clone more compact, but validate that they have no users
768
if (isPseudo(clone.cmd))
769
{
770
CODEGEN_ASSERT(clone.useCount == 0);
771
continue;
772
}
773
774
for (auto& op : clone.ops)
775
redirect(op);
776
777
for (auto& op : clone.ops)
778
addUse(function, op);
779
780
// Instructions that referenced the original will have to be adjusted to use the clone
781
instRedir[index] = uint32_t(function.instructions.size());
782
783
// Reconstruct the fresh clone
784
inst(clone.cmd, clone.ops);
785
}
786
}
787
}
788
789
IrOp IrBuilder::undef()
790
{
791
return {IrOpKind::Undef, 0};
792
}
793
794
IrOp IrBuilder::constInt(int value)
795
{
796
IrConst constant;
797
constant.kind = IrConstKind::Int;
798
constant.valueInt = value;
799
return constAny(constant, uint64_t(value));
800
}
801
802
IrOp IrBuilder::constUint(unsigned value)
803
{
804
IrConst constant;
805
constant.kind = IrConstKind::Uint;
806
constant.valueUint = value;
807
return constAny(constant, uint64_t(value));
808
}
809
810
IrOp IrBuilder::constImport(unsigned value)
811
{
812
IrConst constant;
813
constant.kind = IrConstKind::Import;
814
constant.valueUint = value;
815
return constAny(constant, uint64_t(value));
816
}
817
818
IrOp IrBuilder::constDouble(double value)
819
{
820
IrConst constant;
821
constant.kind = IrConstKind::Double;
822
constant.valueDouble = value;
823
824
uint64_t asCommonKey;
825
static_assert(sizeof(asCommonKey) == sizeof(value), "Expecting double to be 64-bit");
826
memcpy(&asCommonKey, &value, sizeof(value));
827
828
return constAny(constant, asCommonKey);
829
}
830
831
IrOp IrBuilder::constTag(uint8_t value)
832
{
833
IrConst constant;
834
constant.kind = IrConstKind::Tag;
835
constant.valueTag = value;
836
return constAny(constant, uint64_t(value));
837
}
838
839
IrOp IrBuilder::constAny(IrConst constant, uint64_t asCommonKey)
840
{
841
ConstantKey key{constant.kind, asCommonKey};
842
843
if (uint32_t* cache = constantMap.find(key))
844
return {IrOpKind::Constant, *cache};
845
846
uint32_t index = uint32_t(function.constants.size());
847
function.constants.push_back(constant);
848
849
constantMap[key] = index;
850
851
return {IrOpKind::Constant, index};
852
}
853
854
IrOp IrBuilder::cond(IrCondition cond)
855
{
856
return {IrOpKind::Condition, uint32_t(cond)};
857
}
858
859
IrOp IrBuilder::inst(IrCmd cmd)
860
{
861
return inst(cmd, {});
862
}
863
864
IrOp IrBuilder::inst(IrCmd cmd, IrOp a)
865
{
866
return inst(cmd, {a});
867
}
868
869
IrOp IrBuilder::inst(IrCmd cmd, IrOp a, IrOp b)
870
{
871
return inst(cmd, {a, b});
872
}
873
874
IrOp IrBuilder::inst(IrCmd cmd, IrOp a, IrOp b, IrOp c)
875
{
876
return inst(cmd, {a, b, c});
877
}
878
879
IrOp IrBuilder::inst(IrCmd cmd, IrOp a, IrOp b, IrOp c, IrOp d)
880
{
881
return inst(cmd, {a, b, c, d});
882
}
883
884
IrOp IrBuilder::inst(IrCmd cmd, IrOp a, IrOp b, IrOp c, IrOp d, IrOp e)
885
{
886
return inst(cmd, {a, b, c, d, e});
887
}
888
889
IrOp IrBuilder::inst(IrCmd cmd, IrOp a, IrOp b, IrOp c, IrOp d, IrOp e, IrOp f)
890
{
891
return inst(cmd, {a, b, c, d, e, f});
892
}
893
894
IrOp IrBuilder::inst(IrCmd cmd, IrOp a, IrOp b, IrOp c, IrOp d, IrOp e, IrOp f, IrOp g)
895
{
896
return inst(cmd, {a, b, c, d, e, f, g});
897
}
898
899
IrOp IrBuilder::inst(IrCmd cmd, std::initializer_list<IrOp> ops)
900
{
901
uint32_t index = uint32_t(function.instructions.size());
902
function.instructions.push_back({cmd, ops});
903
904
CODEGEN_ASSERT(!inTerminatedBlock);
905
906
if (isBlockTerminator(cmd))
907
{
908
function.blocks[activeBlockIdx].finish = index;
909
inTerminatedBlock = true;
910
}
911
912
if (FFlag::LuauCodegenBlockSafeEnv && canInvalidateSafeEnv(cmd))
913
{
914
// Mark that block has instruction with this flag
915
function.blocks[activeBlockIdx].flags |= kBlockFlagSafeEnvClear;
916
}
917
918
return {IrOpKind::Inst, index};
919
}
920
921
IrOp IrBuilder::inst(IrCmd cmd, const IrOps& ops)
922
{
923
uint32_t index = uint32_t(function.instructions.size());
924
function.instructions.push_back({cmd, ops});
925
926
CODEGEN_ASSERT(!inTerminatedBlock);
927
928
if (isBlockTerminator(cmd))
929
{
930
function.blocks[activeBlockIdx].finish = index;
931
inTerminatedBlock = true;
932
}
933
934
if (FFlag::LuauCodegenBlockSafeEnv && canInvalidateSafeEnv(cmd))
935
{
936
// Mark that block has instruction with this flag
937
function.blocks[activeBlockIdx].flags |= kBlockFlagSafeEnvClear;
938
}
939
940
return {IrOpKind::Inst, index};
941
}
942
943
IrOp IrBuilder::block(IrBlockKind kind)
944
{
945
CODEGEN_ASSERT(kind != IrBlockKind::Fallback && "fallbackBlock must be used for fallback block creation");
946
947
if (kind == IrBlockKind::Internal && activeFastcallFallback)
948
kind = IrBlockKind::Fallback;
949
950
uint32_t index = uint32_t(function.blocks.size());
951
function.blocks.push_back(IrBlock{kind});
952
return IrOp{IrOpKind::Block, index};
953
}
954
955
IrOp IrBuilder::blockAtInst(uint32_t index)
956
{
957
uint32_t blockIndex = instIndexToBlock[index];
958
959
if (blockIndex != kNoAssociatedBlockIndex)
960
return IrOp{IrOpKind::Block, blockIndex};
961
962
IrOp result = block(IrBlockKind::Internal);
963
function.blockOp(result).startpc = index;
964
965
return result;
966
}
967
968
IrOp IrBuilder::fallbackBlock(uint32_t pcpos)
969
{
970
uint32_t index = uint32_t(function.blocks.size());
971
function.blocks.push_back(IrBlock{IrBlockKind::Fallback});
972
CODEGEN_ASSERT(index != 0 && "IR cannot start with a fallback block");
973
974
function.blocks.back().startpc = pcpos;
975
return IrOp{IrOpKind::Block, index};
976
}
977
978
IrOp IrBuilder::vmReg(uint8_t index)
979
{
980
return {IrOpKind::VmReg, index};
981
}
982
983
IrOp IrBuilder::vmConst(uint32_t index)
984
{
985
return {IrOpKind::VmConst, index};
986
}
987
988
IrOp IrBuilder::vmUpvalue(uint8_t index)
989
{
990
return {IrOpKind::VmUpvalue, index};
991
}
992
993
IrOp IrBuilder::vmExit(uint32_t pcpos)
994
{
995
return {IrOpKind::VmExit, pcpos};
996
}
997
998
} // namespace CodeGen
999
} // namespace Luau
1000
1001