Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Roblox
GitHub Repository: Roblox/luau
Path: blob/master/tests/ConformanceIrHooks.h
2723 views
1
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
2
#pragma once
3
4
#include "Luau/IrBuilder.h"
5
6
static const char* kUserdataRunTypes[] = {"extra", "color", "vec2", "mat3", "vertex", nullptr};
7
8
constexpr uint8_t kUserdataExtra = 0;
9
constexpr uint8_t kUserdataColor = 1;
10
constexpr uint8_t kUserdataVec2 = 2;
11
constexpr uint8_t kUserdataMat3 = 3;
12
constexpr uint8_t kUserdataVertex = 4;
13
14
// Userdata tags can be different from userdata bytecode type indices
15
constexpr uint8_t kTagVec2 = 12;
16
constexpr uint8_t kTagVertex = 13;
17
18
struct Vec2
19
{
20
float x;
21
float y;
22
};
23
24
struct Vertex
25
{
26
float pos[3];
27
float normal[3];
28
float uv[2];
29
};
30
31
inline bool compareMemberName(const char* member, size_t memberLength, const char* str)
32
{
33
return memberLength == strlen(str) && strcmp(member, str) == 0;
34
}
35
36
inline uint8_t typeToUserdataIndex(uint8_t type)
37
{
38
// Underflow will push the type into a value that is not comparable to any kUserdata* constants
39
return type - LBC_TYPE_TAGGED_USERDATA_BASE;
40
}
41
42
inline uint8_t userdataIndexToType(uint8_t userdataIndex)
43
{
44
return LBC_TYPE_TAGGED_USERDATA_BASE + userdataIndex;
45
}
46
47
inline uint8_t vectorAccessBytecodeType(const char* member, size_t memberLength)
48
{
49
using namespace Luau::CodeGen;
50
51
if (compareMemberName(member, memberLength, "Magnitude"))
52
return LBC_TYPE_NUMBER;
53
54
if (compareMemberName(member, memberLength, "Unit"))
55
return LBC_TYPE_VECTOR;
56
57
return LBC_TYPE_ANY;
58
}
59
60
inline bool vectorAccess(Luau::CodeGen::IrBuilder& build, const char* member, size_t memberLength, int resultReg, int sourceReg, int pcpos)
61
{
62
using namespace Luau::CodeGen;
63
64
if (compareMemberName(member, memberLength, "Magnitude"))
65
{
66
IrOp x = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(sourceReg), build.constInt(0));
67
IrOp y = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(sourceReg), build.constInt(4));
68
IrOp z = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(sourceReg), build.constInt(8));
69
70
// Intentionally not using DOT_VEC to check other kind of math compared to vector.magnitude
71
IrOp x2 = build.inst(IrCmd::MUL_FLOAT, x, x);
72
IrOp y2 = build.inst(IrCmd::MUL_FLOAT, y, y);
73
IrOp z2 = build.inst(IrCmd::MUL_FLOAT, z, z);
74
75
IrOp sum = build.inst(IrCmd::ADD_FLOAT, build.inst(IrCmd::ADD_FLOAT, x2, y2), z2);
76
77
IrOp mag = build.inst(IrCmd::SQRT_FLOAT, sum);
78
79
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(resultReg), build.inst(IrCmd::FLOAT_TO_NUM, mag));
80
build.inst(IrCmd::STORE_TAG, build.vmReg(resultReg), build.constTag(LUA_TNUMBER));
81
82
return true;
83
}
84
85
if (compareMemberName(member, memberLength, "Unit"))
86
{
87
IrOp x = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(sourceReg), build.constInt(0));
88
IrOp y = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(sourceReg), build.constInt(4));
89
IrOp z = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(sourceReg), build.constInt(8));
90
91
// Intentionally not using DOT_VEC to check other kind of math compared to vector.normalize
92
IrOp x2 = build.inst(IrCmd::MUL_FLOAT, x, x);
93
IrOp y2 = build.inst(IrCmd::MUL_FLOAT, y, y);
94
IrOp z2 = build.inst(IrCmd::MUL_FLOAT, z, z);
95
96
IrOp sum = build.inst(IrCmd::ADD_FLOAT, build.inst(IrCmd::ADD_FLOAT, x2, y2), z2);
97
98
IrOp mag = build.inst(IrCmd::SQRT_FLOAT, sum);
99
IrOp inv = build.inst(IrCmd::DIV_FLOAT, build.constDouble(1.0f), mag);
100
101
IrOp xr = build.inst(IrCmd::MUL_FLOAT, x, inv);
102
IrOp yr = build.inst(IrCmd::MUL_FLOAT, y, inv);
103
IrOp zr = build.inst(IrCmd::MUL_FLOAT, z, inv);
104
105
build.inst(IrCmd::STORE_VECTOR, build.vmReg(resultReg), xr, yr, zr);
106
build.inst(IrCmd::STORE_TAG, build.vmReg(resultReg), build.constTag(LUA_TVECTOR));
107
108
return true;
109
}
110
111
return false;
112
}
113
114
inline uint8_t vectorNamecallBytecodeType(const char* member, size_t memberLength)
115
{
116
if (compareMemberName(member, memberLength, "Dot"))
117
return LBC_TYPE_NUMBER;
118
119
if (compareMemberName(member, memberLength, "Cross"))
120
return LBC_TYPE_VECTOR;
121
122
return LBC_TYPE_ANY;
123
}
124
125
inline bool vectorNamecall(
126
Luau::CodeGen::IrBuilder& build,
127
const char* member,
128
size_t memberLength,
129
int argResReg,
130
int sourceReg,
131
int params,
132
int results,
133
int pcpos
134
)
135
{
136
using namespace Luau::CodeGen;
137
138
if (compareMemberName(member, memberLength, "Dot") && params == 2 && results <= 1)
139
{
140
build.loadAndCheckTag(build.vmReg(argResReg + 2), LUA_TVECTOR, build.vmExit(pcpos));
141
142
// Intentionally not using DOT_VEC to check other kind of math compared to vector.dot
143
IrOp x1 = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(sourceReg), build.constInt(0));
144
IrOp x2 = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(argResReg + 2), build.constInt(0));
145
146
IrOp xx = build.inst(IrCmd::MUL_FLOAT, x1, x2);
147
148
IrOp y1 = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(sourceReg), build.constInt(4));
149
IrOp y2 = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(argResReg + 2), build.constInt(4));
150
151
IrOp yy = build.inst(IrCmd::MUL_FLOAT, y1, y2);
152
153
IrOp z1 = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(sourceReg), build.constInt(8));
154
IrOp z2 = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(argResReg + 2), build.constInt(8));
155
156
IrOp zz = build.inst(IrCmd::MUL_FLOAT, z1, z2);
157
158
IrOp sum = build.inst(IrCmd::ADD_FLOAT, build.inst(IrCmd::ADD_FLOAT, xx, yy), zz);
159
160
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(argResReg), build.inst(IrCmd::FLOAT_TO_NUM, sum));
161
build.inst(IrCmd::STORE_TAG, build.vmReg(argResReg), build.constTag(LUA_TNUMBER));
162
163
// If the function is called in multi-return context, stack has to be adjusted
164
if (results == LUA_MULTRET)
165
build.inst(IrCmd::ADJUST_STACK_TO_REG, build.vmReg(argResReg), build.constInt(1));
166
167
return true;
168
}
169
170
if (compareMemberName(member, memberLength, "Cross") && params == 2 && results <= 1)
171
{
172
build.loadAndCheckTag(build.vmReg(argResReg + 2), LUA_TVECTOR, build.vmExit(pcpos));
173
174
IrOp x1 = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(sourceReg), build.constInt(0));
175
IrOp x2 = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(argResReg + 2), build.constInt(0));
176
177
IrOp y1 = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(sourceReg), build.constInt(4));
178
IrOp y2 = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(argResReg + 2), build.constInt(4));
179
180
IrOp z1 = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(sourceReg), build.constInt(8));
181
IrOp z2 = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(argResReg + 2), build.constInt(8));
182
183
IrOp y1z2 = build.inst(IrCmd::MUL_FLOAT, y1, z2);
184
IrOp z1y2 = build.inst(IrCmd::MUL_FLOAT, z1, y2);
185
IrOp xr = build.inst(IrCmd::SUB_FLOAT, y1z2, z1y2);
186
187
IrOp z1x2 = build.inst(IrCmd::MUL_FLOAT, z1, x2);
188
IrOp x1z2 = build.inst(IrCmd::MUL_FLOAT, x1, z2);
189
IrOp yr = build.inst(IrCmd::SUB_FLOAT, z1x2, x1z2);
190
191
IrOp x1y2 = build.inst(IrCmd::MUL_FLOAT, x1, y2);
192
IrOp y1x2 = build.inst(IrCmd::MUL_FLOAT, y1, x2);
193
IrOp zr = build.inst(IrCmd::SUB_FLOAT, x1y2, y1x2);
194
195
build.inst(IrCmd::STORE_VECTOR, build.vmReg(argResReg), xr, yr, zr);
196
build.inst(IrCmd::STORE_TAG, build.vmReg(argResReg), build.constTag(LUA_TVECTOR));
197
198
// If the function is called in multi-return context, stack has to be adjusted
199
if (results == LUA_MULTRET)
200
build.inst(IrCmd::ADJUST_STACK_TO_REG, build.vmReg(argResReg), build.constInt(1));
201
202
return true;
203
}
204
205
return false;
206
}
207
208
inline uint8_t userdataAccessBytecodeType(uint8_t type, const char* member, size_t memberLength)
209
{
210
switch (typeToUserdataIndex(type))
211
{
212
case kUserdataColor:
213
if (compareMemberName(member, memberLength, "R"))
214
return LBC_TYPE_NUMBER;
215
216
if (compareMemberName(member, memberLength, "G"))
217
return LBC_TYPE_NUMBER;
218
219
if (compareMemberName(member, memberLength, "B"))
220
return LBC_TYPE_NUMBER;
221
break;
222
case kUserdataVec2:
223
if (compareMemberName(member, memberLength, "X"))
224
return LBC_TYPE_NUMBER;
225
226
if (compareMemberName(member, memberLength, "Y"))
227
return LBC_TYPE_NUMBER;
228
229
if (compareMemberName(member, memberLength, "Magnitude"))
230
return LBC_TYPE_NUMBER;
231
232
if (compareMemberName(member, memberLength, "Unit"))
233
return userdataIndexToType(kUserdataVec2);
234
break;
235
case kUserdataMat3:
236
if (compareMemberName(member, memberLength, "Row1"))
237
return LBC_TYPE_VECTOR;
238
239
if (compareMemberName(member, memberLength, "Row2"))
240
return LBC_TYPE_VECTOR;
241
242
if (compareMemberName(member, memberLength, "Row3"))
243
return LBC_TYPE_VECTOR;
244
break;
245
case kUserdataVertex:
246
if (compareMemberName(member, memberLength, "pos"))
247
return LBC_TYPE_VECTOR;
248
249
if (compareMemberName(member, memberLength, "normal"))
250
return LBC_TYPE_VECTOR;
251
252
if (compareMemberName(member, memberLength, "uv"))
253
return userdataIndexToType(kUserdataVec2);
254
break;
255
}
256
257
return LBC_TYPE_ANY;
258
}
259
260
inline bool userdataAccess(
261
Luau::CodeGen::IrBuilder& build,
262
uint8_t type,
263
const char* member,
264
size_t memberLength,
265
int resultReg,
266
int sourceReg,
267
int pcpos
268
)
269
{
270
using namespace Luau::CodeGen;
271
272
switch (typeToUserdataIndex(type))
273
{
274
case kUserdataColor:
275
break;
276
case kUserdataVec2:
277
if (compareMemberName(member, memberLength, "X"))
278
{
279
IrOp udata = build.inst(IrCmd::LOAD_POINTER, build.vmReg(sourceReg));
280
build.inst(IrCmd::CHECK_USERDATA_TAG, udata, build.constInt(kTagVec2), build.vmExit(pcpos));
281
282
IrOp value = build.inst(IrCmd::BUFFER_READF32, udata, build.constInt(offsetof(Vec2, x)), build.constTag(LUA_TUSERDATA));
283
284
value = build.inst(IrCmd::FLOAT_TO_NUM, value);
285
286
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(resultReg), value);
287
build.inst(IrCmd::STORE_TAG, build.vmReg(resultReg), build.constTag(LUA_TNUMBER));
288
return true;
289
}
290
291
if (compareMemberName(member, memberLength, "Y"))
292
{
293
IrOp udata = build.inst(IrCmd::LOAD_POINTER, build.vmReg(sourceReg));
294
build.inst(IrCmd::CHECK_USERDATA_TAG, udata, build.constInt(kTagVec2), build.vmExit(pcpos));
295
296
IrOp value = build.inst(IrCmd::BUFFER_READF32, udata, build.constInt(offsetof(Vec2, y)), build.constTag(LUA_TUSERDATA));
297
298
value = build.inst(IrCmd::FLOAT_TO_NUM, value);
299
300
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(resultReg), value);
301
build.inst(IrCmd::STORE_TAG, build.vmReg(resultReg), build.constTag(LUA_TNUMBER));
302
return true;
303
}
304
305
if (compareMemberName(member, memberLength, "Magnitude"))
306
{
307
IrOp udata = build.inst(IrCmd::LOAD_POINTER, build.vmReg(sourceReg));
308
build.inst(IrCmd::CHECK_USERDATA_TAG, udata, build.constInt(kTagVec2), build.vmExit(pcpos));
309
310
IrOp x = build.inst(IrCmd::BUFFER_READF32, udata, build.constInt(offsetof(Vec2, x)), build.constTag(LUA_TUSERDATA));
311
IrOp y = build.inst(IrCmd::BUFFER_READF32, udata, build.constInt(offsetof(Vec2, y)), build.constTag(LUA_TUSERDATA));
312
313
IrOp x2 = build.inst(IrCmd::MUL_FLOAT, x, x);
314
IrOp y2 = build.inst(IrCmd::MUL_FLOAT, y, y);
315
316
IrOp sum = build.inst(IrCmd::ADD_FLOAT, x2, y2);
317
318
IrOp mag = build.inst(IrCmd::SQRT_FLOAT, sum);
319
320
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(resultReg), build.inst(IrCmd::FLOAT_TO_NUM, mag));
321
build.inst(IrCmd::STORE_TAG, build.vmReg(resultReg), build.constTag(LUA_TNUMBER));
322
return true;
323
}
324
325
if (compareMemberName(member, memberLength, "Unit"))
326
{
327
IrOp udata = build.inst(IrCmd::LOAD_POINTER, build.vmReg(sourceReg));
328
build.inst(IrCmd::CHECK_USERDATA_TAG, udata, build.constInt(kTagVec2), build.vmExit(pcpos));
329
330
IrOp x = build.inst(IrCmd::BUFFER_READF32, udata, build.constInt(offsetof(Vec2, x)), build.constTag(LUA_TUSERDATA));
331
IrOp y = build.inst(IrCmd::BUFFER_READF32, udata, build.constInt(offsetof(Vec2, y)), build.constTag(LUA_TUSERDATA));
332
333
IrOp x2 = build.inst(IrCmd::MUL_FLOAT, x, x);
334
IrOp y2 = build.inst(IrCmd::MUL_FLOAT, y, y);
335
336
IrOp sum = build.inst(IrCmd::ADD_FLOAT, x2, y2);
337
338
IrOp mag = build.inst(IrCmd::SQRT_FLOAT, sum);
339
IrOp inv = build.inst(IrCmd::DIV_FLOAT, build.constDouble(1.0), mag);
340
341
IrOp xr = build.inst(IrCmd::MUL_FLOAT, x, inv);
342
IrOp yr = build.inst(IrCmd::MUL_FLOAT, y, inv);
343
344
build.inst(IrCmd::CHECK_GC);
345
IrOp udatar = build.inst(IrCmd::NEW_USERDATA, build.constInt(sizeof(Vec2)), build.constInt(kTagVec2));
346
347
build.inst(IrCmd::BUFFER_WRITEF32, udatar, build.constInt(offsetof(Vec2, x)), xr, build.constTag(LUA_TUSERDATA));
348
build.inst(IrCmd::BUFFER_WRITEF32, udatar, build.constInt(offsetof(Vec2, y)), yr, build.constTag(LUA_TUSERDATA));
349
350
build.inst(IrCmd::STORE_POINTER, build.vmReg(resultReg), udatar);
351
build.inst(IrCmd::STORE_TAG, build.vmReg(resultReg), build.constTag(LUA_TUSERDATA));
352
return true;
353
}
354
break;
355
case kUserdataMat3:
356
break;
357
case kUserdataVertex:
358
if (compareMemberName(member, memberLength, "pos"))
359
{
360
IrOp udata = build.inst(IrCmd::LOAD_POINTER, build.vmReg(sourceReg));
361
build.inst(IrCmd::CHECK_USERDATA_TAG, udata, build.constInt(kTagVertex), build.vmExit(pcpos));
362
363
IrOp x = build.inst(IrCmd::BUFFER_READF32, udata, build.constInt(offsetof(Vertex, pos[0])), build.constTag(LUA_TUSERDATA));
364
IrOp y = build.inst(IrCmd::BUFFER_READF32, udata, build.constInt(offsetof(Vertex, pos[1])), build.constTag(LUA_TUSERDATA));
365
IrOp z = build.inst(IrCmd::BUFFER_READF32, udata, build.constInt(offsetof(Vertex, pos[2])), build.constTag(LUA_TUSERDATA));
366
367
build.inst(IrCmd::STORE_VECTOR, build.vmReg(resultReg), x, y, z);
368
build.inst(IrCmd::STORE_TAG, build.vmReg(resultReg), build.constTag(LUA_TVECTOR));
369
return true;
370
}
371
372
if (compareMemberName(member, memberLength, "normal"))
373
{
374
IrOp udata = build.inst(IrCmd::LOAD_POINTER, build.vmReg(sourceReg));
375
build.inst(IrCmd::CHECK_USERDATA_TAG, udata, build.constInt(kTagVertex), build.vmExit(pcpos));
376
377
IrOp x = build.inst(IrCmd::BUFFER_READF32, udata, build.constInt(offsetof(Vertex, normal[0])), build.constTag(LUA_TUSERDATA));
378
IrOp y = build.inst(IrCmd::BUFFER_READF32, udata, build.constInt(offsetof(Vertex, normal[1])), build.constTag(LUA_TUSERDATA));
379
IrOp z = build.inst(IrCmd::BUFFER_READF32, udata, build.constInt(offsetof(Vertex, normal[2])), build.constTag(LUA_TUSERDATA));
380
381
build.inst(IrCmd::STORE_VECTOR, build.vmReg(resultReg), x, y, z);
382
build.inst(IrCmd::STORE_TAG, build.vmReg(resultReg), build.constTag(LUA_TVECTOR));
383
return true;
384
}
385
386
if (compareMemberName(member, memberLength, "uv"))
387
{
388
IrOp udata = build.inst(IrCmd::LOAD_POINTER, build.vmReg(sourceReg));
389
build.inst(IrCmd::CHECK_USERDATA_TAG, udata, build.constInt(kTagVertex), build.vmExit(pcpos));
390
391
IrOp x = build.inst(IrCmd::BUFFER_READF32, udata, build.constInt(offsetof(Vertex, uv[0])), build.constTag(LUA_TUSERDATA));
392
IrOp y = build.inst(IrCmd::BUFFER_READF32, udata, build.constInt(offsetof(Vertex, uv[1])), build.constTag(LUA_TUSERDATA));
393
394
build.inst(IrCmd::CHECK_GC);
395
IrOp result = build.inst(IrCmd::NEW_USERDATA, build.constInt(sizeof(Vec2)), build.constInt(kTagVec2));
396
397
build.inst(IrCmd::BUFFER_WRITEF32, result, build.constInt(offsetof(Vec2, x)), x, build.constTag(LUA_TUSERDATA));
398
build.inst(IrCmd::BUFFER_WRITEF32, result, build.constInt(offsetof(Vec2, y)), y, build.constTag(LUA_TUSERDATA));
399
400
build.inst(IrCmd::STORE_POINTER, build.vmReg(resultReg), result);
401
build.inst(IrCmd::STORE_TAG, build.vmReg(resultReg), build.constTag(LUA_TUSERDATA));
402
return true;
403
}
404
break;
405
}
406
407
return false;
408
}
409
410
inline uint8_t userdataMetamethodBytecodeType(uint8_t lhsTy, uint8_t rhsTy, Luau::CodeGen::HostMetamethod method)
411
{
412
switch (method)
413
{
414
case Luau::CodeGen::HostMetamethod::Add:
415
case Luau::CodeGen::HostMetamethod::Sub:
416
case Luau::CodeGen::HostMetamethod::Mul:
417
case Luau::CodeGen::HostMetamethod::Div:
418
if (typeToUserdataIndex(lhsTy) == kUserdataVec2 || typeToUserdataIndex(rhsTy) == kUserdataVec2)
419
return userdataIndexToType(kUserdataVec2);
420
break;
421
case Luau::CodeGen::HostMetamethod::Minus:
422
if (typeToUserdataIndex(lhsTy) == kUserdataVec2)
423
return userdataIndexToType(kUserdataVec2);
424
break;
425
default:
426
break;
427
}
428
429
return LBC_TYPE_ANY;
430
}
431
432
inline bool userdataMetamethod(
433
Luau::CodeGen::IrBuilder& build,
434
uint8_t lhsTy,
435
uint8_t rhsTy,
436
int resultReg,
437
Luau::CodeGen::IrOp lhs,
438
Luau::CodeGen::IrOp rhs,
439
Luau::CodeGen::HostMetamethod method,
440
int pcpos
441
)
442
{
443
using namespace Luau::CodeGen;
444
445
switch (method)
446
{
447
case Luau::CodeGen::HostMetamethod::Add:
448
if (typeToUserdataIndex(lhsTy) == kUserdataVec2 && typeToUserdataIndex(rhsTy) == kUserdataVec2)
449
{
450
build.loadAndCheckTag(lhs, LUA_TUSERDATA, build.vmExit(pcpos));
451
build.loadAndCheckTag(rhs, LUA_TUSERDATA, build.vmExit(pcpos));
452
453
IrOp udata1 = build.inst(IrCmd::LOAD_POINTER, lhs);
454
build.inst(IrCmd::CHECK_USERDATA_TAG, udata1, build.constInt(kTagVec2), build.vmExit(pcpos));
455
456
IrOp udata2 = build.inst(IrCmd::LOAD_POINTER, rhs);
457
build.inst(IrCmd::CHECK_USERDATA_TAG, udata2, build.constInt(kTagVec2), build.vmExit(pcpos));
458
459
IrOp x1 = build.inst(IrCmd::BUFFER_READF32, udata1, build.constInt(offsetof(Vec2, x)), build.constTag(LUA_TUSERDATA));
460
IrOp x2 = build.inst(IrCmd::BUFFER_READF32, udata2, build.constInt(offsetof(Vec2, x)), build.constTag(LUA_TUSERDATA));
461
462
IrOp mx = build.inst(IrCmd::ADD_FLOAT, x1, x2);
463
464
IrOp y1 = build.inst(IrCmd::BUFFER_READF32, udata1, build.constInt(offsetof(Vec2, y)), build.constTag(LUA_TUSERDATA));
465
IrOp y2 = build.inst(IrCmd::BUFFER_READF32, udata2, build.constInt(offsetof(Vec2, y)), build.constTag(LUA_TUSERDATA));
466
467
IrOp my = build.inst(IrCmd::ADD_FLOAT, y1, y2);
468
469
build.inst(IrCmd::CHECK_GC);
470
IrOp udatar = build.inst(IrCmd::NEW_USERDATA, build.constInt(sizeof(Vec2)), build.constInt(kTagVec2));
471
472
build.inst(IrCmd::BUFFER_WRITEF32, udatar, build.constInt(offsetof(Vec2, x)), mx, build.constTag(LUA_TUSERDATA));
473
build.inst(IrCmd::BUFFER_WRITEF32, udatar, build.constInt(offsetof(Vec2, y)), my, build.constTag(LUA_TUSERDATA));
474
475
build.inst(IrCmd::STORE_POINTER, build.vmReg(resultReg), udatar);
476
build.inst(IrCmd::STORE_TAG, build.vmReg(resultReg), build.constTag(LUA_TUSERDATA));
477
478
return true;
479
}
480
break;
481
case Luau::CodeGen::HostMetamethod::Mul:
482
if (typeToUserdataIndex(lhsTy) == kUserdataVec2 && typeToUserdataIndex(rhsTy) == kUserdataVec2)
483
{
484
build.loadAndCheckTag(lhs, LUA_TUSERDATA, build.vmExit(pcpos));
485
build.loadAndCheckTag(rhs, LUA_TUSERDATA, build.vmExit(pcpos));
486
487
IrOp udata1 = build.inst(IrCmd::LOAD_POINTER, lhs);
488
build.inst(IrCmd::CHECK_USERDATA_TAG, udata1, build.constInt(kTagVec2), build.vmExit(pcpos));
489
490
IrOp udata2 = build.inst(IrCmd::LOAD_POINTER, rhs);
491
build.inst(IrCmd::CHECK_USERDATA_TAG, udata2, build.constInt(kTagVec2), build.vmExit(pcpos));
492
493
IrOp x1 = build.inst(IrCmd::BUFFER_READF32, udata1, build.constInt(offsetof(Vec2, x)), build.constTag(LUA_TUSERDATA));
494
IrOp x2 = build.inst(IrCmd::BUFFER_READF32, udata2, build.constInt(offsetof(Vec2, x)), build.constTag(LUA_TUSERDATA));
495
496
IrOp mx = build.inst(IrCmd::MUL_FLOAT, x1, x2);
497
498
IrOp y1 = build.inst(IrCmd::BUFFER_READF32, udata1, build.constInt(offsetof(Vec2, y)), build.constTag(LUA_TUSERDATA));
499
IrOp y2 = build.inst(IrCmd::BUFFER_READF32, udata2, build.constInt(offsetof(Vec2, y)), build.constTag(LUA_TUSERDATA));
500
501
IrOp my = build.inst(IrCmd::MUL_FLOAT, y1, y2);
502
503
build.inst(IrCmd::CHECK_GC);
504
IrOp udatar = build.inst(IrCmd::NEW_USERDATA, build.constInt(sizeof(Vec2)), build.constInt(kTagVec2));
505
506
build.inst(IrCmd::BUFFER_WRITEF32, udatar, build.constInt(offsetof(Vec2, x)), mx, build.constTag(LUA_TUSERDATA));
507
build.inst(IrCmd::BUFFER_WRITEF32, udatar, build.constInt(offsetof(Vec2, y)), my, build.constTag(LUA_TUSERDATA));
508
509
build.inst(IrCmd::STORE_POINTER, build.vmReg(resultReg), udatar);
510
build.inst(IrCmd::STORE_TAG, build.vmReg(resultReg), build.constTag(LUA_TUSERDATA));
511
512
return true;
513
}
514
break;
515
case Luau::CodeGen::HostMetamethod::Minus:
516
if (typeToUserdataIndex(lhsTy) == kUserdataVec2)
517
{
518
build.loadAndCheckTag(lhs, LUA_TUSERDATA, build.vmExit(pcpos));
519
520
IrOp udata1 = build.inst(IrCmd::LOAD_POINTER, lhs);
521
build.inst(IrCmd::CHECK_USERDATA_TAG, udata1, build.constInt(kTagVec2), build.vmExit(pcpos));
522
523
IrOp x = build.inst(IrCmd::BUFFER_READF32, udata1, build.constInt(offsetof(Vec2, x)), build.constTag(LUA_TUSERDATA));
524
IrOp y = build.inst(IrCmd::BUFFER_READF32, udata1, build.constInt(offsetof(Vec2, y)), build.constTag(LUA_TUSERDATA));
525
526
IrOp mx = build.inst(IrCmd::UNM_FLOAT, x);
527
IrOp my = build.inst(IrCmd::UNM_FLOAT, y);
528
529
build.inst(IrCmd::CHECK_GC);
530
IrOp udatar = build.inst(IrCmd::NEW_USERDATA, build.constInt(sizeof(Vec2)), build.constInt(kTagVec2));
531
532
build.inst(IrCmd::BUFFER_WRITEF32, udatar, build.constInt(offsetof(Vec2, x)), mx, build.constTag(LUA_TUSERDATA));
533
build.inst(IrCmd::BUFFER_WRITEF32, udatar, build.constInt(offsetof(Vec2, y)), my, build.constTag(LUA_TUSERDATA));
534
535
build.inst(IrCmd::STORE_POINTER, build.vmReg(resultReg), udatar);
536
build.inst(IrCmd::STORE_TAG, build.vmReg(resultReg), build.constTag(LUA_TUSERDATA));
537
538
return true;
539
}
540
break;
541
default:
542
break;
543
}
544
545
return false;
546
}
547
548
inline uint8_t userdataNamecallBytecodeType(uint8_t type, const char* member, size_t memberLength)
549
{
550
switch (typeToUserdataIndex(type))
551
{
552
case kUserdataColor:
553
break;
554
case kUserdataVec2:
555
if (compareMemberName(member, memberLength, "Dot"))
556
return LBC_TYPE_NUMBER;
557
558
if (compareMemberName(member, memberLength, "Min"))
559
return userdataIndexToType(kUserdataVec2);
560
break;
561
case kUserdataMat3:
562
break;
563
}
564
565
return LBC_TYPE_ANY;
566
}
567
568
inline bool userdataNamecall(
569
Luau::CodeGen::IrBuilder& build,
570
uint8_t type,
571
const char* member,
572
size_t memberLength,
573
int argResReg,
574
int sourceReg,
575
int params,
576
int results,
577
int pcpos
578
)
579
{
580
using namespace Luau::CodeGen;
581
582
switch (typeToUserdataIndex(type))
583
{
584
case kUserdataColor:
585
break;
586
case kUserdataVec2:
587
if (compareMemberName(member, memberLength, "Dot"))
588
{
589
IrOp udata1 = build.inst(IrCmd::LOAD_POINTER, build.vmReg(sourceReg));
590
build.inst(IrCmd::CHECK_USERDATA_TAG, udata1, build.constInt(kTagVec2), build.vmExit(pcpos));
591
592
build.loadAndCheckTag(build.vmReg(argResReg + 2), LUA_TUSERDATA, build.vmExit(pcpos));
593
594
IrOp udata2 = build.inst(IrCmd::LOAD_POINTER, build.vmReg(argResReg + 2));
595
build.inst(IrCmd::CHECK_USERDATA_TAG, udata2, build.constInt(kTagVec2), build.vmExit(pcpos));
596
597
IrOp x1 = build.inst(IrCmd::BUFFER_READF32, udata1, build.constInt(offsetof(Vec2, x)), build.constTag(LUA_TUSERDATA));
598
IrOp x2 = build.inst(IrCmd::BUFFER_READF32, udata2, build.constInt(offsetof(Vec2, x)), build.constTag(LUA_TUSERDATA));
599
600
x1 = build.inst(IrCmd::FLOAT_TO_NUM, x1);
601
x2 = build.inst(IrCmd::FLOAT_TO_NUM, x2);
602
603
IrOp xx = build.inst(IrCmd::MUL_NUM, x1, x2);
604
605
IrOp y1 = build.inst(IrCmd::BUFFER_READF32, udata1, build.constInt(offsetof(Vec2, y)), build.constTag(LUA_TUSERDATA));
606
IrOp y2 = build.inst(IrCmd::BUFFER_READF32, udata2, build.constInt(offsetof(Vec2, y)), build.constTag(LUA_TUSERDATA));
607
608
y1 = build.inst(IrCmd::FLOAT_TO_NUM, y1);
609
y2 = build.inst(IrCmd::FLOAT_TO_NUM, y2);
610
611
IrOp yy = build.inst(IrCmd::MUL_NUM, y1, y2);
612
613
IrOp sum = build.inst(IrCmd::ADD_NUM, xx, yy);
614
615
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(argResReg), sum);
616
build.inst(IrCmd::STORE_TAG, build.vmReg(argResReg), build.constTag(LUA_TNUMBER));
617
618
// If the function is called in multi-return context, stack has to be adjusted
619
if (results == LUA_MULTRET)
620
build.inst(IrCmd::ADJUST_STACK_TO_REG, build.vmReg(argResReg), build.constInt(1));
621
622
return true;
623
}
624
625
if (compareMemberName(member, memberLength, "Min"))
626
{
627
IrOp udata1 = build.inst(IrCmd::LOAD_POINTER, build.vmReg(sourceReg));
628
build.inst(IrCmd::CHECK_USERDATA_TAG, udata1, build.constInt(kTagVec2), build.vmExit(pcpos));
629
630
build.loadAndCheckTag(build.vmReg(argResReg + 2), LUA_TUSERDATA, build.vmExit(pcpos));
631
632
IrOp udata2 = build.inst(IrCmd::LOAD_POINTER, build.vmReg(argResReg + 2));
633
build.inst(IrCmd::CHECK_USERDATA_TAG, udata2, build.constInt(kTagVec2), build.vmExit(pcpos));
634
635
IrOp x1 = build.inst(IrCmd::BUFFER_READF32, udata1, build.constInt(offsetof(Vec2, x)), build.constTag(LUA_TUSERDATA));
636
IrOp x2 = build.inst(IrCmd::BUFFER_READF32, udata2, build.constInt(offsetof(Vec2, x)), build.constTag(LUA_TUSERDATA));
637
638
x1 = build.inst(IrCmd::FLOAT_TO_NUM, x1);
639
x2 = build.inst(IrCmd::FLOAT_TO_NUM, x2);
640
641
IrOp mx = build.inst(IrCmd::MIN_NUM, x1, x2);
642
643
IrOp y1 = build.inst(IrCmd::BUFFER_READF32, udata1, build.constInt(offsetof(Vec2, y)), build.constTag(LUA_TUSERDATA));
644
IrOp y2 = build.inst(IrCmd::BUFFER_READF32, udata2, build.constInt(offsetof(Vec2, y)), build.constTag(LUA_TUSERDATA));
645
646
y1 = build.inst(IrCmd::FLOAT_TO_NUM, y1);
647
y2 = build.inst(IrCmd::FLOAT_TO_NUM, y2);
648
649
IrOp my = build.inst(IrCmd::MIN_NUM, y1, y2);
650
651
mx = build.inst(IrCmd::NUM_TO_FLOAT, mx);
652
my = build.inst(IrCmd::NUM_TO_FLOAT, my);
653
654
build.inst(IrCmd::CHECK_GC);
655
IrOp udatar = build.inst(IrCmd::NEW_USERDATA, build.constInt(sizeof(Vec2)), build.constInt(kTagVec2));
656
657
build.inst(IrCmd::BUFFER_WRITEF32, udatar, build.constInt(offsetof(Vec2, x)), mx, build.constTag(LUA_TUSERDATA));
658
build.inst(IrCmd::BUFFER_WRITEF32, udatar, build.constInt(offsetof(Vec2, y)), my, build.constTag(LUA_TUSERDATA));
659
660
build.inst(IrCmd::STORE_POINTER, build.vmReg(argResReg), udatar);
661
build.inst(IrCmd::STORE_TAG, build.vmReg(argResReg), build.constTag(LUA_TUSERDATA));
662
663
// If the function is called in multi-return context, stack has to be adjusted
664
if (results == LUA_MULTRET)
665
build.inst(IrCmd::ADJUST_STACK_TO_REG, build.vmReg(argResReg), build.constInt(1));
666
667
return true;
668
}
669
break;
670
case kUserdataMat3:
671
break;
672
}
673
674
return false;
675
}
676
677