#pragma once
#include "Luau/AssemblyBuilderX64.h"
#include "EmitCommon.h"
#include "lobject.h"
#include "ltm.h"
namespace Luau
{
namespace CodeGen
{
enum class IrCondition : uint8_t;
struct IrOp;
namespace X64
{
struct IrRegAllocX64;
inline constexpr uint32_t kFunctionAlignment = 32;
inline constexpr RegisterX64 rState = r15;
inline constexpr RegisterX64 rBase = r14;
inline constexpr RegisterX64 rNativeContext = r13;
inline constexpr RegisterX64 rConstants = r12;
inline constexpr unsigned kExtraLocals = 3;
inline constexpr unsigned kSpillSlots_DEPRECATED = 13;
inline constexpr unsigned kSpillSlots_NEW = 12;
static_assert((kExtraLocals + kSpillSlots_DEPRECATED) * 8 % 16 == 0, "locals have to preserve 16 byte alignment");
static_assert(kSpillSlots_NEW <= kSpillSlots_DEPRECATED, "new spill slot allocation cannot exceed deprecated one");
static_assert(kSpillSlots_NEW % 2 == 0, "spill slots have to be sized in 16 byte TValue chunks, for valid extra register spill-over");
inline constexpr unsigned kExtraSpillSlots = 64;
static_assert(kExtraSpillSlots * 8 <= LUA_EXECUTION_CALLBACK_STORAGE, "can't use more extra slots than Luau global state provides");
inline constexpr uint8_t kWindowsFirstNonVolXmmReg = 6;
inline constexpr uint8_t kWindowsUsableXmmRegs = 10;
inline constexpr uint8_t kSystemVUsableXmmRegs = 16;
inline uint8_t getXmmRegisterCount(ABIX64 abi)
{
return abi == ABIX64::SystemV ? kSystemVUsableXmmRegs : kWindowsUsableXmmRegs;
}
inline constexpr unsigned kStackAlign = 8;
inline constexpr unsigned kStackLocalStorage = 8 * kExtraLocals;
inline constexpr unsigned kStackSpillStorage = 8 * kSpillSlots_DEPRECATED;
inline constexpr unsigned kStackExtraArgumentStorage = 2 * 8;
inline constexpr unsigned kStackRegHomeStorage = 4 * 8;
inline unsigned getNonVolXmmStorageSize(ABIX64 abi, uint8_t xmmRegCount)
{
if (abi == ABIX64::SystemV)
return 0;
if (xmmRegCount <= kWindowsFirstNonVolXmmReg)
return 0;
CODEGEN_ASSERT(xmmRegCount <= 16);
return (xmmRegCount - kWindowsFirstNonVolXmmReg) * 16;
}
inline constexpr unsigned kStackOffsetToLocals = kStackExtraArgumentStorage + kStackRegHomeStorage;
inline constexpr unsigned kStackOffsetToSpillSlots = kStackOffsetToLocals + kStackLocalStorage;
inline unsigned getFullStackSize(ABIX64 abi, uint8_t xmmRegCount)
{
return kStackOffsetToSpillSlots + kStackSpillStorage + getNonVolXmmStorageSize(abi, xmmRegCount) + kStackAlign;
}
inline constexpr OperandX64 sClosure = qword[rsp + kStackOffsetToLocals + 0];
inline constexpr OperandX64 sCode = qword[rsp + kStackOffsetToLocals + 8];
inline constexpr OperandX64 sTemporarySlot = addr[rsp + kStackOffsetToLocals + 16];
inline constexpr OperandX64 sSpillArea = addr[rsp + kStackOffsetToSpillSlots];
inline OperandX64 luauReg(int ri)
{
return xmmword[rBase + ri * sizeof(TValue)];
}
inline OperandX64 luauRegAddress(int ri)
{
return addr[rBase + ri * sizeof(TValue)];
}
inline OperandX64 luauRegValue(int ri)
{
return qword[rBase + ri * sizeof(TValue) + offsetof(TValue, value)];
}
inline OperandX64 luauRegTag(int ri)
{
return dword[rBase + ri * sizeof(TValue) + offsetof(TValue, tt)];
}
inline OperandX64 luauRegExtra(int ri)
{
return dword[rBase + ri * sizeof(TValue) + offsetof(TValue, extra)];
}
inline OperandX64 luauRegValueInt(int ri)
{
return dword[rBase + ri * sizeof(TValue) + offsetof(TValue, value)];
}
inline OperandX64 luauRegValueVector(int ri, int index)
{
return dword[rBase + ri * sizeof(TValue) + offsetof(TValue, value) + (sizeof(float) * index)];
}
inline OperandX64 luauConstant(int ki)
{
return xmmword[rConstants + ki * sizeof(TValue)];
}
inline OperandX64 luauConstantAddress(int ki)
{
return addr[rConstants + ki * sizeof(TValue)];
}
inline OperandX64 luauConstantTag(int ki)
{
return dword[rConstants + ki * sizeof(TValue) + offsetof(TValue, tt)];
}
inline OperandX64 luauConstantValue(int ki)
{
return qword[rConstants + ki * sizeof(TValue) + offsetof(TValue, value)];
}
inline OperandX64 luauNodeKeyValue(RegisterX64 node)
{
return qword[node + offsetof(LuaNode, key) + offsetof(TKey, value)];
}
inline OperandX64 luauNodeKeyTag(RegisterX64 node)
{
return dword[node + offsetof(LuaNode, key) + kOffsetOfTKeyTagNext];
}
inline void setLuauReg(AssemblyBuilderX64& build, RegisterX64 tmp, int ri, OperandX64 op)
{
CODEGEN_ASSERT(op.cat == CategoryX64::mem);
build.vmovups(tmp, op);
build.vmovups(luauReg(ri), tmp);
}
inline void jumpIfTagIs(AssemblyBuilderX64& build, int ri, lua_Type tag, Label& label)
{
build.cmp(luauRegTag(ri), tag);
build.jcc(ConditionX64::Equal, label);
}
inline void jumpIfTagIsNot(AssemblyBuilderX64& build, int ri, lua_Type tag, Label& label)
{
build.cmp(luauRegTag(ri), tag);
build.jcc(ConditionX64::NotEqual, label);
}
inline void jumpIfFalsy(AssemblyBuilderX64& build, int ri, Label& target, Label& fallthrough)
{
jumpIfTagIs(build, ri, LUA_TNIL, target);
jumpIfTagIsNot(build, ri, LUA_TBOOLEAN, fallthrough);
build.cmp(luauRegValueInt(ri), 0);
build.jcc(ConditionX64::Equal, target);
}
inline void jumpIfTruthy(AssemblyBuilderX64& build, int ri, Label& target, Label& fallthrough)
{
jumpIfTagIs(build, ri, LUA_TNIL, fallthrough);
jumpIfTagIsNot(build, ri, LUA_TBOOLEAN, target);
build.cmp(luauRegValueInt(ri), 0);
build.jcc(ConditionX64::NotEqual, target);
}
void jumpOnNumberCmp(AssemblyBuilderX64& build, RegisterX64 tmp, OperandX64 lhs, OperandX64 rhs, IrCondition cond, Label& label, bool floatPrecision);
ConditionX64 getConditionInt(IrCondition cond);
void getTableNodeAtCachedSlot(AssemblyBuilderX64& build, RegisterX64 tmp, RegisterX64 node, RegisterX64 table, int pcpos);
void convertNumberToIndexOrJump(AssemblyBuilderX64& build, RegisterX64 tmp, RegisterX64 numd, RegisterX64 numi, Label& label);
void callArithHelper(IrRegAllocX64& regs, AssemblyBuilderX64& build, int ra, OperandX64 b, OperandX64 c, TMS tm);
void callLengthHelper(IrRegAllocX64& regs, AssemblyBuilderX64& build, int ra, int rb);
void callGetTable(IrRegAllocX64& regs, AssemblyBuilderX64& build, int rb, OperandX64 c, int ra);
void callSetTable(IrRegAllocX64& regs, AssemblyBuilderX64& build, int rb, OperandX64 c, int ra);
void checkObjectBarrierConditions(AssemblyBuilderX64& build, RegisterX64 tmp, RegisterX64 object, RegisterX64 ra, IrOp raOp, int ratag, Label& skip);
void checkObjectBarrierConditions_DEPRECATED(AssemblyBuilderX64& build, RegisterX64 tmp, RegisterX64 object, IrOp ra, int ratag, Label& skip);
void callBarrierObject(IrRegAllocX64& regs, AssemblyBuilderX64& build, RegisterX64 object, IrOp objectOp, RegisterX64 ra, IrOp raOp, int ratag);
void callBarrierObject_DEPRECATED(IrRegAllocX64& regs, AssemblyBuilderX64& build, RegisterX64 object, IrOp objectOp, IrOp ra, int ratag);
void callBarrierTableFast(IrRegAllocX64& regs, AssemblyBuilderX64& build, RegisterX64 table, IrOp tableOp);
void callStepGc(IrRegAllocX64& regs, AssemblyBuilderX64& build);
void emitClearNativeFlag(AssemblyBuilderX64& build);
void emitExit(AssemblyBuilderX64& build, bool continueInVm);
void emitUpdateBase(AssemblyBuilderX64& build);
void emitInterrupt(AssemblyBuilderX64& build);
void emitFallback(IrRegAllocX64& regs, AssemblyBuilderX64& build, int offset, int pcpos);
void emitUpdatePcForExit(AssemblyBuilderX64& build);
void emitReturn(AssemblyBuilderX64& build, ModuleHelpers& helpers);
}
}
}