CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutSign UpSign In
hrydgard

CoCalc provides the best real-time collaborative environment for Jupyter Notebooks, LaTeX documents, and SageMath, scalable from individual users to large groups and classes!

GitHub Repository: hrydgard/ppsspp
Path: blob/master/Core/MIPS/x86/X64IRCompLoadStore.cpp
Views: 1401
1
// Copyright (c) 2023- PPSSPP Project.
2
3
// This program is free software: you can redistribute it and/or modify
4
// it under the terms of the GNU General Public License as published by
5
// the Free Software Foundation, version 2.0 or later versions.
6
7
// This program is distributed in the hope that it will be useful,
8
// but WITHOUT ANY WARRANTY; without even the implied warranty of
9
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10
// GNU General Public License 2.0 for more details.
11
12
// A copy of the GPL 2.0 should have been included with the program.
13
// If not, see http://www.gnu.org/licenses/
14
15
// Official git repository and contact information can be found at
16
// https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.
17
18
#include "ppsspp_config.h"
19
#if PPSSPP_ARCH(X86) || PPSSPP_ARCH(AMD64)
20
21
#include "Core/MemMap.h"
22
#include "Core/MIPS/x86/X64IRJit.h"
23
#include "Core/MIPS/x86/X64IRRegCache.h"
24
25
// This file contains compilation for load/store instructions.
26
//
27
// All functions should have CONDITIONAL_DISABLE, so we can narrow things down to a file quickly.
28
// Currently known non working ones should have DISABLE. No flags because that's in IR already.
29
30
// #define CONDITIONAL_DISABLE { CompIR_Generic(inst); return; }
31
#define CONDITIONAL_DISABLE {}
32
#define DISABLE { CompIR_Generic(inst); return; }
33
#define INVALIDOP { _assert_msg_(false, "Invalid IR inst %d", (int)inst.op); CompIR_Generic(inst); return; }
34
35
namespace MIPSComp {
36
37
using namespace Gen;
38
using namespace X64IRJitConstants;
39
40
Gen::OpArg X64JitBackend::PrepareSrc1Address(IRInst inst) {
41
const IRMeta *m = GetIRMeta(inst.op);
42
43
bool src1IsPointer = regs_.IsGPRMappedAsPointer(inst.src1);
44
bool readsFromSrc1 = inst.src1 == inst.src3 && (m->flags & (IRFLAG_SRC3 | IRFLAG_SRC3DST)) != 0;
45
// If it's about to be clobbered, don't waste time pointerifying. Use displacement.
46
bool clobbersSrc1 = !readsFromSrc1 && regs_.IsGPRClobbered(inst.src1);
47
48
int32_t disp = (int32_t)inst.constant;
49
// It can't be this negative, must be a constant address with the top bit set.
50
if ((disp & 0xC0000000) == 0x80000000) {
51
disp = inst.constant & 0x7FFFFFFF;
52
}
53
54
#ifdef MASKED_PSP_MEMORY
55
if (disp > 0)
56
disp &= Memory::MEMVIEW32_MASK;
57
#endif
58
59
OpArg addrArg;
60
if (inst.src1 == MIPS_REG_ZERO) {
61
#ifdef MASKED_PSP_MEMORY
62
disp &= Memory::MEMVIEW32_MASK;
63
#endif
64
#if PPSSPP_ARCH(AMD64)
65
addrArg = MDisp(MEMBASEREG, disp & 0x7FFFFFFF);
66
#else
67
addrArg = M(Memory::base + disp);
68
#endif
69
} else if ((jo.cachePointers || src1IsPointer) && !readsFromSrc1 && (!clobbersSrc1 || src1IsPointer)) {
70
X64Reg src1 = regs_.MapGPRAsPointer(inst.src1);
71
addrArg = MDisp(src1, disp);
72
} else {
73
regs_.MapGPR(inst.src1);
74
#ifdef MASKED_PSP_MEMORY
75
LEA(PTRBITS, SCRATCH1, MDisp(regs_.RX(inst.src1), disp));
76
AND(PTRBITS, R(SCRATCH1), Imm32(Memory::MEMVIEW32_MASK));
77
addrArg = MDisp(SCRATCH1, (intptr_t)Memory::base);
78
#else
79
#if PPSSPP_ARCH(AMD64)
80
addrArg = MComplex(MEMBASEREG, regs_.RX(inst.src1), SCALE_1, disp);
81
#else
82
addrArg = MDisp(regs_.RX(inst.src1), Memory::base + disp);
83
#endif
84
#endif
85
}
86
87
return addrArg;
88
}
89
90
void X64JitBackend::CompIR_CondStore(IRInst inst) {
91
CONDITIONAL_DISABLE;
92
if (inst.op != IROp::Store32Conditional)
93
INVALIDOP;
94
95
regs_.SpillLockGPR(IRREG_LLBIT, inst.src3, inst.src1);
96
OpArg addrArg = PrepareSrc1Address(inst);
97
OpArg valueArg = R(regs_.MapGPR(inst.src3, MIPSMap::INIT));
98
99
regs_.MapGPR(IRREG_LLBIT, MIPSMap::INIT);
100
101
// TODO: Safe memory? Or enough to have crash handler + validate?
102
103
TEST(32, regs_.R(IRREG_LLBIT), regs_.R(IRREG_LLBIT));
104
FixupBranch condFailed = J_CC(CC_Z);
105
MOV(32, addrArg, valueArg);
106
107
if (inst.dest != MIPS_REG_ZERO) {
108
MOV(32, regs_.R(inst.dest), Imm32(1));
109
FixupBranch finish = J();
110
111
SetJumpTarget(condFailed);
112
XOR(32, regs_.R(inst.dest), regs_.R(inst.dest));
113
SetJumpTarget(finish);
114
} else {
115
SetJumpTarget(condFailed);
116
}
117
}
118
119
void X64JitBackend::CompIR_FLoad(IRInst inst) {
120
CONDITIONAL_DISABLE;
121
122
OpArg addrArg = PrepareSrc1Address(inst);
123
124
switch (inst.op) {
125
case IROp::LoadFloat:
126
regs_.MapFPR(inst.dest, MIPSMap::NOINIT);
127
MOVSS(regs_.FX(inst.dest), addrArg);
128
break;
129
130
default:
131
INVALIDOP;
132
break;
133
}
134
}
135
136
void X64JitBackend::CompIR_FStore(IRInst inst) {
137
CONDITIONAL_DISABLE;
138
139
OpArg addrArg = PrepareSrc1Address(inst);
140
141
switch (inst.op) {
142
case IROp::StoreFloat:
143
regs_.MapFPR(inst.src3);
144
MOVSS(addrArg, regs_.FX(inst.src3));
145
break;
146
147
default:
148
INVALIDOP;
149
break;
150
}
151
}
152
153
void X64JitBackend::CompIR_Load(IRInst inst) {
154
CONDITIONAL_DISABLE;
155
156
regs_.SpillLockGPR(inst.dest, inst.src1);
157
OpArg addrArg = PrepareSrc1Address(inst);
158
// With NOINIT, MapReg won't subtract MEMBASEREG even if dest == src1.
159
regs_.MapGPR(inst.dest, MIPSMap::NOINIT);
160
161
// TODO: Safe memory? Or enough to have crash handler + validate?
162
163
switch (inst.op) {
164
case IROp::Load8:
165
MOVZX(32, 8, regs_.RX(inst.dest), addrArg);
166
break;
167
168
case IROp::Load8Ext:
169
MOVSX(32, 8, regs_.RX(inst.dest), addrArg);
170
break;
171
172
case IROp::Load16:
173
MOVZX(32, 16, regs_.RX(inst.dest), addrArg);
174
break;
175
176
case IROp::Load16Ext:
177
MOVSX(32, 16, regs_.RX(inst.dest), addrArg);
178
break;
179
180
case IROp::Load32:
181
MOV(32, regs_.R(inst.dest), addrArg);
182
break;
183
184
case IROp::Load32Linked:
185
if (inst.dest != MIPS_REG_ZERO)
186
MOV(32, regs_.R(inst.dest), addrArg);
187
regs_.SetGPRImm(IRREG_LLBIT, 1);
188
break;
189
190
default:
191
INVALIDOP;
192
break;
193
}
194
}
195
196
void X64JitBackend::CompIR_LoadShift(IRInst inst) {
197
CONDITIONAL_DISABLE;
198
199
switch (inst.op) {
200
case IROp::Load32Left:
201
case IROp::Load32Right:
202
// Should not happen if the pass to split is active.
203
DISABLE;
204
break;
205
206
default:
207
INVALIDOP;
208
break;
209
}
210
}
211
212
void X64JitBackend::CompIR_Store(IRInst inst) {
213
CONDITIONAL_DISABLE;
214
215
regs_.SpillLockGPR(inst.src3, inst.src1);
216
OpArg addrArg = PrepareSrc1Address(inst);
217
218
// i386 can only use certain regs for 8-bit operations.
219
X64Map valueFlags = inst.op == IROp::Store8 ? X64Map::LOW_SUBREG : X64Map::NONE;
220
221
OpArg valueArg;
222
X64Reg valueReg = regs_.TryMapTempImm(inst.src3, valueFlags);
223
if (valueReg != INVALID_REG) {
224
valueArg = R(valueReg);
225
} else if (regs_.IsGPRImm(inst.src3)) {
226
u32 imm = regs_.GetGPRImm(inst.src3);
227
switch (inst.op) {
228
case IROp::Store8: valueArg = Imm8((u8)imm); break;
229
case IROp::Store16: valueArg = Imm16((u16)imm); break;
230
case IROp::Store32: valueArg = Imm32(imm); break;
231
default:
232
INVALIDOP;
233
break;
234
}
235
} else {
236
valueArg = R(regs_.MapGPR(inst.src3, MIPSMap::INIT | valueFlags));
237
}
238
239
// TODO: Safe memory? Or enough to have crash handler + validate?
240
241
switch (inst.op) {
242
case IROp::Store8:
243
MOV(8, addrArg, valueArg);
244
break;
245
246
case IROp::Store16:
247
MOV(16, addrArg, valueArg);
248
break;
249
250
case IROp::Store32:
251
MOV(32, addrArg, valueArg);
252
break;
253
254
default:
255
INVALIDOP;
256
break;
257
}
258
}
259
260
void X64JitBackend::CompIR_StoreShift(IRInst inst) {
261
CONDITIONAL_DISABLE;
262
263
switch (inst.op) {
264
case IROp::Store32Left:
265
case IROp::Store32Right:
266
// Should not happen if the pass to split is active.
267
DISABLE;
268
break;
269
270
default:
271
INVALIDOP;
272
break;
273
}
274
}
275
276
void X64JitBackend::CompIR_VecLoad(IRInst inst) {
277
CONDITIONAL_DISABLE;
278
279
OpArg addrArg = PrepareSrc1Address(inst);
280
281
switch (inst.op) {
282
case IROp::LoadVec4:
283
regs_.MapVec4(inst.dest, MIPSMap::NOINIT);
284
MOVUPS(regs_.FX(inst.dest), addrArg);
285
break;
286
287
default:
288
INVALIDOP;
289
break;
290
}
291
}
292
293
void X64JitBackend::CompIR_VecStore(IRInst inst) {
294
CONDITIONAL_DISABLE;
295
296
OpArg addrArg = PrepareSrc1Address(inst);
297
298
switch (inst.op) {
299
case IROp::StoreVec4:
300
regs_.MapVec4(inst.src3);
301
MOVUPS(addrArg, regs_.FX(inst.src3));
302
break;
303
304
default:
305
INVALIDOP;
306
break;
307
}
308
}
309
310
} // namespace MIPSComp
311
312
#endif
313
314