CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutSign UpSign In
hrydgard

CoCalc provides the best real-time collaborative environment for Jupyter Notebooks, LaTeX documents, and SageMath, scalable from individual users to large groups and classes!

GitHub Repository: hrydgard/ppsspp
Path: blob/master/Core/MIPS/RiscV/RiscVCompLoadStore.cpp
Views: 1401
1
// Copyright (c) 2023- PPSSPP Project.
2
3
// This program is free software: you can redistribute it and/or modify
4
// it under the terms of the GNU General Public License as published by
5
// the Free Software Foundation, version 2.0 or later versions.
6
7
// This program is distributed in the hope that it will be useful,
8
// but WITHOUT ANY WARRANTY; without even the implied warranty of
9
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10
// GNU General Public License 2.0 for more details.
11
12
// A copy of the GPL 2.0 should have been included with the program.
13
// If not, see http://www.gnu.org/licenses/
14
15
// Official git repository and contact information can be found at
16
// https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.
17
18
#include "Core/MemMap.h"
19
#include "Core/MIPS/RiscV/RiscVJit.h"
20
#include "Core/MIPS/RiscV/RiscVRegCache.h"
21
22
// This file contains compilation for load/store instructions.
23
//
24
// All functions should have CONDITIONAL_DISABLE, so we can narrow things down to a file quickly.
25
// Currently known non working ones should have DISABLE. No flags because that's in IR already.
26
27
// #define CONDITIONAL_DISABLE { CompIR_Generic(inst); return; }
28
#define CONDITIONAL_DISABLE {}
29
#define DISABLE { CompIR_Generic(inst); return; }
30
#define INVALIDOP { _assert_msg_(false, "Invalid IR inst %d", (int)inst.op); CompIR_Generic(inst); return; }
31
32
namespace MIPSComp {
33
34
using namespace RiscVGen;
35
using namespace RiscVJitConstants;
36
37
void RiscVJitBackend::SetScratch1ToSrc1Address(IRReg src1) {
38
regs_.MapGPR(src1);
39
#ifdef MASKED_PSP_MEMORY
40
SLLIW(SCRATCH1, regs_.R(src1), 2);
41
SRLIW(SCRATCH1, SCRATCH1, 2);
42
ADD(SCRATCH1, SCRATCH1, MEMBASEREG);
43
#else
44
// Clear the top bits to be safe.
45
if (cpu_info.RiscV_Zba) {
46
ADD_UW(SCRATCH1, regs_.R(src1), MEMBASEREG);
47
} else {
48
_assert_(XLEN == 64);
49
SLLI(SCRATCH1, regs_.R(src1), 32);
50
SRLI(SCRATCH1, SCRATCH1, 32);
51
ADD(SCRATCH1, SCRATCH1, MEMBASEREG);
52
}
53
#endif
54
}
55
56
int32_t RiscVJitBackend::AdjustForAddressOffset(RiscVGen::RiscVReg *reg, int32_t constant, int32_t range) {
57
if (constant < -2048 || constant + range > 2047) {
58
#ifdef MASKED_PSP_MEMORY
59
if (constant > 0)
60
constant &= Memory::MEMVIEW32_MASK;
61
#endif
62
// It can't be this negative, must be a constant with top bit set.
63
if ((constant & 0xC0000000) == 0x80000000) {
64
if (cpu_info.RiscV_Zba) {
65
LI(SCRATCH2, constant);
66
ADD_UW(SCRATCH1, SCRATCH2, *reg);
67
} else {
68
LI(SCRATCH2, (uint32_t)constant);
69
ADD(SCRATCH1, *reg, SCRATCH2);
70
}
71
} else {
72
LI(SCRATCH2, constant);
73
ADD(SCRATCH1, *reg, SCRATCH2);
74
}
75
*reg = SCRATCH1;
76
return 0;
77
}
78
return constant;
79
}
80
81
void RiscVJitBackend::CompIR_Load(IRInst inst) {
82
CONDITIONAL_DISABLE;
83
84
regs_.SpillLockGPR(inst.dest, inst.src1);
85
RiscVReg addrReg = INVALID_REG;
86
if (inst.src1 == MIPS_REG_ZERO) {
87
// This will get changed by AdjustForAddressOffset.
88
addrReg = MEMBASEREG;
89
#ifdef MASKED_PSP_MEMORY
90
inst.constant &= Memory::MEMVIEW32_MASK;
91
#endif
92
} else if (jo.cachePointers || regs_.IsGPRMappedAsPointer(inst.src1)) {
93
addrReg = regs_.MapGPRAsPointer(inst.src1);
94
} else {
95
SetScratch1ToSrc1Address(inst.src1);
96
addrReg = SCRATCH1;
97
}
98
// With NOINIT, MapReg won't subtract MEMBASEREG even if dest == src1.
99
regs_.MapGPR(inst.dest, MIPSMap::NOINIT);
100
regs_.MarkGPRDirty(inst.dest, true);
101
102
s32 imm = AdjustForAddressOffset(&addrReg, inst.constant);
103
104
// TODO: Safe memory? Or enough to have crash handler + validate?
105
106
switch (inst.op) {
107
case IROp::Load8:
108
LBU(regs_.R(inst.dest), addrReg, imm);
109
break;
110
111
case IROp::Load8Ext:
112
LB(regs_.R(inst.dest), addrReg, imm);
113
break;
114
115
case IROp::Load16:
116
LHU(regs_.R(inst.dest), addrReg, imm);
117
break;
118
119
case IROp::Load16Ext:
120
LH(regs_.R(inst.dest), addrReg, imm);
121
break;
122
123
case IROp::Load32:
124
LW(regs_.R(inst.dest), addrReg, imm);
125
break;
126
127
case IROp::Load32Linked:
128
if (inst.dest != MIPS_REG_ZERO)
129
LW(regs_.R(inst.dest), addrReg, imm);
130
regs_.SetGPRImm(IRREG_LLBIT, 1);
131
break;
132
133
default:
134
INVALIDOP;
135
break;
136
}
137
}
138
139
void RiscVJitBackend::CompIR_LoadShift(IRInst inst) {
140
CONDITIONAL_DISABLE;
141
142
switch (inst.op) {
143
case IROp::Load32Left:
144
case IROp::Load32Right:
145
// Should not happen if the pass to split is active.
146
DISABLE;
147
break;
148
149
default:
150
INVALIDOP;
151
break;
152
}
153
}
154
155
void RiscVJitBackend::CompIR_FLoad(IRInst inst) {
156
CONDITIONAL_DISABLE;
157
158
RiscVReg addrReg = INVALID_REG;
159
if (inst.src1 == MIPS_REG_ZERO) {
160
// This will get changed by AdjustForAddressOffset.
161
addrReg = MEMBASEREG;
162
#ifdef MASKED_PSP_MEMORY
163
inst.constant &= Memory::MEMVIEW32_MASK;
164
#endif
165
} else if (jo.cachePointers || regs_.IsGPRMappedAsPointer(inst.src1)) {
166
addrReg = regs_.MapGPRAsPointer(inst.src1);
167
} else {
168
SetScratch1ToSrc1Address(inst.src1);
169
addrReg = SCRATCH1;
170
}
171
172
s32 imm = AdjustForAddressOffset(&addrReg, inst.constant);
173
174
// TODO: Safe memory? Or enough to have crash handler + validate?
175
176
switch (inst.op) {
177
case IROp::LoadFloat:
178
regs_.MapFPR(inst.dest, MIPSMap::NOINIT);
179
FL(32, regs_.F(inst.dest), addrReg, imm);
180
break;
181
182
default:
183
INVALIDOP;
184
break;
185
}
186
}
187
188
void RiscVJitBackend::CompIR_VecLoad(IRInst inst) {
189
CONDITIONAL_DISABLE;
190
191
RiscVReg addrReg = INVALID_REG;
192
if (inst.src1 == MIPS_REG_ZERO) {
193
// This will get changed by AdjustForAddressOffset.
194
addrReg = MEMBASEREG;
195
#ifdef MASKED_PSP_MEMORY
196
inst.constant &= Memory::MEMVIEW32_MASK;
197
#endif
198
} else if (jo.cachePointers || regs_.IsGPRMappedAsPointer(inst.src1)) {
199
addrReg = regs_.MapGPRAsPointer(inst.src1);
200
} else {
201
SetScratch1ToSrc1Address(inst.src1);
202
addrReg = SCRATCH1;
203
}
204
205
// We need to be able to address the whole 16 bytes, so offset of 12.
206
s32 imm = AdjustForAddressOffset(&addrReg, inst.constant, 12);
207
208
// TODO: Safe memory? Or enough to have crash handler + validate?
209
210
switch (inst.op) {
211
case IROp::LoadVec4:
212
for (int i = 0; i < 4; ++i) {
213
// Spilling is okay.
214
regs_.MapFPR(inst.dest + i, MIPSMap::NOINIT);
215
FL(32, regs_.F(inst.dest + i), addrReg, imm + 4 * i);
216
}
217
break;
218
219
default:
220
INVALIDOP;
221
break;
222
}
223
}
224
225
void RiscVJitBackend::CompIR_Store(IRInst inst) {
226
CONDITIONAL_DISABLE;
227
228
regs_.SpillLockGPR(inst.src3, inst.src1);
229
RiscVReg addrReg = INVALID_REG;
230
if (inst.src1 == MIPS_REG_ZERO) {
231
// This will get changed by AdjustForAddressOffset.
232
addrReg = MEMBASEREG;
233
#ifdef MASKED_PSP_MEMORY
234
inst.constant &= Memory::MEMVIEW32_MASK;
235
#endif
236
} else if ((jo.cachePointers || regs_.IsGPRMappedAsPointer(inst.src1)) && inst.src3 != inst.src1) {
237
addrReg = regs_.MapGPRAsPointer(inst.src1);
238
} else {
239
SetScratch1ToSrc1Address(inst.src1);
240
addrReg = SCRATCH1;
241
}
242
RiscVReg valueReg = regs_.TryMapTempImm(inst.src3);
243
if (valueReg == INVALID_REG)
244
valueReg = regs_.MapGPR(inst.src3);
245
246
s32 imm = AdjustForAddressOffset(&addrReg, inst.constant);
247
248
// TODO: Safe memory? Or enough to have crash handler + validate?
249
250
switch (inst.op) {
251
case IROp::Store8:
252
SB(valueReg, addrReg, imm);
253
break;
254
255
case IROp::Store16:
256
SH(valueReg, addrReg, imm);
257
break;
258
259
case IROp::Store32:
260
SW(valueReg, addrReg, imm);
261
break;
262
263
default:
264
INVALIDOP;
265
break;
266
}
267
}
268
269
void RiscVJitBackend::CompIR_CondStore(IRInst inst) {
270
CONDITIONAL_DISABLE;
271
if (inst.op != IROp::Store32Conditional)
272
INVALIDOP;
273
274
regs_.SpillLockGPR(IRREG_LLBIT, inst.src3, inst.src1);
275
RiscVReg addrReg = INVALID_REG;
276
if (inst.src1 == MIPS_REG_ZERO) {
277
// This will get changed by AdjustForAddressOffset.
278
addrReg = MEMBASEREG;
279
#ifdef MASKED_PSP_MEMORY
280
inst.constant &= Memory::MEMVIEW32_MASK;
281
#endif
282
} else if ((jo.cachePointers || regs_.IsGPRMappedAsPointer(inst.src1)) && inst.src3 != inst.src1) {
283
addrReg = regs_.MapGPRAsPointer(inst.src1);
284
} else {
285
SetScratch1ToSrc1Address(inst.src1);
286
addrReg = SCRATCH1;
287
}
288
regs_.MapGPR(inst.src3, inst.dest == MIPS_REG_ZERO ? MIPSMap::INIT : MIPSMap::DIRTY);
289
regs_.MapGPR(IRREG_LLBIT);
290
291
s32 imm = AdjustForAddressOffset(&addrReg, inst.constant);
292
293
// TODO: Safe memory? Or enough to have crash handler + validate?
294
295
FixupBranch condFailed = BEQ(regs_.R(IRREG_LLBIT), R_ZERO);
296
SW(regs_.R(inst.src3), addrReg, imm);
297
298
if (inst.dest != MIPS_REG_ZERO) {
299
LI(regs_.R(inst.dest), 1);
300
FixupBranch finish = J();
301
302
SetJumpTarget(condFailed);
303
LI(regs_.R(inst.dest), 0);
304
SetJumpTarget(finish);
305
} else {
306
SetJumpTarget(condFailed);
307
}
308
}
309
310
void RiscVJitBackend::CompIR_StoreShift(IRInst inst) {
311
CONDITIONAL_DISABLE;
312
313
switch (inst.op) {
314
case IROp::Store32Left:
315
case IROp::Store32Right:
316
// Should not happen if the pass to split is active.
317
DISABLE;
318
break;
319
320
default:
321
INVALIDOP;
322
break;
323
}
324
}
325
326
void RiscVJitBackend::CompIR_FStore(IRInst inst) {
327
CONDITIONAL_DISABLE;
328
329
RiscVReg addrReg = INVALID_REG;
330
if (inst.src1 == MIPS_REG_ZERO) {
331
// This will get changed by AdjustForAddressOffset.
332
addrReg = MEMBASEREG;
333
#ifdef MASKED_PSP_MEMORY
334
inst.constant &= Memory::MEMVIEW32_MASK;
335
#endif
336
} else if (jo.cachePointers || regs_.IsGPRMappedAsPointer(inst.src1)) {
337
addrReg = regs_.MapGPRAsPointer(inst.src1);
338
} else {
339
SetScratch1ToSrc1Address(inst.src1);
340
addrReg = SCRATCH1;
341
}
342
343
s32 imm = AdjustForAddressOffset(&addrReg, inst.constant);
344
345
// TODO: Safe memory? Or enough to have crash handler + validate?
346
347
switch (inst.op) {
348
case IROp::StoreFloat:
349
regs_.MapFPR(inst.src3);
350
FS(32, regs_.F(inst.src3), addrReg, imm);
351
break;
352
353
default:
354
INVALIDOP;
355
break;
356
}
357
}
358
359
void RiscVJitBackend::CompIR_VecStore(IRInst inst) {
360
CONDITIONAL_DISABLE;
361
362
RiscVReg addrReg = INVALID_REG;
363
if (inst.src1 == MIPS_REG_ZERO) {
364
// This will get changed by AdjustForAddressOffset.
365
addrReg = MEMBASEREG;
366
#ifdef MASKED_PSP_MEMORY
367
inst.constant &= Memory::MEMVIEW32_MASK;
368
#endif
369
} else if (jo.cachePointers || regs_.IsGPRMappedAsPointer(inst.src1)) {
370
addrReg = regs_.MapGPRAsPointer(inst.src1);
371
} else {
372
SetScratch1ToSrc1Address(inst.src1);
373
addrReg = SCRATCH1;
374
}
375
376
// We need to be able to address the whole 16 bytes, so offset of 12.
377
s32 imm = AdjustForAddressOffset(&addrReg, inst.constant, 12);
378
379
// TODO: Safe memory? Or enough to have crash handler + validate?
380
381
switch (inst.op) {
382
case IROp::StoreVec4:
383
for (int i = 0; i < 4; ++i) {
384
// Spilling is okay, though not ideal.
385
regs_.MapFPR(inst.src3 + i);
386
FS(32, regs_.F(inst.src3 + i), addrReg, imm + 4 * i);
387
}
388
break;
389
390
default:
391
INVALIDOP;
392
break;
393
}
394
}
395
396
} // namespace MIPSComp
397
398