CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutSign UpSign In
hrydgard

CoCalc provides the best real-time collaborative environment for Jupyter Notebooks, LaTeX documents, and SageMath, scalable from individual users to large groups and classes!

GitHub Repository: hrydgard/ppsspp
Path: blob/master/Core/MemMap.cpp
Views: 1401
1
// Copyright (C) 2003 Dolphin Project / 2012 PPSSPP Project
2
3
// This program is free software: you can redistribute it and/or modify
4
// it under the terms of the GNU General Public License as published by
5
// the Free Software Foundation, version 2.0 or later versions.
6
7
// This program is distributed in the hope that it will be useful,
8
// but WITHOUT ANY WARRANTY; without even the implied warranty of
9
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10
// GNU General Public License 2.0 for more details.
11
12
// A copy of the GPL 2.0 should have been included with the program.
13
// If not, see http://www.gnu.org/licenses/
14
15
// Official git repository and contact information can be found at
16
// https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.
17
18
#include "ppsspp_config.h"
19
20
#if PPSSPP_PLATFORM(UWP)
21
#include "Common/CommonWindows.h"
22
#endif
23
24
#include <algorithm>
25
#include <mutex>
26
27
#include "Common/Common.h"
28
#include "Common/MemoryUtil.h"
29
#include "Common/MemArena.h"
30
#include "Common/Serialize/Serializer.h"
31
#include "Common/Serialize/SerializeFuncs.h"
32
33
#include "Core/Core.h"
34
#include "Core/Config.h"
35
#include "Core/ConfigValues.h"
36
#include "Core/Debugger/SymbolMap.h"
37
#include "Core/Debugger/MemBlockInfo.h"
38
#include "Core/HDRemaster.h"
39
#include "Core/HLE/HLE.h"
40
#include "Core/HLE/ReplaceTables.h"
41
#include "Core/MemMap.h"
42
#include "Core/MemFault.h"
43
#include "Core/MIPS/MIPS.h"
44
#include "Core/MIPS/JitCommon/JitBlockCache.h"
45
#include "Core/MIPS/JitCommon/JitCommon.h"
46
#include "Common/Thread/ParallelLoop.h"
47
48
namespace Memory {
49
50
// The base pointer to the auto-mirrored arena.
51
u8* base = nullptr;
52
53
// The MemArena class
54
MemArena g_arena;
55
// ==============
56
57
u8 *m_pPhysicalScratchPad;
58
u8 *m_pUncachedScratchPad;
59
// 64-bit: Pointers to high-mem mirrors
60
// 32-bit: Same as above
61
u8 *m_pPhysicalRAM[3];
62
u8 *m_pUncachedRAM[3];
63
u8 *m_pKernelRAM[3]; // RAM mirrored up to "kernel space". Fully accessible at all times currently.
64
// Technically starts at 0xA0000000, which we don't properly support (but we don't really support kernel code.)
65
// This matches how we handle 32-bit masking.
66
u8 *m_pUncachedKernelRAM[3];
67
68
// VRAM is mirrored 4 times. The second and fourth mirrors are swizzled.
69
// In practice, a game accessing the mirrors most likely is deswizzling the depth buffer.
70
u8 *m_pPhysicalVRAM[4];
71
u8 *m_pUncachedVRAM[4];
72
73
// Holds the ending address of the PSP's user space.
74
// Required for HD Remasters to work properly.
75
// This replaces RAM_NORMAL_SIZE at runtime.
76
u32 g_MemorySize;
77
// Used to store the PSP model on game startup.
78
u32 g_PSPModel;
79
80
std::recursive_mutex g_shutdownLock;
81
82
// We don't declare the IO region in here since its handled by other means.
83
static MemoryView views[] =
84
{
85
{&m_pPhysicalScratchPad, 0x00010000, SCRATCHPAD_SIZE, 0},
86
{&m_pUncachedScratchPad, 0x40010000, SCRATCHPAD_SIZE, MV_MIRROR_PREVIOUS},
87
{&m_pPhysicalVRAM[0], 0x04000000, 0x00200000, 0},
88
{&m_pPhysicalVRAM[1], 0x04200000, 0x00200000, MV_MIRROR_PREVIOUS},
89
{&m_pPhysicalVRAM[2], 0x04400000, 0x00200000, MV_MIRROR_PREVIOUS},
90
{&m_pPhysicalVRAM[3], 0x04600000, 0x00200000, MV_MIRROR_PREVIOUS},
91
{&m_pUncachedVRAM[0], 0x44000000, 0x00200000, MV_MIRROR_PREVIOUS},
92
{&m_pUncachedVRAM[1], 0x44200000, 0x00200000, MV_MIRROR_PREVIOUS},
93
{&m_pUncachedVRAM[2], 0x44400000, 0x00200000, MV_MIRROR_PREVIOUS},
94
{&m_pUncachedVRAM[3], 0x44600000, 0x00200000, MV_MIRROR_PREVIOUS},
95
{&m_pPhysicalRAM[0], 0x08000000, g_MemorySize, MV_IS_PRIMARY_RAM}, // only from 0x08800000 is it usable (last 24 megs)
96
{&m_pUncachedRAM[0], 0x48000000, g_MemorySize, MV_MIRROR_PREVIOUS | MV_IS_PRIMARY_RAM},
97
{&m_pKernelRAM[0], 0x88000000, g_MemorySize, MV_MIRROR_PREVIOUS | MV_IS_PRIMARY_RAM | MV_KERNEL},
98
{&m_pUncachedKernelRAM[0],0xC8000000, g_MemorySize, MV_MIRROR_PREVIOUS | MV_IS_PRIMARY_RAM | MV_KERNEL},
99
// Starts at memory + 31 MB.
100
{&m_pPhysicalRAM[1], 0x09F00000, g_MemorySize, MV_IS_EXTRA1_RAM},
101
{&m_pUncachedRAM[1], 0x49F00000, g_MemorySize, MV_MIRROR_PREVIOUS | MV_IS_EXTRA1_RAM},
102
{&m_pKernelRAM[1], 0x89F00000, g_MemorySize, MV_MIRROR_PREVIOUS | MV_IS_EXTRA1_RAM | MV_KERNEL},
103
{&m_pUncachedKernelRAM[1],0xC9F00000, g_MemorySize, MV_MIRROR_PREVIOUS | MV_IS_EXTRA1_RAM | MV_KERNEL},
104
// Starts at memory + 31 * 2 MB.
105
{&m_pPhysicalRAM[2], 0x0BE00000, g_MemorySize, MV_IS_EXTRA2_RAM},
106
{&m_pUncachedRAM[2], 0x4BE00000, g_MemorySize, MV_MIRROR_PREVIOUS | MV_IS_EXTRA2_RAM},
107
{&m_pKernelRAM[2], 0x8BE00000, g_MemorySize, MV_MIRROR_PREVIOUS | MV_IS_EXTRA2_RAM | MV_KERNEL},
108
{&m_pUncachedKernelRAM[2],0xCBE00000, g_MemorySize, MV_MIRROR_PREVIOUS | MV_IS_EXTRA2_RAM | MV_KERNEL},
109
110
// TODO: There are a few swizzled mirrors of VRAM, not sure about the best way to
111
// implement those.
112
};
113
114
static const int num_views = sizeof(views) / sizeof(MemoryView);
115
116
inline static bool CanIgnoreView(const MemoryView &view) {
117
#ifdef MASKED_PSP_MEMORY
118
// Basically, 32-bit platforms can ignore views that are masked out anyway.
119
return (view.flags & MV_MIRROR_PREVIOUS) && (view.virtual_address & ~MEMVIEW32_MASK) != 0;
120
#else
121
return false;
122
#endif
123
}
124
125
#if PPSSPP_PLATFORM(IOS) && PPSSPP_ARCH(64BIT)
126
#define SKIP(a_flags, b_flags) \
127
if ((b_flags) & MV_KERNEL) \
128
continue;
129
#else
130
#define SKIP(a_flags, b_flags) \
131
;
132
#endif
133
134
static bool Memory_TryBase(u32 flags) {
135
// OK, we know where to find free space. Now grab it!
136
// We just mimic the popular BAT setup.
137
138
size_t position = 0;
139
size_t last_position = 0;
140
141
// Zero all the pointers to be sure.
142
for (int i = 0; i < num_views; i++) {
143
if (views[i].out_ptr)
144
*views[i].out_ptr = 0;
145
}
146
147
int i;
148
for (i = 0; i < num_views; i++) {
149
const MemoryView &view = views[i];
150
if (view.size == 0)
151
continue;
152
SKIP(flags, view.flags);
153
154
if (view.flags & MV_MIRROR_PREVIOUS) {
155
position = last_position;
156
}
157
#ifndef MASKED_PSP_MEMORY
158
*view.out_ptr = (u8*)g_arena.CreateView(
159
position, view.size, base + view.virtual_address);
160
if (!*view.out_ptr) {
161
goto bail;
162
DEBUG_LOG(Log::MemMap, "Failed at view %d", i);
163
}
164
#else
165
if (CanIgnoreView(view)) {
166
// This is handled by address masking in 32-bit, no view needs to be created.
167
*view.out_ptr = *views[i - 1].out_ptr;
168
} else {
169
*view.out_ptr = (u8*)g_arena.CreateView(
170
position, view.size, base + (view.virtual_address & MEMVIEW32_MASK));
171
if (!*view.out_ptr) {
172
DEBUG_LOG(Log::MemMap, "Failed at view %d", i);
173
goto bail;
174
}
175
}
176
#endif
177
last_position = position;
178
position += g_arena.roundup(view.size);
179
}
180
181
return true;
182
bail:
183
// Argh! ERROR! Free what we grabbed so far so we can try again.
184
for (int j = 0; j <= i; j++) {
185
if (views[i].size == 0)
186
continue;
187
SKIP(flags, views[i].flags);
188
if (*views[j].out_ptr) {
189
if (!CanIgnoreView(views[j])) {
190
g_arena.ReleaseView(0, *views[j].out_ptr, views[j].size);
191
}
192
*views[j].out_ptr = NULL;
193
}
194
}
195
return false;
196
}
197
198
bool MemoryMap_Setup(u32 flags) {
199
#if PPSSPP_PLATFORM(UWP)
200
// We reserve the memory, then simply commit in TryBase.
201
base = (u8*)VirtualAllocFromApp(0, 0x10000000, MEM_RESERVE, PAGE_READWRITE);
202
#else
203
204
// Figure out how much memory we need to allocate in total.
205
size_t total_mem = 0;
206
for (int i = 0; i < num_views; i++) {
207
if (views[i].size == 0)
208
continue;
209
SKIP(flags, views[i].flags);
210
if (!CanIgnoreView(views[i]))
211
total_mem += g_arena.roundup(views[i].size);
212
}
213
214
// Grab some pagefile backed memory out of the void ...
215
if (!g_arena.GrabMemSpace(total_mem)) {
216
// It'll already have logged.
217
return false;
218
}
219
#endif
220
221
#if !PPSSPP_PLATFORM(ANDROID)
222
if (g_arena.NeedsProbing()) {
223
int base_attempts = 0;
224
#if PPSSPP_PLATFORM(WINDOWS) && PPSSPP_ARCH(32BIT)
225
// Try a whole range of possible bases. Return once we got a valid one.
226
uintptr_t max_base_addr = 0x7FFF0000 - 0x10000000;
227
uintptr_t min_base_addr = 0x01000000;
228
uintptr_t stride = 0x400000;
229
#elif PPSSPP_ARCH(ARM64) && PPSSPP_PLATFORM(IOS)
230
// iOS
231
uintptr_t max_base_addr = 0x1FFFF0000ULL - 0x80000000ULL;
232
uintptr_t min_base_addr = 0x100000000ULL;
233
uintptr_t stride = 0x800000;
234
#else
235
uintptr_t max_base_addr = 0;
236
uintptr_t min_base_addr = 0;
237
uintptr_t stride = 0;
238
ERROR_LOG(Log::MemMap, "MemoryMap_Setup: Hit a wrong path, should not be needed on this platform.");
239
return false;
240
#endif
241
for (uintptr_t base_addr = min_base_addr; base_addr < max_base_addr; base_addr += stride) {
242
base_attempts++;
243
base = (u8 *)base_addr;
244
if (Memory_TryBase(flags)) {
245
INFO_LOG(Log::MemMap, "Found valid memory base at %p after %i tries.", base, base_attempts);
246
return true;
247
}
248
}
249
ERROR_LOG(Log::MemMap, "MemoryMap_Setup: Failed finding a memory base.");
250
return false;
251
}
252
else
253
#endif
254
{
255
#if !PPSSPP_PLATFORM(UWP)
256
base = g_arena.Find4GBBase();
257
if (!base) {
258
return false;
259
}
260
#endif
261
}
262
263
// Should return true...
264
return Memory_TryBase(flags);
265
}
266
267
void MemoryMap_Shutdown(u32 flags) {
268
size_t position = 0;
269
size_t last_position = 0;
270
271
for (int i = 0; i < num_views; i++) {
272
if (views[i].size == 0)
273
continue;
274
SKIP(flags, views[i].flags);
275
276
if (views[i].flags & MV_MIRROR_PREVIOUS) {
277
position = last_position;
278
}
279
280
if (*views[i].out_ptr)
281
g_arena.ReleaseView(position, *views[i].out_ptr, views[i].size);
282
*views[i].out_ptr = nullptr;
283
284
last_position = position;
285
position += g_arena.roundup(views[i].size);
286
}
287
g_arena.ReleaseSpace();
288
289
#if PPSSPP_PLATFORM(UWP)
290
VirtualFree(base, 0, MEM_RELEASE);
291
#endif
292
}
293
294
bool Init() {
295
// On some 32 bit platforms (like Android, iOS, etc.), you can only map < 32 megs at a time.
296
const static int MAX_MMAP_SIZE = 31 * 1024 * 1024;
297
_dbg_assert_msg_(g_MemorySize <= MAX_MMAP_SIZE * 3, "ACK - too much memory for three mmap views.");
298
for (size_t i = 0; i < ARRAY_SIZE(views); i++) {
299
if (views[i].flags & MV_IS_PRIMARY_RAM)
300
views[i].size = std::min((int)g_MemorySize, MAX_MMAP_SIZE);
301
if (views[i].flags & MV_IS_EXTRA1_RAM)
302
views[i].size = std::min(std::max((int)g_MemorySize - MAX_MMAP_SIZE, 0), MAX_MMAP_SIZE);
303
if (views[i].flags & MV_IS_EXTRA2_RAM)
304
views[i].size = std::min(std::max((int)g_MemorySize - MAX_MMAP_SIZE * 2, 0), MAX_MMAP_SIZE);
305
}
306
307
int flags = 0;
308
if (!MemoryMap_Setup(flags)) {
309
return false;
310
}
311
312
INFO_LOG(Log::MemMap, "Memory system initialized. Base at %p (RAM at @ %p, uncached @ %p)",
313
base, m_pPhysicalRAM, m_pUncachedRAM);
314
315
MemFault_Init();
316
return true;
317
}
318
319
void Reinit() {
320
_assert_msg_(PSP_IsInited(), "Cannot reinit during startup/shutdown");
321
Core_NotifyLifecycle(CoreLifecycle::MEMORY_REINITING);
322
Shutdown();
323
Init();
324
Core_NotifyLifecycle(CoreLifecycle::MEMORY_REINITED);
325
}
326
327
static void DoMemoryVoid(PointerWrap &p, uint32_t start, uint32_t size) {
328
uint8_t *d = GetPointerWrite(start);
329
uint8_t *&storage = *p.ptr;
330
331
// We only handle aligned data and sizes.
332
if ((size & 0x3F) != 0 || ((uintptr_t)d & 0x3F) != 0)
333
return p.DoVoid(d, size);
334
335
switch (p.mode) {
336
case PointerWrap::MODE_READ:
337
ParallelMemcpy(&g_threadManager, d, storage, size);
338
break;
339
case PointerWrap::MODE_WRITE:
340
ParallelMemcpy(&g_threadManager, storage, d, size);
341
break;
342
case PointerWrap::MODE_MEASURE:
343
// Nothing to do here.
344
break;
345
case PointerWrap::MODE_VERIFY:
346
ParallelRangeLoop(&g_threadManager, [&](int l, int h) {
347
for (int i = l; i < h; i++)
348
_dbg_assert_msg_(d[i] == storage[i], "Savestate verification failure: %d (0x%X) (at %p) != %d (0x%X) (at %p).\n", d[i], d[i], &d[i], storage[i], storage[i], &storage[i]);
349
}, 0, size, 128);
350
break;
351
case PointerWrap::MODE_NOOP:
352
break;
353
}
354
storage += size;
355
}
356
357
void DoState(PointerWrap &p) {
358
auto s = p.Section("Memory", 1, 3);
359
if (!s)
360
return;
361
362
if (s < 2) {
363
if (!g_RemasterMode)
364
g_MemorySize = RAM_NORMAL_SIZE;
365
g_PSPModel = PSP_MODEL_FAT;
366
} else if (s == 2) {
367
// In version 2, we determine memory size based on PSP model.
368
u32 oldMemorySize = g_MemorySize;
369
Do(p, g_PSPModel);
370
p.DoMarker("PSPModel");
371
if (!g_RemasterMode) {
372
g_MemorySize = g_PSPModel == PSP_MODEL_FAT ? RAM_NORMAL_SIZE : RAM_DOUBLE_SIZE;
373
if (oldMemorySize < g_MemorySize) {
374
Reinit();
375
}
376
}
377
} else {
378
// In version 3, we started just saving the memory size directly.
379
// It's no longer based strictly on the PSP model.
380
u32 oldMemorySize = g_MemorySize;
381
Do(p, g_PSPModel);
382
p.DoMarker("PSPModel");
383
Do(p, g_MemorySize);
384
if (oldMemorySize != g_MemorySize) {
385
Reinit();
386
}
387
}
388
389
DoMemoryVoid(p, PSP_GetKernelMemoryBase(), g_MemorySize);
390
p.DoMarker("RAM");
391
392
DoMemoryVoid(p, PSP_GetVidMemBase(), VRAM_SIZE);
393
p.DoMarker("VRAM");
394
DoArray(p, m_pPhysicalScratchPad, SCRATCHPAD_SIZE);
395
p.DoMarker("ScratchPad");
396
}
397
398
void Shutdown() {
399
std::lock_guard<std::recursive_mutex> guard(g_shutdownLock);
400
u32 flags = 0;
401
MemoryMap_Shutdown(flags);
402
base = nullptr;
403
DEBUG_LOG(Log::MemMap, "Memory system shut down.");
404
}
405
406
bool IsActive() {
407
return base != nullptr;
408
}
409
410
// Wanting to avoid include pollution, MemMap.h is included a lot.
411
MemoryInitedLock::MemoryInitedLock()
412
{
413
g_shutdownLock.lock();
414
}
415
MemoryInitedLock::~MemoryInitedLock()
416
{
417
g_shutdownLock.unlock();
418
}
419
420
MemoryInitedLock Lock()
421
{
422
return MemoryInitedLock();
423
}
424
425
__forceinline static Opcode Read_Instruction(u32 address, bool resolveReplacements, Opcode inst)
426
{
427
if (!MIPS_IS_EMUHACK(inst.encoding)) {
428
return inst;
429
}
430
431
// No mutex on jit access here, but we assume the caller has locked, if necessary.
432
if (MIPS_IS_RUNBLOCK(inst.encoding) && MIPSComp::jit) {
433
inst = MIPSComp::jit->GetOriginalOp(inst);
434
if (resolveReplacements && MIPS_IS_REPLACEMENT(inst)) {
435
u32 op;
436
if (GetReplacedOpAt(address, &op)) {
437
if (MIPS_IS_EMUHACK(op)) {
438
ERROR_LOG(Log::MemMap, "WTF 1");
439
return Opcode(op);
440
} else {
441
return Opcode(op);
442
}
443
} else {
444
ERROR_LOG(Log::MemMap, "Replacement, but no replacement op? %08x", inst.encoding);
445
}
446
}
447
return inst;
448
} else if (resolveReplacements && MIPS_IS_REPLACEMENT(inst.encoding)) {
449
u32 op;
450
if (GetReplacedOpAt(address, &op)) {
451
if (MIPS_IS_EMUHACK(op)) {
452
ERROR_LOG(Log::MemMap, "WTF 2");
453
return Opcode(op);
454
} else {
455
return Opcode(op);
456
}
457
} else {
458
return inst;
459
}
460
} else {
461
return inst;
462
}
463
}
464
465
Opcode Read_Instruction(u32 address, bool resolveReplacements)
466
{
467
Opcode inst = Opcode(Read_U32(address));
468
return Read_Instruction(address, resolveReplacements, inst);
469
}
470
471
Opcode ReadUnchecked_Instruction(u32 address, bool resolveReplacements)
472
{
473
Opcode inst = Opcode(ReadUnchecked_U32(address));
474
return Read_Instruction(address, resolveReplacements, inst);
475
}
476
477
Opcode Read_Opcode_JIT(u32 address)
478
{
479
Opcode inst = Opcode(Read_U32(address));
480
// No mutex around jit access here, but we assume caller has if necessary.
481
if (MIPS_IS_RUNBLOCK(inst.encoding) && MIPSComp::jit) {
482
return MIPSComp::jit->GetOriginalOp(inst);
483
} else {
484
return inst;
485
}
486
}
487
488
// WARNING! No checks!
489
// We assume that _Address is cached
490
void Write_Opcode_JIT(const u32 _Address, const Opcode& _Value)
491
{
492
Memory::WriteUnchecked_U32(_Value.encoding, _Address);
493
}
494
495
void Memset(const u32 _Address, const u8 _iValue, const u32 _iLength, const char *tag) {
496
if (IsValidRange(_Address, _iLength)) {
497
uint8_t *ptr = GetPointerWriteUnchecked(_Address);
498
memset(ptr, _iValue, _iLength);
499
} else {
500
for (size_t i = 0; i < _iLength; i++)
501
Write_U8(_iValue, (u32)(_Address + i));
502
}
503
504
NotifyMemInfo(MemBlockFlags::WRITE, _Address, _iLength, tag, strlen(tag));
505
}
506
507
} // namespace
508
509
void PSPPointerNotifyRW(int rw, uint32_t ptr, uint32_t bytes, const char * tag, size_t tagLen) {
510
if (MemBlockInfoDetailed(bytes)) {
511
if (rw & 1)
512
NotifyMemInfo(MemBlockFlags::WRITE, ptr, bytes, tag, tagLen);
513
if (rw & 2)
514
NotifyMemInfo(MemBlockFlags::READ, ptr, bytes, tag, tagLen);
515
}
516
}
517
518