Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
freebsd
GitHub Repository: freebsd/freebsd-src
Path: blob/main/contrib/llvm-project/llvm/lib/ExecutionEngine/RuntimeDyld/Targets/RuntimeDyldMachOAArch64.h
35294 views
1
//===-- RuntimeDyldMachOAArch64.h -- MachO/AArch64 specific code. -*- C++ -*-=//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
9
#ifndef LLVM_LIB_EXECUTIONENGINE_RUNTIMEDYLD_TARGETS_RUNTIMEDYLDMACHOAARCH64_H
10
#define LLVM_LIB_EXECUTIONENGINE_RUNTIMEDYLD_TARGETS_RUNTIMEDYLDMACHOAARCH64_H
11
12
#include "../RuntimeDyldMachO.h"
13
#include "llvm/Support/Endian.h"
14
15
#define DEBUG_TYPE "dyld"
16
17
namespace llvm {
18
19
class RuntimeDyldMachOAArch64
20
: public RuntimeDyldMachOCRTPBase<RuntimeDyldMachOAArch64> {
21
public:
22
23
typedef uint64_t TargetPtrT;
24
25
RuntimeDyldMachOAArch64(RuntimeDyld::MemoryManager &MM,
26
JITSymbolResolver &Resolver)
27
: RuntimeDyldMachOCRTPBase(MM, Resolver) {}
28
29
unsigned getMaxStubSize() const override { return 8; }
30
31
Align getStubAlignment() override { return Align(8); }
32
33
/// Extract the addend encoded in the instruction / memory location.
34
Expected<int64_t> decodeAddend(const RelocationEntry &RE) const {
35
const SectionEntry &Section = Sections[RE.SectionID];
36
uint8_t *LocalAddress = Section.getAddressWithOffset(RE.Offset);
37
unsigned NumBytes = 1 << RE.Size;
38
int64_t Addend = 0;
39
// Verify that the relocation has the correct size and alignment.
40
switch (RE.RelType) {
41
default: {
42
std::string ErrMsg;
43
{
44
raw_string_ostream ErrStream(ErrMsg);
45
ErrStream << "Unsupported relocation type: "
46
<< getRelocName(RE.RelType);
47
}
48
return make_error<StringError>(std::move(ErrMsg),
49
inconvertibleErrorCode());
50
}
51
case MachO::ARM64_RELOC_POINTER_TO_GOT:
52
case MachO::ARM64_RELOC_UNSIGNED: {
53
if (NumBytes != 4 && NumBytes != 8) {
54
std::string ErrMsg;
55
{
56
raw_string_ostream ErrStream(ErrMsg);
57
ErrStream << "Invalid relocation size for relocation "
58
<< getRelocName(RE.RelType);
59
}
60
return make_error<StringError>(std::move(ErrMsg),
61
inconvertibleErrorCode());
62
}
63
break;
64
}
65
case MachO::ARM64_RELOC_BRANCH26:
66
case MachO::ARM64_RELOC_PAGE21:
67
case MachO::ARM64_RELOC_PAGEOFF12:
68
case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
69
case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12:
70
assert(NumBytes == 4 && "Invalid relocation size.");
71
assert((((uintptr_t)LocalAddress & 0x3) == 0) &&
72
"Instruction address is not aligned to 4 bytes.");
73
break;
74
}
75
76
switch (RE.RelType) {
77
default:
78
llvm_unreachable("Unsupported relocation type!");
79
case MachO::ARM64_RELOC_POINTER_TO_GOT:
80
case MachO::ARM64_RELOC_UNSIGNED:
81
// This could be an unaligned memory location.
82
if (NumBytes == 4)
83
Addend = *reinterpret_cast<support::ulittle32_t *>(LocalAddress);
84
else
85
Addend = *reinterpret_cast<support::ulittle64_t *>(LocalAddress);
86
break;
87
case MachO::ARM64_RELOC_BRANCH26: {
88
// Verify that the relocation points to a B/BL instruction.
89
auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
90
assert(((*p & 0xFC000000) == 0x14000000 ||
91
(*p & 0xFC000000) == 0x94000000) &&
92
"Expected branch instruction.");
93
94
// Get the 26 bit addend encoded in the branch instruction and sign-extend
95
// to 64 bit. The lower 2 bits are always zeros and are therefore implicit
96
// (<< 2).
97
Addend = (*p & 0x03FFFFFF) << 2;
98
Addend = SignExtend64(Addend, 28);
99
break;
100
}
101
case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
102
case MachO::ARM64_RELOC_PAGE21: {
103
// Verify that the relocation points to the expected adrp instruction.
104
auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
105
assert((*p & 0x9F000000) == 0x90000000 && "Expected adrp instruction.");
106
107
// Get the 21 bit addend encoded in the adrp instruction and sign-extend
108
// to 64 bit. The lower 12 bits (4096 byte page) are always zeros and are
109
// therefore implicit (<< 12).
110
Addend = ((*p & 0x60000000) >> 29) | ((*p & 0x01FFFFE0) >> 3) << 12;
111
Addend = SignExtend64(Addend, 33);
112
break;
113
}
114
case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12: {
115
// Verify that the relocation points to one of the expected load / store
116
// instructions.
117
auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
118
(void)p;
119
assert((*p & 0x3B000000) == 0x39000000 &&
120
"Only expected load / store instructions.");
121
[[fallthrough]];
122
}
123
case MachO::ARM64_RELOC_PAGEOFF12: {
124
// Verify that the relocation points to one of the expected load / store
125
// or add / sub instructions.
126
auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
127
assert((((*p & 0x3B000000) == 0x39000000) ||
128
((*p & 0x11C00000) == 0x11000000) ) &&
129
"Expected load / store or add/sub instruction.");
130
131
// Get the 12 bit addend encoded in the instruction.
132
Addend = (*p & 0x003FFC00) >> 10;
133
134
// Check which instruction we are decoding to obtain the implicit shift
135
// factor of the instruction.
136
int ImplicitShift = 0;
137
if ((*p & 0x3B000000) == 0x39000000) { // << load / store
138
// For load / store instructions the size is encoded in bits 31:30.
139
ImplicitShift = ((*p >> 30) & 0x3);
140
if (ImplicitShift == 0) {
141
// Check if this a vector op to get the correct shift value.
142
if ((*p & 0x04800000) == 0x04800000)
143
ImplicitShift = 4;
144
}
145
}
146
// Compensate for implicit shift.
147
Addend <<= ImplicitShift;
148
break;
149
}
150
}
151
return Addend;
152
}
153
154
/// Extract the addend encoded in the instruction.
155
void encodeAddend(uint8_t *LocalAddress, unsigned NumBytes,
156
MachO::RelocationInfoType RelType, int64_t Addend) const {
157
// Verify that the relocation has the correct alignment.
158
switch (RelType) {
159
default:
160
llvm_unreachable("Unsupported relocation type!");
161
case MachO::ARM64_RELOC_POINTER_TO_GOT:
162
case MachO::ARM64_RELOC_UNSIGNED:
163
assert((NumBytes == 4 || NumBytes == 8) && "Invalid relocation size.");
164
break;
165
case MachO::ARM64_RELOC_BRANCH26:
166
case MachO::ARM64_RELOC_PAGE21:
167
case MachO::ARM64_RELOC_PAGEOFF12:
168
case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
169
case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12:
170
assert(NumBytes == 4 && "Invalid relocation size.");
171
assert((((uintptr_t)LocalAddress & 0x3) == 0) &&
172
"Instruction address is not aligned to 4 bytes.");
173
break;
174
}
175
176
switch (RelType) {
177
default:
178
llvm_unreachable("Unsupported relocation type!");
179
case MachO::ARM64_RELOC_POINTER_TO_GOT:
180
case MachO::ARM64_RELOC_UNSIGNED:
181
// This could be an unaligned memory location.
182
if (NumBytes == 4)
183
*reinterpret_cast<support::ulittle32_t *>(LocalAddress) = Addend;
184
else
185
*reinterpret_cast<support::ulittle64_t *>(LocalAddress) = Addend;
186
break;
187
case MachO::ARM64_RELOC_BRANCH26: {
188
auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
189
// Verify that the relocation points to the expected branch instruction.
190
assert(((*p & 0xFC000000) == 0x14000000 ||
191
(*p & 0xFC000000) == 0x94000000) &&
192
"Expected branch instruction.");
193
194
// Verify addend value.
195
assert((Addend & 0x3) == 0 && "Branch target is not aligned");
196
assert(isInt<28>(Addend) && "Branch target is out of range.");
197
198
// Encode the addend as 26 bit immediate in the branch instruction.
199
*p = (*p & 0xFC000000) | ((uint32_t)(Addend >> 2) & 0x03FFFFFF);
200
break;
201
}
202
case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
203
case MachO::ARM64_RELOC_PAGE21: {
204
// Verify that the relocation points to the expected adrp instruction.
205
auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
206
assert((*p & 0x9F000000) == 0x90000000 && "Expected adrp instruction.");
207
208
// Check that the addend fits into 21 bits (+ 12 lower bits).
209
assert((Addend & 0xFFF) == 0 && "ADRP target is not page aligned.");
210
assert(isInt<33>(Addend) && "Invalid page reloc value.");
211
212
// Encode the addend into the instruction.
213
uint32_t ImmLoValue = ((uint64_t)Addend << 17) & 0x60000000;
214
uint32_t ImmHiValue = ((uint64_t)Addend >> 9) & 0x00FFFFE0;
215
*p = (*p & 0x9F00001F) | ImmHiValue | ImmLoValue;
216
break;
217
}
218
case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12: {
219
// Verify that the relocation points to one of the expected load / store
220
// instructions.
221
auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
222
assert((*p & 0x3B000000) == 0x39000000 &&
223
"Only expected load / store instructions.");
224
(void)p;
225
[[fallthrough]];
226
}
227
case MachO::ARM64_RELOC_PAGEOFF12: {
228
// Verify that the relocation points to one of the expected load / store
229
// or add / sub instructions.
230
auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
231
assert((((*p & 0x3B000000) == 0x39000000) ||
232
((*p & 0x11C00000) == 0x11000000) ) &&
233
"Expected load / store or add/sub instruction.");
234
235
// Check which instruction we are decoding to obtain the implicit shift
236
// factor of the instruction and verify alignment.
237
int ImplicitShift = 0;
238
if ((*p & 0x3B000000) == 0x39000000) { // << load / store
239
// For load / store instructions the size is encoded in bits 31:30.
240
ImplicitShift = ((*p >> 30) & 0x3);
241
switch (ImplicitShift) {
242
case 0:
243
// Check if this a vector op to get the correct shift value.
244
if ((*p & 0x04800000) == 0x04800000) {
245
ImplicitShift = 4;
246
assert(((Addend & 0xF) == 0) &&
247
"128-bit LDR/STR not 16-byte aligned.");
248
}
249
break;
250
case 1:
251
assert(((Addend & 0x1) == 0) && "16-bit LDR/STR not 2-byte aligned.");
252
break;
253
case 2:
254
assert(((Addend & 0x3) == 0) && "32-bit LDR/STR not 4-byte aligned.");
255
break;
256
case 3:
257
assert(((Addend & 0x7) == 0) && "64-bit LDR/STR not 8-byte aligned.");
258
break;
259
}
260
}
261
// Compensate for implicit shift.
262
Addend >>= ImplicitShift;
263
assert(isUInt<12>(Addend) && "Addend cannot be encoded.");
264
265
// Encode the addend into the instruction.
266
*p = (*p & 0xFFC003FF) | ((uint32_t)(Addend << 10) & 0x003FFC00);
267
break;
268
}
269
}
270
}
271
272
Expected<relocation_iterator>
273
processRelocationRef(unsigned SectionID, relocation_iterator RelI,
274
const ObjectFile &BaseObjT,
275
ObjSectionToIDMap &ObjSectionToID,
276
StubMap &Stubs) override {
277
const MachOObjectFile &Obj =
278
static_cast<const MachOObjectFile &>(BaseObjT);
279
MachO::any_relocation_info RelInfo =
280
Obj.getRelocation(RelI->getRawDataRefImpl());
281
282
if (Obj.isRelocationScattered(RelInfo))
283
return make_error<RuntimeDyldError>("Scattered relocations not supported "
284
"for MachO AArch64");
285
286
// ARM64 has an ARM64_RELOC_ADDEND relocation type that carries an explicit
287
// addend for the following relocation. If found: (1) store the associated
288
// addend, (2) consume the next relocation, and (3) use the stored addend to
289
// override the addend.
290
int64_t ExplicitAddend = 0;
291
if (Obj.getAnyRelocationType(RelInfo) == MachO::ARM64_RELOC_ADDEND) {
292
assert(!Obj.getPlainRelocationExternal(RelInfo));
293
assert(!Obj.getAnyRelocationPCRel(RelInfo));
294
assert(Obj.getAnyRelocationLength(RelInfo) == 2);
295
int64_t RawAddend = Obj.getPlainRelocationSymbolNum(RelInfo);
296
// Sign-extend the 24-bit to 64-bit.
297
ExplicitAddend = SignExtend64(RawAddend, 24);
298
++RelI;
299
RelInfo = Obj.getRelocation(RelI->getRawDataRefImpl());
300
}
301
302
if (Obj.getAnyRelocationType(RelInfo) == MachO::ARM64_RELOC_SUBTRACTOR)
303
return processSubtractRelocation(SectionID, RelI, Obj, ObjSectionToID);
304
305
RelocationEntry RE(getRelocationEntry(SectionID, Obj, RelI));
306
307
if (RE.RelType == MachO::ARM64_RELOC_POINTER_TO_GOT) {
308
bool Valid =
309
(RE.Size == 2 && RE.IsPCRel) || (RE.Size == 3 && !RE.IsPCRel);
310
if (!Valid)
311
return make_error<StringError>("ARM64_RELOC_POINTER_TO_GOT supports "
312
"32-bit pc-rel or 64-bit absolute only",
313
inconvertibleErrorCode());
314
}
315
316
if (auto Addend = decodeAddend(RE))
317
RE.Addend = *Addend;
318
else
319
return Addend.takeError();
320
321
assert((ExplicitAddend == 0 || RE.Addend == 0) && "Relocation has "\
322
"ARM64_RELOC_ADDEND and embedded addend in the instruction.");
323
if (ExplicitAddend)
324
RE.Addend = ExplicitAddend;
325
326
RelocationValueRef Value;
327
if (auto ValueOrErr = getRelocationValueRef(Obj, RelI, RE, ObjSectionToID))
328
Value = *ValueOrErr;
329
else
330
return ValueOrErr.takeError();
331
332
bool IsExtern = Obj.getPlainRelocationExternal(RelInfo);
333
if (RE.RelType == MachO::ARM64_RELOC_POINTER_TO_GOT) {
334
// We'll take care of the offset in processGOTRelocation.
335
Value.Offset = 0;
336
} else if (!IsExtern && RE.IsPCRel)
337
makeValueAddendPCRel(Value, RelI, 1 << RE.Size);
338
339
RE.Addend = Value.Offset;
340
341
if (RE.RelType == MachO::ARM64_RELOC_GOT_LOAD_PAGE21 ||
342
RE.RelType == MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12 ||
343
RE.RelType == MachO::ARM64_RELOC_POINTER_TO_GOT)
344
processGOTRelocation(RE, Value, Stubs);
345
else {
346
if (Value.SymbolName)
347
addRelocationForSymbol(RE, Value.SymbolName);
348
else
349
addRelocationForSection(RE, Value.SectionID);
350
}
351
352
return ++RelI;
353
}
354
355
void resolveRelocation(const RelocationEntry &RE, uint64_t Value) override {
356
LLVM_DEBUG(dumpRelocationToResolve(RE, Value));
357
358
const SectionEntry &Section = Sections[RE.SectionID];
359
uint8_t *LocalAddress = Section.getAddressWithOffset(RE.Offset);
360
MachO::RelocationInfoType RelType =
361
static_cast<MachO::RelocationInfoType>(RE.RelType);
362
363
switch (RelType) {
364
default:
365
llvm_unreachable("Invalid relocation type!");
366
case MachO::ARM64_RELOC_UNSIGNED: {
367
assert(!RE.IsPCRel && "PCRel and ARM64_RELOC_UNSIGNED not supported");
368
// Mask in the target value a byte at a time (we don't have an alignment
369
// guarantee for the target address, so this is safest).
370
if (RE.Size < 2)
371
llvm_unreachable("Invalid size for ARM64_RELOC_UNSIGNED");
372
373
encodeAddend(LocalAddress, 1 << RE.Size, RelType, Value + RE.Addend);
374
break;
375
}
376
377
case MachO::ARM64_RELOC_POINTER_TO_GOT: {
378
assert(((RE.Size == 2 && RE.IsPCRel) || (RE.Size == 3 && !RE.IsPCRel)) &&
379
"ARM64_RELOC_POINTER_TO_GOT only supports 32-bit pc-rel or 64-bit "
380
"absolute");
381
// Addend is the GOT entry address and RE.Offset the target of the
382
// relocation.
383
uint64_t Result =
384
RE.IsPCRel ? (RE.Addend - RE.Offset) : (Value + RE.Addend);
385
encodeAddend(LocalAddress, 1 << RE.Size, RelType, Result);
386
break;
387
}
388
389
case MachO::ARM64_RELOC_BRANCH26: {
390
assert(RE.IsPCRel && "not PCRel and ARM64_RELOC_BRANCH26 not supported");
391
// Check if branch is in range.
392
uint64_t FinalAddress = Section.getLoadAddressWithOffset(RE.Offset);
393
int64_t PCRelVal = Value - FinalAddress + RE.Addend;
394
encodeAddend(LocalAddress, /*Size=*/4, RelType, PCRelVal);
395
break;
396
}
397
case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
398
case MachO::ARM64_RELOC_PAGE21: {
399
assert(RE.IsPCRel && "not PCRel and ARM64_RELOC_PAGE21 not supported");
400
// Adjust for PC-relative relocation and offset.
401
uint64_t FinalAddress = Section.getLoadAddressWithOffset(RE.Offset);
402
int64_t PCRelVal =
403
((Value + RE.Addend) & (-4096)) - (FinalAddress & (-4096));
404
encodeAddend(LocalAddress, /*Size=*/4, RelType, PCRelVal);
405
break;
406
}
407
case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12:
408
case MachO::ARM64_RELOC_PAGEOFF12: {
409
assert(!RE.IsPCRel && "PCRel and ARM64_RELOC_PAGEOFF21 not supported");
410
// Add the offset from the symbol.
411
Value += RE.Addend;
412
// Mask out the page address and only use the lower 12 bits.
413
Value &= 0xFFF;
414
encodeAddend(LocalAddress, /*Size=*/4, RelType, Value);
415
break;
416
}
417
case MachO::ARM64_RELOC_SUBTRACTOR: {
418
uint64_t SectionABase = Sections[RE.Sections.SectionA].getLoadAddress();
419
uint64_t SectionBBase = Sections[RE.Sections.SectionB].getLoadAddress();
420
assert((Value == SectionABase || Value == SectionBBase) &&
421
"Unexpected SUBTRACTOR relocation value.");
422
Value = SectionABase - SectionBBase + RE.Addend;
423
writeBytesUnaligned(Value, LocalAddress, 1 << RE.Size);
424
break;
425
}
426
427
case MachO::ARM64_RELOC_TLVP_LOAD_PAGE21:
428
case MachO::ARM64_RELOC_TLVP_LOAD_PAGEOFF12:
429
llvm_unreachable("Relocation type not yet implemented!");
430
case MachO::ARM64_RELOC_ADDEND:
431
llvm_unreachable("ARM64_RELOC_ADDEND should have been handeled by "
432
"processRelocationRef!");
433
}
434
}
435
436
Error finalizeSection(const ObjectFile &Obj, unsigned SectionID,
437
const SectionRef &Section) {
438
return Error::success();
439
}
440
441
private:
442
void processGOTRelocation(const RelocationEntry &RE,
443
RelocationValueRef &Value, StubMap &Stubs) {
444
assert((RE.RelType == MachO::ARM64_RELOC_POINTER_TO_GOT &&
445
(RE.Size == 2 || RE.Size == 3)) ||
446
RE.Size == 2);
447
SectionEntry &Section = Sections[RE.SectionID];
448
StubMap::const_iterator i = Stubs.find(Value);
449
int64_t Offset;
450
if (i != Stubs.end())
451
Offset = static_cast<int64_t>(i->second);
452
else {
453
// FIXME: There must be a better way to do this then to check and fix the
454
// alignment every time!!!
455
uintptr_t BaseAddress = uintptr_t(Section.getAddress());
456
uintptr_t StubAlignment = getStubAlignment().value();
457
uintptr_t StubAddress =
458
(BaseAddress + Section.getStubOffset() + StubAlignment - 1) &
459
-StubAlignment;
460
unsigned StubOffset = StubAddress - BaseAddress;
461
Stubs[Value] = StubOffset;
462
assert(isAligned(getStubAlignment(), StubAddress) &&
463
"GOT entry not aligned");
464
RelocationEntry GOTRE(RE.SectionID, StubOffset,
465
MachO::ARM64_RELOC_UNSIGNED, Value.Offset,
466
/*IsPCRel=*/false, /*Size=*/3);
467
if (Value.SymbolName)
468
addRelocationForSymbol(GOTRE, Value.SymbolName);
469
else
470
addRelocationForSection(GOTRE, Value.SectionID);
471
Section.advanceStubOffset(getMaxStubSize());
472
Offset = static_cast<int64_t>(StubOffset);
473
}
474
RelocationEntry TargetRE(RE.SectionID, RE.Offset, RE.RelType, Offset,
475
RE.IsPCRel, RE.Size);
476
addRelocationForSection(TargetRE, RE.SectionID);
477
}
478
479
Expected<relocation_iterator>
480
processSubtractRelocation(unsigned SectionID, relocation_iterator RelI,
481
const ObjectFile &BaseObjT,
482
ObjSectionToIDMap &ObjSectionToID) {
483
const MachOObjectFile &Obj =
484
static_cast<const MachOObjectFile&>(BaseObjT);
485
MachO::any_relocation_info RE =
486
Obj.getRelocation(RelI->getRawDataRefImpl());
487
488
unsigned Size = Obj.getAnyRelocationLength(RE);
489
uint64_t Offset = RelI->getOffset();
490
uint8_t *LocalAddress = Sections[SectionID].getAddressWithOffset(Offset);
491
unsigned NumBytes = 1 << Size;
492
493
Expected<StringRef> SubtrahendNameOrErr = RelI->getSymbol()->getName();
494
if (!SubtrahendNameOrErr)
495
return SubtrahendNameOrErr.takeError();
496
auto SubtrahendI = GlobalSymbolTable.find(*SubtrahendNameOrErr);
497
unsigned SectionBID = SubtrahendI->second.getSectionID();
498
uint64_t SectionBOffset = SubtrahendI->second.getOffset();
499
int64_t Addend =
500
SignExtend64(readBytesUnaligned(LocalAddress, NumBytes), NumBytes * 8);
501
502
++RelI;
503
Expected<StringRef> MinuendNameOrErr = RelI->getSymbol()->getName();
504
if (!MinuendNameOrErr)
505
return MinuendNameOrErr.takeError();
506
auto MinuendI = GlobalSymbolTable.find(*MinuendNameOrErr);
507
unsigned SectionAID = MinuendI->second.getSectionID();
508
uint64_t SectionAOffset = MinuendI->second.getOffset();
509
510
RelocationEntry R(SectionID, Offset, MachO::ARM64_RELOC_SUBTRACTOR, (uint64_t)Addend,
511
SectionAID, SectionAOffset, SectionBID, SectionBOffset,
512
false, Size);
513
514
addRelocationForSection(R, SectionAID);
515
516
return ++RelI;
517
}
518
519
static const char *getRelocName(uint32_t RelocType) {
520
switch (RelocType) {
521
case MachO::ARM64_RELOC_UNSIGNED: return "ARM64_RELOC_UNSIGNED";
522
case MachO::ARM64_RELOC_SUBTRACTOR: return "ARM64_RELOC_SUBTRACTOR";
523
case MachO::ARM64_RELOC_BRANCH26: return "ARM64_RELOC_BRANCH26";
524
case MachO::ARM64_RELOC_PAGE21: return "ARM64_RELOC_PAGE21";
525
case MachO::ARM64_RELOC_PAGEOFF12: return "ARM64_RELOC_PAGEOFF12";
526
case MachO::ARM64_RELOC_GOT_LOAD_PAGE21: return "ARM64_RELOC_GOT_LOAD_PAGE21";
527
case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12: return "ARM64_RELOC_GOT_LOAD_PAGEOFF12";
528
case MachO::ARM64_RELOC_POINTER_TO_GOT: return "ARM64_RELOC_POINTER_TO_GOT";
529
case MachO::ARM64_RELOC_TLVP_LOAD_PAGE21: return "ARM64_RELOC_TLVP_LOAD_PAGE21";
530
case MachO::ARM64_RELOC_TLVP_LOAD_PAGEOFF12: return "ARM64_RELOC_TLVP_LOAD_PAGEOFF12";
531
case MachO::ARM64_RELOC_ADDEND: return "ARM64_RELOC_ADDEND";
532
}
533
return "Unrecognized arm64 addend";
534
}
535
536
};
537
}
538
539
#undef DEBUG_TYPE
540
541
#endif
542
543