Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
freebsd
GitHub Repository: freebsd/freebsd-src
Path: blob/main/contrib/llvm-project/clang/lib/AST/Interp/Program.cpp
35291 views
1
//===--- Program.cpp - Bytecode for the constexpr VM ------------*- C++ -*-===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
9
#include "Program.h"
10
#include "Context.h"
11
#include "Function.h"
12
#include "Integral.h"
13
#include "Opcode.h"
14
#include "PrimType.h"
15
#include "clang/AST/Decl.h"
16
#include "clang/AST/DeclCXX.h"
17
18
using namespace clang;
19
using namespace clang::interp;
20
21
unsigned Program::getOrCreateNativePointer(const void *Ptr) {
22
auto It = NativePointerIndices.find(Ptr);
23
if (It != NativePointerIndices.end())
24
return It->second;
25
26
unsigned Idx = NativePointers.size();
27
NativePointers.push_back(Ptr);
28
NativePointerIndices[Ptr] = Idx;
29
return Idx;
30
}
31
32
const void *Program::getNativePointer(unsigned Idx) {
33
return NativePointers[Idx];
34
}
35
36
unsigned Program::createGlobalString(const StringLiteral *S) {
37
const size_t CharWidth = S->getCharByteWidth();
38
const size_t BitWidth = CharWidth * Ctx.getCharBit();
39
40
PrimType CharType;
41
switch (CharWidth) {
42
case 1:
43
CharType = PT_Sint8;
44
break;
45
case 2:
46
CharType = PT_Uint16;
47
break;
48
case 4:
49
CharType = PT_Uint32;
50
break;
51
default:
52
llvm_unreachable("unsupported character width");
53
}
54
55
// Create a descriptor for the string.
56
Descriptor *Desc =
57
allocateDescriptor(S, CharType, Descriptor::GlobalMD, S->getLength() + 1,
58
/*isConst=*/true,
59
/*isTemporary=*/false,
60
/*isMutable=*/false);
61
62
// Allocate storage for the string.
63
// The byte length does not include the null terminator.
64
unsigned I = Globals.size();
65
unsigned Sz = Desc->getAllocSize();
66
auto *G = new (Allocator, Sz) Global(Ctx.getEvalID(), Desc, /*isStatic=*/true,
67
/*isExtern=*/false);
68
G->block()->invokeCtor();
69
70
new (G->block()->rawData()) InlineDescriptor(Desc);
71
Globals.push_back(G);
72
73
// Construct the string in storage.
74
const Pointer Ptr(G->block());
75
for (unsigned I = 0, N = S->getLength(); I <= N; ++I) {
76
Pointer Field = Ptr.atIndex(I).narrow();
77
const uint32_t CodePoint = I == N ? 0 : S->getCodeUnit(I);
78
switch (CharType) {
79
case PT_Sint8: {
80
using T = PrimConv<PT_Sint8>::T;
81
Field.deref<T>() = T::from(CodePoint, BitWidth);
82
Field.initialize();
83
break;
84
}
85
case PT_Uint16: {
86
using T = PrimConv<PT_Uint16>::T;
87
Field.deref<T>() = T::from(CodePoint, BitWidth);
88
Field.initialize();
89
break;
90
}
91
case PT_Uint32: {
92
using T = PrimConv<PT_Uint32>::T;
93
Field.deref<T>() = T::from(CodePoint, BitWidth);
94
Field.initialize();
95
break;
96
}
97
default:
98
llvm_unreachable("unsupported character type");
99
}
100
}
101
return I;
102
}
103
104
Pointer Program::getPtrGlobal(unsigned Idx) const {
105
assert(Idx < Globals.size());
106
return Pointer(Globals[Idx]->block());
107
}
108
109
std::optional<unsigned> Program::getGlobal(const ValueDecl *VD) {
110
if (auto It = GlobalIndices.find(VD); It != GlobalIndices.end())
111
return It->second;
112
113
// Find any previous declarations which were already evaluated.
114
std::optional<unsigned> Index;
115
for (const Decl *P = VD->getPreviousDecl(); P; P = P->getPreviousDecl()) {
116
if (auto It = GlobalIndices.find(P); It != GlobalIndices.end()) {
117
Index = It->second;
118
break;
119
}
120
}
121
122
// Map the decl to the existing index.
123
if (Index)
124
GlobalIndices[VD] = *Index;
125
126
return std::nullopt;
127
}
128
129
std::optional<unsigned> Program::getGlobal(const Expr *E) {
130
if (auto It = GlobalIndices.find(E); It != GlobalIndices.end())
131
return It->second;
132
return std::nullopt;
133
}
134
135
std::optional<unsigned> Program::getOrCreateGlobal(const ValueDecl *VD,
136
const Expr *Init) {
137
if (auto Idx = getGlobal(VD))
138
return Idx;
139
140
if (auto Idx = createGlobal(VD, Init)) {
141
GlobalIndices[VD] = *Idx;
142
return Idx;
143
}
144
return std::nullopt;
145
}
146
147
std::optional<unsigned> Program::getOrCreateDummy(const ValueDecl *VD) {
148
// Dedup blocks since they are immutable and pointers cannot be compared.
149
if (auto It = DummyVariables.find(VD); It != DummyVariables.end())
150
return It->second;
151
152
QualType QT = VD->getType();
153
if (const auto *RT = QT->getAs<ReferenceType>())
154
QT = RT->getPointeeType();
155
156
Descriptor *Desc;
157
if (std::optional<PrimType> T = Ctx.classify(QT))
158
Desc = createDescriptor(VD, *T, std::nullopt, true, false);
159
else
160
Desc = createDescriptor(VD, QT.getTypePtr(), std::nullopt, true, false);
161
if (!Desc)
162
Desc = allocateDescriptor(VD);
163
164
assert(Desc);
165
Desc->makeDummy();
166
167
assert(Desc->isDummy());
168
169
// Allocate a block for storage.
170
unsigned I = Globals.size();
171
172
auto *G = new (Allocator, Desc->getAllocSize())
173
Global(Ctx.getEvalID(), getCurrentDecl(), Desc, /*IsStatic=*/true,
174
/*IsExtern=*/false);
175
G->block()->invokeCtor();
176
177
Globals.push_back(G);
178
DummyVariables[VD] = I;
179
return I;
180
}
181
182
std::optional<unsigned> Program::createGlobal(const ValueDecl *VD,
183
const Expr *Init) {
184
bool IsStatic, IsExtern;
185
if (const auto *Var = dyn_cast<VarDecl>(VD)) {
186
IsStatic = Context::shouldBeGloballyIndexed(VD);
187
IsExtern = Var->hasExternalStorage();
188
} else if (isa<UnnamedGlobalConstantDecl, MSGuidDecl,
189
TemplateParamObjectDecl>(VD)) {
190
IsStatic = true;
191
IsExtern = false;
192
} else {
193
IsStatic = false;
194
IsExtern = true;
195
}
196
if (auto Idx = createGlobal(VD, VD->getType(), IsStatic, IsExtern, Init)) {
197
for (const Decl *P = VD; P; P = P->getPreviousDecl())
198
GlobalIndices[P] = *Idx;
199
return *Idx;
200
}
201
return std::nullopt;
202
}
203
204
std::optional<unsigned> Program::createGlobal(const Expr *E) {
205
if (auto Idx = getGlobal(E))
206
return Idx;
207
if (auto Idx = createGlobal(E, E->getType(), /*isStatic=*/true,
208
/*isExtern=*/false)) {
209
GlobalIndices[E] = *Idx;
210
return *Idx;
211
}
212
return std::nullopt;
213
}
214
215
std::optional<unsigned> Program::createGlobal(const DeclTy &D, QualType Ty,
216
bool IsStatic, bool IsExtern,
217
const Expr *Init) {
218
// Create a descriptor for the global.
219
Descriptor *Desc;
220
const bool IsConst = Ty.isConstQualified();
221
const bool IsTemporary = D.dyn_cast<const Expr *>();
222
if (std::optional<PrimType> T = Ctx.classify(Ty))
223
Desc = createDescriptor(D, *T, Descriptor::GlobalMD, IsConst, IsTemporary);
224
else
225
Desc = createDescriptor(D, Ty.getTypePtr(), Descriptor::GlobalMD, IsConst,
226
IsTemporary);
227
228
if (!Desc)
229
return std::nullopt;
230
231
// Allocate a block for storage.
232
unsigned I = Globals.size();
233
234
auto *G = new (Allocator, Desc->getAllocSize())
235
Global(Ctx.getEvalID(), getCurrentDecl(), Desc, IsStatic, IsExtern);
236
G->block()->invokeCtor();
237
238
// Initialize InlineDescriptor fields.
239
auto *GD = new (G->block()->rawData()) GlobalInlineDescriptor();
240
if (!Init)
241
GD->InitState = GlobalInitState::NoInitializer;
242
Globals.push_back(G);
243
244
return I;
245
}
246
247
Function *Program::getFunction(const FunctionDecl *F) {
248
F = F->getCanonicalDecl();
249
assert(F);
250
auto It = Funcs.find(F);
251
return It == Funcs.end() ? nullptr : It->second.get();
252
}
253
254
Record *Program::getOrCreateRecord(const RecordDecl *RD) {
255
// Use the actual definition as a key.
256
RD = RD->getDefinition();
257
if (!RD)
258
return nullptr;
259
260
if (!RD->isCompleteDefinition())
261
return nullptr;
262
263
// Deduplicate records.
264
if (auto It = Records.find(RD); It != Records.end())
265
return It->second;
266
267
// We insert nullptr now and replace that later, so recursive calls
268
// to this function with the same RecordDecl don't run into
269
// infinite recursion.
270
Records.insert({RD, nullptr});
271
272
// Number of bytes required by fields and base classes.
273
unsigned BaseSize = 0;
274
// Number of bytes required by virtual base.
275
unsigned VirtSize = 0;
276
277
// Helper to get a base descriptor.
278
auto GetBaseDesc = [this](const RecordDecl *BD,
279
const Record *BR) -> const Descriptor * {
280
if (!BR)
281
return nullptr;
282
return allocateDescriptor(BD, BR, std::nullopt, /*isConst=*/false,
283
/*isTemporary=*/false,
284
/*isMutable=*/false);
285
};
286
287
// Reserve space for base classes.
288
Record::BaseList Bases;
289
Record::VirtualBaseList VirtBases;
290
if (const auto *CD = dyn_cast<CXXRecordDecl>(RD)) {
291
for (const CXXBaseSpecifier &Spec : CD->bases()) {
292
if (Spec.isVirtual())
293
continue;
294
295
// In error cases, the base might not be a RecordType.
296
const auto *RT = Spec.getType()->getAs<RecordType>();
297
if (!RT)
298
return nullptr;
299
const RecordDecl *BD = RT->getDecl();
300
const Record *BR = getOrCreateRecord(BD);
301
302
const Descriptor *Desc = GetBaseDesc(BD, BR);
303
if (!Desc)
304
return nullptr;
305
306
BaseSize += align(sizeof(InlineDescriptor));
307
Bases.push_back({BD, BaseSize, Desc, BR});
308
BaseSize += align(BR->getSize());
309
}
310
311
for (const CXXBaseSpecifier &Spec : CD->vbases()) {
312
const auto *RT = Spec.getType()->getAs<RecordType>();
313
if (!RT)
314
return nullptr;
315
316
const RecordDecl *BD = RT->getDecl();
317
const Record *BR = getOrCreateRecord(BD);
318
319
const Descriptor *Desc = GetBaseDesc(BD, BR);
320
if (!Desc)
321
return nullptr;
322
323
VirtSize += align(sizeof(InlineDescriptor));
324
VirtBases.push_back({BD, VirtSize, Desc, BR});
325
VirtSize += align(BR->getSize());
326
}
327
}
328
329
// Reserve space for fields.
330
Record::FieldList Fields;
331
for (const FieldDecl *FD : RD->fields()) {
332
// Note that we DO create fields and descriptors
333
// for unnamed bitfields here, even though we later ignore
334
// them everywhere. That's so the FieldDecl's getFieldIndex() matches.
335
336
// Reserve space for the field's descriptor and the offset.
337
BaseSize += align(sizeof(InlineDescriptor));
338
339
// Classify the field and add its metadata.
340
QualType FT = FD->getType();
341
const bool IsConst = FT.isConstQualified();
342
const bool IsMutable = FD->isMutable();
343
const Descriptor *Desc;
344
if (std::optional<PrimType> T = Ctx.classify(FT)) {
345
Desc = createDescriptor(FD, *T, std::nullopt, IsConst,
346
/*isTemporary=*/false, IsMutable);
347
} else {
348
Desc = createDescriptor(FD, FT.getTypePtr(), std::nullopt, IsConst,
349
/*isTemporary=*/false, IsMutable);
350
}
351
if (!Desc)
352
return nullptr;
353
Fields.push_back({FD, BaseSize, Desc});
354
BaseSize += align(Desc->getAllocSize());
355
}
356
357
Record *R = new (Allocator) Record(RD, std::move(Bases), std::move(Fields),
358
std::move(VirtBases), VirtSize, BaseSize);
359
Records[RD] = R;
360
return R;
361
}
362
363
Descriptor *Program::createDescriptor(const DeclTy &D, const Type *Ty,
364
Descriptor::MetadataSize MDSize,
365
bool IsConst, bool IsTemporary,
366
bool IsMutable, const Expr *Init) {
367
368
// Classes and structures.
369
if (const auto *RT = Ty->getAs<RecordType>()) {
370
if (const auto *Record = getOrCreateRecord(RT->getDecl()))
371
return allocateDescriptor(D, Record, MDSize, IsConst, IsTemporary,
372
IsMutable);
373
}
374
375
// Arrays.
376
if (const auto ArrayType = Ty->getAsArrayTypeUnsafe()) {
377
QualType ElemTy = ArrayType->getElementType();
378
// Array of well-known bounds.
379
if (auto CAT = dyn_cast<ConstantArrayType>(ArrayType)) {
380
size_t NumElems = CAT->getZExtSize();
381
if (std::optional<PrimType> T = Ctx.classify(ElemTy)) {
382
// Arrays of primitives.
383
unsigned ElemSize = primSize(*T);
384
if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems) {
385
return {};
386
}
387
return allocateDescriptor(D, *T, MDSize, NumElems, IsConst, IsTemporary,
388
IsMutable);
389
} else {
390
// Arrays of composites. In this case, the array is a list of pointers,
391
// followed by the actual elements.
392
const Descriptor *ElemDesc = createDescriptor(
393
D, ElemTy.getTypePtr(), std::nullopt, IsConst, IsTemporary);
394
if (!ElemDesc)
395
return nullptr;
396
unsigned ElemSize =
397
ElemDesc->getAllocSize() + sizeof(InlineDescriptor);
398
if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems)
399
return {};
400
return allocateDescriptor(D, ElemDesc, MDSize, NumElems, IsConst,
401
IsTemporary, IsMutable);
402
}
403
}
404
405
// Array of unknown bounds - cannot be accessed and pointer arithmetic
406
// is forbidden on pointers to such objects.
407
if (isa<IncompleteArrayType>(ArrayType) ||
408
isa<VariableArrayType>(ArrayType)) {
409
if (std::optional<PrimType> T = Ctx.classify(ElemTy)) {
410
return allocateDescriptor(D, *T, MDSize, IsTemporary,
411
Descriptor::UnknownSize{});
412
} else {
413
const Descriptor *Desc = createDescriptor(D, ElemTy.getTypePtr(),
414
MDSize, IsConst, IsTemporary);
415
if (!Desc)
416
return nullptr;
417
return allocateDescriptor(D, Desc, MDSize, IsTemporary,
418
Descriptor::UnknownSize{});
419
}
420
}
421
}
422
423
// Atomic types.
424
if (const auto *AT = Ty->getAs<AtomicType>()) {
425
const Type *InnerTy = AT->getValueType().getTypePtr();
426
return createDescriptor(D, InnerTy, MDSize, IsConst, IsTemporary,
427
IsMutable);
428
}
429
430
// Complex types - represented as arrays of elements.
431
if (const auto *CT = Ty->getAs<ComplexType>()) {
432
PrimType ElemTy = *Ctx.classify(CT->getElementType());
433
return allocateDescriptor(D, ElemTy, MDSize, 2, IsConst, IsTemporary,
434
IsMutable);
435
}
436
437
// Same with vector types.
438
if (const auto *VT = Ty->getAs<VectorType>()) {
439
PrimType ElemTy = *Ctx.classify(VT->getElementType());
440
return allocateDescriptor(D, ElemTy, MDSize, VT->getNumElements(), IsConst,
441
IsTemporary, IsMutable);
442
}
443
444
return nullptr;
445
}
446
447