Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
freebsd
GitHub Repository: freebsd/freebsd-src
Path: blob/main/contrib/llvm-project/clang/lib/Sema/AnalysisBasedWarnings.cpp
35233 views
1
//=== AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis ------===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// This file defines analysis_warnings::[Policy,Executor].
10
// Together they are used by Sema to issue warnings based on inexpensive
11
// static analysis algorithms in libAnalysis.
12
//
13
//===----------------------------------------------------------------------===//
14
15
#include "clang/Sema/AnalysisBasedWarnings.h"
16
#include "clang/AST/Decl.h"
17
#include "clang/AST/DeclCXX.h"
18
#include "clang/AST/DeclObjC.h"
19
#include "clang/AST/EvaluatedExprVisitor.h"
20
#include "clang/AST/Expr.h"
21
#include "clang/AST/ExprCXX.h"
22
#include "clang/AST/ExprObjC.h"
23
#include "clang/AST/OperationKinds.h"
24
#include "clang/AST/ParentMap.h"
25
#include "clang/AST/RecursiveASTVisitor.h"
26
#include "clang/AST/StmtCXX.h"
27
#include "clang/AST/StmtObjC.h"
28
#include "clang/AST/StmtVisitor.h"
29
#include "clang/AST/Type.h"
30
#include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
31
#include "clang/Analysis/Analyses/CalledOnceCheck.h"
32
#include "clang/Analysis/Analyses/Consumed.h"
33
#include "clang/Analysis/Analyses/ReachableCode.h"
34
#include "clang/Analysis/Analyses/ThreadSafety.h"
35
#include "clang/Analysis/Analyses/UninitializedValues.h"
36
#include "clang/Analysis/Analyses/UnsafeBufferUsage.h"
37
#include "clang/Analysis/AnalysisDeclContext.h"
38
#include "clang/Analysis/CFG.h"
39
#include "clang/Analysis/CFGStmtMap.h"
40
#include "clang/Basic/Diagnostic.h"
41
#include "clang/Basic/DiagnosticSema.h"
42
#include "clang/Basic/SourceLocation.h"
43
#include "clang/Basic/SourceManager.h"
44
#include "clang/Lex/Preprocessor.h"
45
#include "clang/Sema/ScopeInfo.h"
46
#include "clang/Sema/SemaInternal.h"
47
#include "llvm/ADT/ArrayRef.h"
48
#include "llvm/ADT/BitVector.h"
49
#include "llvm/ADT/MapVector.h"
50
#include "llvm/ADT/STLFunctionalExtras.h"
51
#include "llvm/ADT/SmallString.h"
52
#include "llvm/ADT/SmallVector.h"
53
#include "llvm/ADT/StringRef.h"
54
#include "llvm/Support/Casting.h"
55
#include <algorithm>
56
#include <deque>
57
#include <iterator>
58
#include <optional>
59
60
using namespace clang;
61
62
//===----------------------------------------------------------------------===//
63
// Unreachable code analysis.
64
//===----------------------------------------------------------------------===//
65
66
namespace {
67
class UnreachableCodeHandler : public reachable_code::Callback {
68
Sema &S;
69
SourceRange PreviousSilenceableCondVal;
70
71
public:
72
UnreachableCodeHandler(Sema &s) : S(s) {}
73
74
void HandleUnreachable(reachable_code::UnreachableKind UK, SourceLocation L,
75
SourceRange SilenceableCondVal, SourceRange R1,
76
SourceRange R2, bool HasFallThroughAttr) override {
77
// If the diagnosed code is `[[fallthrough]];` and
78
// `-Wunreachable-code-fallthrough` is enabled, suppress `code will never
79
// be executed` warning to avoid generating diagnostic twice
80
if (HasFallThroughAttr &&
81
!S.getDiagnostics().isIgnored(diag::warn_unreachable_fallthrough_attr,
82
SourceLocation()))
83
return;
84
85
// Avoid reporting multiple unreachable code diagnostics that are
86
// triggered by the same conditional value.
87
if (PreviousSilenceableCondVal.isValid() &&
88
SilenceableCondVal.isValid() &&
89
PreviousSilenceableCondVal == SilenceableCondVal)
90
return;
91
PreviousSilenceableCondVal = SilenceableCondVal;
92
93
unsigned diag = diag::warn_unreachable;
94
switch (UK) {
95
case reachable_code::UK_Break:
96
diag = diag::warn_unreachable_break;
97
break;
98
case reachable_code::UK_Return:
99
diag = diag::warn_unreachable_return;
100
break;
101
case reachable_code::UK_Loop_Increment:
102
diag = diag::warn_unreachable_loop_increment;
103
break;
104
case reachable_code::UK_Other:
105
break;
106
}
107
108
S.Diag(L, diag) << R1 << R2;
109
110
SourceLocation Open = SilenceableCondVal.getBegin();
111
if (Open.isValid()) {
112
SourceLocation Close = SilenceableCondVal.getEnd();
113
Close = S.getLocForEndOfToken(Close);
114
if (Close.isValid()) {
115
S.Diag(Open, diag::note_unreachable_silence)
116
<< FixItHint::CreateInsertion(Open, "/* DISABLES CODE */ (")
117
<< FixItHint::CreateInsertion(Close, ")");
118
}
119
}
120
}
121
};
122
} // anonymous namespace
123
124
/// CheckUnreachable - Check for unreachable code.
125
static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
126
// As a heuristic prune all diagnostics not in the main file. Currently
127
// the majority of warnings in headers are false positives. These
128
// are largely caused by configuration state, e.g. preprocessor
129
// defined code, etc.
130
//
131
// Note that this is also a performance optimization. Analyzing
132
// headers many times can be expensive.
133
if (!S.getSourceManager().isInMainFile(AC.getDecl()->getBeginLoc()))
134
return;
135
136
UnreachableCodeHandler UC(S);
137
reachable_code::FindUnreachableCode(AC, S.getPreprocessor(), UC);
138
}
139
140
namespace {
141
/// Warn on logical operator errors in CFGBuilder
142
class LogicalErrorHandler : public CFGCallback {
143
Sema &S;
144
145
public:
146
LogicalErrorHandler(Sema &S) : S(S) {}
147
148
static bool HasMacroID(const Expr *E) {
149
if (E->getExprLoc().isMacroID())
150
return true;
151
152
// Recurse to children.
153
for (const Stmt *SubStmt : E->children())
154
if (const Expr *SubExpr = dyn_cast_or_null<Expr>(SubStmt))
155
if (HasMacroID(SubExpr))
156
return true;
157
158
return false;
159
}
160
161
void logicAlwaysTrue(const BinaryOperator *B, bool isAlwaysTrue) override {
162
if (HasMacroID(B))
163
return;
164
165
unsigned DiagID = isAlwaysTrue
166
? diag::warn_tautological_negation_or_compare
167
: diag::warn_tautological_negation_and_compare;
168
SourceRange DiagRange = B->getSourceRange();
169
S.Diag(B->getExprLoc(), DiagID) << DiagRange;
170
}
171
172
void compareAlwaysTrue(const BinaryOperator *B, bool isAlwaysTrue) override {
173
if (HasMacroID(B))
174
return;
175
176
SourceRange DiagRange = B->getSourceRange();
177
S.Diag(B->getExprLoc(), diag::warn_tautological_overlap_comparison)
178
<< DiagRange << isAlwaysTrue;
179
}
180
181
void compareBitwiseEquality(const BinaryOperator *B,
182
bool isAlwaysTrue) override {
183
if (HasMacroID(B))
184
return;
185
186
SourceRange DiagRange = B->getSourceRange();
187
S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_always)
188
<< DiagRange << isAlwaysTrue;
189
}
190
191
void compareBitwiseOr(const BinaryOperator *B) override {
192
if (HasMacroID(B))
193
return;
194
195
SourceRange DiagRange = B->getSourceRange();
196
S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_or) << DiagRange;
197
}
198
199
static bool hasActiveDiagnostics(DiagnosticsEngine &Diags,
200
SourceLocation Loc) {
201
return !Diags.isIgnored(diag::warn_tautological_overlap_comparison, Loc) ||
202
!Diags.isIgnored(diag::warn_comparison_bitwise_or, Loc) ||
203
!Diags.isIgnored(diag::warn_tautological_negation_and_compare, Loc);
204
}
205
};
206
} // anonymous namespace
207
208
//===----------------------------------------------------------------------===//
209
// Check for infinite self-recursion in functions
210
//===----------------------------------------------------------------------===//
211
212
// Returns true if the function is called anywhere within the CFGBlock.
213
// For member functions, the additional condition of being call from the
214
// this pointer is required.
215
static bool hasRecursiveCallInPath(const FunctionDecl *FD, CFGBlock &Block) {
216
// Process all the Stmt's in this block to find any calls to FD.
217
for (const auto &B : Block) {
218
if (B.getKind() != CFGElement::Statement)
219
continue;
220
221
const CallExpr *CE = dyn_cast<CallExpr>(B.getAs<CFGStmt>()->getStmt());
222
if (!CE || !CE->getCalleeDecl() ||
223
CE->getCalleeDecl()->getCanonicalDecl() != FD)
224
continue;
225
226
// Skip function calls which are qualified with a templated class.
227
if (const DeclRefExpr *DRE =
228
dyn_cast<DeclRefExpr>(CE->getCallee()->IgnoreParenImpCasts())) {
229
if (NestedNameSpecifier *NNS = DRE->getQualifier()) {
230
if (NNS->getKind() == NestedNameSpecifier::TypeSpec &&
231
isa<TemplateSpecializationType>(NNS->getAsType())) {
232
continue;
233
}
234
}
235
}
236
237
const CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(CE);
238
if (!MCE || isa<CXXThisExpr>(MCE->getImplicitObjectArgument()) ||
239
!MCE->getMethodDecl()->isVirtual())
240
return true;
241
}
242
return false;
243
}
244
245
// Returns true if every path from the entry block passes through a call to FD.
246
static bool checkForRecursiveFunctionCall(const FunctionDecl *FD, CFG *cfg) {
247
llvm::SmallPtrSet<CFGBlock *, 16> Visited;
248
llvm::SmallVector<CFGBlock *, 16> WorkList;
249
// Keep track of whether we found at least one recursive path.
250
bool foundRecursion = false;
251
252
const unsigned ExitID = cfg->getExit().getBlockID();
253
254
// Seed the work list with the entry block.
255
WorkList.push_back(&cfg->getEntry());
256
257
while (!WorkList.empty()) {
258
CFGBlock *Block = WorkList.pop_back_val();
259
260
for (auto I = Block->succ_begin(), E = Block->succ_end(); I != E; ++I) {
261
if (CFGBlock *SuccBlock = *I) {
262
if (!Visited.insert(SuccBlock).second)
263
continue;
264
265
// Found a path to the exit node without a recursive call.
266
if (ExitID == SuccBlock->getBlockID())
267
return false;
268
269
// If the successor block contains a recursive call, end analysis there.
270
if (hasRecursiveCallInPath(FD, *SuccBlock)) {
271
foundRecursion = true;
272
continue;
273
}
274
275
WorkList.push_back(SuccBlock);
276
}
277
}
278
}
279
return foundRecursion;
280
}
281
282
static void checkRecursiveFunction(Sema &S, const FunctionDecl *FD,
283
const Stmt *Body, AnalysisDeclContext &AC) {
284
FD = FD->getCanonicalDecl();
285
286
// Only run on non-templated functions and non-templated members of
287
// templated classes.
288
if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate &&
289
FD->getTemplatedKind() != FunctionDecl::TK_MemberSpecialization)
290
return;
291
292
CFG *cfg = AC.getCFG();
293
if (!cfg) return;
294
295
// If the exit block is unreachable, skip processing the function.
296
if (cfg->getExit().pred_empty())
297
return;
298
299
// Emit diagnostic if a recursive function call is detected for all paths.
300
if (checkForRecursiveFunctionCall(FD, cfg))
301
S.Diag(Body->getBeginLoc(), diag::warn_infinite_recursive_function);
302
}
303
304
//===----------------------------------------------------------------------===//
305
// Check for throw in a non-throwing function.
306
//===----------------------------------------------------------------------===//
307
308
/// Determine whether an exception thrown by E, unwinding from ThrowBlock,
309
/// can reach ExitBlock.
310
static bool throwEscapes(Sema &S, const CXXThrowExpr *E, CFGBlock &ThrowBlock,
311
CFG *Body) {
312
SmallVector<CFGBlock *, 16> Stack;
313
llvm::BitVector Queued(Body->getNumBlockIDs());
314
315
Stack.push_back(&ThrowBlock);
316
Queued[ThrowBlock.getBlockID()] = true;
317
318
while (!Stack.empty()) {
319
CFGBlock &UnwindBlock = *Stack.back();
320
Stack.pop_back();
321
322
for (auto &Succ : UnwindBlock.succs()) {
323
if (!Succ.isReachable() || Queued[Succ->getBlockID()])
324
continue;
325
326
if (Succ->getBlockID() == Body->getExit().getBlockID())
327
return true;
328
329
if (auto *Catch =
330
dyn_cast_or_null<CXXCatchStmt>(Succ->getLabel())) {
331
QualType Caught = Catch->getCaughtType();
332
if (Caught.isNull() || // catch (...) catches everything
333
!E->getSubExpr() || // throw; is considered cuaght by any handler
334
S.handlerCanCatch(Caught, E->getSubExpr()->getType()))
335
// Exception doesn't escape via this path.
336
break;
337
} else {
338
Stack.push_back(Succ);
339
Queued[Succ->getBlockID()] = true;
340
}
341
}
342
}
343
344
return false;
345
}
346
347
static void visitReachableThrows(
348
CFG *BodyCFG,
349
llvm::function_ref<void(const CXXThrowExpr *, CFGBlock &)> Visit) {
350
llvm::BitVector Reachable(BodyCFG->getNumBlockIDs());
351
clang::reachable_code::ScanReachableFromBlock(&BodyCFG->getEntry(), Reachable);
352
for (CFGBlock *B : *BodyCFG) {
353
if (!Reachable[B->getBlockID()])
354
continue;
355
for (CFGElement &E : *B) {
356
std::optional<CFGStmt> S = E.getAs<CFGStmt>();
357
if (!S)
358
continue;
359
if (auto *Throw = dyn_cast<CXXThrowExpr>(S->getStmt()))
360
Visit(Throw, *B);
361
}
362
}
363
}
364
365
static void EmitDiagForCXXThrowInNonThrowingFunc(Sema &S, SourceLocation OpLoc,
366
const FunctionDecl *FD) {
367
if (!S.getSourceManager().isInSystemHeader(OpLoc) &&
368
FD->getTypeSourceInfo()) {
369
S.Diag(OpLoc, diag::warn_throw_in_noexcept_func) << FD;
370
if (S.getLangOpts().CPlusPlus11 &&
371
(isa<CXXDestructorDecl>(FD) ||
372
FD->getDeclName().getCXXOverloadedOperator() == OO_Delete ||
373
FD->getDeclName().getCXXOverloadedOperator() == OO_Array_Delete)) {
374
if (const auto *Ty = FD->getTypeSourceInfo()->getType()->
375
getAs<FunctionProtoType>())
376
S.Diag(FD->getLocation(), diag::note_throw_in_dtor)
377
<< !isa<CXXDestructorDecl>(FD) << !Ty->hasExceptionSpec()
378
<< FD->getExceptionSpecSourceRange();
379
} else
380
S.Diag(FD->getLocation(), diag::note_throw_in_function)
381
<< FD->getExceptionSpecSourceRange();
382
}
383
}
384
385
static void checkThrowInNonThrowingFunc(Sema &S, const FunctionDecl *FD,
386
AnalysisDeclContext &AC) {
387
CFG *BodyCFG = AC.getCFG();
388
if (!BodyCFG)
389
return;
390
if (BodyCFG->getExit().pred_empty())
391
return;
392
visitReachableThrows(BodyCFG, [&](const CXXThrowExpr *Throw, CFGBlock &Block) {
393
if (throwEscapes(S, Throw, Block, BodyCFG))
394
EmitDiagForCXXThrowInNonThrowingFunc(S, Throw->getThrowLoc(), FD);
395
});
396
}
397
398
static bool isNoexcept(const FunctionDecl *FD) {
399
const auto *FPT = FD->getType()->castAs<FunctionProtoType>();
400
if (FPT->isNothrow() || FD->hasAttr<NoThrowAttr>())
401
return true;
402
return false;
403
}
404
405
//===----------------------------------------------------------------------===//
406
// Check for missing return value.
407
//===----------------------------------------------------------------------===//
408
409
enum ControlFlowKind {
410
UnknownFallThrough,
411
NeverFallThrough,
412
MaybeFallThrough,
413
AlwaysFallThrough,
414
NeverFallThroughOrReturn
415
};
416
417
/// CheckFallThrough - Check that we don't fall off the end of a
418
/// Statement that should return a value.
419
///
420
/// \returns AlwaysFallThrough iff we always fall off the end of the statement,
421
/// MaybeFallThrough iff we might or might not fall off the end,
422
/// NeverFallThroughOrReturn iff we never fall off the end of the statement or
423
/// return. We assume NeverFallThrough iff we never fall off the end of the
424
/// statement but we may return. We assume that functions not marked noreturn
425
/// will return.
426
static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
427
CFG *cfg = AC.getCFG();
428
if (!cfg) return UnknownFallThrough;
429
430
// The CFG leaves in dead things, and we don't want the dead code paths to
431
// confuse us, so we mark all live things first.
432
llvm::BitVector live(cfg->getNumBlockIDs());
433
unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
434
live);
435
436
bool AddEHEdges = AC.getAddEHEdges();
437
if (!AddEHEdges && count != cfg->getNumBlockIDs())
438
// When there are things remaining dead, and we didn't add EH edges
439
// from CallExprs to the catch clauses, we have to go back and
440
// mark them as live.
441
for (const auto *B : *cfg) {
442
if (!live[B->getBlockID()]) {
443
if (B->pred_begin() == B->pred_end()) {
444
const Stmt *Term = B->getTerminatorStmt();
445
if (isa_and_nonnull<CXXTryStmt>(Term))
446
// When not adding EH edges from calls, catch clauses
447
// can otherwise seem dead. Avoid noting them as dead.
448
count += reachable_code::ScanReachableFromBlock(B, live);
449
continue;
450
}
451
}
452
}
453
454
// Now we know what is live, we check the live precessors of the exit block
455
// and look for fall through paths, being careful to ignore normal returns,
456
// and exceptional paths.
457
bool HasLiveReturn = false;
458
bool HasFakeEdge = false;
459
bool HasPlainEdge = false;
460
bool HasAbnormalEdge = false;
461
462
// Ignore default cases that aren't likely to be reachable because all
463
// enums in a switch(X) have explicit case statements.
464
CFGBlock::FilterOptions FO;
465
FO.IgnoreDefaultsWithCoveredEnums = 1;
466
467
for (CFGBlock::filtered_pred_iterator I =
468
cfg->getExit().filtered_pred_start_end(FO);
469
I.hasMore(); ++I) {
470
const CFGBlock &B = **I;
471
if (!live[B.getBlockID()])
472
continue;
473
474
// Skip blocks which contain an element marked as no-return. They don't
475
// represent actually viable edges into the exit block, so mark them as
476
// abnormal.
477
if (B.hasNoReturnElement()) {
478
HasAbnormalEdge = true;
479
continue;
480
}
481
482
// Destructors can appear after the 'return' in the CFG. This is
483
// normal. We need to look pass the destructors for the return
484
// statement (if it exists).
485
CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
486
487
for ( ; ri != re ; ++ri)
488
if (ri->getAs<CFGStmt>())
489
break;
490
491
// No more CFGElements in the block?
492
if (ri == re) {
493
const Stmt *Term = B.getTerminatorStmt();
494
if (Term && (isa<CXXTryStmt>(Term) || isa<ObjCAtTryStmt>(Term))) {
495
HasAbnormalEdge = true;
496
continue;
497
}
498
// A labeled empty statement, or the entry block...
499
HasPlainEdge = true;
500
continue;
501
}
502
503
CFGStmt CS = ri->castAs<CFGStmt>();
504
const Stmt *S = CS.getStmt();
505
if (isa<ReturnStmt>(S) || isa<CoreturnStmt>(S)) {
506
HasLiveReturn = true;
507
continue;
508
}
509
if (isa<ObjCAtThrowStmt>(S)) {
510
HasFakeEdge = true;
511
continue;
512
}
513
if (isa<CXXThrowExpr>(S)) {
514
HasFakeEdge = true;
515
continue;
516
}
517
if (isa<MSAsmStmt>(S)) {
518
// TODO: Verify this is correct.
519
HasFakeEdge = true;
520
HasLiveReturn = true;
521
continue;
522
}
523
if (isa<CXXTryStmt>(S)) {
524
HasAbnormalEdge = true;
525
continue;
526
}
527
if (!llvm::is_contained(B.succs(), &cfg->getExit())) {
528
HasAbnormalEdge = true;
529
continue;
530
}
531
532
HasPlainEdge = true;
533
}
534
if (!HasPlainEdge) {
535
if (HasLiveReturn)
536
return NeverFallThrough;
537
return NeverFallThroughOrReturn;
538
}
539
if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
540
return MaybeFallThrough;
541
// This says AlwaysFallThrough for calls to functions that are not marked
542
// noreturn, that don't return. If people would like this warning to be more
543
// accurate, such functions should be marked as noreturn.
544
return AlwaysFallThrough;
545
}
546
547
namespace {
548
549
struct CheckFallThroughDiagnostics {
550
unsigned diag_MaybeFallThrough_HasNoReturn;
551
unsigned diag_MaybeFallThrough_ReturnsNonVoid;
552
unsigned diag_AlwaysFallThrough_HasNoReturn;
553
unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
554
unsigned diag_NeverFallThroughOrReturn;
555
enum { Function, Block, Lambda, Coroutine } funMode;
556
SourceLocation FuncLoc;
557
558
static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
559
CheckFallThroughDiagnostics D;
560
D.FuncLoc = Func->getLocation();
561
D.diag_MaybeFallThrough_HasNoReturn =
562
diag::warn_falloff_noreturn_function;
563
D.diag_MaybeFallThrough_ReturnsNonVoid =
564
diag::warn_maybe_falloff_nonvoid_function;
565
D.diag_AlwaysFallThrough_HasNoReturn =
566
diag::warn_falloff_noreturn_function;
567
D.diag_AlwaysFallThrough_ReturnsNonVoid =
568
diag::warn_falloff_nonvoid_function;
569
570
// Don't suggest that virtual functions be marked "noreturn", since they
571
// might be overridden by non-noreturn functions.
572
bool isVirtualMethod = false;
573
if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
574
isVirtualMethod = Method->isVirtual();
575
576
// Don't suggest that template instantiations be marked "noreturn"
577
bool isTemplateInstantiation = false;
578
if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
579
isTemplateInstantiation = Function->isTemplateInstantiation();
580
581
if (!isVirtualMethod && !isTemplateInstantiation)
582
D.diag_NeverFallThroughOrReturn =
583
diag::warn_suggest_noreturn_function;
584
else
585
D.diag_NeverFallThroughOrReturn = 0;
586
587
D.funMode = Function;
588
return D;
589
}
590
591
static CheckFallThroughDiagnostics MakeForCoroutine(const Decl *Func) {
592
CheckFallThroughDiagnostics D;
593
D.FuncLoc = Func->getLocation();
594
D.diag_MaybeFallThrough_HasNoReturn = 0;
595
D.diag_MaybeFallThrough_ReturnsNonVoid =
596
diag::warn_maybe_falloff_nonvoid_coroutine;
597
D.diag_AlwaysFallThrough_HasNoReturn = 0;
598
D.diag_AlwaysFallThrough_ReturnsNonVoid =
599
diag::warn_falloff_nonvoid_coroutine;
600
D.diag_NeverFallThroughOrReturn = 0;
601
D.funMode = Coroutine;
602
return D;
603
}
604
605
static CheckFallThroughDiagnostics MakeForBlock() {
606
CheckFallThroughDiagnostics D;
607
D.diag_MaybeFallThrough_HasNoReturn =
608
diag::err_noreturn_block_has_return_expr;
609
D.diag_MaybeFallThrough_ReturnsNonVoid =
610
diag::err_maybe_falloff_nonvoid_block;
611
D.diag_AlwaysFallThrough_HasNoReturn =
612
diag::err_noreturn_block_has_return_expr;
613
D.diag_AlwaysFallThrough_ReturnsNonVoid =
614
diag::err_falloff_nonvoid_block;
615
D.diag_NeverFallThroughOrReturn = 0;
616
D.funMode = Block;
617
return D;
618
}
619
620
static CheckFallThroughDiagnostics MakeForLambda() {
621
CheckFallThroughDiagnostics D;
622
D.diag_MaybeFallThrough_HasNoReturn =
623
diag::err_noreturn_lambda_has_return_expr;
624
D.diag_MaybeFallThrough_ReturnsNonVoid =
625
diag::warn_maybe_falloff_nonvoid_lambda;
626
D.diag_AlwaysFallThrough_HasNoReturn =
627
diag::err_noreturn_lambda_has_return_expr;
628
D.diag_AlwaysFallThrough_ReturnsNonVoid =
629
diag::warn_falloff_nonvoid_lambda;
630
D.diag_NeverFallThroughOrReturn = 0;
631
D.funMode = Lambda;
632
return D;
633
}
634
635
bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
636
bool HasNoReturn) const {
637
if (funMode == Function) {
638
return (ReturnsVoid ||
639
D.isIgnored(diag::warn_maybe_falloff_nonvoid_function,
640
FuncLoc)) &&
641
(!HasNoReturn ||
642
D.isIgnored(diag::warn_noreturn_function_has_return_expr,
643
FuncLoc)) &&
644
(!ReturnsVoid ||
645
D.isIgnored(diag::warn_suggest_noreturn_block, FuncLoc));
646
}
647
if (funMode == Coroutine) {
648
return (ReturnsVoid ||
649
D.isIgnored(diag::warn_maybe_falloff_nonvoid_function, FuncLoc) ||
650
D.isIgnored(diag::warn_maybe_falloff_nonvoid_coroutine,
651
FuncLoc)) &&
652
(!HasNoReturn);
653
}
654
// For blocks / lambdas.
655
return ReturnsVoid && !HasNoReturn;
656
}
657
};
658
659
} // anonymous namespace
660
661
/// CheckFallThroughForBody - Check that we don't fall off the end of a
662
/// function that should return a value. Check that we don't fall off the end
663
/// of a noreturn function. We assume that functions and blocks not marked
664
/// noreturn will return.
665
static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
666
QualType BlockType,
667
const CheckFallThroughDiagnostics &CD,
668
AnalysisDeclContext &AC,
669
sema::FunctionScopeInfo *FSI) {
670
671
bool ReturnsVoid = false;
672
bool HasNoReturn = false;
673
bool IsCoroutine = FSI->isCoroutine();
674
675
if (const auto *FD = dyn_cast<FunctionDecl>(D)) {
676
if (const auto *CBody = dyn_cast<CoroutineBodyStmt>(Body))
677
ReturnsVoid = CBody->getFallthroughHandler() != nullptr;
678
else
679
ReturnsVoid = FD->getReturnType()->isVoidType();
680
HasNoReturn = FD->isNoReturn();
681
}
682
else if (const auto *MD = dyn_cast<ObjCMethodDecl>(D)) {
683
ReturnsVoid = MD->getReturnType()->isVoidType();
684
HasNoReturn = MD->hasAttr<NoReturnAttr>();
685
}
686
else if (isa<BlockDecl>(D)) {
687
if (const FunctionType *FT =
688
BlockType->getPointeeType()->getAs<FunctionType>()) {
689
if (FT->getReturnType()->isVoidType())
690
ReturnsVoid = true;
691
if (FT->getNoReturnAttr())
692
HasNoReturn = true;
693
}
694
}
695
696
DiagnosticsEngine &Diags = S.getDiagnostics();
697
698
// Short circuit for compilation speed.
699
if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
700
return;
701
SourceLocation LBrace = Body->getBeginLoc(), RBrace = Body->getEndLoc();
702
auto EmitDiag = [&](SourceLocation Loc, unsigned DiagID) {
703
if (IsCoroutine)
704
S.Diag(Loc, DiagID) << FSI->CoroutinePromise->getType();
705
else
706
S.Diag(Loc, DiagID);
707
};
708
709
// cpu_dispatch functions permit empty function bodies for ICC compatibility.
710
if (D->getAsFunction() && D->getAsFunction()->isCPUDispatchMultiVersion())
711
return;
712
713
// Either in a function body compound statement, or a function-try-block.
714
switch (CheckFallThrough(AC)) {
715
case UnknownFallThrough:
716
break;
717
718
case MaybeFallThrough:
719
if (HasNoReturn)
720
EmitDiag(RBrace, CD.diag_MaybeFallThrough_HasNoReturn);
721
else if (!ReturnsVoid)
722
EmitDiag(RBrace, CD.diag_MaybeFallThrough_ReturnsNonVoid);
723
break;
724
case AlwaysFallThrough:
725
if (HasNoReturn)
726
EmitDiag(RBrace, CD.diag_AlwaysFallThrough_HasNoReturn);
727
else if (!ReturnsVoid)
728
EmitDiag(RBrace, CD.diag_AlwaysFallThrough_ReturnsNonVoid);
729
break;
730
case NeverFallThroughOrReturn:
731
if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
732
if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
733
S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 0 << FD;
734
} else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
735
S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 1 << MD;
736
} else {
737
S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn);
738
}
739
}
740
break;
741
case NeverFallThrough:
742
break;
743
}
744
}
745
746
//===----------------------------------------------------------------------===//
747
// -Wuninitialized
748
//===----------------------------------------------------------------------===//
749
750
namespace {
751
/// ContainsReference - A visitor class to search for references to
752
/// a particular declaration (the needle) within any evaluated component of an
753
/// expression (recursively).
754
class ContainsReference : public ConstEvaluatedExprVisitor<ContainsReference> {
755
bool FoundReference;
756
const DeclRefExpr *Needle;
757
758
public:
759
typedef ConstEvaluatedExprVisitor<ContainsReference> Inherited;
760
761
ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
762
: Inherited(Context), FoundReference(false), Needle(Needle) {}
763
764
void VisitExpr(const Expr *E) {
765
// Stop evaluating if we already have a reference.
766
if (FoundReference)
767
return;
768
769
Inherited::VisitExpr(E);
770
}
771
772
void VisitDeclRefExpr(const DeclRefExpr *E) {
773
if (E == Needle)
774
FoundReference = true;
775
else
776
Inherited::VisitDeclRefExpr(E);
777
}
778
779
bool doesContainReference() const { return FoundReference; }
780
};
781
} // anonymous namespace
782
783
static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
784
QualType VariableTy = VD->getType().getCanonicalType();
785
if (VariableTy->isBlockPointerType() &&
786
!VD->hasAttr<BlocksAttr>()) {
787
S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization)
788
<< VD->getDeclName()
789
<< FixItHint::CreateInsertion(VD->getLocation(), "__block ");
790
return true;
791
}
792
793
// Don't issue a fixit if there is already an initializer.
794
if (VD->getInit())
795
return false;
796
797
// Don't suggest a fixit inside macros.
798
if (VD->getEndLoc().isMacroID())
799
return false;
800
801
SourceLocation Loc = S.getLocForEndOfToken(VD->getEndLoc());
802
803
// Suggest possible initialization (if any).
804
std::string Init = S.getFixItZeroInitializerForType(VariableTy, Loc);
805
if (Init.empty())
806
return false;
807
808
S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
809
<< FixItHint::CreateInsertion(Loc, Init);
810
return true;
811
}
812
813
/// Create a fixit to remove an if-like statement, on the assumption that its
814
/// condition is CondVal.
815
static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
816
const Stmt *Else, bool CondVal,
817
FixItHint &Fixit1, FixItHint &Fixit2) {
818
if (CondVal) {
819
// If condition is always true, remove all but the 'then'.
820
Fixit1 = FixItHint::CreateRemoval(
821
CharSourceRange::getCharRange(If->getBeginLoc(), Then->getBeginLoc()));
822
if (Else) {
823
SourceLocation ElseKwLoc = S.getLocForEndOfToken(Then->getEndLoc());
824
Fixit2 =
825
FixItHint::CreateRemoval(SourceRange(ElseKwLoc, Else->getEndLoc()));
826
}
827
} else {
828
// If condition is always false, remove all but the 'else'.
829
if (Else)
830
Fixit1 = FixItHint::CreateRemoval(CharSourceRange::getCharRange(
831
If->getBeginLoc(), Else->getBeginLoc()));
832
else
833
Fixit1 = FixItHint::CreateRemoval(If->getSourceRange());
834
}
835
}
836
837
/// DiagUninitUse -- Helper function to produce a diagnostic for an
838
/// uninitialized use of a variable.
839
static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
840
bool IsCapturedByBlock) {
841
bool Diagnosed = false;
842
843
switch (Use.getKind()) {
844
case UninitUse::Always:
845
S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_var)
846
<< VD->getDeclName() << IsCapturedByBlock
847
<< Use.getUser()->getSourceRange();
848
return;
849
850
case UninitUse::AfterDecl:
851
case UninitUse::AfterCall:
852
S.Diag(VD->getLocation(), diag::warn_sometimes_uninit_var)
853
<< VD->getDeclName() << IsCapturedByBlock
854
<< (Use.getKind() == UninitUse::AfterDecl ? 4 : 5)
855
<< const_cast<DeclContext*>(VD->getLexicalDeclContext())
856
<< VD->getSourceRange();
857
S.Diag(Use.getUser()->getBeginLoc(), diag::note_uninit_var_use)
858
<< IsCapturedByBlock << Use.getUser()->getSourceRange();
859
return;
860
861
case UninitUse::Maybe:
862
case UninitUse::Sometimes:
863
// Carry on to report sometimes-uninitialized branches, if possible,
864
// or a 'may be used uninitialized' diagnostic otherwise.
865
break;
866
}
867
868
// Diagnose each branch which leads to a sometimes-uninitialized use.
869
for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
870
I != E; ++I) {
871
assert(Use.getKind() == UninitUse::Sometimes);
872
873
const Expr *User = Use.getUser();
874
const Stmt *Term = I->Terminator;
875
876
// Information used when building the diagnostic.
877
unsigned DiagKind;
878
StringRef Str;
879
SourceRange Range;
880
881
// FixIts to suppress the diagnostic by removing the dead condition.
882
// For all binary terminators, branch 0 is taken if the condition is true,
883
// and branch 1 is taken if the condition is false.
884
int RemoveDiagKind = -1;
885
const char *FixitStr =
886
S.getLangOpts().CPlusPlus ? (I->Output ? "true" : "false")
887
: (I->Output ? "1" : "0");
888
FixItHint Fixit1, Fixit2;
889
890
switch (Term ? Term->getStmtClass() : Stmt::DeclStmtClass) {
891
default:
892
// Don't know how to report this. Just fall back to 'may be used
893
// uninitialized'. FIXME: Can this happen?
894
continue;
895
896
// "condition is true / condition is false".
897
case Stmt::IfStmtClass: {
898
const IfStmt *IS = cast<IfStmt>(Term);
899
DiagKind = 0;
900
Str = "if";
901
Range = IS->getCond()->getSourceRange();
902
RemoveDiagKind = 0;
903
CreateIfFixit(S, IS, IS->getThen(), IS->getElse(),
904
I->Output, Fixit1, Fixit2);
905
break;
906
}
907
case Stmt::ConditionalOperatorClass: {
908
const ConditionalOperator *CO = cast<ConditionalOperator>(Term);
909
DiagKind = 0;
910
Str = "?:";
911
Range = CO->getCond()->getSourceRange();
912
RemoveDiagKind = 0;
913
CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(),
914
I->Output, Fixit1, Fixit2);
915
break;
916
}
917
case Stmt::BinaryOperatorClass: {
918
const BinaryOperator *BO = cast<BinaryOperator>(Term);
919
if (!BO->isLogicalOp())
920
continue;
921
DiagKind = 0;
922
Str = BO->getOpcodeStr();
923
Range = BO->getLHS()->getSourceRange();
924
RemoveDiagKind = 0;
925
if ((BO->getOpcode() == BO_LAnd && I->Output) ||
926
(BO->getOpcode() == BO_LOr && !I->Output))
927
// true && y -> y, false || y -> y.
928
Fixit1 = FixItHint::CreateRemoval(
929
SourceRange(BO->getBeginLoc(), BO->getOperatorLoc()));
930
else
931
// false && y -> false, true || y -> true.
932
Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr);
933
break;
934
}
935
936
// "loop is entered / loop is exited".
937
case Stmt::WhileStmtClass:
938
DiagKind = 1;
939
Str = "while";
940
Range = cast<WhileStmt>(Term)->getCond()->getSourceRange();
941
RemoveDiagKind = 1;
942
Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
943
break;
944
case Stmt::ForStmtClass:
945
DiagKind = 1;
946
Str = "for";
947
Range = cast<ForStmt>(Term)->getCond()->getSourceRange();
948
RemoveDiagKind = 1;
949
if (I->Output)
950
Fixit1 = FixItHint::CreateRemoval(Range);
951
else
952
Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
953
break;
954
case Stmt::CXXForRangeStmtClass:
955
if (I->Output == 1) {
956
// The use occurs if a range-based for loop's body never executes.
957
// That may be impossible, and there's no syntactic fix for this,
958
// so treat it as a 'may be uninitialized' case.
959
continue;
960
}
961
DiagKind = 1;
962
Str = "for";
963
Range = cast<CXXForRangeStmt>(Term)->getRangeInit()->getSourceRange();
964
break;
965
966
// "condition is true / loop is exited".
967
case Stmt::DoStmtClass:
968
DiagKind = 2;
969
Str = "do";
970
Range = cast<DoStmt>(Term)->getCond()->getSourceRange();
971
RemoveDiagKind = 1;
972
Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
973
break;
974
975
// "switch case is taken".
976
case Stmt::CaseStmtClass:
977
DiagKind = 3;
978
Str = "case";
979
Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange();
980
break;
981
case Stmt::DefaultStmtClass:
982
DiagKind = 3;
983
Str = "default";
984
Range = cast<DefaultStmt>(Term)->getDefaultLoc();
985
break;
986
}
987
988
S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var)
989
<< VD->getDeclName() << IsCapturedByBlock << DiagKind
990
<< Str << I->Output << Range;
991
S.Diag(User->getBeginLoc(), diag::note_uninit_var_use)
992
<< IsCapturedByBlock << User->getSourceRange();
993
if (RemoveDiagKind != -1)
994
S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond)
995
<< RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
996
997
Diagnosed = true;
998
}
999
1000
if (!Diagnosed)
1001
S.Diag(Use.getUser()->getBeginLoc(), diag::warn_maybe_uninit_var)
1002
<< VD->getDeclName() << IsCapturedByBlock
1003
<< Use.getUser()->getSourceRange();
1004
}
1005
1006
/// Diagnose uninitialized const reference usages.
1007
static bool DiagnoseUninitializedConstRefUse(Sema &S, const VarDecl *VD,
1008
const UninitUse &Use) {
1009
S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_const_reference)
1010
<< VD->getDeclName() << Use.getUser()->getSourceRange();
1011
return true;
1012
}
1013
1014
/// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
1015
/// uninitialized variable. This manages the different forms of diagnostic
1016
/// emitted for particular types of uses. Returns true if the use was diagnosed
1017
/// as a warning. If a particular use is one we omit warnings for, returns
1018
/// false.
1019
static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
1020
const UninitUse &Use,
1021
bool alwaysReportSelfInit = false) {
1022
if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) {
1023
// Inspect the initializer of the variable declaration which is
1024
// being referenced prior to its initialization. We emit
1025
// specialized diagnostics for self-initialization, and we
1026
// specifically avoid warning about self references which take the
1027
// form of:
1028
//
1029
// int x = x;
1030
//
1031
// This is used to indicate to GCC that 'x' is intentionally left
1032
// uninitialized. Proven code paths which access 'x' in
1033
// an uninitialized state after this will still warn.
1034
if (const Expr *Initializer = VD->getInit()) {
1035
if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
1036
return false;
1037
1038
ContainsReference CR(S.Context, DRE);
1039
CR.Visit(Initializer);
1040
if (CR.doesContainReference()) {
1041
S.Diag(DRE->getBeginLoc(), diag::warn_uninit_self_reference_in_init)
1042
<< VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
1043
return true;
1044
}
1045
}
1046
1047
DiagUninitUse(S, VD, Use, false);
1048
} else {
1049
const BlockExpr *BE = cast<BlockExpr>(Use.getUser());
1050
if (VD->getType()->isBlockPointerType() && !VD->hasAttr<BlocksAttr>())
1051
S.Diag(BE->getBeginLoc(),
1052
diag::warn_uninit_byref_blockvar_captured_by_block)
1053
<< VD->getDeclName()
1054
<< VD->getType().getQualifiers().hasObjCLifetime();
1055
else
1056
DiagUninitUse(S, VD, Use, true);
1057
}
1058
1059
// Report where the variable was declared when the use wasn't within
1060
// the initializer of that declaration & we didn't already suggest
1061
// an initialization fixit.
1062
if (!SuggestInitializationFixit(S, VD))
1063
S.Diag(VD->getBeginLoc(), diag::note_var_declared_here)
1064
<< VD->getDeclName();
1065
1066
return true;
1067
}
1068
1069
namespace {
1070
class FallthroughMapper : public RecursiveASTVisitor<FallthroughMapper> {
1071
public:
1072
FallthroughMapper(Sema &S)
1073
: FoundSwitchStatements(false),
1074
S(S) {
1075
}
1076
1077
bool foundSwitchStatements() const { return FoundSwitchStatements; }
1078
1079
void markFallthroughVisited(const AttributedStmt *Stmt) {
1080
bool Found = FallthroughStmts.erase(Stmt);
1081
assert(Found);
1082
(void)Found;
1083
}
1084
1085
typedef llvm::SmallPtrSet<const AttributedStmt*, 8> AttrStmts;
1086
1087
const AttrStmts &getFallthroughStmts() const {
1088
return FallthroughStmts;
1089
}
1090
1091
void fillReachableBlocks(CFG *Cfg) {
1092
assert(ReachableBlocks.empty() && "ReachableBlocks already filled");
1093
std::deque<const CFGBlock *> BlockQueue;
1094
1095
ReachableBlocks.insert(&Cfg->getEntry());
1096
BlockQueue.push_back(&Cfg->getEntry());
1097
// Mark all case blocks reachable to avoid problems with switching on
1098
// constants, covered enums, etc.
1099
// These blocks can contain fall-through annotations, and we don't want to
1100
// issue a warn_fallthrough_attr_unreachable for them.
1101
for (const auto *B : *Cfg) {
1102
const Stmt *L = B->getLabel();
1103
if (isa_and_nonnull<SwitchCase>(L) && ReachableBlocks.insert(B).second)
1104
BlockQueue.push_back(B);
1105
}
1106
1107
while (!BlockQueue.empty()) {
1108
const CFGBlock *P = BlockQueue.front();
1109
BlockQueue.pop_front();
1110
for (const CFGBlock *B : P->succs()) {
1111
if (B && ReachableBlocks.insert(B).second)
1112
BlockQueue.push_back(B);
1113
}
1114
}
1115
}
1116
1117
bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt,
1118
bool IsTemplateInstantiation) {
1119
assert(!ReachableBlocks.empty() && "ReachableBlocks empty");
1120
1121
int UnannotatedCnt = 0;
1122
AnnotatedCnt = 0;
1123
1124
std::deque<const CFGBlock*> BlockQueue(B.pred_begin(), B.pred_end());
1125
while (!BlockQueue.empty()) {
1126
const CFGBlock *P = BlockQueue.front();
1127
BlockQueue.pop_front();
1128
if (!P) continue;
1129
1130
const Stmt *Term = P->getTerminatorStmt();
1131
if (isa_and_nonnull<SwitchStmt>(Term))
1132
continue; // Switch statement, good.
1133
1134
const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel());
1135
if (SW && SW->getSubStmt() == B.getLabel() && P->begin() == P->end())
1136
continue; // Previous case label has no statements, good.
1137
1138
const LabelStmt *L = dyn_cast_or_null<LabelStmt>(P->getLabel());
1139
if (L && L->getSubStmt() == B.getLabel() && P->begin() == P->end())
1140
continue; // Case label is preceded with a normal label, good.
1141
1142
if (!ReachableBlocks.count(P)) {
1143
for (const CFGElement &Elem : llvm::reverse(*P)) {
1144
if (std::optional<CFGStmt> CS = Elem.getAs<CFGStmt>()) {
1145
if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) {
1146
// Don't issue a warning for an unreachable fallthrough
1147
// attribute in template instantiations as it may not be
1148
// unreachable in all instantiations of the template.
1149
if (!IsTemplateInstantiation)
1150
S.Diag(AS->getBeginLoc(),
1151
diag::warn_unreachable_fallthrough_attr);
1152
markFallthroughVisited(AS);
1153
++AnnotatedCnt;
1154
break;
1155
}
1156
// Don't care about other unreachable statements.
1157
}
1158
}
1159
// If there are no unreachable statements, this may be a special
1160
// case in CFG:
1161
// case X: {
1162
// A a; // A has a destructor.
1163
// break;
1164
// }
1165
// // <<<< This place is represented by a 'hanging' CFG block.
1166
// case Y:
1167
continue;
1168
}
1169
1170
const Stmt *LastStmt = getLastStmt(*P);
1171
if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) {
1172
markFallthroughVisited(AS);
1173
++AnnotatedCnt;
1174
continue; // Fallthrough annotation, good.
1175
}
1176
1177
if (!LastStmt) { // This block contains no executable statements.
1178
// Traverse its predecessors.
1179
std::copy(P->pred_begin(), P->pred_end(),
1180
std::back_inserter(BlockQueue));
1181
continue;
1182
}
1183
1184
++UnannotatedCnt;
1185
}
1186
return !!UnannotatedCnt;
1187
}
1188
1189
// RecursiveASTVisitor setup.
1190
bool shouldWalkTypesOfTypeLocs() const { return false; }
1191
1192
bool VisitAttributedStmt(AttributedStmt *S) {
1193
if (asFallThroughAttr(S))
1194
FallthroughStmts.insert(S);
1195
return true;
1196
}
1197
1198
bool VisitSwitchStmt(SwitchStmt *S) {
1199
FoundSwitchStatements = true;
1200
return true;
1201
}
1202
1203
// We don't want to traverse local type declarations. We analyze their
1204
// methods separately.
1205
bool TraverseDecl(Decl *D) { return true; }
1206
1207
// We analyze lambda bodies separately. Skip them here.
1208
bool TraverseLambdaExpr(LambdaExpr *LE) {
1209
// Traverse the captures, but not the body.
1210
for (const auto C : zip(LE->captures(), LE->capture_inits()))
1211
TraverseLambdaCapture(LE, &std::get<0>(C), std::get<1>(C));
1212
return true;
1213
}
1214
1215
private:
1216
1217
static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
1218
if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) {
1219
if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs()))
1220
return AS;
1221
}
1222
return nullptr;
1223
}
1224
1225
static const Stmt *getLastStmt(const CFGBlock &B) {
1226
if (const Stmt *Term = B.getTerminatorStmt())
1227
return Term;
1228
for (const CFGElement &Elem : llvm::reverse(B))
1229
if (std::optional<CFGStmt> CS = Elem.getAs<CFGStmt>())
1230
return CS->getStmt();
1231
// Workaround to detect a statement thrown out by CFGBuilder:
1232
// case X: {} case Y:
1233
// case X: ; case Y:
1234
if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(B.getLabel()))
1235
if (!isa<SwitchCase>(SW->getSubStmt()))
1236
return SW->getSubStmt();
1237
1238
return nullptr;
1239
}
1240
1241
bool FoundSwitchStatements;
1242
AttrStmts FallthroughStmts;
1243
Sema &S;
1244
llvm::SmallPtrSet<const CFGBlock *, 16> ReachableBlocks;
1245
};
1246
} // anonymous namespace
1247
1248
static StringRef getFallthroughAttrSpelling(Preprocessor &PP,
1249
SourceLocation Loc) {
1250
TokenValue FallthroughTokens[] = {
1251
tok::l_square, tok::l_square,
1252
PP.getIdentifierInfo("fallthrough"),
1253
tok::r_square, tok::r_square
1254
};
1255
1256
TokenValue ClangFallthroughTokens[] = {
1257
tok::l_square, tok::l_square, PP.getIdentifierInfo("clang"),
1258
tok::coloncolon, PP.getIdentifierInfo("fallthrough"),
1259
tok::r_square, tok::r_square
1260
};
1261
1262
bool PreferClangAttr = !PP.getLangOpts().CPlusPlus17 && !PP.getLangOpts().C23;
1263
1264
StringRef MacroName;
1265
if (PreferClangAttr)
1266
MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
1267
if (MacroName.empty())
1268
MacroName = PP.getLastMacroWithSpelling(Loc, FallthroughTokens);
1269
if (MacroName.empty() && !PreferClangAttr)
1270
MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
1271
if (MacroName.empty()) {
1272
if (!PreferClangAttr)
1273
MacroName = "[[fallthrough]]";
1274
else if (PP.getLangOpts().CPlusPlus)
1275
MacroName = "[[clang::fallthrough]]";
1276
else
1277
MacroName = "__attribute__((fallthrough))";
1278
}
1279
return MacroName;
1280
}
1281
1282
static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
1283
bool PerFunction) {
1284
FallthroughMapper FM(S);
1285
FM.TraverseStmt(AC.getBody());
1286
1287
if (!FM.foundSwitchStatements())
1288
return;
1289
1290
if (PerFunction && FM.getFallthroughStmts().empty())
1291
return;
1292
1293
CFG *Cfg = AC.getCFG();
1294
1295
if (!Cfg)
1296
return;
1297
1298
FM.fillReachableBlocks(Cfg);
1299
1300
for (const CFGBlock *B : llvm::reverse(*Cfg)) {
1301
const Stmt *Label = B->getLabel();
1302
1303
if (!isa_and_nonnull<SwitchCase>(Label))
1304
continue;
1305
1306
int AnnotatedCnt;
1307
1308
bool IsTemplateInstantiation = false;
1309
if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(AC.getDecl()))
1310
IsTemplateInstantiation = Function->isTemplateInstantiation();
1311
if (!FM.checkFallThroughIntoBlock(*B, AnnotatedCnt,
1312
IsTemplateInstantiation))
1313
continue;
1314
1315
S.Diag(Label->getBeginLoc(),
1316
PerFunction ? diag::warn_unannotated_fallthrough_per_function
1317
: diag::warn_unannotated_fallthrough);
1318
1319
if (!AnnotatedCnt) {
1320
SourceLocation L = Label->getBeginLoc();
1321
if (L.isMacroID())
1322
continue;
1323
1324
const Stmt *Term = B->getTerminatorStmt();
1325
// Skip empty cases.
1326
while (B->empty() && !Term && B->succ_size() == 1) {
1327
B = *B->succ_begin();
1328
Term = B->getTerminatorStmt();
1329
}
1330
if (!(B->empty() && isa_and_nonnull<BreakStmt>(Term))) {
1331
Preprocessor &PP = S.getPreprocessor();
1332
StringRef AnnotationSpelling = getFallthroughAttrSpelling(PP, L);
1333
SmallString<64> TextToInsert(AnnotationSpelling);
1334
TextToInsert += "; ";
1335
S.Diag(L, diag::note_insert_fallthrough_fixit)
1336
<< AnnotationSpelling
1337
<< FixItHint::CreateInsertion(L, TextToInsert);
1338
}
1339
S.Diag(L, diag::note_insert_break_fixit)
1340
<< FixItHint::CreateInsertion(L, "break; ");
1341
}
1342
}
1343
1344
for (const auto *F : FM.getFallthroughStmts())
1345
S.Diag(F->getBeginLoc(), diag::err_fallthrough_attr_invalid_placement);
1346
}
1347
1348
static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM,
1349
const Stmt *S) {
1350
assert(S);
1351
1352
do {
1353
switch (S->getStmtClass()) {
1354
case Stmt::ForStmtClass:
1355
case Stmt::WhileStmtClass:
1356
case Stmt::CXXForRangeStmtClass:
1357
case Stmt::ObjCForCollectionStmtClass:
1358
return true;
1359
case Stmt::DoStmtClass: {
1360
Expr::EvalResult Result;
1361
if (!cast<DoStmt>(S)->getCond()->EvaluateAsInt(Result, Ctx))
1362
return true;
1363
return Result.Val.getInt().getBoolValue();
1364
}
1365
default:
1366
break;
1367
}
1368
} while ((S = PM.getParent(S)));
1369
1370
return false;
1371
}
1372
1373
static void diagnoseRepeatedUseOfWeak(Sema &S,
1374
const sema::FunctionScopeInfo *CurFn,
1375
const Decl *D,
1376
const ParentMap &PM) {
1377
typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy;
1378
typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap;
1379
typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector;
1380
typedef std::pair<const Stmt *, WeakObjectUseMap::const_iterator>
1381
StmtUsesPair;
1382
1383
ASTContext &Ctx = S.getASTContext();
1384
1385
const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses();
1386
1387
// Extract all weak objects that are referenced more than once.
1388
SmallVector<StmtUsesPair, 8> UsesByStmt;
1389
for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end();
1390
I != E; ++I) {
1391
const WeakUseVector &Uses = I->second;
1392
1393
// Find the first read of the weak object.
1394
WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
1395
for ( ; UI != UE; ++UI) {
1396
if (UI->isUnsafe())
1397
break;
1398
}
1399
1400
// If there were only writes to this object, don't warn.
1401
if (UI == UE)
1402
continue;
1403
1404
// If there was only one read, followed by any number of writes, and the
1405
// read is not within a loop, don't warn. Additionally, don't warn in a
1406
// loop if the base object is a local variable -- local variables are often
1407
// changed in loops.
1408
if (UI == Uses.begin()) {
1409
WeakUseVector::const_iterator UI2 = UI;
1410
for (++UI2; UI2 != UE; ++UI2)
1411
if (UI2->isUnsafe())
1412
break;
1413
1414
if (UI2 == UE) {
1415
if (!isInLoop(Ctx, PM, UI->getUseExpr()))
1416
continue;
1417
1418
const WeakObjectProfileTy &Profile = I->first;
1419
if (!Profile.isExactProfile())
1420
continue;
1421
1422
const NamedDecl *Base = Profile.getBase();
1423
if (!Base)
1424
Base = Profile.getProperty();
1425
assert(Base && "A profile always has a base or property.");
1426
1427
if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Base))
1428
if (BaseVar->hasLocalStorage() && !isa<ParmVarDecl>(Base))
1429
continue;
1430
}
1431
}
1432
1433
UsesByStmt.push_back(StmtUsesPair(UI->getUseExpr(), I));
1434
}
1435
1436
if (UsesByStmt.empty())
1437
return;
1438
1439
// Sort by first use so that we emit the warnings in a deterministic order.
1440
SourceManager &SM = S.getSourceManager();
1441
llvm::sort(UsesByStmt,
1442
[&SM](const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
1443
return SM.isBeforeInTranslationUnit(LHS.first->getBeginLoc(),
1444
RHS.first->getBeginLoc());
1445
});
1446
1447
// Classify the current code body for better warning text.
1448
// This enum should stay in sync with the cases in
1449
// warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1450
// FIXME: Should we use a common classification enum and the same set of
1451
// possibilities all throughout Sema?
1452
enum {
1453
Function,
1454
Method,
1455
Block,
1456
Lambda
1457
} FunctionKind;
1458
1459
if (isa<sema::BlockScopeInfo>(CurFn))
1460
FunctionKind = Block;
1461
else if (isa<sema::LambdaScopeInfo>(CurFn))
1462
FunctionKind = Lambda;
1463
else if (isa<ObjCMethodDecl>(D))
1464
FunctionKind = Method;
1465
else
1466
FunctionKind = Function;
1467
1468
// Iterate through the sorted problems and emit warnings for each.
1469
for (const auto &P : UsesByStmt) {
1470
const Stmt *FirstRead = P.first;
1471
const WeakObjectProfileTy &Key = P.second->first;
1472
const WeakUseVector &Uses = P.second->second;
1473
1474
// For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy
1475
// may not contain enough information to determine that these are different
1476
// properties. We can only be 100% sure of a repeated use in certain cases,
1477
// and we adjust the diagnostic kind accordingly so that the less certain
1478
// case can be turned off if it is too noisy.
1479
unsigned DiagKind;
1480
if (Key.isExactProfile())
1481
DiagKind = diag::warn_arc_repeated_use_of_weak;
1482
else
1483
DiagKind = diag::warn_arc_possible_repeated_use_of_weak;
1484
1485
// Classify the weak object being accessed for better warning text.
1486
// This enum should stay in sync with the cases in
1487
// warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1488
enum {
1489
Variable,
1490
Property,
1491
ImplicitProperty,
1492
Ivar
1493
} ObjectKind;
1494
1495
const NamedDecl *KeyProp = Key.getProperty();
1496
if (isa<VarDecl>(KeyProp))
1497
ObjectKind = Variable;
1498
else if (isa<ObjCPropertyDecl>(KeyProp))
1499
ObjectKind = Property;
1500
else if (isa<ObjCMethodDecl>(KeyProp))
1501
ObjectKind = ImplicitProperty;
1502
else if (isa<ObjCIvarDecl>(KeyProp))
1503
ObjectKind = Ivar;
1504
else
1505
llvm_unreachable("Unexpected weak object kind!");
1506
1507
// Do not warn about IBOutlet weak property receivers being set to null
1508
// since they are typically only used from the main thread.
1509
if (const ObjCPropertyDecl *Prop = dyn_cast<ObjCPropertyDecl>(KeyProp))
1510
if (Prop->hasAttr<IBOutletAttr>())
1511
continue;
1512
1513
// Show the first time the object was read.
1514
S.Diag(FirstRead->getBeginLoc(), DiagKind)
1515
<< int(ObjectKind) << KeyProp << int(FunctionKind)
1516
<< FirstRead->getSourceRange();
1517
1518
// Print all the other accesses as notes.
1519
for (const auto &Use : Uses) {
1520
if (Use.getUseExpr() == FirstRead)
1521
continue;
1522
S.Diag(Use.getUseExpr()->getBeginLoc(),
1523
diag::note_arc_weak_also_accessed_here)
1524
<< Use.getUseExpr()->getSourceRange();
1525
}
1526
}
1527
}
1528
1529
namespace clang {
1530
namespace {
1531
typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
1532
typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
1533
typedef std::list<DelayedDiag> DiagList;
1534
1535
struct SortDiagBySourceLocation {
1536
SourceManager &SM;
1537
SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
1538
1539
bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
1540
// Although this call will be slow, this is only called when outputting
1541
// multiple warnings.
1542
return SM.isBeforeInTranslationUnit(left.first.first, right.first.first);
1543
}
1544
};
1545
} // anonymous namespace
1546
} // namespace clang
1547
1548
namespace {
1549
class UninitValsDiagReporter : public UninitVariablesHandler {
1550
Sema &S;
1551
typedef SmallVector<UninitUse, 2> UsesVec;
1552
typedef llvm::PointerIntPair<UsesVec *, 1, bool> MappedType;
1553
// Prefer using MapVector to DenseMap, so that iteration order will be
1554
// the same as insertion order. This is needed to obtain a deterministic
1555
// order of diagnostics when calling flushDiagnostics().
1556
typedef llvm::MapVector<const VarDecl *, MappedType> UsesMap;
1557
UsesMap uses;
1558
UsesMap constRefUses;
1559
1560
public:
1561
UninitValsDiagReporter(Sema &S) : S(S) {}
1562
~UninitValsDiagReporter() override { flushDiagnostics(); }
1563
1564
MappedType &getUses(UsesMap &um, const VarDecl *vd) {
1565
MappedType &V = um[vd];
1566
if (!V.getPointer())
1567
V.setPointer(new UsesVec());
1568
return V;
1569
}
1570
1571
void handleUseOfUninitVariable(const VarDecl *vd,
1572
const UninitUse &use) override {
1573
getUses(uses, vd).getPointer()->push_back(use);
1574
}
1575
1576
void handleConstRefUseOfUninitVariable(const VarDecl *vd,
1577
const UninitUse &use) override {
1578
getUses(constRefUses, vd).getPointer()->push_back(use);
1579
}
1580
1581
void handleSelfInit(const VarDecl *vd) override {
1582
getUses(uses, vd).setInt(true);
1583
getUses(constRefUses, vd).setInt(true);
1584
}
1585
1586
void flushDiagnostics() {
1587
for (const auto &P : uses) {
1588
const VarDecl *vd = P.first;
1589
const MappedType &V = P.second;
1590
1591
UsesVec *vec = V.getPointer();
1592
bool hasSelfInit = V.getInt();
1593
1594
// Specially handle the case where we have uses of an uninitialized
1595
// variable, but the root cause is an idiomatic self-init. We want
1596
// to report the diagnostic at the self-init since that is the root cause.
1597
if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
1598
DiagnoseUninitializedUse(S, vd,
1599
UninitUse(vd->getInit()->IgnoreParenCasts(),
1600
/* isAlwaysUninit */ true),
1601
/* alwaysReportSelfInit */ true);
1602
else {
1603
// Sort the uses by their SourceLocations. While not strictly
1604
// guaranteed to produce them in line/column order, this will provide
1605
// a stable ordering.
1606
llvm::sort(*vec, [](const UninitUse &a, const UninitUse &b) {
1607
// Prefer a more confident report over a less confident one.
1608
if (a.getKind() != b.getKind())
1609
return a.getKind() > b.getKind();
1610
return a.getUser()->getBeginLoc() < b.getUser()->getBeginLoc();
1611
});
1612
1613
for (const auto &U : *vec) {
1614
// If we have self-init, downgrade all uses to 'may be uninitialized'.
1615
UninitUse Use = hasSelfInit ? UninitUse(U.getUser(), false) : U;
1616
1617
if (DiagnoseUninitializedUse(S, vd, Use))
1618
// Skip further diagnostics for this variable. We try to warn only
1619
// on the first point at which a variable is used uninitialized.
1620
break;
1621
}
1622
}
1623
1624
// Release the uses vector.
1625
delete vec;
1626
}
1627
1628
uses.clear();
1629
1630
// Flush all const reference uses diags.
1631
for (const auto &P : constRefUses) {
1632
const VarDecl *vd = P.first;
1633
const MappedType &V = P.second;
1634
1635
UsesVec *vec = V.getPointer();
1636
bool hasSelfInit = V.getInt();
1637
1638
if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
1639
DiagnoseUninitializedUse(S, vd,
1640
UninitUse(vd->getInit()->IgnoreParenCasts(),
1641
/* isAlwaysUninit */ true),
1642
/* alwaysReportSelfInit */ true);
1643
else {
1644
for (const auto &U : *vec) {
1645
if (DiagnoseUninitializedConstRefUse(S, vd, U))
1646
break;
1647
}
1648
}
1649
1650
// Release the uses vector.
1651
delete vec;
1652
}
1653
1654
constRefUses.clear();
1655
}
1656
1657
private:
1658
static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
1659
return llvm::any_of(*vec, [](const UninitUse &U) {
1660
return U.getKind() == UninitUse::Always ||
1661
U.getKind() == UninitUse::AfterCall ||
1662
U.getKind() == UninitUse::AfterDecl;
1663
});
1664
}
1665
};
1666
1667
/// Inter-procedural data for the called-once checker.
1668
class CalledOnceInterProceduralData {
1669
public:
1670
// Add the delayed warning for the given block.
1671
void addDelayedWarning(const BlockDecl *Block,
1672
PartialDiagnosticAt &&Warning) {
1673
DelayedBlockWarnings[Block].emplace_back(std::move(Warning));
1674
}
1675
// Report all of the warnings we've gathered for the given block.
1676
void flushWarnings(const BlockDecl *Block, Sema &S) {
1677
for (const PartialDiagnosticAt &Delayed : DelayedBlockWarnings[Block])
1678
S.Diag(Delayed.first, Delayed.second);
1679
1680
discardWarnings(Block);
1681
}
1682
// Discard all of the warnings we've gathered for the given block.
1683
void discardWarnings(const BlockDecl *Block) {
1684
DelayedBlockWarnings.erase(Block);
1685
}
1686
1687
private:
1688
using DelayedDiagnostics = SmallVector<PartialDiagnosticAt, 2>;
1689
llvm::DenseMap<const BlockDecl *, DelayedDiagnostics> DelayedBlockWarnings;
1690
};
1691
1692
class CalledOnceCheckReporter : public CalledOnceCheckHandler {
1693
public:
1694
CalledOnceCheckReporter(Sema &S, CalledOnceInterProceduralData &Data)
1695
: S(S), Data(Data) {}
1696
void handleDoubleCall(const ParmVarDecl *Parameter, const Expr *Call,
1697
const Expr *PrevCall, bool IsCompletionHandler,
1698
bool Poised) override {
1699
auto DiagToReport = IsCompletionHandler
1700
? diag::warn_completion_handler_called_twice
1701
: diag::warn_called_once_gets_called_twice;
1702
S.Diag(Call->getBeginLoc(), DiagToReport) << Parameter;
1703
S.Diag(PrevCall->getBeginLoc(), diag::note_called_once_gets_called_twice)
1704
<< Poised;
1705
}
1706
1707
void handleNeverCalled(const ParmVarDecl *Parameter,
1708
bool IsCompletionHandler) override {
1709
auto DiagToReport = IsCompletionHandler
1710
? diag::warn_completion_handler_never_called
1711
: diag::warn_called_once_never_called;
1712
S.Diag(Parameter->getBeginLoc(), DiagToReport)
1713
<< Parameter << /* Captured */ false;
1714
}
1715
1716
void handleNeverCalled(const ParmVarDecl *Parameter, const Decl *Function,
1717
const Stmt *Where, NeverCalledReason Reason,
1718
bool IsCalledDirectly,
1719
bool IsCompletionHandler) override {
1720
auto DiagToReport = IsCompletionHandler
1721
? diag::warn_completion_handler_never_called_when
1722
: diag::warn_called_once_never_called_when;
1723
PartialDiagnosticAt Warning(Where->getBeginLoc(), S.PDiag(DiagToReport)
1724
<< Parameter
1725
<< IsCalledDirectly
1726
<< (unsigned)Reason);
1727
1728
if (const auto *Block = dyn_cast<BlockDecl>(Function)) {
1729
// We shouldn't report these warnings on blocks immediately
1730
Data.addDelayedWarning(Block, std::move(Warning));
1731
} else {
1732
S.Diag(Warning.first, Warning.second);
1733
}
1734
}
1735
1736
void handleCapturedNeverCalled(const ParmVarDecl *Parameter,
1737
const Decl *Where,
1738
bool IsCompletionHandler) override {
1739
auto DiagToReport = IsCompletionHandler
1740
? diag::warn_completion_handler_never_called
1741
: diag::warn_called_once_never_called;
1742
S.Diag(Where->getBeginLoc(), DiagToReport)
1743
<< Parameter << /* Captured */ true;
1744
}
1745
1746
void
1747
handleBlockThatIsGuaranteedToBeCalledOnce(const BlockDecl *Block) override {
1748
Data.flushWarnings(Block, S);
1749
}
1750
1751
void handleBlockWithNoGuarantees(const BlockDecl *Block) override {
1752
Data.discardWarnings(Block);
1753
}
1754
1755
private:
1756
Sema &S;
1757
CalledOnceInterProceduralData &Data;
1758
};
1759
1760
constexpr unsigned CalledOnceWarnings[] = {
1761
diag::warn_called_once_never_called,
1762
diag::warn_called_once_never_called_when,
1763
diag::warn_called_once_gets_called_twice};
1764
1765
constexpr unsigned CompletionHandlerWarnings[]{
1766
diag::warn_completion_handler_never_called,
1767
diag::warn_completion_handler_never_called_when,
1768
diag::warn_completion_handler_called_twice};
1769
1770
bool shouldAnalyzeCalledOnceImpl(llvm::ArrayRef<unsigned> DiagIDs,
1771
const DiagnosticsEngine &Diags,
1772
SourceLocation At) {
1773
return llvm::any_of(DiagIDs, [&Diags, At](unsigned DiagID) {
1774
return !Diags.isIgnored(DiagID, At);
1775
});
1776
}
1777
1778
bool shouldAnalyzeCalledOnceConventions(const DiagnosticsEngine &Diags,
1779
SourceLocation At) {
1780
return shouldAnalyzeCalledOnceImpl(CompletionHandlerWarnings, Diags, At);
1781
}
1782
1783
bool shouldAnalyzeCalledOnceParameters(const DiagnosticsEngine &Diags,
1784
SourceLocation At) {
1785
return shouldAnalyzeCalledOnceImpl(CalledOnceWarnings, Diags, At) ||
1786
shouldAnalyzeCalledOnceConventions(Diags, At);
1787
}
1788
} // anonymous namespace
1789
1790
//===----------------------------------------------------------------------===//
1791
// -Wthread-safety
1792
//===----------------------------------------------------------------------===//
1793
namespace clang {
1794
namespace threadSafety {
1795
namespace {
1796
class ThreadSafetyReporter : public clang::threadSafety::ThreadSafetyHandler {
1797
Sema &S;
1798
DiagList Warnings;
1799
SourceLocation FunLocation, FunEndLocation;
1800
1801
const FunctionDecl *CurrentFunction;
1802
bool Verbose;
1803
1804
OptionalNotes getNotes() const {
1805
if (Verbose && CurrentFunction) {
1806
PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1807
S.PDiag(diag::note_thread_warning_in_fun)
1808
<< CurrentFunction);
1809
return OptionalNotes(1, FNote);
1810
}
1811
return OptionalNotes();
1812
}
1813
1814
OptionalNotes getNotes(const PartialDiagnosticAt &Note) const {
1815
OptionalNotes ONS(1, Note);
1816
if (Verbose && CurrentFunction) {
1817
PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1818
S.PDiag(diag::note_thread_warning_in_fun)
1819
<< CurrentFunction);
1820
ONS.push_back(std::move(FNote));
1821
}
1822
return ONS;
1823
}
1824
1825
OptionalNotes getNotes(const PartialDiagnosticAt &Note1,
1826
const PartialDiagnosticAt &Note2) const {
1827
OptionalNotes ONS;
1828
ONS.push_back(Note1);
1829
ONS.push_back(Note2);
1830
if (Verbose && CurrentFunction) {
1831
PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1832
S.PDiag(diag::note_thread_warning_in_fun)
1833
<< CurrentFunction);
1834
ONS.push_back(std::move(FNote));
1835
}
1836
return ONS;
1837
}
1838
1839
OptionalNotes makeLockedHereNote(SourceLocation LocLocked, StringRef Kind) {
1840
return LocLocked.isValid()
1841
? getNotes(PartialDiagnosticAt(
1842
LocLocked, S.PDiag(diag::note_locked_here) << Kind))
1843
: getNotes();
1844
}
1845
1846
OptionalNotes makeUnlockedHereNote(SourceLocation LocUnlocked,
1847
StringRef Kind) {
1848
return LocUnlocked.isValid()
1849
? getNotes(PartialDiagnosticAt(
1850
LocUnlocked, S.PDiag(diag::note_unlocked_here) << Kind))
1851
: getNotes();
1852
}
1853
1854
public:
1855
ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
1856
: S(S), FunLocation(FL), FunEndLocation(FEL),
1857
CurrentFunction(nullptr), Verbose(false) {}
1858
1859
void setVerbose(bool b) { Verbose = b; }
1860
1861
/// Emit all buffered diagnostics in order of sourcelocation.
1862
/// We need to output diagnostics produced while iterating through
1863
/// the lockset in deterministic order, so this function orders diagnostics
1864
/// and outputs them.
1865
void emitDiagnostics() {
1866
Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
1867
for (const auto &Diag : Warnings) {
1868
S.Diag(Diag.first.first, Diag.first.second);
1869
for (const auto &Note : Diag.second)
1870
S.Diag(Note.first, Note.second);
1871
}
1872
}
1873
1874
void handleInvalidLockExp(SourceLocation Loc) override {
1875
PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_cannot_resolve_lock)
1876
<< Loc);
1877
Warnings.emplace_back(std::move(Warning), getNotes());
1878
}
1879
1880
void handleUnmatchedUnlock(StringRef Kind, Name LockName, SourceLocation Loc,
1881
SourceLocation LocPreviousUnlock) override {
1882
if (Loc.isInvalid())
1883
Loc = FunLocation;
1884
PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_unlock_but_no_lock)
1885
<< Kind << LockName);
1886
Warnings.emplace_back(std::move(Warning),
1887
makeUnlockedHereNote(LocPreviousUnlock, Kind));
1888
}
1889
1890
void handleIncorrectUnlockKind(StringRef Kind, Name LockName,
1891
LockKind Expected, LockKind Received,
1892
SourceLocation LocLocked,
1893
SourceLocation LocUnlock) override {
1894
if (LocUnlock.isInvalid())
1895
LocUnlock = FunLocation;
1896
PartialDiagnosticAt Warning(
1897
LocUnlock, S.PDiag(diag::warn_unlock_kind_mismatch)
1898
<< Kind << LockName << Received << Expected);
1899
Warnings.emplace_back(std::move(Warning),
1900
makeLockedHereNote(LocLocked, Kind));
1901
}
1902
1903
void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked,
1904
SourceLocation LocDoubleLock) override {
1905
if (LocDoubleLock.isInvalid())
1906
LocDoubleLock = FunLocation;
1907
PartialDiagnosticAt Warning(LocDoubleLock, S.PDiag(diag::warn_double_lock)
1908
<< Kind << LockName);
1909
Warnings.emplace_back(std::move(Warning),
1910
makeLockedHereNote(LocLocked, Kind));
1911
}
1912
1913
void handleMutexHeldEndOfScope(StringRef Kind, Name LockName,
1914
SourceLocation LocLocked,
1915
SourceLocation LocEndOfScope,
1916
LockErrorKind LEK) override {
1917
unsigned DiagID = 0;
1918
switch (LEK) {
1919
case LEK_LockedSomePredecessors:
1920
DiagID = diag::warn_lock_some_predecessors;
1921
break;
1922
case LEK_LockedSomeLoopIterations:
1923
DiagID = diag::warn_expecting_lock_held_on_loop;
1924
break;
1925
case LEK_LockedAtEndOfFunction:
1926
DiagID = diag::warn_no_unlock;
1927
break;
1928
case LEK_NotLockedAtEndOfFunction:
1929
DiagID = diag::warn_expecting_locked;
1930
break;
1931
}
1932
if (LocEndOfScope.isInvalid())
1933
LocEndOfScope = FunEndLocation;
1934
1935
PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << Kind
1936
<< LockName);
1937
Warnings.emplace_back(std::move(Warning),
1938
makeLockedHereNote(LocLocked, Kind));
1939
}
1940
1941
void handleExclusiveAndShared(StringRef Kind, Name LockName,
1942
SourceLocation Loc1,
1943
SourceLocation Loc2) override {
1944
PartialDiagnosticAt Warning(Loc1,
1945
S.PDiag(diag::warn_lock_exclusive_and_shared)
1946
<< Kind << LockName);
1947
PartialDiagnosticAt Note(Loc2, S.PDiag(diag::note_lock_exclusive_and_shared)
1948
<< Kind << LockName);
1949
Warnings.emplace_back(std::move(Warning), getNotes(Note));
1950
}
1951
1952
void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK,
1953
AccessKind AK, SourceLocation Loc) override {
1954
assert((POK == POK_VarAccess || POK == POK_VarDereference) &&
1955
"Only works for variables");
1956
unsigned DiagID = POK == POK_VarAccess?
1957
diag::warn_variable_requires_any_lock:
1958
diag::warn_var_deref_requires_any_lock;
1959
PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1960
<< D << getLockKindFromAccessKind(AK));
1961
Warnings.emplace_back(std::move(Warning), getNotes());
1962
}
1963
1964
void handleMutexNotHeld(StringRef Kind, const NamedDecl *D,
1965
ProtectedOperationKind POK, Name LockName,
1966
LockKind LK, SourceLocation Loc,
1967
Name *PossibleMatch) override {
1968
unsigned DiagID = 0;
1969
if (PossibleMatch) {
1970
switch (POK) {
1971
case POK_VarAccess:
1972
DiagID = diag::warn_variable_requires_lock_precise;
1973
break;
1974
case POK_VarDereference:
1975
DiagID = diag::warn_var_deref_requires_lock_precise;
1976
break;
1977
case POK_FunctionCall:
1978
DiagID = diag::warn_fun_requires_lock_precise;
1979
break;
1980
case POK_PassByRef:
1981
DiagID = diag::warn_guarded_pass_by_reference;
1982
break;
1983
case POK_PtPassByRef:
1984
DiagID = diag::warn_pt_guarded_pass_by_reference;
1985
break;
1986
case POK_ReturnByRef:
1987
DiagID = diag::warn_guarded_return_by_reference;
1988
break;
1989
case POK_PtReturnByRef:
1990
DiagID = diag::warn_pt_guarded_return_by_reference;
1991
break;
1992
}
1993
PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
1994
<< D
1995
<< LockName << LK);
1996
PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match)
1997
<< *PossibleMatch);
1998
if (Verbose && POK == POK_VarAccess) {
1999
PartialDiagnosticAt VNote(D->getLocation(),
2000
S.PDiag(diag::note_guarded_by_declared_here)
2001
<< D->getDeclName());
2002
Warnings.emplace_back(std::move(Warning), getNotes(Note, VNote));
2003
} else
2004
Warnings.emplace_back(std::move(Warning), getNotes(Note));
2005
} else {
2006
switch (POK) {
2007
case POK_VarAccess:
2008
DiagID = diag::warn_variable_requires_lock;
2009
break;
2010
case POK_VarDereference:
2011
DiagID = diag::warn_var_deref_requires_lock;
2012
break;
2013
case POK_FunctionCall:
2014
DiagID = diag::warn_fun_requires_lock;
2015
break;
2016
case POK_PassByRef:
2017
DiagID = diag::warn_guarded_pass_by_reference;
2018
break;
2019
case POK_PtPassByRef:
2020
DiagID = diag::warn_pt_guarded_pass_by_reference;
2021
break;
2022
case POK_ReturnByRef:
2023
DiagID = diag::warn_guarded_return_by_reference;
2024
break;
2025
case POK_PtReturnByRef:
2026
DiagID = diag::warn_pt_guarded_return_by_reference;
2027
break;
2028
}
2029
PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
2030
<< D
2031
<< LockName << LK);
2032
if (Verbose && POK == POK_VarAccess) {
2033
PartialDiagnosticAt Note(D->getLocation(),
2034
S.PDiag(diag::note_guarded_by_declared_here));
2035
Warnings.emplace_back(std::move(Warning), getNotes(Note));
2036
} else
2037
Warnings.emplace_back(std::move(Warning), getNotes());
2038
}
2039
}
2040
2041
void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg,
2042
SourceLocation Loc) override {
2043
PartialDiagnosticAt Warning(Loc,
2044
S.PDiag(diag::warn_acquire_requires_negative_cap)
2045
<< Kind << LockName << Neg);
2046
Warnings.emplace_back(std::move(Warning), getNotes());
2047
}
2048
2049
void handleNegativeNotHeld(const NamedDecl *D, Name LockName,
2050
SourceLocation Loc) override {
2051
PartialDiagnosticAt Warning(
2052
Loc, S.PDiag(diag::warn_fun_requires_negative_cap) << D << LockName);
2053
Warnings.emplace_back(std::move(Warning), getNotes());
2054
}
2055
2056
void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName,
2057
SourceLocation Loc) override {
2058
PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_fun_excludes_mutex)
2059
<< Kind << FunName << LockName);
2060
Warnings.emplace_back(std::move(Warning), getNotes());
2061
}
2062
2063
void handleLockAcquiredBefore(StringRef Kind, Name L1Name, Name L2Name,
2064
SourceLocation Loc) override {
2065
PartialDiagnosticAt Warning(Loc,
2066
S.PDiag(diag::warn_acquired_before) << Kind << L1Name << L2Name);
2067
Warnings.emplace_back(std::move(Warning), getNotes());
2068
}
2069
2070
void handleBeforeAfterCycle(Name L1Name, SourceLocation Loc) override {
2071
PartialDiagnosticAt Warning(Loc,
2072
S.PDiag(diag::warn_acquired_before_after_cycle) << L1Name);
2073
Warnings.emplace_back(std::move(Warning), getNotes());
2074
}
2075
2076
void enterFunction(const FunctionDecl* FD) override {
2077
CurrentFunction = FD;
2078
}
2079
2080
void leaveFunction(const FunctionDecl* FD) override {
2081
CurrentFunction = nullptr;
2082
}
2083
};
2084
} // anonymous namespace
2085
} // namespace threadSafety
2086
} // namespace clang
2087
2088
//===----------------------------------------------------------------------===//
2089
// -Wconsumed
2090
//===----------------------------------------------------------------------===//
2091
2092
namespace clang {
2093
namespace consumed {
2094
namespace {
2095
class ConsumedWarningsHandler : public ConsumedWarningsHandlerBase {
2096
2097
Sema &S;
2098
DiagList Warnings;
2099
2100
public:
2101
2102
ConsumedWarningsHandler(Sema &S) : S(S) {}
2103
2104
void emitDiagnostics() override {
2105
Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
2106
for (const auto &Diag : Warnings) {
2107
S.Diag(Diag.first.first, Diag.first.second);
2108
for (const auto &Note : Diag.second)
2109
S.Diag(Note.first, Note.second);
2110
}
2111
}
2112
2113
void warnLoopStateMismatch(SourceLocation Loc,
2114
StringRef VariableName) override {
2115
PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_loop_state_mismatch) <<
2116
VariableName);
2117
2118
Warnings.emplace_back(std::move(Warning), OptionalNotes());
2119
}
2120
2121
void warnParamReturnTypestateMismatch(SourceLocation Loc,
2122
StringRef VariableName,
2123
StringRef ExpectedState,
2124
StringRef ObservedState) override {
2125
2126
PartialDiagnosticAt Warning(Loc, S.PDiag(
2127
diag::warn_param_return_typestate_mismatch) << VariableName <<
2128
ExpectedState << ObservedState);
2129
2130
Warnings.emplace_back(std::move(Warning), OptionalNotes());
2131
}
2132
2133
void warnParamTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2134
StringRef ObservedState) override {
2135
2136
PartialDiagnosticAt Warning(Loc, S.PDiag(
2137
diag::warn_param_typestate_mismatch) << ExpectedState << ObservedState);
2138
2139
Warnings.emplace_back(std::move(Warning), OptionalNotes());
2140
}
2141
2142
void warnReturnTypestateForUnconsumableType(SourceLocation Loc,
2143
StringRef TypeName) override {
2144
PartialDiagnosticAt Warning(Loc, S.PDiag(
2145
diag::warn_return_typestate_for_unconsumable_type) << TypeName);
2146
2147
Warnings.emplace_back(std::move(Warning), OptionalNotes());
2148
}
2149
2150
void warnReturnTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2151
StringRef ObservedState) override {
2152
2153
PartialDiagnosticAt Warning(Loc, S.PDiag(
2154
diag::warn_return_typestate_mismatch) << ExpectedState << ObservedState);
2155
2156
Warnings.emplace_back(std::move(Warning), OptionalNotes());
2157
}
2158
2159
void warnUseOfTempInInvalidState(StringRef MethodName, StringRef State,
2160
SourceLocation Loc) override {
2161
2162
PartialDiagnosticAt Warning(Loc, S.PDiag(
2163
diag::warn_use_of_temp_in_invalid_state) << MethodName << State);
2164
2165
Warnings.emplace_back(std::move(Warning), OptionalNotes());
2166
}
2167
2168
void warnUseInInvalidState(StringRef MethodName, StringRef VariableName,
2169
StringRef State, SourceLocation Loc) override {
2170
2171
PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_use_in_invalid_state) <<
2172
MethodName << VariableName << State);
2173
2174
Warnings.emplace_back(std::move(Warning), OptionalNotes());
2175
}
2176
};
2177
} // anonymous namespace
2178
} // namespace consumed
2179
} // namespace clang
2180
2181
//===----------------------------------------------------------------------===//
2182
// Unsafe buffer usage analysis.
2183
//===----------------------------------------------------------------------===//
2184
2185
namespace {
2186
class UnsafeBufferUsageReporter : public UnsafeBufferUsageHandler {
2187
Sema &S;
2188
bool SuggestSuggestions; // Recommend -fsafe-buffer-usage-suggestions?
2189
2190
// Lists as a string the names of variables in `VarGroupForVD` except for `VD`
2191
// itself:
2192
std::string listVariableGroupAsString(
2193
const VarDecl *VD, const ArrayRef<const VarDecl *> &VarGroupForVD) const {
2194
if (VarGroupForVD.size() <= 1)
2195
return "";
2196
2197
std::vector<StringRef> VarNames;
2198
auto PutInQuotes = [](StringRef S) -> std::string {
2199
return "'" + S.str() + "'";
2200
};
2201
2202
for (auto *V : VarGroupForVD) {
2203
if (V == VD)
2204
continue;
2205
VarNames.push_back(V->getName());
2206
}
2207
if (VarNames.size() == 1) {
2208
return PutInQuotes(VarNames[0]);
2209
}
2210
if (VarNames.size() == 2) {
2211
return PutInQuotes(VarNames[0]) + " and " + PutInQuotes(VarNames[1]);
2212
}
2213
assert(VarGroupForVD.size() > 3);
2214
const unsigned N = VarNames.size() -
2215
2; // need to print the last two names as "..., X, and Y"
2216
std::string AllVars = "";
2217
2218
for (unsigned I = 0; I < N; ++I)
2219
AllVars.append(PutInQuotes(VarNames[I]) + ", ");
2220
AllVars.append(PutInQuotes(VarNames[N]) + ", and " +
2221
PutInQuotes(VarNames[N + 1]));
2222
return AllVars;
2223
}
2224
2225
public:
2226
UnsafeBufferUsageReporter(Sema &S, bool SuggestSuggestions)
2227
: S(S), SuggestSuggestions(SuggestSuggestions) {}
2228
2229
void handleUnsafeOperation(const Stmt *Operation, bool IsRelatedToDecl,
2230
ASTContext &Ctx) override {
2231
SourceLocation Loc;
2232
SourceRange Range;
2233
unsigned MsgParam = 0;
2234
if (const auto *ASE = dyn_cast<ArraySubscriptExpr>(Operation)) {
2235
Loc = ASE->getBase()->getExprLoc();
2236
Range = ASE->getBase()->getSourceRange();
2237
MsgParam = 2;
2238
} else if (const auto *BO = dyn_cast<BinaryOperator>(Operation)) {
2239
BinaryOperator::Opcode Op = BO->getOpcode();
2240
if (Op == BO_Add || Op == BO_AddAssign || Op == BO_Sub ||
2241
Op == BO_SubAssign) {
2242
if (BO->getRHS()->getType()->isIntegerType()) {
2243
Loc = BO->getLHS()->getExprLoc();
2244
Range = BO->getLHS()->getSourceRange();
2245
} else {
2246
Loc = BO->getRHS()->getExprLoc();
2247
Range = BO->getRHS()->getSourceRange();
2248
}
2249
MsgParam = 1;
2250
}
2251
} else if (const auto *UO = dyn_cast<UnaryOperator>(Operation)) {
2252
UnaryOperator::Opcode Op = UO->getOpcode();
2253
if (Op == UO_PreInc || Op == UO_PreDec || Op == UO_PostInc ||
2254
Op == UO_PostDec) {
2255
Loc = UO->getSubExpr()->getExprLoc();
2256
Range = UO->getSubExpr()->getSourceRange();
2257
MsgParam = 1;
2258
}
2259
} else {
2260
if (isa<CallExpr>(Operation) || isa<CXXConstructExpr>(Operation)) {
2261
// note_unsafe_buffer_operation doesn't have this mode yet.
2262
assert(!IsRelatedToDecl && "Not implemented yet!");
2263
MsgParam = 3;
2264
} else if (const auto *ECE = dyn_cast<ExplicitCastExpr>(Operation)) {
2265
QualType destType = ECE->getType();
2266
if (!isa<PointerType>(destType))
2267
return;
2268
2269
const uint64_t dSize =
2270
Ctx.getTypeSize(destType.getTypePtr()->getPointeeType());
2271
2272
QualType srcType = ECE->getSubExpr()->getType();
2273
const uint64_t sSize =
2274
Ctx.getTypeSize(srcType.getTypePtr()->getPointeeType());
2275
if (sSize >= dSize)
2276
return;
2277
2278
MsgParam = 4;
2279
}
2280
Loc = Operation->getBeginLoc();
2281
Range = Operation->getSourceRange();
2282
}
2283
if (IsRelatedToDecl) {
2284
assert(!SuggestSuggestions &&
2285
"Variables blamed for unsafe buffer usage without suggestions!");
2286
S.Diag(Loc, diag::note_unsafe_buffer_operation) << MsgParam << Range;
2287
} else {
2288
S.Diag(Loc, diag::warn_unsafe_buffer_operation) << MsgParam << Range;
2289
if (SuggestSuggestions) {
2290
S.Diag(Loc, diag::note_safe_buffer_usage_suggestions_disabled);
2291
}
2292
}
2293
}
2294
2295
void handleUnsafeOperationInContainer(const Stmt *Operation,
2296
bool IsRelatedToDecl,
2297
ASTContext &Ctx) override {
2298
SourceLocation Loc;
2299
SourceRange Range;
2300
unsigned MsgParam = 0;
2301
2302
// This function only handles SpanTwoParamConstructorGadget so far, which
2303
// always gives a CXXConstructExpr.
2304
const auto *CtorExpr = cast<CXXConstructExpr>(Operation);
2305
Loc = CtorExpr->getLocation();
2306
2307
S.Diag(Loc, diag::warn_unsafe_buffer_usage_in_container);
2308
if (IsRelatedToDecl) {
2309
assert(!SuggestSuggestions &&
2310
"Variables blamed for unsafe buffer usage without suggestions!");
2311
S.Diag(Loc, diag::note_unsafe_buffer_operation) << MsgParam << Range;
2312
}
2313
}
2314
2315
void handleUnsafeVariableGroup(const VarDecl *Variable,
2316
const VariableGroupsManager &VarGrpMgr,
2317
FixItList &&Fixes, const Decl *D,
2318
const FixitStrategy &VarTargetTypes) override {
2319
assert(!SuggestSuggestions &&
2320
"Unsafe buffer usage fixits displayed without suggestions!");
2321
S.Diag(Variable->getLocation(), diag::warn_unsafe_buffer_variable)
2322
<< Variable << (Variable->getType()->isPointerType() ? 0 : 1)
2323
<< Variable->getSourceRange();
2324
if (!Fixes.empty()) {
2325
assert(isa<NamedDecl>(D) &&
2326
"Fix-its are generated only for `NamedDecl`s");
2327
const NamedDecl *ND = cast<NamedDecl>(D);
2328
bool BriefMsg = false;
2329
// If the variable group involves parameters, the diagnostic message will
2330
// NOT explain how the variables are grouped as the reason is non-trivial
2331
// and irrelavant to users' experience:
2332
const auto VarGroupForVD = VarGrpMgr.getGroupOfVar(Variable, &BriefMsg);
2333
unsigned FixItStrategy = 0;
2334
switch (VarTargetTypes.lookup(Variable)) {
2335
case clang::FixitStrategy::Kind::Span:
2336
FixItStrategy = 0;
2337
break;
2338
case clang::FixitStrategy::Kind::Array:
2339
FixItStrategy = 1;
2340
break;
2341
default:
2342
assert(false && "We support only std::span and std::array");
2343
};
2344
2345
const auto &FD =
2346
S.Diag(Variable->getLocation(),
2347
BriefMsg ? diag::note_unsafe_buffer_variable_fixit_together
2348
: diag::note_unsafe_buffer_variable_fixit_group);
2349
2350
FD << Variable << FixItStrategy;
2351
FD << listVariableGroupAsString(Variable, VarGroupForVD)
2352
<< (VarGroupForVD.size() > 1) << ND;
2353
for (const auto &F : Fixes) {
2354
FD << F;
2355
}
2356
}
2357
2358
#ifndef NDEBUG
2359
if (areDebugNotesRequested())
2360
for (const DebugNote &Note: DebugNotesByVar[Variable])
2361
S.Diag(Note.first, diag::note_safe_buffer_debug_mode) << Note.second;
2362
#endif
2363
}
2364
2365
bool isSafeBufferOptOut(const SourceLocation &Loc) const override {
2366
return S.PP.isSafeBufferOptOut(S.getSourceManager(), Loc);
2367
}
2368
2369
bool ignoreUnsafeBufferInContainer(const SourceLocation &Loc) const override {
2370
return S.Diags.isIgnored(diag::warn_unsafe_buffer_usage_in_container, Loc);
2371
}
2372
2373
// Returns the text representation of clang::unsafe_buffer_usage attribute.
2374
// `WSSuffix` holds customized "white-space"s, e.g., newline or whilespace
2375
// characters.
2376
std::string
2377
getUnsafeBufferUsageAttributeTextAt(SourceLocation Loc,
2378
StringRef WSSuffix = "") const override {
2379
Preprocessor &PP = S.getPreprocessor();
2380
TokenValue ClangUnsafeBufferUsageTokens[] = {
2381
tok::l_square,
2382
tok::l_square,
2383
PP.getIdentifierInfo("clang"),
2384
tok::coloncolon,
2385
PP.getIdentifierInfo("unsafe_buffer_usage"),
2386
tok::r_square,
2387
tok::r_square};
2388
2389
StringRef MacroName;
2390
2391
// The returned macro (it returns) is guaranteed not to be function-like:
2392
MacroName = PP.getLastMacroWithSpelling(Loc, ClangUnsafeBufferUsageTokens);
2393
if (MacroName.empty())
2394
MacroName = "[[clang::unsafe_buffer_usage]]";
2395
return MacroName.str() + WSSuffix.str();
2396
}
2397
};
2398
} // namespace
2399
2400
//===----------------------------------------------------------------------===//
2401
// AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
2402
// warnings on a function, method, or block.
2403
//===----------------------------------------------------------------------===//
2404
2405
sema::AnalysisBasedWarnings::Policy::Policy() {
2406
enableCheckFallThrough = 1;
2407
enableCheckUnreachable = 0;
2408
enableThreadSafetyAnalysis = 0;
2409
enableConsumedAnalysis = 0;
2410
}
2411
2412
/// InterProceduralData aims to be a storage of whatever data should be passed
2413
/// between analyses of different functions.
2414
///
2415
/// At the moment, its primary goal is to make the information gathered during
2416
/// the analysis of the blocks available during the analysis of the enclosing
2417
/// function. This is important due to the fact that blocks are analyzed before
2418
/// the enclosed function is even parsed fully, so it is not viable to access
2419
/// anything in the outer scope while analyzing the block. On the other hand,
2420
/// re-building CFG for blocks and re-analyzing them when we do have all the
2421
/// information (i.e. during the analysis of the enclosing function) seems to be
2422
/// ill-designed.
2423
class sema::AnalysisBasedWarnings::InterProceduralData {
2424
public:
2425
// It is important to analyze blocks within functions because it's a very
2426
// common pattern to capture completion handler parameters by blocks.
2427
CalledOnceInterProceduralData CalledOnceData;
2428
};
2429
2430
static unsigned isEnabled(DiagnosticsEngine &D, unsigned diag) {
2431
return (unsigned)!D.isIgnored(diag, SourceLocation());
2432
}
2433
2434
sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
2435
: S(s), IPData(std::make_unique<InterProceduralData>()),
2436
NumFunctionsAnalyzed(0), NumFunctionsWithBadCFGs(0), NumCFGBlocks(0),
2437
MaxCFGBlocksPerFunction(0), NumUninitAnalysisFunctions(0),
2438
NumUninitAnalysisVariables(0), MaxUninitAnalysisVariablesPerFunction(0),
2439
NumUninitAnalysisBlockVisits(0),
2440
MaxUninitAnalysisBlockVisitsPerFunction(0) {
2441
2442
using namespace diag;
2443
DiagnosticsEngine &D = S.getDiagnostics();
2444
2445
DefaultPolicy.enableCheckUnreachable =
2446
isEnabled(D, warn_unreachable) || isEnabled(D, warn_unreachable_break) ||
2447
isEnabled(D, warn_unreachable_return) ||
2448
isEnabled(D, warn_unreachable_loop_increment);
2449
2450
DefaultPolicy.enableThreadSafetyAnalysis = isEnabled(D, warn_double_lock);
2451
2452
DefaultPolicy.enableConsumedAnalysis =
2453
isEnabled(D, warn_use_in_invalid_state);
2454
}
2455
2456
// We need this here for unique_ptr with forward declared class.
2457
sema::AnalysisBasedWarnings::~AnalysisBasedWarnings() = default;
2458
2459
static void flushDiagnostics(Sema &S, const sema::FunctionScopeInfo *fscope) {
2460
for (const auto &D : fscope->PossiblyUnreachableDiags)
2461
S.Diag(D.Loc, D.PD);
2462
}
2463
2464
// An AST Visitor that calls a callback function on each callable DEFINITION
2465
// that is NOT in a dependent context:
2466
class CallableVisitor : public RecursiveASTVisitor<CallableVisitor> {
2467
private:
2468
llvm::function_ref<void(const Decl *)> Callback;
2469
2470
public:
2471
CallableVisitor(llvm::function_ref<void(const Decl *)> Callback)
2472
: Callback(Callback) {}
2473
2474
bool VisitFunctionDecl(FunctionDecl *Node) {
2475
if (cast<DeclContext>(Node)->isDependentContext())
2476
return true; // Not to analyze dependent decl
2477
// `FunctionDecl->hasBody()` returns true if the function has a body
2478
// somewhere defined. But we want to know if this `Node` has a body
2479
// child. So we use `doesThisDeclarationHaveABody`:
2480
if (Node->doesThisDeclarationHaveABody())
2481
Callback(Node);
2482
return true;
2483
}
2484
2485
bool VisitBlockDecl(BlockDecl *Node) {
2486
if (cast<DeclContext>(Node)->isDependentContext())
2487
return true; // Not to analyze dependent decl
2488
Callback(Node);
2489
return true;
2490
}
2491
2492
bool VisitObjCMethodDecl(ObjCMethodDecl *Node) {
2493
if (cast<DeclContext>(Node)->isDependentContext())
2494
return true; // Not to analyze dependent decl
2495
if (Node->hasBody())
2496
Callback(Node);
2497
return true;
2498
}
2499
2500
bool VisitLambdaExpr(LambdaExpr *Node) {
2501
return VisitFunctionDecl(Node->getCallOperator());
2502
}
2503
2504
bool shouldVisitTemplateInstantiations() const { return true; }
2505
bool shouldVisitImplicitCode() const { return false; }
2506
};
2507
2508
void clang::sema::AnalysisBasedWarnings::IssueWarnings(
2509
TranslationUnitDecl *TU) {
2510
if (!TU)
2511
return; // This is unexpected, give up quietly.
2512
2513
DiagnosticsEngine &Diags = S.getDiagnostics();
2514
2515
if (S.hasUncompilableErrorOccurred() || Diags.getIgnoreAllWarnings())
2516
// exit if having uncompilable errors or ignoring all warnings:
2517
return;
2518
2519
DiagnosticOptions &DiagOpts = Diags.getDiagnosticOptions();
2520
2521
// UnsafeBufferUsage analysis settings.
2522
bool UnsafeBufferUsageCanEmitSuggestions = S.getLangOpts().CPlusPlus20;
2523
bool UnsafeBufferUsageShouldEmitSuggestions = // Should != Can.
2524
UnsafeBufferUsageCanEmitSuggestions &&
2525
DiagOpts.ShowSafeBufferUsageSuggestions;
2526
bool UnsafeBufferUsageShouldSuggestSuggestions =
2527
UnsafeBufferUsageCanEmitSuggestions &&
2528
!DiagOpts.ShowSafeBufferUsageSuggestions;
2529
UnsafeBufferUsageReporter R(S, UnsafeBufferUsageShouldSuggestSuggestions);
2530
2531
// The Callback function that performs analyses:
2532
auto CallAnalyzers = [&](const Decl *Node) -> void {
2533
// Perform unsafe buffer usage analysis:
2534
if (!Diags.isIgnored(diag::warn_unsafe_buffer_operation,
2535
Node->getBeginLoc()) ||
2536
!Diags.isIgnored(diag::warn_unsafe_buffer_variable,
2537
Node->getBeginLoc()) ||
2538
!Diags.isIgnored(diag::warn_unsafe_buffer_usage_in_container,
2539
Node->getBeginLoc())) {
2540
clang::checkUnsafeBufferUsage(Node, R,
2541
UnsafeBufferUsageShouldEmitSuggestions);
2542
}
2543
2544
// More analysis ...
2545
};
2546
// Emit per-function analysis-based warnings that require the whole-TU
2547
// reasoning. Check if any of them is enabled at all before scanning the AST:
2548
if (!Diags.isIgnored(diag::warn_unsafe_buffer_operation, SourceLocation()) ||
2549
!Diags.isIgnored(diag::warn_unsafe_buffer_variable, SourceLocation()) ||
2550
!Diags.isIgnored(diag::warn_unsafe_buffer_usage_in_container,
2551
SourceLocation())) {
2552
CallableVisitor(CallAnalyzers).TraverseTranslationUnitDecl(TU);
2553
}
2554
}
2555
2556
void clang::sema::AnalysisBasedWarnings::IssueWarnings(
2557
sema::AnalysisBasedWarnings::Policy P, sema::FunctionScopeInfo *fscope,
2558
const Decl *D, QualType BlockType) {
2559
2560
// We avoid doing analysis-based warnings when there are errors for
2561
// two reasons:
2562
// (1) The CFGs often can't be constructed (if the body is invalid), so
2563
// don't bother trying.
2564
// (2) The code already has problems; running the analysis just takes more
2565
// time.
2566
DiagnosticsEngine &Diags = S.getDiagnostics();
2567
2568
// Do not do any analysis if we are going to just ignore them.
2569
if (Diags.getIgnoreAllWarnings() ||
2570
(Diags.getSuppressSystemWarnings() &&
2571
S.SourceMgr.isInSystemHeader(D->getLocation())))
2572
return;
2573
2574
// For code in dependent contexts, we'll do this at instantiation time.
2575
if (cast<DeclContext>(D)->isDependentContext())
2576
return;
2577
2578
if (S.hasUncompilableErrorOccurred()) {
2579
// Flush out any possibly unreachable diagnostics.
2580
flushDiagnostics(S, fscope);
2581
return;
2582
}
2583
2584
const Stmt *Body = D->getBody();
2585
assert(Body);
2586
2587
// Construct the analysis context with the specified CFG build options.
2588
AnalysisDeclContext AC(/* AnalysisDeclContextManager */ nullptr, D);
2589
2590
// Don't generate EH edges for CallExprs as we'd like to avoid the n^2
2591
// explosion for destructors that can result and the compile time hit.
2592
AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
2593
AC.getCFGBuildOptions().AddEHEdges = false;
2594
AC.getCFGBuildOptions().AddInitializers = true;
2595
AC.getCFGBuildOptions().AddImplicitDtors = true;
2596
AC.getCFGBuildOptions().AddTemporaryDtors = true;
2597
AC.getCFGBuildOptions().AddCXXNewAllocator = false;
2598
AC.getCFGBuildOptions().AddCXXDefaultInitExprInCtors = true;
2599
2600
// Force that certain expressions appear as CFGElements in the CFG. This
2601
// is used to speed up various analyses.
2602
// FIXME: This isn't the right factoring. This is here for initial
2603
// prototyping, but we need a way for analyses to say what expressions they
2604
// expect to always be CFGElements and then fill in the BuildOptions
2605
// appropriately. This is essentially a layering violation.
2606
if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis ||
2607
P.enableConsumedAnalysis) {
2608
// Unreachable code analysis and thread safety require a linearized CFG.
2609
AC.getCFGBuildOptions().setAllAlwaysAdd();
2610
}
2611
else {
2612
AC.getCFGBuildOptions()
2613
.setAlwaysAdd(Stmt::BinaryOperatorClass)
2614
.setAlwaysAdd(Stmt::CompoundAssignOperatorClass)
2615
.setAlwaysAdd(Stmt::BlockExprClass)
2616
.setAlwaysAdd(Stmt::CStyleCastExprClass)
2617
.setAlwaysAdd(Stmt::DeclRefExprClass)
2618
.setAlwaysAdd(Stmt::ImplicitCastExprClass)
2619
.setAlwaysAdd(Stmt::UnaryOperatorClass);
2620
}
2621
2622
// Install the logical handler.
2623
std::optional<LogicalErrorHandler> LEH;
2624
if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
2625
LEH.emplace(S);
2626
AC.getCFGBuildOptions().Observer = &*LEH;
2627
}
2628
2629
// Emit delayed diagnostics.
2630
if (!fscope->PossiblyUnreachableDiags.empty()) {
2631
bool analyzed = false;
2632
2633
// Register the expressions with the CFGBuilder.
2634
for (const auto &D : fscope->PossiblyUnreachableDiags) {
2635
for (const Stmt *S : D.Stmts)
2636
AC.registerForcedBlockExpression(S);
2637
}
2638
2639
if (AC.getCFG()) {
2640
analyzed = true;
2641
for (const auto &D : fscope->PossiblyUnreachableDiags) {
2642
bool AllReachable = true;
2643
for (const Stmt *S : D.Stmts) {
2644
const CFGBlock *block = AC.getBlockForRegisteredExpression(S);
2645
CFGReverseBlockReachabilityAnalysis *cra =
2646
AC.getCFGReachablityAnalysis();
2647
// FIXME: We should be able to assert that block is non-null, but
2648
// the CFG analysis can skip potentially-evaluated expressions in
2649
// edge cases; see test/Sema/vla-2.c.
2650
if (block && cra) {
2651
// Can this block be reached from the entrance?
2652
if (!cra->isReachable(&AC.getCFG()->getEntry(), block)) {
2653
AllReachable = false;
2654
break;
2655
}
2656
}
2657
// If we cannot map to a basic block, assume the statement is
2658
// reachable.
2659
}
2660
2661
if (AllReachable)
2662
S.Diag(D.Loc, D.PD);
2663
}
2664
}
2665
2666
if (!analyzed)
2667
flushDiagnostics(S, fscope);
2668
}
2669
2670
// Warning: check missing 'return'
2671
if (P.enableCheckFallThrough) {
2672
const CheckFallThroughDiagnostics &CD =
2673
(isa<BlockDecl>(D)
2674
? CheckFallThroughDiagnostics::MakeForBlock()
2675
: (isa<CXXMethodDecl>(D) &&
2676
cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call &&
2677
cast<CXXMethodDecl>(D)->getParent()->isLambda())
2678
? CheckFallThroughDiagnostics::MakeForLambda()
2679
: (fscope->isCoroutine()
2680
? CheckFallThroughDiagnostics::MakeForCoroutine(D)
2681
: CheckFallThroughDiagnostics::MakeForFunction(D)));
2682
CheckFallThroughForBody(S, D, Body, BlockType, CD, AC, fscope);
2683
}
2684
2685
// Warning: check for unreachable code
2686
if (P.enableCheckUnreachable) {
2687
// Only check for unreachable code on non-template instantiations.
2688
// Different template instantiations can effectively change the control-flow
2689
// and it is very difficult to prove that a snippet of code in a template
2690
// is unreachable for all instantiations.
2691
bool isTemplateInstantiation = false;
2692
if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
2693
isTemplateInstantiation = Function->isTemplateInstantiation();
2694
if (!isTemplateInstantiation)
2695
CheckUnreachable(S, AC);
2696
}
2697
2698
// Check for thread safety violations
2699
if (P.enableThreadSafetyAnalysis) {
2700
SourceLocation FL = AC.getDecl()->getLocation();
2701
SourceLocation FEL = AC.getDecl()->getEndLoc();
2702
threadSafety::ThreadSafetyReporter Reporter(S, FL, FEL);
2703
if (!Diags.isIgnored(diag::warn_thread_safety_beta, D->getBeginLoc()))
2704
Reporter.setIssueBetaWarnings(true);
2705
if (!Diags.isIgnored(diag::warn_thread_safety_verbose, D->getBeginLoc()))
2706
Reporter.setVerbose(true);
2707
2708
threadSafety::runThreadSafetyAnalysis(AC, Reporter,
2709
&S.ThreadSafetyDeclCache);
2710
Reporter.emitDiagnostics();
2711
}
2712
2713
// Check for violations of consumed properties.
2714
if (P.enableConsumedAnalysis) {
2715
consumed::ConsumedWarningsHandler WarningHandler(S);
2716
consumed::ConsumedAnalyzer Analyzer(WarningHandler);
2717
Analyzer.run(AC);
2718
}
2719
2720
if (!Diags.isIgnored(diag::warn_uninit_var, D->getBeginLoc()) ||
2721
!Diags.isIgnored(diag::warn_sometimes_uninit_var, D->getBeginLoc()) ||
2722
!Diags.isIgnored(diag::warn_maybe_uninit_var, D->getBeginLoc()) ||
2723
!Diags.isIgnored(diag::warn_uninit_const_reference, D->getBeginLoc())) {
2724
if (CFG *cfg = AC.getCFG()) {
2725
UninitValsDiagReporter reporter(S);
2726
UninitVariablesAnalysisStats stats;
2727
std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
2728
runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
2729
reporter, stats);
2730
2731
if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
2732
++NumUninitAnalysisFunctions;
2733
NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
2734
NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
2735
MaxUninitAnalysisVariablesPerFunction =
2736
std::max(MaxUninitAnalysisVariablesPerFunction,
2737
stats.NumVariablesAnalyzed);
2738
MaxUninitAnalysisBlockVisitsPerFunction =
2739
std::max(MaxUninitAnalysisBlockVisitsPerFunction,
2740
stats.NumBlockVisits);
2741
}
2742
}
2743
}
2744
2745
// Check for violations of "called once" parameter properties.
2746
if (S.getLangOpts().ObjC && !S.getLangOpts().CPlusPlus &&
2747
shouldAnalyzeCalledOnceParameters(Diags, D->getBeginLoc())) {
2748
if (AC.getCFG()) {
2749
CalledOnceCheckReporter Reporter(S, IPData->CalledOnceData);
2750
checkCalledOnceParameters(
2751
AC, Reporter,
2752
shouldAnalyzeCalledOnceConventions(Diags, D->getBeginLoc()));
2753
}
2754
}
2755
2756
bool FallThroughDiagFull =
2757
!Diags.isIgnored(diag::warn_unannotated_fallthrough, D->getBeginLoc());
2758
bool FallThroughDiagPerFunction = !Diags.isIgnored(
2759
diag::warn_unannotated_fallthrough_per_function, D->getBeginLoc());
2760
if (FallThroughDiagFull || FallThroughDiagPerFunction ||
2761
fscope->HasFallthroughStmt) {
2762
DiagnoseSwitchLabelsFallthrough(S, AC, !FallThroughDiagFull);
2763
}
2764
2765
if (S.getLangOpts().ObjCWeak &&
2766
!Diags.isIgnored(diag::warn_arc_repeated_use_of_weak, D->getBeginLoc()))
2767
diagnoseRepeatedUseOfWeak(S, fscope, D, AC.getParentMap());
2768
2769
2770
// Check for infinite self-recursion in functions
2771
if (!Diags.isIgnored(diag::warn_infinite_recursive_function,
2772
D->getBeginLoc())) {
2773
if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
2774
checkRecursiveFunction(S, FD, Body, AC);
2775
}
2776
}
2777
2778
// Check for throw out of non-throwing function.
2779
if (!Diags.isIgnored(diag::warn_throw_in_noexcept_func, D->getBeginLoc()))
2780
if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D))
2781
if (S.getLangOpts().CPlusPlus && !fscope->isCoroutine() && isNoexcept(FD))
2782
checkThrowInNonThrowingFunc(S, FD, AC);
2783
2784
// If none of the previous checks caused a CFG build, trigger one here
2785
// for the logical error handler.
2786
if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
2787
AC.getCFG();
2788
}
2789
2790
// Collect statistics about the CFG if it was built.
2791
if (S.CollectStats && AC.isCFGBuilt()) {
2792
++NumFunctionsAnalyzed;
2793
if (CFG *cfg = AC.getCFG()) {
2794
// If we successfully built a CFG for this context, record some more
2795
// detail information about it.
2796
NumCFGBlocks += cfg->getNumBlockIDs();
2797
MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
2798
cfg->getNumBlockIDs());
2799
} else {
2800
++NumFunctionsWithBadCFGs;
2801
}
2802
}
2803
}
2804
2805
void clang::sema::AnalysisBasedWarnings::PrintStats() const {
2806
llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
2807
2808
unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
2809
unsigned AvgCFGBlocksPerFunction =
2810
!NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
2811
llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
2812
<< NumFunctionsWithBadCFGs << " w/o CFGs).\n"
2813
<< " " << NumCFGBlocks << " CFG blocks built.\n"
2814
<< " " << AvgCFGBlocksPerFunction
2815
<< " average CFG blocks per function.\n"
2816
<< " " << MaxCFGBlocksPerFunction
2817
<< " max CFG blocks per function.\n";
2818
2819
unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
2820
: NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
2821
unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
2822
: NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
2823
llvm::errs() << NumUninitAnalysisFunctions
2824
<< " functions analyzed for uninitialiazed variables\n"
2825
<< " " << NumUninitAnalysisVariables << " variables analyzed.\n"
2826
<< " " << AvgUninitVariablesPerFunction
2827
<< " average variables per function.\n"
2828
<< " " << MaxUninitAnalysisVariablesPerFunction
2829
<< " max variables per function.\n"
2830
<< " " << NumUninitAnalysisBlockVisits << " block visits.\n"
2831
<< " " << AvgUninitBlockVisitsPerFunction
2832
<< " average block visits per function.\n"
2833
<< " " << MaxUninitAnalysisBlockVisitsPerFunction
2834
<< " max block visits per function.\n";
2835
}
2836
2837