29 #include "llvm/ADT/BitVector.h"
30 #include "llvm/ADT/DenseMap.h"
31 #include "llvm/ADT/None.h"
32 #include "llvm/ADT/Optional.h"
33 #include "llvm/ADT/PackedVector.h"
34 #include "llvm/ADT/SmallBitVector.h"
35 #include "llvm/ADT/SmallVector.h"
36 #include "llvm/Support/Casting.h"
40 using namespace clang;
42 #define DEBUG_LOGGING 0
61 llvm::DenseMap<const VarDecl *, unsigned> map;
64 DeclToIndex() =
default;
70 unsigned size()
const {
return map.size(); }
78 void DeclToIndex::computeMap(
const DeclContext &dc) {
82 for ( ; I != E; ++I) {
90 llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
117 using ValueVector = llvm::PackedVector<Value, 2, llvm::SmallBitVector>;
119 class CFGBlockValues {
123 DeclToIndex declToIndex;
126 CFGBlockValues(
const CFG &cfg);
128 unsigned getNumEntries()
const {
return declToIndex.size(); }
130 void computeSetOfDeclarations(
const DeclContext &dc);
132 ValueVector &getValueVector(
const CFGBlock *block) {
136 void setAllScratchValues(
Value V);
137 void mergeIntoScratch(ValueVector
const &source,
bool isFirst);
138 bool updateValueVectorWithScratch(
const CFGBlock *block);
140 bool hasNoDeclarations()
const {
141 return declToIndex.size() == 0;
146 ValueVector::reference operator[](
const VarDecl *vd);
151 assert(idx.hasValue());
152 return getValueVector(block)[idx.getValue()];
158 CFGBlockValues::CFGBlockValues(
const CFG &
c) : cfg(
c), vals(0) {}
160 void CFGBlockValues::computeSetOfDeclarations(
const DeclContext &dc) {
161 declToIndex.computeMap(dc);
162 unsigned decls = declToIndex.size();
163 scratch.resize(decls);
168 for (
auto &val : vals)
173 static void printVector(
const CFGBlock *block, ValueVector &bv,
176 for (
const auto &i : bv)
177 llvm::errs() <<
' ' << i;
178 llvm::errs() <<
" : " << num <<
'\n';
182 void CFGBlockValues::setAllScratchValues(
Value V) {
183 for (
unsigned I = 0, E = scratch.size(); I != E; ++I)
187 void CFGBlockValues::mergeIntoScratch(ValueVector
const &source,
195 bool CFGBlockValues::updateValueVectorWithScratch(
const CFGBlock *block) {
196 ValueVector &dst = getValueVector(block);
197 bool changed = (dst != scratch);
201 printVector(block, scratch, 0);
206 void CFGBlockValues::resetScratch() {
210 ValueVector::reference CFGBlockValues::operator[](
const VarDecl *vd) {
212 assert(idx.hasValue());
213 return scratch[idx.getValue()];
222 class FindVarResult {
229 const DeclRefExpr *getDeclRefExpr()
const {
return dr; }
230 const VarDecl *getDecl()
const {
return vd; }
238 if (
const auto *CE = dyn_cast<CastExpr>(Ex)) {
239 if (CE->getCastKind() == CK_LValueBitCast) {
240 Ex = CE->getSubExpr();
252 if (
const auto *DRE =
254 if (
const auto *VD = dyn_cast<VarDecl>(DRE->getDecl()))
256 return FindVarResult(VD, DRE);
257 return FindVarResult(
nullptr,
nullptr);
265 class ClassifyRefs :
public StmtVisitor<ClassifyRefs> {
277 llvm::DenseMap<const DeclRefExpr *, Class> Classification;
283 void classify(
const Expr *E, Class
C);
298 llvm::DenseMap<const DeclRefExpr*, Class>::const_iterator I
299 = Classification.find(DRE);
300 if (I != Classification.end())
303 const auto *VD = dyn_cast<VarDecl>(DRE->
getDecl());
319 if (DRE && DRE->
getDecl() == VD)
325 void ClassifyRefs::classify(
const Expr *E, Class
C) {
328 if (
const auto *CO = dyn_cast<ConditionalOperator>(E)) {
329 classify(CO->getTrueExpr(),
C);
330 classify(CO->getFalseExpr(),
C);
334 if (
const auto *BCO = dyn_cast<BinaryConditionalOperator>(E)) {
335 classify(BCO->getFalseExpr(),
C);
339 if (
const auto *OVE = dyn_cast<OpaqueValueExpr>(E)) {
340 classify(OVE->getSourceExpr(),
C);
344 if (
const auto *ME = dyn_cast<MemberExpr>(E)) {
345 if (
const auto *VD = dyn_cast<VarDecl>(ME->getMemberDecl())) {
346 if (!VD->isStaticDataMember())
347 classify(ME->getBase(),
C);
352 if (
const auto *BO = dyn_cast<BinaryOperator>(E)) {
353 switch (BO->getOpcode()) {
356 classify(BO->getLHS(),
C);
359 classify(BO->getRHS(),
C);
366 FindVarResult Var =
findVar(E, DC);
368 Classification[DRE] =
std::max(Classification[DRE],
C);
371 void ClassifyRefs::VisitDeclStmt(
DeclStmt *DS) {
372 for (
auto *DI : DS->
decls()) {
373 auto *VD = dyn_cast<VarDecl>(DI);
376 Classification[DRE] = SelfInit;
387 classify(BO->
getLHS(), Use);
389 classify(BO->
getLHS(), Ignore);
400 for (
Stmt *S : OMPExecutableDirective::used_clauses_children(ED->
clauses()))
401 classify(cast<Expr>(S), Use);
411 return FTD->getTemplatedDecl()->hasTrivialBody();
412 return FD->hasTrivialBody();
417 void ClassifyRefs::VisitCallExpr(
CallExpr *CE) {
422 classify(CE->
getArg(0), Use);
433 if ((*I)->isGLValue()) {
434 if ((*I)->getType().isConstQualified())
435 classify((*I), isTrivialBody ? Ignore : ConstRefUse);
438 const auto *UO = dyn_cast<UnaryOperator>(Ex);
441 classify(Ex, Ignore);
446 void ClassifyRefs::VisitCastExpr(
CastExpr *CE) {
449 else if (
const auto *CSE = dyn_cast<CStyleCastExpr>(CE)) {
450 if (CSE->getType()->isVoidType()) {
454 classify(CSE->getSubExpr(), Ignore);
465 class TransferFunctions :
public StmtVisitor<TransferFunctions> {
466 CFGBlockValues &vals;
470 const ClassifyRefs &classification;
475 TransferFunctions(CFGBlockValues &vals,
const CFG &cfg,
477 const ClassifyRefs &classification,
479 : vals(vals), cfg(cfg), block(block), ac(ac),
480 classification(classification), objCNoRet(ac.getASTContext()),
484 void reportConstRefUse(
const Expr *ex,
const VarDecl *vd);
508 if (Use.getKind() == UninitUse::Always)
559 Queue.push_back(block);
564 while (!Queue.empty()) {
565 const CFGBlock *B = Queue.pop_back_val();
569 Use.setUninitAfterCall();
577 Value AtPredExit = vals.getValue(Pred, B, vd);
588 Use.setUninitAfterDecl();
597 if (
const auto *as = dyn_cast_or_null<GCCAsmStmt>(term.
getStmt())) {
599 if (as->isAsmGoto() &&
600 llvm::any_of(as->outputs(), [&](
const Expr *output) {
601 return vd == findVar(output).getDecl() &&
602 llvm::any_of(as->labels(),
603 [&](const AddrLabelExpr *label) {
604 return label->getLabel()->getStmt() == B->Label &&
608 Use.setUninitAfterDecl();
614 unsigned &SV = SuccsVisited[Pred->
getBlockID()];
628 Queue.push_back(Pred);
634 for (
const auto *Block : cfg) {
636 const Stmt *Term =
Block->getTerminatorStmt();
643 E =
Block->succ_end(); I != E; ++I) {
651 if (isa<SwitchStmt>(Term)) {
659 Use.addUninitBranch(Branch);
664 Use.addUninitBranch(Branch);
677 void TransferFunctions::reportUse(
const Expr *ex,
const VarDecl *vd) {
683 void TransferFunctions::reportConstRefUse(
const Expr *ex,
const VarDecl *vd) {
691 if (
const auto *DS = dyn_cast<DeclStmt>(FS->getElement())) {
698 void TransferFunctions::VisitOMPExecutableDirective(
700 for (
Stmt *S : OMPExecutableDirective::used_clauses_children(ED->
clauses())) {
701 assert(S &&
"Expected non-null used-in-clause child.");
708 void TransferFunctions::VisitBlockExpr(
BlockExpr *be) {
710 for (
const auto &I : bd->
captures()) {
711 const VarDecl *vd = I.getVariable();
722 void TransferFunctions::VisitCallExpr(
CallExpr *ce) {
724 if (
Callee->hasAttr<ReturnsTwiceAttr>()) {
732 else if (
Callee->hasAttr<AnalyzerNoReturnAttr>()) {
740 vals.setAllScratchValues(
Unknown);
745 void TransferFunctions::VisitDeclRefExpr(
DeclRefExpr *dr) {
746 switch (classification.get(dr)) {
747 case ClassifyRefs::Ignore:
749 case ClassifyRefs::Use:
750 reportUse(dr, cast<VarDecl>(dr->
getDecl()));
752 case ClassifyRefs::Init:
755 case ClassifyRefs::SelfInit:
758 case ClassifyRefs::ConstRefUse:
759 reportConstRefUse(dr, cast<VarDecl>(dr->
getDecl()));
767 if (
const VarDecl *VD = Var.getDecl())
772 void TransferFunctions::VisitDeclStmt(
DeclStmt *DS) {
773 for (
auto *DI : DS->
decls()) {
774 auto *VD = dyn_cast<VarDecl>(DI);
788 }
else if (VD->getInit()) {
808 void TransferFunctions::VisitGCCAsmStmt(
GCCAsmStmt *as) {
819 while (
const auto *UO = dyn_cast<UnaryOperator>(Ex))
834 vals.setAllScratchValues(
Unknown);
844 const ClassifyRefs &classification,
845 llvm::BitVector &wasAnalyzed,
852 E = block->
pred_end(); I != E; ++I) {
857 vals.mergeIntoScratch(vals.getValueVector(pred), isFirst);
862 TransferFunctions tf(vals, cfg, block, ac, classification, handler);
863 for (
const auto &I : *block) {
865 tf.Visit(
const_cast<Stmt *
>(cs->getStmt()));
868 if (
auto *as = dyn_cast_or_null<GCCAsmStmt>(terminator.
getStmt()))
871 return vals.updateValueVectorWithScratch(block);
882 llvm::BitVector hadUse;
885 bool hadAnyUse =
false;
888 unsigned currentBlock = 0;
890 PruneBlocksHandler(
unsigned numBlocks) : hadUse(numBlocks,
false) {}
892 ~PruneBlocksHandler()
override =
default;
896 hadUse[currentBlock] =
true;
902 hadUse[currentBlock] =
true;
910 hadUse[currentBlock] =
true;
923 CFGBlockValues vals(cfg);
924 vals.computeSetOfDeclarations(dc);
925 if (vals.hasNoDeclarations())
931 ClassifyRefs classification(ac);
936 ValueVector &vec = vals.getValueVector(&entry);
937 const unsigned n = vals.getNumEntries();
938 for (
unsigned j = 0; j < n; ++j) {
954 bool changed =
runOnBlock(block, cfg, ac, vals,
955 classification, wasAnalyzed, PBH);
957 if (changed || !previouslyVisited[block->
getBlockID()])
959 previouslyVisited[block->
getBlockID()] =
true;
966 for (
const auto *block : cfg)
968 runOnBlock(block, cfg, ac, vals, classification, wasAnalyzed, handler);
973 UninitVariablesHandler::~UninitVariablesHandler() =
default;