29 #include "llvm/ADT/BitVector.h"
30 #include "llvm/ADT/DenseMap.h"
31 #include "llvm/ADT/PackedVector.h"
32 #include "llvm/ADT/SmallBitVector.h"
33 #include "llvm/ADT/SmallVector.h"
34 #include "llvm/Support/Casting.h"
39 using namespace clang;
41 #define DEBUG_LOGGING 0
61 llvm::DenseMap<const VarDecl *, unsigned> map;
64 DeclToIndex() =
default;
70 unsigned size()
const {
return map.size(); }
73 std::optional<unsigned> getValueIndex(
const VarDecl *d)
const;
78 void DeclToIndex::computeMap(
const DeclContext &dc) {
82 for ( ; I != E; ++I) {
89 std::optional<unsigned> DeclToIndex::getValueIndex(
const VarDecl *d)
const {
90 llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
117 using ValueVector = llvm::PackedVector<Value, 2, llvm::SmallBitVector>;
119 class CFGBlockValues {
123 DeclToIndex declToIndex;
126 CFGBlockValues(
const CFG &cfg);
128 unsigned getNumEntries()
const {
return declToIndex.size(); }
130 void computeSetOfDeclarations(
const DeclContext &dc);
132 ValueVector &getValueVector(
const CFGBlock *block) {
136 void setAllScratchValues(
Value V);
137 void mergeIntoScratch(ValueVector
const &source,
bool isFirst);
138 bool updateValueVectorWithScratch(
const CFGBlock *block);
140 bool hasNoDeclarations()
const {
141 return declToIndex.size() == 0;
146 ValueVector::reference operator[](
const VarDecl *vd);
150 std::optional<unsigned> idx = declToIndex.getValueIndex(vd);
151 return getValueVector(block)[*idx];
157 CFGBlockValues::CFGBlockValues(
const CFG &
c) : cfg(
c), vals(0) {}
159 void CFGBlockValues::computeSetOfDeclarations(
const DeclContext &dc) {
160 declToIndex.computeMap(dc);
161 unsigned decls = declToIndex.size();
162 scratch.resize(decls);
167 for (
auto &val : vals)
172 static void printVector(
const CFGBlock *block, ValueVector &bv,
175 for (
const auto &i : bv)
176 llvm::errs() <<
' ' << i;
177 llvm::errs() <<
" : " << num <<
'\n';
181 void CFGBlockValues::setAllScratchValues(
Value V) {
182 for (
unsigned I = 0, E = scratch.size(); I != E; ++I)
186 void CFGBlockValues::mergeIntoScratch(ValueVector
const &source,
194 bool CFGBlockValues::updateValueVectorWithScratch(
const CFGBlock *block) {
195 ValueVector &dst = getValueVector(block);
196 bool changed = (dst != scratch);
200 printVector(block, scratch, 0);
205 void CFGBlockValues::resetScratch() {
209 ValueVector::reference CFGBlockValues::operator[](
const VarDecl *vd) {
210 return scratch[*declToIndex.getValueIndex(vd)];
219 class FindVarResult {
226 const DeclRefExpr *getDeclRefExpr()
const {
return dr; }
227 const VarDecl *getDecl()
const {
return vd; }
235 if (
const auto *CE = dyn_cast<CastExpr>(Ex)) {
236 if (CE->getCastKind() == CK_LValueBitCast) {
237 Ex = CE->getSubExpr();
249 if (
const auto *DRE =
251 if (
const auto *VD = dyn_cast<VarDecl>(DRE->getDecl()))
253 return FindVarResult(VD, DRE);
254 return FindVarResult(
nullptr,
nullptr);
262 class ClassifyRefs :
public StmtVisitor<ClassifyRefs> {
274 llvm::DenseMap<const DeclRefExpr *, Class> Classification;
280 void classify(
const Expr *E, Class
C);
295 llvm::DenseMap<const DeclRefExpr*, Class>::const_iterator I
296 = Classification.find(DRE);
297 if (I != Classification.end())
300 const auto *VD = dyn_cast<VarDecl>(DRE->
getDecl());
316 if (DRE && DRE->
getDecl() == VD)
322 void ClassifyRefs::classify(
const Expr *E, Class
C) {
325 if (
const auto *CO = dyn_cast<ConditionalOperator>(E)) {
326 classify(CO->getTrueExpr(),
C);
327 classify(CO->getFalseExpr(),
C);
331 if (
const auto *BCO = dyn_cast<BinaryConditionalOperator>(E)) {
332 classify(BCO->getFalseExpr(),
C);
336 if (
const auto *OVE = dyn_cast<OpaqueValueExpr>(E)) {
337 classify(OVE->getSourceExpr(),
C);
341 if (
const auto *ME = dyn_cast<MemberExpr>(E)) {
342 if (
const auto *VD = dyn_cast<VarDecl>(ME->getMemberDecl())) {
343 if (!VD->isStaticDataMember())
344 classify(ME->getBase(),
C);
349 if (
const auto *BO = dyn_cast<BinaryOperator>(E)) {
350 switch (BO->getOpcode()) {
353 classify(BO->getLHS(),
C);
356 classify(BO->getRHS(),
C);
363 FindVarResult Var =
findVar(E, DC);
365 Classification[DRE] =
std::max(Classification[DRE],
C);
368 void ClassifyRefs::VisitDeclStmt(
DeclStmt *DS) {
369 for (
auto *DI : DS->
decls()) {
370 auto *VD = dyn_cast<VarDecl>(DI);
373 Classification[DRE] = SelfInit;
384 classify(BO->
getLHS(), Use);
386 classify(BO->
getLHS(), Ignore);
397 for (
Stmt *S : OMPExecutableDirective::used_clauses_children(ED->
clauses()))
398 classify(cast<Expr>(S), Use);
408 return FTD->getTemplatedDecl()->hasTrivialBody();
409 return FD->hasTrivialBody();
414 void ClassifyRefs::VisitCallExpr(
CallExpr *CE) {
419 classify(CE->
getArg(0), Use);
430 if ((*I)->isGLValue()) {
431 if ((*I)->getType().isConstQualified())
432 classify((*I), isTrivialBody ? Ignore : ConstRefUse);
435 const auto *UO = dyn_cast<UnaryOperator>(Ex);
438 classify(Ex, Ignore);
443 void ClassifyRefs::VisitCastExpr(
CastExpr *CE) {
446 else if (
const auto *CSE = dyn_cast<CStyleCastExpr>(CE)) {
447 if (CSE->getType()->isVoidType()) {
451 classify(CSE->getSubExpr(), Ignore);
462 class TransferFunctions :
public StmtVisitor<TransferFunctions> {
463 CFGBlockValues &vals;
467 const ClassifyRefs &classification;
472 TransferFunctions(CFGBlockValues &vals,
const CFG &cfg,
474 const ClassifyRefs &classification,
476 : vals(vals), cfg(cfg), block(block), ac(ac),
477 classification(classification), objCNoRet(ac.getASTContext()),
481 void reportConstRefUse(
const Expr *ex,
const VarDecl *vd);
505 if (Use.getKind() == UninitUse::Always)
556 Queue.push_back(block);
561 while (!Queue.empty()) {
562 const CFGBlock *B = Queue.pop_back_val();
566 Use.setUninitAfterCall();
574 Value AtPredExit = vals.getValue(Pred, B, vd);
585 Use.setUninitAfterDecl();
594 if (
const auto *as = dyn_cast_or_null<GCCAsmStmt>(term.
getStmt())) {
596 if (as->isAsmGoto() &&
597 llvm::any_of(as->outputs(), [&](
const Expr *output) {
598 return vd == findVar(output).getDecl() &&
599 llvm::any_of(as->labels(),
600 [&](const AddrLabelExpr *label) {
601 return label->getLabel()->getStmt() == B->Label &&
605 Use.setUninitAfterDecl();
611 unsigned &SV = SuccsVisited[Pred->
getBlockID()];
625 Queue.push_back(Pred);
631 for (
const auto *Block : cfg) {
633 const Stmt *Term =
Block->getTerminatorStmt();
640 E =
Block->succ_end(); I != E; ++I) {
648 if (isa<SwitchStmt>(Term)) {
656 Use.addUninitBranch(Branch);
661 Use.addUninitBranch(Branch);
674 void TransferFunctions::reportUse(
const Expr *ex,
const VarDecl *vd) {
680 void TransferFunctions::reportConstRefUse(
const Expr *ex,
const VarDecl *vd) {
688 if (
const auto *DS = dyn_cast<DeclStmt>(FS->getElement())) {
695 void TransferFunctions::VisitOMPExecutableDirective(
697 for (
Stmt *S : OMPExecutableDirective::used_clauses_children(ED->
clauses())) {
698 assert(S &&
"Expected non-null used-in-clause child.");
705 void TransferFunctions::VisitBlockExpr(
BlockExpr *be) {
707 for (
const auto &I : bd->
captures()) {
708 const VarDecl *vd = I.getVariable();
719 void TransferFunctions::VisitCallExpr(
CallExpr *ce) {
721 if (
Callee->hasAttr<ReturnsTwiceAttr>()) {
729 else if (
Callee->hasAttr<AnalyzerNoReturnAttr>()) {
737 vals.setAllScratchValues(
Unknown);
742 void TransferFunctions::VisitDeclRefExpr(
DeclRefExpr *dr) {
743 switch (classification.get(dr)) {
744 case ClassifyRefs::Ignore:
746 case ClassifyRefs::Use:
747 reportUse(dr, cast<VarDecl>(dr->
getDecl()));
749 case ClassifyRefs::Init:
752 case ClassifyRefs::SelfInit:
755 case ClassifyRefs::ConstRefUse:
756 reportConstRefUse(dr, cast<VarDecl>(dr->
getDecl()));
764 if (
const VarDecl *VD = Var.getDecl())
769 void TransferFunctions::VisitDeclStmt(
DeclStmt *DS) {
770 for (
auto *DI : DS->
decls()) {
771 auto *VD = dyn_cast<VarDecl>(DI);
785 }
else if (VD->getInit()) {
805 void TransferFunctions::VisitGCCAsmStmt(
GCCAsmStmt *as) {
816 while (
const auto *UO = dyn_cast<UnaryOperator>(Ex))
831 vals.setAllScratchValues(
Unknown);
841 const ClassifyRefs &classification,
842 llvm::BitVector &wasAnalyzed,
849 E = block->
pred_end(); I != E; ++I) {
854 vals.mergeIntoScratch(vals.getValueVector(pred), isFirst);
859 TransferFunctions tf(vals, cfg, block, ac, classification, handler);
860 for (
const auto &I : *block) {
861 if (std::optional<CFGStmt> cs = I.getAs<
CFGStmt>())
862 tf.Visit(
const_cast<Stmt *
>(cs->getStmt()));
865 if (
auto *as = dyn_cast_or_null<GCCAsmStmt>(terminator.
getStmt()))
868 return vals.updateValueVectorWithScratch(block);
879 llvm::BitVector hadUse;
882 bool hadAnyUse =
false;
885 unsigned currentBlock = 0;
887 PruneBlocksHandler(
unsigned numBlocks) : hadUse(numBlocks,
false) {}
889 ~PruneBlocksHandler()
override =
default;
893 hadUse[currentBlock] =
true;
899 hadUse[currentBlock] =
true;
907 hadUse[currentBlock] =
true;
920 CFGBlockValues vals(cfg);
921 vals.computeSetOfDeclarations(dc);
922 if (vals.hasNoDeclarations())
928 ClassifyRefs classification(ac);
933 ValueVector &vec = vals.getValueVector(&entry);
934 const unsigned n = vals.getNumEntries();
935 for (
unsigned j = 0; j < n; ++j) {
951 bool changed =
runOnBlock(block, cfg, ac, vals,
952 classification, wasAnalyzed, PBH);
954 if (changed || !previouslyVisited[block->
getBlockID()])
956 previouslyVisited[block->
getBlockID()] =
true;
963 for (
const auto *block : cfg)
965 runOnBlock(block, cfg, ac, vals, classification, wasAnalyzed, handler);
970 UninitVariablesHandler::~UninitVariablesHandler() =
default;