39#include "llvm/ADT/DenseMap.h"
40#include "llvm/ADT/ImmutableMap.h"
41#include "llvm/ADT/STLExtras.h"
42#include "llvm/ADT/ScopeExit.h"
43#include "llvm/ADT/SmallVector.h"
44#include "llvm/ADT/StringRef.h"
45#include "llvm/Support/Allocator.h"
46#include "llvm/Support/ErrorHandling.h"
47#include "llvm/Support/TrailingObjects.h"
48#include "llvm/Support/raw_ostream.h"
67 const Expr *DeclExp, StringRef Kind) {
81class CapExprSet :
public SmallVector<CapabilityExpr, 4> {
84 void push_back_nodup(
const CapabilityExpr &CapE) {
85 if (llvm::none_of(*
this, [=](
const CapabilityExpr &CapE2) {
101 enum FactEntryKind { Lockable, ScopedLockable };
112 const FactEntryKind Kind : 8;
118 SourceKind Source : 8;
121 SourceLocation AcquireLoc;
124 ~FactEntry() =
default;
127 FactEntry(FactEntryKind FK,
const CapabilityExpr &CE,
LockKind LK,
128 SourceLocation Loc, SourceKind Src)
129 : CapabilityExpr(CE), Kind(FK), LKind(LK), Source(Src), AcquireLoc(Loc) {}
132 SourceLocation loc()
const {
return AcquireLoc; }
133 FactEntryKind getFactEntryKind()
const {
return Kind; }
135 bool asserted()
const {
return Source == Asserted; }
136 bool declared()
const {
return Source == Declared; }
137 bool managed()
const {
return Source == Managed; }
140 handleRemovalFromIntersection(
const FactSet &FSet, FactManager &FactMan,
142 ThreadSafetyHandler &Handler)
const = 0;
143 virtual void handleLock(FactSet &FSet, FactManager &FactMan,
144 const FactEntry &entry,
145 ThreadSafetyHandler &Handler)
const = 0;
146 virtual void handleUnlock(FactSet &FSet, FactManager &FactMan,
147 const CapabilityExpr &Cp, SourceLocation UnlockLoc,
149 ThreadSafetyHandler &Handler)
const = 0;
157using FactID =
unsigned short;
163 llvm::BumpPtrAllocator &Alloc;
164 std::vector<const FactEntry *> Facts;
167 FactManager(llvm::BumpPtrAllocator &Alloc) : Alloc(Alloc) {}
169 template <
typename T,
typename... ArgTypes>
170 T *createFact(ArgTypes &&...Args) {
171 static_assert(std::is_trivially_destructible_v<T>);
172 return T::create(Alloc, std::forward<ArgTypes>(Args)...);
175 FactID newFact(
const FactEntry *Entry) {
176 Facts.push_back(Entry);
177 assert(Facts.size() - 1 <= std::numeric_limits<FactID>::max() &&
178 "FactID space exhausted");
179 return static_cast<unsigned short>(Facts.size() - 1);
182 const FactEntry &operator[](FactID F)
const {
return *Facts[F]; }
194 using FactVec = SmallVector<FactID, 4>;
199 using iterator = FactVec::iterator;
200 using const_iterator = FactVec::const_iterator;
202 iterator begin() {
return FactIDs.begin(); }
203 const_iterator begin()
const {
return FactIDs.begin(); }
205 iterator end() {
return FactIDs.end(); }
206 const_iterator end()
const {
return FactIDs.end(); }
208 bool isEmpty()
const {
return FactIDs.size() == 0; }
211 bool isEmpty(FactManager &FactMan)
const {
212 for (
const auto FID : *
this) {
213 if (!FactMan[FID].negative())
219 void addLockByID(FactID ID) { FactIDs.push_back(ID); }
221 FactID addLock(FactManager &FM,
const FactEntry *Entry) {
222 FactID F = FM.newFact(Entry);
223 FactIDs.push_back(F);
227 bool removeLock(FactManager& FM,
const CapabilityExpr &CapE) {
228 unsigned n = FactIDs.size();
232 for (
unsigned i = 0; i < n-1; ++i) {
233 if (FM[FactIDs[i]].
matches(CapE)) {
234 FactIDs[i] = FactIDs[n-1];
239 if (FM[FactIDs[n-1]].
matches(CapE)) {
246 std::optional<FactID> replaceLock(FactManager &FM, iterator It,
247 const FactEntry *Entry) {
250 FactID F = FM.newFact(Entry);
255 std::optional<FactID> replaceLock(FactManager &FM,
const CapabilityExpr &CapE,
256 const FactEntry *Entry) {
257 return replaceLock(FM, findLockIter(FM, CapE), Entry);
260 iterator findLockIter(FactManager &FM,
const CapabilityExpr &CapE) {
261 return llvm::find_if(*
this,
262 [&](FactID ID) {
return FM[
ID].matches(CapE); });
265 const FactEntry *findLock(FactManager &FM,
const CapabilityExpr &CapE)
const {
267 llvm::find_if(*
this, [&](FactID ID) {
return FM[
ID].matches(CapE); });
268 return I != end() ? &FM[*I] :
nullptr;
271 const FactEntry *findLockUniv(FactManager &FM,
272 const CapabilityExpr &CapE)
const {
273 auto I = llvm::find_if(
274 *
this, [&](FactID ID) ->
bool {
return FM[
ID].matchesUniv(CapE); });
275 return I != end() ? &FM[*I] :
nullptr;
278 const FactEntry *findPartialMatch(FactManager &FM,
279 const CapabilityExpr &CapE)
const {
280 auto I = llvm::find_if(*
this, [&](FactID ID) ->
bool {
281 return FM[
ID].partiallyMatches(CapE);
283 return I != end() ? &FM[*I] :
nullptr;
286 bool containsMutexDecl(FactManager &FM,
const ValueDecl* Vd)
const {
287 auto I = llvm::find_if(
288 *
this, [&](FactID ID) ->
bool {
return FM[
ID].valueDecl() == Vd; });
293class ThreadSafetyAnalyzer;
308 BeforeInfo() =
default;
309 BeforeInfo(BeforeInfo &&) =
default;
313 llvm::DenseMap<const ValueDecl *, std::unique_ptr<BeforeInfo>>;
314 using CycleMap = llvm::DenseMap<const ValueDecl *, bool>;
320 ThreadSafetyAnalyzer& Analyzer);
323 ThreadSafetyAnalyzer &Analyzer);
327 ThreadSafetyAnalyzer& Analyzer,
340class LocalVariableMap;
342using LocalVarContext = llvm::ImmutableMap<const NamedDecl *, unsigned>;
345enum CFGBlockSide { CBS_Entry, CBS_Exit };
358 LocalVarContext EntryContext;
361 LocalVarContext ExitContext;
364 SourceLocation EntryLoc;
367 SourceLocation ExitLoc;
373 bool Reachable =
false;
375 const FactSet &getSet(CFGBlockSide Side)
const {
376 return Side == CBS_Entry ? EntrySet : ExitSet;
379 SourceLocation getLocation(CFGBlockSide Side)
const {
380 return Side == CBS_Entry ? EntryLoc : ExitLoc;
384 CFGBlockInfo(LocalVarContext EmptyCtx)
385 : EntryContext(EmptyCtx), ExitContext(EmptyCtx) {}
388 static CFGBlockInfo getEmptyBlockInfo(LocalVariableMap &M);
404class LocalVariableMap {
406 using Context = LocalVarContext;
412 struct VarDefinition {
414 friend class LocalVariableMap;
417 const NamedDecl *Dec;
420 const Expr *Exp =
nullptr;
423 unsigned DirectRef = 0;
426 unsigned CanonicalRef = 0;
431 bool isReference()
const {
return !Exp; }
433 void invalidateRef() { DirectRef = CanonicalRef = 0; }
437 VarDefinition(
const NamedDecl *D,
const Expr *E, Context
C)
438 : Dec(D), Exp(E), Ctx(
C) {}
441 VarDefinition(
const NamedDecl *D,
unsigned DirectRef,
unsigned CanonicalRef,
443 : Dec(D), DirectRef(DirectRef), CanonicalRef(CanonicalRef), Ctx(
C) {}
447 Context::Factory ContextFactory;
448 std::vector<VarDefinition> VarDefinitions;
449 std::vector<std::pair<const Stmt *, Context>> SavedContexts;
454 VarDefinitions.push_back(VarDefinition(
nullptr, 0, 0, getEmptyContext()));
458 const VarDefinition* lookup(
const NamedDecl *D, Context Ctx) {
459 const unsigned *i = Ctx.lookup(D);
462 assert(*i < VarDefinitions.size());
463 return &VarDefinitions[*i];
469 const Expr* lookupExpr(
const NamedDecl *D, Context &Ctx) {
470 const unsigned *P = Ctx.lookup(D);
476 if (VarDefinitions[i].Exp) {
477 Ctx = VarDefinitions[i].Ctx;
478 return VarDefinitions[i].Exp;
480 i = VarDefinitions[i].DirectRef;
485 Context getEmptyContext() {
return ContextFactory.getEmptyMap(); }
490 Context getNextContext(
unsigned &CtxIndex,
const Stmt *S, Context
C) {
491 if (SavedContexts[CtxIndex+1].first == S) {
493 Context
Result = SavedContexts[CtxIndex].second;
499 void dumpVarDefinitionName(
unsigned i) {
501 llvm::errs() <<
"Undefined";
504 const NamedDecl *
Dec = VarDefinitions[i].Dec;
506 llvm::errs() <<
"<<NULL>>";
509 Dec->printName(llvm::errs());
510 llvm::errs() <<
"." << i <<
" " << ((
const void*) Dec);
515 for (
unsigned i = 1, e = VarDefinitions.size(); i < e; ++i) {
516 const Expr *Exp = VarDefinitions[i].Exp;
517 unsigned Ref = VarDefinitions[i].DirectRef;
519 dumpVarDefinitionName(i);
520 llvm::errs() <<
" = ";
521 if (Exp) Exp->
dump();
523 dumpVarDefinitionName(Ref);
524 llvm::errs() <<
"\n";
530 void dumpContext(Context
C) {
531 for (Context::iterator I =
C.begin(), E =
C.end(); I != E; ++I) {
532 const NamedDecl *D = I.getKey();
534 llvm::errs() <<
" -> ";
535 dumpVarDefinitionName(I.getData());
536 llvm::errs() <<
"\n";
541 void traverseCFG(CFG *CFGraph,
const PostOrderCFGView *SortedGraph,
542 std::vector<CFGBlockInfo> &BlockInfo);
545 friend class VarMapBuilder;
548 unsigned getCanonicalDefinitionID(
unsigned ID)
const {
549 while (ID > 0 && VarDefinitions[ID].isReference())
550 ID = VarDefinitions[
ID].CanonicalRef;
555 unsigned getContextIndex() {
return SavedContexts.size()-1; }
558 void saveContext(
const Stmt *S, Context
C) {
559 SavedContexts.push_back(std::make_pair(S,
C));
564 Context addDefinition(
const NamedDecl *D,
const Expr *Exp, Context Ctx) {
565 assert(!Ctx.contains(D));
566 unsigned newID = VarDefinitions.size();
567 Context NewCtx = ContextFactory.add(Ctx, D, newID);
568 VarDefinitions.push_back(VarDefinition(D, Exp, Ctx));
573 Context addReference(
const NamedDecl *D,
unsigned Ref, Context Ctx) {
574 unsigned newID = VarDefinitions.size();
575 Context NewCtx = ContextFactory.add(Ctx, D, newID);
576 VarDefinitions.push_back(
577 VarDefinition(D, Ref, getCanonicalDefinitionID(Ref), Ctx));
583 Context updateDefinition(
const NamedDecl *D, Expr *Exp, Context Ctx) {
584 if (Ctx.contains(D)) {
585 unsigned newID = VarDefinitions.size();
586 Context NewCtx = ContextFactory.remove(Ctx, D);
587 NewCtx = ContextFactory.add(NewCtx, D, newID);
588 VarDefinitions.push_back(VarDefinition(D, Exp, Ctx));
596 Context clearDefinition(
const NamedDecl *D, Context Ctx) {
597 Context NewCtx = Ctx;
598 if (NewCtx.contains(D)) {
599 NewCtx = ContextFactory.remove(NewCtx, D);
600 NewCtx = ContextFactory.add(NewCtx, D, 0);
606 Context removeDefinition(
const NamedDecl *D, Context Ctx) {
607 Context NewCtx = Ctx;
608 if (NewCtx.contains(D)) {
609 NewCtx = ContextFactory.remove(NewCtx, D);
614 Context intersectContexts(Context C1, Context C2);
615 Context createReferenceContext(Context
C);
616 void intersectBackEdge(Context C1, Context C2);
622CFGBlockInfo CFGBlockInfo::getEmptyBlockInfo(LocalVariableMap &M) {
623 return CFGBlockInfo(M.getEmptyContext());
629class VarMapBuilder :
public ConstStmtVisitor<VarMapBuilder> {
631 LocalVariableMap* VMap;
632 LocalVariableMap::Context Ctx;
634 VarMapBuilder(LocalVariableMap *VM, LocalVariableMap::Context
C)
635 : VMap(VM), Ctx(
C) {}
637 void VisitDeclStmt(
const DeclStmt *S);
638 void VisitBinaryOperator(
const BinaryOperator *BO);
639 void VisitCallExpr(
const CallExpr *CE);
645void VarMapBuilder::VisitDeclStmt(
const DeclStmt *S) {
646 bool modifiedCtx =
false;
648 for (
const auto *D : DGrp) {
649 if (
const auto *VD = dyn_cast_or_null<VarDecl>(D)) {
650 const Expr *E = VD->getInit();
653 QualType
T = VD->getType();
654 if (
T.isTrivialType(VD->getASTContext())) {
655 Ctx = VMap->addDefinition(VD, E, Ctx);
661 VMap->saveContext(S, Ctx);
665void VarMapBuilder::VisitBinaryOperator(
const BinaryOperator *BO) {
672 if (
const auto *DRE = dyn_cast<DeclRefExpr>(LHSExp)) {
673 const ValueDecl *VDec = DRE->getDecl();
674 if (Ctx.lookup(VDec)) {
676 Ctx = VMap->updateDefinition(VDec, BO->
getRHS(), Ctx);
679 Ctx = VMap->clearDefinition(VDec, Ctx);
680 VMap->saveContext(BO, Ctx);
686void VarMapBuilder::VisitCallExpr(
const CallExpr *CE) {
696 if (II->isStr(
"bind") || II->isStr(
"bind_front"))
702 for (
unsigned Idx = 0; Idx < CE->
getNumArgs(); ++Idx) {
708 QualType ParamType = PVD->
getType();
711 const ValueDecl *VDec =
nullptr;
714 if (
const auto *DRE = dyn_cast<DeclRefExpr>(Arg))
715 VDec = DRE->getDecl();
719 if (
const auto *UO = dyn_cast<UnaryOperator>(Arg)) {
720 if (UO->getOpcode() == UO_AddrOf) {
721 const Expr *SubE = UO->getSubExpr()->IgnoreParenCasts();
722 if (
const auto *DRE = dyn_cast<DeclRefExpr>(SubE))
723 VDec = DRE->getDecl();
728 if (VDec && Ctx.lookup(VDec)) {
729 Ctx = VMap->clearDefinition(VDec, Ctx);
730 VMap->saveContext(CE, Ctx);
738LocalVariableMap::Context
739LocalVariableMap::intersectContexts(Context C1, Context C2) {
741 for (
const auto &P : C1) {
742 const NamedDecl *
Dec = P.first;
743 const unsigned *I2 = C2.lookup(Dec);
747 }
else if (getCanonicalDefinitionID(P.second) !=
748 getCanonicalDefinitionID(*I2)) {
760LocalVariableMap::Context LocalVariableMap::createReferenceContext(Context
C) {
761 Context
Result = getEmptyContext();
762 for (
const auto &P :
C)
770void LocalVariableMap::intersectBackEdge(Context C1, Context C2) {
771 for (
const auto &P : C1) {
772 const unsigned I1 = P.second;
773 VarDefinition *VDef = &VarDefinitions[I1];
774 assert(VDef->isReference());
776 const unsigned *I2 = C2.lookup(P.first);
779 VDef->invalidateRef();
785 if (VDef->CanonicalRef != getCanonicalDefinitionID(*I2))
786 VDef->invalidateRef();
827void LocalVariableMap::traverseCFG(CFG *CFGraph,
828 const PostOrderCFGView *SortedGraph,
829 std::vector<CFGBlockInfo> &BlockInfo) {
830 PostOrderCFGView::CFGBlockSet VisitedBlocks(CFGraph);
832 for (
const auto *CurrBlock : *SortedGraph) {
833 unsigned CurrBlockID = CurrBlock->getBlockID();
834 CFGBlockInfo *CurrBlockInfo = &BlockInfo[CurrBlockID];
836 VisitedBlocks.insert(CurrBlock);
839 bool HasBackEdges =
false;
842 PE = CurrBlock->pred_end(); PI != PE; ++PI) {
844 if (*PI ==
nullptr || !VisitedBlocks.alreadySet(*PI)) {
849 unsigned PrevBlockID = (*PI)->getBlockID();
850 CFGBlockInfo *PrevBlockInfo = &BlockInfo[PrevBlockID];
853 CurrBlockInfo->EntryContext = PrevBlockInfo->ExitContext;
857 CurrBlockInfo->EntryContext =
858 intersectContexts(CurrBlockInfo->EntryContext,
859 PrevBlockInfo->ExitContext);
866 CurrBlockInfo->EntryContext =
867 createReferenceContext(CurrBlockInfo->EntryContext);
870 saveContext(
nullptr, CurrBlockInfo->EntryContext);
871 CurrBlockInfo->EntryIndex = getContextIndex();
874 VarMapBuilder VMapBuilder(
this, CurrBlockInfo->EntryContext);
875 for (
const auto &BI : *CurrBlock) {
876 switch (BI.getKind()) {
878 CFGStmt CS = BI.castAs<CFGStmt>();
879 VMapBuilder.Visit(CS.
getStmt());
886 CurrBlockInfo->ExitContext = VMapBuilder.Ctx;
890 SE = CurrBlock->succ_end(); SI != SE; ++SI) {
892 if (*SI ==
nullptr || !VisitedBlocks.alreadySet(*SI))
895 CFGBlock *FirstLoopBlock = *SI;
896 Context LoopBegin = BlockInfo[FirstLoopBlock->
getBlockID()].EntryContext;
897 Context LoopEnd = CurrBlockInfo->ExitContext;
898 intersectBackEdge(LoopBegin, LoopEnd);
904 saveContext(
nullptr, BlockInfo[exitID].ExitContext);
911 std::vector<CFGBlockInfo> &BlockInfo) {
912 for (
const auto *CurrBlock : *SortedGraph) {
913 CFGBlockInfo *CurrBlockInfo = &BlockInfo[CurrBlock->getBlockID()];
917 if (
const Stmt *S = CurrBlock->getTerminatorStmt()) {
918 CurrBlockInfo->EntryLoc = CurrBlockInfo->ExitLoc = S->
getBeginLoc();
921 BE = CurrBlock->rend(); BI != BE; ++BI) {
923 if (std::optional<CFGStmt> CS = BI->getAs<
CFGStmt>()) {
924 CurrBlockInfo->ExitLoc = CS->getStmt()->getBeginLoc();
930 if (CurrBlockInfo->ExitLoc.
isValid()) {
933 for (
const auto &BI : *CurrBlock) {
935 if (std::optional<CFGStmt> CS = BI.getAs<
CFGStmt>()) {
936 CurrBlockInfo->EntryLoc = CS->getStmt()->getBeginLoc();
940 }
else if (CurrBlock->pred_size() == 1 && *CurrBlock->pred_begin() &&
941 CurrBlock != &CFGraph->
getExit()) {
944 CurrBlockInfo->EntryLoc = CurrBlockInfo->ExitLoc =
945 BlockInfo[(*CurrBlock->pred_begin())->getBlockID()].ExitLoc;
946 }
else if (CurrBlock->succ_size() == 1 && *CurrBlock->succ_begin()) {
949 CurrBlockInfo->EntryLoc = CurrBlockInfo->ExitLoc =
950 BlockInfo[(*CurrBlock->succ_begin())->getBlockID()].EntryLoc;
957class LockableFactEntry final :
public FactEntry {
962 unsigned int ReentrancyDepth = 0;
964 LockableFactEntry(
const CapabilityExpr &CE,
LockKind LK, SourceLocation Loc,
966 : FactEntry(Lockable, CE, LK, Loc, Src) {}
969 static LockableFactEntry *
create(llvm::BumpPtrAllocator &Alloc,
970 const LockableFactEntry &
Other) {
974 static LockableFactEntry *
create(llvm::BumpPtrAllocator &Alloc,
975 const CapabilityExpr &CE,
LockKind LK,
977 SourceKind Src = Acquired) {
978 return new (
Alloc) LockableFactEntry(CE, LK, Loc, Src);
981 unsigned int getReentrancyDepth()
const {
return ReentrancyDepth; }
984 handleRemovalFromIntersection(
const FactSet &FSet, FactManager &FactMan,
986 ThreadSafetyHandler &Handler)
const override {
987 if (!asserted() && !negative() && !isUniversal()) {
993 void handleLock(FactSet &FSet, FactManager &FactMan,
const FactEntry &entry,
994 ThreadSafetyHandler &Handler)
const override {
995 if (
const FactEntry *RFact = tryReenter(FactMan, entry.kind())) {
997 FSet.replaceLock(FactMan, entry, RFact);
1004 void handleUnlock(FactSet &FSet, FactManager &FactMan,
1005 const CapabilityExpr &Cp, SourceLocation UnlockLoc,
1007 ThreadSafetyHandler &Handler)
const override {
1008 FSet.removeLock(FactMan, Cp);
1010 if (
const FactEntry *RFact = leaveReentrant(FactMan)) {
1012 FSet.addLock(FactMan, RFact);
1014 FSet.addLock(FactMan, FactMan.createFact<LockableFactEntry>(
1021 const FactEntry *tryReenter(FactManager &FactMan,
1025 if (
kind() != ReenterKind)
1027 auto *NewFact = FactMan.createFact<LockableFactEntry>(*this);
1028 NewFact->ReentrancyDepth++;
1034 const FactEntry *leaveReentrant(FactManager &FactMan)
const {
1035 if (!ReentrancyDepth)
1037 assert(reentrant());
1038 auto *NewFact = FactMan.createFact<LockableFactEntry>(*this);
1039 NewFact->ReentrancyDepth--;
1043 static bool classof(
const FactEntry *A) {
1044 return A->getFactEntryKind() == Lockable;
1048enum UnderlyingCapabilityKind {
1051 UCK_ReleasedExclusive,
1054struct UnderlyingCapability {
1056 UnderlyingCapabilityKind Kind;
1059class ScopedLockableFactEntry final
1061 private llvm::TrailingObjects<ScopedLockableFactEntry,
1062 UnderlyingCapability> {
1063 friend TrailingObjects;
1066 const unsigned ManagedCapacity;
1067 unsigned ManagedSize = 0;
1069 ScopedLockableFactEntry(
const CapabilityExpr &CE, SourceLocation Loc,
1070 SourceKind Src,
unsigned ManagedCapacity)
1071 : FactEntry(ScopedLockable, CE,
LK_Exclusive, Loc, Src),
1072 ManagedCapacity(ManagedCapacity) {}
1074 void addManaged(
const CapabilityExpr &M, UnderlyingCapabilityKind UCK) {
1075 assert(ManagedSize < ManagedCapacity);
1076 new (getTrailingObjects() + ManagedSize) UnderlyingCapability{M, UCK};
1080 ArrayRef<UnderlyingCapability> getManaged()
const {
1081 return getTrailingObjects(ManagedSize);
1085 static ScopedLockableFactEntry *
create(llvm::BumpPtrAllocator &Alloc,
1086 const CapabilityExpr &CE,
1087 SourceLocation Loc, SourceKind Src,
1088 unsigned ManagedCapacity) {
1090 Alloc.Allocate(totalSizeToAlloc<UnderlyingCapability>(ManagedCapacity),
1091 alignof(ScopedLockableFactEntry));
1092 return new (
Storage) ScopedLockableFactEntry(CE, Loc, Src, ManagedCapacity);
1095 CapExprSet getUnderlyingMutexes()
const {
1096 CapExprSet UnderlyingMutexesSet;
1097 for (
const UnderlyingCapability &UnderlyingMutex : getManaged())
1098 UnderlyingMutexesSet.push_back(UnderlyingMutex.Cap);
1099 return UnderlyingMutexesSet;
1106 void addLock(
const CapabilityExpr &M) { addManaged(M, UCK_Acquired); }
1108 void addExclusiveUnlock(
const CapabilityExpr &M) {
1109 addManaged(M, UCK_ReleasedExclusive);
1112 void addSharedUnlock(
const CapabilityExpr &M) {
1113 addManaged(M, UCK_ReleasedShared);
1118 handleRemovalFromIntersection(
const FactSet &FSet, FactManager &FactMan,
1120 ThreadSafetyHandler &Handler)
const override {
1124 for (
const auto &UnderlyingMutex : getManaged()) {
1125 const auto *Entry = FSet.findLock(FactMan, UnderlyingMutex.Cap);
1126 if ((UnderlyingMutex.Kind == UCK_Acquired && Entry) ||
1127 (UnderlyingMutex.Kind != UCK_Acquired && !Entry)) {
1131 UnderlyingMutex.Cap.toString(), loc(),
1137 void handleLock(FactSet &FSet, FactManager &FactMan,
const FactEntry &entry,
1138 ThreadSafetyHandler &Handler)
const override {
1139 for (
const auto &UnderlyingMutex : getManaged()) {
1140 if (UnderlyingMutex.Kind == UCK_Acquired)
1141 lock(FSet, FactMan, UnderlyingMutex.Cap, entry.kind(), entry.loc(),
1144 unlock(FSet, FactMan, UnderlyingMutex.Cap, entry.loc(), &Handler);
1148 void handleUnlock(FactSet &FSet, FactManager &FactMan,
1149 const CapabilityExpr &Cp, SourceLocation UnlockLoc,
1151 ThreadSafetyHandler &Handler)
const override {
1152 assert(!Cp.
negative() &&
"Managing object cannot be negative.");
1153 for (
const auto &UnderlyingMutex : getManaged()) {
1156 ThreadSafetyHandler *TSHandler = FullyRemove ?
nullptr : &Handler;
1157 if (UnderlyingMutex.Kind == UCK_Acquired) {
1158 unlock(FSet, FactMan, UnderlyingMutex.Cap, UnlockLoc, TSHandler);
1160 LockKind kind = UnderlyingMutex.Kind == UCK_ReleasedShared
1163 lock(FSet, FactMan, UnderlyingMutex.Cap, kind, UnlockLoc, TSHandler);
1167 FSet.removeLock(FactMan, Cp);
1170 static bool classof(
const FactEntry *A) {
1171 return A->getFactEntryKind() == ScopedLockable;
1175 void lock(FactSet &FSet, FactManager &FactMan,
const CapabilityExpr &Cp,
1177 ThreadSafetyHandler *Handler)
const {
1178 if (
const auto It = FSet.findLockIter(FactMan, Cp); It != FSet.end()) {
1180 if (
const FactEntry *RFact = Fact.tryReenter(FactMan, kind)) {
1182 FSet.replaceLock(FactMan, It, RFact);
1183 }
else if (Handler) {
1187 FSet.removeLock(FactMan, !Cp);
1188 FSet.addLock(FactMan, FactMan.createFact<LockableFactEntry>(Cp, kind, loc,
1193 void unlock(FactSet &FSet, FactManager &FactMan,
const CapabilityExpr &Cp,
1194 SourceLocation loc, ThreadSafetyHandler *Handler)
const {
1195 if (
const auto It = FSet.findLockIter(FactMan, Cp); It != FSet.end()) {
1197 if (
const FactEntry *RFact = Fact.leaveReentrant(FactMan)) {
1199 FSet.replaceLock(FactMan, It, RFact);
1205 FactMan.createFact<LockableFactEntry>(!Cp,
LK_Exclusive, loc));
1206 }
else if (Handler) {
1207 SourceLocation PrevLoc;
1208 if (
const FactEntry *Neg = FSet.findLock(FactMan, !Cp))
1209 PrevLoc =
Neg->loc();
1216class ThreadSafetyAnalyzer {
1217 friend class BuildLockset;
1218 friend class threadSafety::BeforeSet;
1220 llvm::BumpPtrAllocator Bpa;
1221 threadSafety::til::MemRegionRef Arena;
1222 threadSafety::SExprBuilder SxBuilder;
1224 ThreadSafetyHandler &Handler;
1225 const FunctionDecl *CurrentFunction;
1226 LocalVariableMap LocalVarMap;
1228 llvm::SmallDenseMap<const Expr *, til::LiteralPtr *> ConstructedObjects;
1229 FactManager FactMan;
1230 std::vector<CFGBlockInfo> BlockInfo;
1232 BeforeSet *GlobalBeforeSet;
1235 ThreadSafetyAnalyzer(ThreadSafetyHandler &H, BeforeSet *Bset)
1236 : Arena(&Bpa), SxBuilder(Arena), Handler(H), FactMan(Bpa),
1237 GlobalBeforeSet(Bset) {}
1239 bool inCurrentScope(
const CapabilityExpr &CapE);
1241 void addLock(FactSet &FSet,
const FactEntry *Entry,
bool ReqAttr =
false);
1242 void removeLock(FactSet &FSet,
const CapabilityExpr &CapE,
1243 SourceLocation UnlockLoc,
bool FullyRemove,
LockKind Kind);
1245 template <
typename AttrType>
1246 void getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
const Expr *Exp,
1247 const NamedDecl *D, til::SExpr *
Self =
nullptr);
1249 template <
class AttrType>
1250 void getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
const Expr *Exp,
1252 const CFGBlock *PredBlock,
const CFGBlock *CurrBlock,
1253 Expr *BrE,
bool Neg);
1255 const CallExpr* getTrylockCallExpr(
const Stmt *
Cond, LocalVarContext
C,
1258 void getEdgeLockset(FactSet &
Result,
const FactSet &ExitSet,
1259 const CFGBlock* PredBlock,
1260 const CFGBlock *CurrBlock);
1262 bool join(
const FactEntry &A,
const FactEntry &B, SourceLocation JoinLoc,
1265 void intersectAndWarn(FactSet &EntrySet,
const FactSet &ExitSet,
1269 void intersectAndWarn(FactSet &EntrySet,
const FactSet &ExitSet,
1271 intersectAndWarn(EntrySet, ExitSet, JoinLoc, LEK, LEK);
1274 void runAnalysis(AnalysisDeclContext &AC);
1276 void warnIfMutexNotHeld(
const FactSet &FSet,
const NamedDecl *D,
1277 const Expr *Exp,
AccessKind AK, Expr *MutexExp,
1279 SourceLocation Loc);
1280 void warnIfMutexHeld(
const FactSet &FSet,
const NamedDecl *D,
const Expr *Exp,
1281 Expr *MutexExp, til::SExpr *
Self, SourceLocation Loc);
1283 void checkAccess(
const FactSet &FSet,
const Expr *Exp,
AccessKind AK,
1285 void checkPtAccess(
const FactSet &FSet,
const Expr *Exp,
AccessKind AK,
1293 ThreadSafetyAnalyzer& Analyzer) {
1295 BeforeInfo *Info =
nullptr;
1299 std::unique_ptr<BeforeInfo> &InfoPtr = BMap[Vd];
1301 InfoPtr.reset(
new BeforeInfo());
1302 Info = InfoPtr.get();
1305 for (
const auto *At : Vd->
attrs()) {
1306 switch (At->getKind()) {
1307 case attr::AcquiredBefore: {
1311 for (
const auto *Arg : A->args()) {
1313 Analyzer.SxBuilder.translateAttrExpr(Arg,
nullptr);
1315 Info->Vect.push_back(Cpvd);
1316 const auto It = BMap.find(Cpvd);
1317 if (It == BMap.end())
1323 case attr::AcquiredAfter: {
1327 for (
const auto *Arg : A->args()) {
1329 Analyzer.SxBuilder.translateAttrExpr(Arg,
nullptr);
1333 ArgInfo->Vect.push_back(Vd);
1346BeforeSet::BeforeInfo *
1348 ThreadSafetyAnalyzer &Analyzer) {
1349 auto It = BMap.find(Vd);
1350 BeforeInfo *Info =
nullptr;
1351 if (It == BMap.end())
1354 Info = It->second.get();
1355 assert(Info &&
"BMap contained nullptr?");
1361 const FactSet& FSet,
1362 ThreadSafetyAnalyzer& Analyzer,
1374 if (Info->Visited == 1)
1377 if (Info->Visited == 2)
1380 if (Info->Vect.empty())
1383 InfoVect.push_back(Info);
1385 for (
const auto *Vdb : Info->Vect) {
1387 if (FSet.containsMutexDecl(Analyzer.FactMan, Vdb)) {
1388 StringRef L1 = StartVd->
getName();
1389 StringRef L2 = Vdb->getName();
1390 Analyzer.Handler.handleLockAcquiredBefore(CapKind, L1, L2, Loc);
1394 if (CycMap.try_emplace(Vd,
true).second) {
1396 Analyzer.Handler.handleBeforeAfterCycle(L1, Vd->
getLocation());
1406 for (
auto *Info : InfoVect)
1412 if (
const auto *CE = dyn_cast<ImplicitCastExpr>(Exp))
1415 if (
const auto *DR = dyn_cast<DeclRefExpr>(Exp))
1416 return DR->getDecl();
1418 if (
const auto *ME = dyn_cast<MemberExpr>(Exp))
1419 return ME->getMemberDecl();
1424bool ThreadSafetyAnalyzer::inCurrentScope(
const CapabilityExpr &CapE) {
1425 const threadSafety::til::SExpr *SExp = CapE.
sexpr();
1426 assert(SExp &&
"Null expressions should be ignored");
1428 if (
const auto *LP = dyn_cast<til::LiteralPtr>(SExp)) {
1429 const ValueDecl *VD = LP->clangDecl();
1441 if (
const auto *P = dyn_cast<til::Project>(SExp)) {
1442 if (!isa_and_nonnull<CXXMethodDecl>(CurrentFunction))
1444 const ValueDecl *VD = P->clangDecl();
1453void ThreadSafetyAnalyzer::addLock(FactSet &FSet,
const FactEntry *Entry,
1455 if (Entry->shouldIgnore())
1458 if (!ReqAttr && !Entry->negative()) {
1460 CapabilityExpr NegC = !*Entry;
1461 const FactEntry *Nen = FSet.findLock(FactMan, NegC);
1463 FSet.removeLock(FactMan, NegC);
1466 if (inCurrentScope(*Entry) && !Entry->asserted() && !Entry->reentrant())
1473 if (!Entry->asserted() && !Entry->declared()) {
1475 Entry->loc(), Entry->getKind());
1478 if (
const FactEntry *Cp = FSet.findLock(FactMan, *Entry)) {
1479 if (!Entry->asserted())
1480 Cp->handleLock(FSet, FactMan, *Entry, Handler);
1482 FSet.addLock(FactMan, Entry);
1488void ThreadSafetyAnalyzer::removeLock(FactSet &FSet,
const CapabilityExpr &Cp,
1489 SourceLocation UnlockLoc,
1490 bool FullyRemove,
LockKind ReceivedKind) {
1494 const FactEntry *LDat = FSet.findLock(FactMan, Cp);
1496 SourceLocation PrevLoc;
1497 if (
const FactEntry *Neg = FSet.findLock(FactMan, !Cp))
1498 PrevLoc =
Neg->loc();
1506 if (ReceivedKind !=
LK_Generic && LDat->kind() != ReceivedKind) {
1508 ReceivedKind, LDat->loc(), UnlockLoc);
1511 LDat->handleUnlock(FSet, FactMan, Cp, UnlockLoc, FullyRemove, Handler);
1516template <
typename AttrType>
1517void ThreadSafetyAnalyzer::getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
1518 const Expr *Exp,
const NamedDecl *D,
1520 if (Attr->args_size() == 0) {
1529 Mtxs.push_back_nodup(Cp);
1533 for (
const auto *Arg : Attr->args()) {
1541 Mtxs.push_back_nodup(Cp);
1548template <
class AttrType>
1549void ThreadSafetyAnalyzer::getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
1550 const Expr *Exp,
const NamedDecl *D,
1551 const CFGBlock *PredBlock,
1552 const CFGBlock *CurrBlock,
1553 Expr *BrE,
bool Neg) {
1555 bool branch =
false;
1556 if (
const auto *BLE = dyn_cast_or_null<CXXBoolLiteralExpr>(BrE))
1557 branch = BLE->getValue();
1558 else if (
const auto *ILE = dyn_cast_or_null<IntegerLiteral>(BrE))
1559 branch = ILE->getValue().getBoolValue();
1561 int branchnum = branch ? 0 : 1;
1563 branchnum = !branchnum;
1568 SE = PredBlock->
succ_end(); SI != SE && i < 2; ++SI, ++i) {
1569 if (*SI == CurrBlock && i == branchnum)
1570 getMutexIDs(Mtxs, Attr, Exp, D);
1578 }
else if (
const auto *BLE = dyn_cast<CXXBoolLiteralExpr>(E)) {
1579 TCond = BLE->getValue();
1581 }
else if (
const auto *ILE = dyn_cast<IntegerLiteral>(E)) {
1582 TCond = ILE->getValue().getBoolValue();
1584 }
else if (
auto *CE = dyn_cast<ImplicitCastExpr>(E))
1592const CallExpr* ThreadSafetyAnalyzer::getTrylockCallExpr(
const Stmt *
Cond,
1598 if (
const auto *CallExp = dyn_cast<CallExpr>(
Cond)) {
1599 if (CallExp->getBuiltinCallee() == Builtin::BI__builtin_expect)
1600 return getTrylockCallExpr(CallExp->getArg(0),
C, Negate);
1603 else if (
const auto *PE = dyn_cast<ParenExpr>(
Cond))
1604 return getTrylockCallExpr(PE->getSubExpr(),
C, Negate);
1605 else if (
const auto *CE = dyn_cast<ImplicitCastExpr>(
Cond))
1606 return getTrylockCallExpr(CE->getSubExpr(),
C, Negate);
1607 else if (
const auto *FE = dyn_cast<FullExpr>(
Cond))
1608 return getTrylockCallExpr(FE->getSubExpr(),
C, Negate);
1609 else if (
const auto *DRE = dyn_cast<DeclRefExpr>(
Cond)) {
1610 const Expr *E = LocalVarMap.lookupExpr(DRE->getDecl(),
C);
1611 return getTrylockCallExpr(E,
C, Negate);
1613 else if (
const auto *UOP = dyn_cast<UnaryOperator>(
Cond)) {
1614 if (UOP->getOpcode() == UO_LNot) {
1616 return getTrylockCallExpr(UOP->getSubExpr(),
C, Negate);
1620 else if (
const auto *BOP = dyn_cast<BinaryOperator>(
Cond)) {
1621 if (BOP->getOpcode() == BO_EQ || BOP->getOpcode() == BO_NE) {
1622 if (BOP->getOpcode() == BO_NE)
1627 if (!TCond) Negate = !Negate;
1628 return getTrylockCallExpr(BOP->getLHS(),
C, Negate);
1632 if (!TCond) Negate = !Negate;
1633 return getTrylockCallExpr(BOP->getRHS(),
C, Negate);
1637 if (BOP->getOpcode() == BO_LAnd) {
1639 return getTrylockCallExpr(BOP->getRHS(),
C, Negate);
1641 if (BOP->getOpcode() == BO_LOr)
1642 return getTrylockCallExpr(BOP->getRHS(),
C, Negate);
1644 }
else if (
const auto *COP = dyn_cast<ConditionalOperator>(
Cond)) {
1648 if (TCond && !FCond)
1649 return getTrylockCallExpr(COP->getCond(),
C, Negate);
1650 if (!TCond && FCond) {
1652 return getTrylockCallExpr(COP->getCond(),
C, Negate);
1662void ThreadSafetyAnalyzer::getEdgeLockset(FactSet&
Result,
1663 const FactSet &ExitSet,
1664 const CFGBlock *PredBlock,
1665 const CFGBlock *CurrBlock) {
1673 bool Negate =
false;
1674 const CFGBlockInfo *PredBlockInfo = &BlockInfo[PredBlock->
getBlockID()];
1675 const LocalVarContext &LVarCtx = PredBlockInfo->ExitContext;
1680 [
this, Ctx = LVarCtx](
const NamedDecl *D)
mutable ->
const Expr * {
1681 return LocalVarMap.lookupExpr(D, Ctx);
1684 auto Cleanup = llvm::make_scope_exit(
1687 const auto *Exp = getTrylockCallExpr(
Cond, LVarCtx, Negate);
1691 auto *FunDecl = dyn_cast_or_null<NamedDecl>(Exp->getCalleeDecl());
1692 if (!FunDecl || !FunDecl->hasAttr<TryAcquireCapabilityAttr>())
1695 CapExprSet ExclusiveLocksToAdd;
1696 CapExprSet SharedLocksToAdd;
1699 for (
const auto *Attr : FunDecl->specific_attrs<TryAcquireCapabilityAttr>())
1700 getMutexIDs(Attr->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, Attr,
1701 Exp, FunDecl, PredBlock, CurrBlock, Attr->getSuccessValue(),
1705 SourceLocation Loc = Exp->getExprLoc();
1706 for (
const auto &ExclusiveLockToAdd : ExclusiveLocksToAdd)
1707 addLock(
Result, FactMan.createFact<LockableFactEntry>(ExclusiveLockToAdd,
1709 for (
const auto &SharedLockToAdd : SharedLocksToAdd)
1710 addLock(
Result, FactMan.createFact<LockableFactEntry>(SharedLockToAdd,
1721class BuildLockset :
public ConstStmtVisitor<BuildLockset> {
1722 friend class ThreadSafetyAnalyzer;
1724 ThreadSafetyAnalyzer *Analyzer;
1727 const FactSet &FunctionExitFSet;
1728 LocalVariableMap::Context LVarCtx;
1732 void updateLocalVarMapCtx(
const Stmt *S) {
1734 LVarCtx = Analyzer->LocalVarMap.getNextContext(CtxIndex, S, LVarCtx);
1739 [
this, Ctx = LVarCtx](
const NamedDecl *D)
mutable ->
const Expr * {
1740 return Analyzer->LocalVarMap.lookupExpr(D, Ctx);
1746 void checkAccess(
const Expr *Exp,
AccessKind AK,
1748 Analyzer->checkAccess(FSet, Exp, AK, POK);
1750 void checkPtAccess(
const Expr *Exp,
AccessKind AK,
1752 Analyzer->checkPtAccess(FSet, Exp, AK, POK);
1755 void handleCall(
const Expr *Exp,
const NamedDecl *D,
1756 til::SExpr *
Self =
nullptr,
1757 SourceLocation Loc = SourceLocation());
1758 void examineArguments(
const FunctionDecl *FD,
1761 bool SkipFirstParam =
false);
1764 BuildLockset(ThreadSafetyAnalyzer *Anlzr, CFGBlockInfo &Info,
1765 const FactSet &FunctionExitFSet)
1766 : ConstStmtVisitor<BuildLockset>(), Analyzer(Anlzr), FSet(Info.EntrySet),
1767 FunctionExitFSet(FunctionExitFSet), LVarCtx(Info.EntryContext),
1768 CtxIndex(Info.EntryIndex) {
1769 updateLocalVarMapCtx(
nullptr);
1774 void VisitUnaryOperator(
const UnaryOperator *UO);
1775 void VisitBinaryOperator(
const BinaryOperator *BO);
1776 void VisitCastExpr(
const CastExpr *CE);
1777 void VisitCallExpr(
const CallExpr *Exp);
1778 void VisitCXXConstructExpr(
const CXXConstructExpr *Exp);
1779 void VisitDeclStmt(
const DeclStmt *S);
1780 void VisitMaterializeTemporaryExpr(
const MaterializeTemporaryExpr *Exp);
1781 void VisitReturnStmt(
const ReturnStmt *S);
1788void ThreadSafetyAnalyzer::warnIfMutexNotHeld(
1789 const FactSet &FSet,
const NamedDecl *D,
const Expr *Exp,
AccessKind AK,
1791 SourceLocation Loc) {
1803 const FactEntry *LDat = FSet.findLock(FactMan, !Cp);
1806 (!Cp).toString(), Loc);
1812 if (!inCurrentScope(Cp))
1816 LDat = FSet.findLock(FactMan, Cp);
1823 const FactEntry *LDat = FSet.findLockUniv(FactMan, Cp);
1824 bool NoError =
true;
1827 LDat = FSet.findPartialMatch(FactMan, Cp);
1830 std::string PartMatchStr = LDat->toString();
1831 StringRef PartMatchName(PartMatchStr);
1841 if (NoError && LDat && !LDat->isAtLeast(LK)) {
1847void ThreadSafetyAnalyzer::warnIfMutexHeld(
const FactSet &FSet,
1848 const NamedDecl *D,
const Expr *Exp,
1849 Expr *MutexExp, til::SExpr *
Self,
1850 SourceLocation Loc) {
1859 const FactEntry *LDat = FSet.findLock(FactMan, Cp);
1871void ThreadSafetyAnalyzer::checkAccess(
const FactSet &FSet,
const Expr *Exp,
1880 while (
const auto *DRE = dyn_cast<DeclRefExpr>(Exp)) {
1881 const auto *VD = dyn_cast<VarDecl>(DRE->getDecl()->getCanonicalDecl());
1883 if (
const auto *E = VD->getInit()) {
1894 if (
const auto *UO = dyn_cast<UnaryOperator>(Exp)) {
1896 if (UO->getOpcode() == UO_Deref)
1897 checkPtAccess(FSet, UO->getSubExpr(), AK, POK);
1901 if (
const auto *BO = dyn_cast<BinaryOperator>(Exp)) {
1904 return checkAccess(FSet, BO->
getLHS(), AK, POK);
1906 return checkPtAccess(FSet, BO->
getLHS(), AK, POK);
1912 if (
const auto *AE = dyn_cast<ArraySubscriptExpr>(Exp)) {
1913 checkPtAccess(FSet, AE->getLHS(), AK, POK);
1917 if (
const auto *ME = dyn_cast<MemberExpr>(Exp)) {
1919 checkPtAccess(FSet, ME->getBase(), AK, POK);
1921 checkAccess(FSet, ME->getBase(), AK, POK);
1928 if (D->
hasAttr<GuardedVarAttr>() && FSet.isEmpty(FactMan)) {
1933 warnIfMutexNotHeld(FSet, D, Exp, AK, I->getArg(), POK,
nullptr, Loc);
1938void ThreadSafetyAnalyzer::checkPtAccess(
const FactSet &FSet,
const Expr *Exp,
1944 if (
const auto *PE = dyn_cast<ParenExpr>(Exp)) {
1945 Exp = PE->getSubExpr();
1948 if (
const auto *CE = dyn_cast<CastExpr>(Exp)) {
1949 if (CE->getCastKind() == CK_ArrayToPointerDecay) {
1952 checkAccess(FSet, CE->getSubExpr(), AK, POK);
1955 Exp = CE->getSubExpr();
1961 if (
const auto *UO = dyn_cast<UnaryOperator>(Exp)) {
1962 if (UO->getOpcode() == UO_AddrOf) {
1965 checkAccess(FSet, UO->getSubExpr(), AK, POK);
1993 if (D->
hasAttr<PtGuardedVarAttr>() && FSet.isEmpty(FactMan))
1997 warnIfMutexNotHeld(FSet, D, Exp, AK, I->getArg(), PtPOK,
nullptr,
2016void BuildLockset::handleCall(
const Expr *Exp,
const NamedDecl *D,
2017 til::SExpr *
Self, SourceLocation Loc) {
2018 CapExprSet ExclusiveLocksToAdd, SharedLocksToAdd;
2019 CapExprSet ExclusiveLocksToRemove, SharedLocksToRemove, GenericLocksToRemove;
2020 CapExprSet ScopedReqsAndExcludes;
2028 til::LiteralPtr *Placeholder =
2030 [[maybe_unused]]
auto inserted =
2031 Analyzer->ConstructedObjects.insert({Exp, Placeholder});
2032 assert(inserted.second &&
"Are we visiting the same expression again?");
2035 if (TagT->getDecl()->getMostRecentDecl()->hasAttr<ScopedLockableAttr>())
2036 Scp = CapabilityExpr(Placeholder, Exp->
getType(),
false);
2043 for(
const Attr *At : D->
attrs()) {
2044 switch (At->getKind()) {
2047 case attr::AcquireCapability: {
2049 Analyzer->getMutexIDs(A->isShared() ? SharedLocksToAdd
2050 : ExclusiveLocksToAdd,
2058 case attr::AssertCapability: {
2060 CapExprSet AssertLocks;
2061 Analyzer->getMutexIDs(AssertLocks, A, Exp, D,
Self);
2062 for (
const auto &AssertLock : AssertLocks)
2064 FSet, Analyzer->FactMan.createFact<LockableFactEntry>(
2066 Loc, FactEntry::Asserted));
2072 case attr::ReleaseCapability: {
2075 Analyzer->getMutexIDs(GenericLocksToRemove, A, Exp, D,
Self);
2076 else if (A->isShared())
2077 Analyzer->getMutexIDs(SharedLocksToRemove, A, Exp, D,
Self);
2079 Analyzer->getMutexIDs(ExclusiveLocksToRemove, A, Exp, D,
Self);
2083 case attr::RequiresCapability: {
2085 for (
auto *Arg : A->args()) {
2086 Analyzer->warnIfMutexNotHeld(FSet, D, Exp,
2091 Analyzer->getMutexIDs(ScopedReqsAndExcludes, A, Exp, D,
Self);
2096 case attr::LocksExcluded: {
2098 for (
auto *Arg : A->args()) {
2099 Analyzer->warnIfMutexHeld(FSet, D, Exp, Arg,
Self, Loc);
2102 Analyzer->getMutexIDs(ScopedReqsAndExcludes, A, Exp, D,
Self);
2113 std::optional<CallExpr::const_arg_range> Args;
2115 if (
const auto *CE = dyn_cast<CallExpr>(Exp))
2116 Args = CE->arguments();
2117 else if (
const auto *CE = dyn_cast<CXXConstructExpr>(Exp))
2118 Args = CE->arguments();
2120 llvm_unreachable(
"Unknown call kind");
2122 const auto *CalledFunction = dyn_cast<FunctionDecl>(D);
2123 if (CalledFunction && Args.has_value()) {
2124 for (
auto [Param, Arg] : zip(CalledFunction->parameters(), *Args)) {
2125 CapExprSet DeclaredLocks;
2126 for (
const Attr *At : Param->attrs()) {
2127 switch (At->getKind()) {
2128 case attr::AcquireCapability: {
2130 Analyzer->getMutexIDs(A->isShared() ? SharedLocksToAdd
2131 : ExclusiveLocksToAdd,
2133 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2137 case attr::ReleaseCapability: {
2140 Analyzer->getMutexIDs(GenericLocksToRemove, A, Exp, D,
Self);
2141 else if (A->isShared())
2142 Analyzer->getMutexIDs(SharedLocksToRemove, A, Exp, D,
Self);
2144 Analyzer->getMutexIDs(ExclusiveLocksToRemove, A, Exp, D,
Self);
2145 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2149 case attr::RequiresCapability: {
2151 for (
auto *Arg : A->args())
2152 Analyzer->warnIfMutexNotHeld(FSet, D, Exp,
2155 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2159 case attr::LocksExcluded: {
2161 for (
auto *Arg : A->args())
2162 Analyzer->warnIfMutexHeld(FSet, D, Exp, Arg,
Self, Loc);
2163 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2171 if (DeclaredLocks.empty())
2173 CapabilityExpr Cp(Analyzer->SxBuilder.
translate(Arg,
nullptr),
2174 StringRef(
"mutex"),
false,
false);
2175 if (
const auto *CBTE = dyn_cast<CXXBindTemporaryExpr>(Arg->
IgnoreCasts());
2177 if (
auto Object = Analyzer->ConstructedObjects.find(CBTE->getSubExpr());
2178 Object != Analyzer->ConstructedObjects.end())
2179 Cp = CapabilityExpr(
Object->second, StringRef(
"mutex"),
false,
2182 const FactEntry *Fact = FSet.findLock(Analyzer->FactMan, Cp);
2190 for (
const auto &[a,
b] :
2191 zip_longest(DeclaredLocks, Scope->getUnderlyingMutexes())) {
2192 if (!a.has_value()) {
2195 b.value().getKind(),
b.value().toString());
2196 }
else if (!
b.has_value()) {
2199 a.value().getKind(), a.value().toString());
2200 }
else if (!a.value().equals(
b.value())) {
2203 a.value().getKind(), a.value().toString(),
b.value().toString());
2212 for (
const auto &M : ExclusiveLocksToRemove)
2213 Analyzer->removeLock(FSet, M, Loc, Dtor,
LK_Exclusive);
2214 for (
const auto &M : SharedLocksToRemove)
2215 Analyzer->removeLock(FSet, M, Loc, Dtor,
LK_Shared);
2216 for (
const auto &M : GenericLocksToRemove)
2217 Analyzer->removeLock(FSet, M, Loc, Dtor,
LK_Generic);
2220 FactEntry::SourceKind Source =
2221 !Scp.
shouldIgnore() ? FactEntry::Managed : FactEntry::Acquired;
2222 for (
const auto &M : ExclusiveLocksToAdd)
2223 Analyzer->addLock(FSet, Analyzer->FactMan.createFact<LockableFactEntry>(
2225 for (
const auto &M : SharedLocksToAdd)
2226 Analyzer->addLock(FSet, Analyzer->FactMan.createFact<LockableFactEntry>(
2231 auto *ScopedEntry = Analyzer->FactMan.createFact<ScopedLockableFactEntry>(
2232 Scp, Loc, FactEntry::Acquired,
2233 ExclusiveLocksToAdd.size() + SharedLocksToAdd.size() +
2234 ScopedReqsAndExcludes.size() + ExclusiveLocksToRemove.size() +
2235 SharedLocksToRemove.size());
2236 for (
const auto &M : ExclusiveLocksToAdd)
2237 ScopedEntry->addLock(M);
2238 for (
const auto &M : SharedLocksToAdd)
2239 ScopedEntry->addLock(M);
2240 for (
const auto &M : ScopedReqsAndExcludes)
2241 ScopedEntry->addLock(M);
2242 for (
const auto &M : ExclusiveLocksToRemove)
2243 ScopedEntry->addExclusiveUnlock(M);
2244 for (
const auto &M : SharedLocksToRemove)
2245 ScopedEntry->addSharedUnlock(M);
2246 Analyzer->addLock(FSet, ScopedEntry);
2253void BuildLockset::VisitUnaryOperator(
const UnaryOperator *UO) {
2269void BuildLockset::VisitBinaryOperator(
const BinaryOperator *BO) {
2273 updateLocalVarMapCtx(BO);
2280void BuildLockset::VisitCastExpr(
const CastExpr *CE) {
2286void BuildLockset::examineArguments(
const FunctionDecl *FD,
2289 bool SkipFirstParam) {
2299 if (FD->
hasAttr<NoThreadSafetyAnalysisAttr>())
2302 const ArrayRef<ParmVarDecl *> Params = FD->
parameters();
2303 auto Param = Params.begin();
2308 for (
auto Arg = ArgBegin; Param != Params.end() && Arg != ArgEnd;
2310 QualType Qt = (*Param)->getType();
2318void BuildLockset::VisitCallExpr(
const CallExpr *Exp) {
2319 updateLocalVarMapCtx(Exp);
2321 if (
const auto *CE = dyn_cast<CXXMemberCallExpr>(Exp)) {
2322 const auto *ME = dyn_cast<MemberExpr>(CE->getCallee());
2324 const CXXMethodDecl *MD = CE->getMethodDecl();
2327 if (ME->isArrow()) {
2329 checkPtAccess(CE->getImplicitObjectArgument(),
AK_Read);
2332 checkAccess(CE->getImplicitObjectArgument(),
AK_Read);
2336 examineArguments(CE->getDirectCallee(), CE->arg_begin(), CE->arg_end());
2337 }
else if (
const auto *OE = dyn_cast<CXXOperatorCallExpr>(Exp)) {
2345 case OO_PercentEqual:
2349 case OO_LessLessEqual:
2350 case OO_GreaterGreaterEqual:
2351 checkAccess(OE->getArg(1),
AK_Read);
2361 if (!(OEop == OO_Star && OE->getNumArgs() > 1)) {
2363 checkPtAccess(OE->getArg(0),
AK_Read);
2368 const Expr *Obj = OE->getArg(0);
2373 const FunctionDecl *FD = OE->getDirectCallee();
2374 examineArguments(FD, std::next(OE->arg_begin()), OE->arg_end(),
2383 auto *D = dyn_cast_or_null<NamedDecl>(Exp->
getCalleeDecl());
2389void BuildLockset::VisitCXXConstructExpr(
const CXXConstructExpr *Exp) {
2392 const Expr* Source = Exp->
getArg(0);
2402 if (
auto *CE = dyn_cast<CastExpr>(E))
2405 if (
auto *CE = dyn_cast<CastExpr>(E))
2406 if (CE->
getCastKind() == CK_ConstructorConversion ||
2409 if (
auto *BTE = dyn_cast<CXXBindTemporaryExpr>(E))
2410 E = BTE->getSubExpr();
2414void BuildLockset::VisitDeclStmt(
const DeclStmt *S) {
2415 updateLocalVarMapCtx(S);
2418 if (
auto *VD = dyn_cast_or_null<VarDecl>(D)) {
2419 const Expr *E = VD->getInit();
2425 if (
auto *EWC = dyn_cast<ExprWithCleanups>(E))
2429 if (
auto Object = Analyzer->ConstructedObjects.find(E);
2430 Object != Analyzer->ConstructedObjects.end()) {
2431 Object->second->setClangDecl(VD);
2432 Analyzer->ConstructedObjects.erase(Object);
2438void BuildLockset::VisitMaterializeTemporaryExpr(
2439 const MaterializeTemporaryExpr *Exp) {
2441 if (
auto Object = Analyzer->ConstructedObjects.find(
2443 Object != Analyzer->ConstructedObjects.end()) {
2444 Object->second->setClangDecl(ExtD);
2445 Analyzer->ConstructedObjects.erase(Object);
2450void BuildLockset::VisitReturnStmt(
const ReturnStmt *S) {
2451 if (Analyzer->CurrentFunction ==
nullptr)
2459 const QualType ReturnType =
2462 Analyzer->checkAccess(
2463 FunctionExitFSet, RetVal,
2467 Analyzer->checkPtAccess(
2468 FunctionExitFSet, RetVal,
2478bool ThreadSafetyAnalyzer::join(
const FactEntry &A,
const FactEntry &B,
2479 SourceLocation JoinLoc,
2483 unsigned int ReentrancyDepthA = 0;
2484 unsigned int ReentrancyDepthB = 0;
2486 if (
const auto *LFE = dyn_cast<LockableFactEntry>(&A))
2487 ReentrancyDepthA = LFE->getReentrancyDepth();
2488 if (
const auto *LFE = dyn_cast<LockableFactEntry>(&B))
2489 ReentrancyDepthB = LFE->getReentrancyDepth();
2491 if (ReentrancyDepthA != ReentrancyDepthB) {
2497 return CanModify && ReentrancyDepthA < ReentrancyDepthB;
2498 }
else if (A.kind() != B.kind()) {
2501 if ((A.managed() || A.asserted()) && (B.managed() || B.asserted())) {
2503 bool ShouldTakeB = B.kind() ==
LK_Shared;
2504 if (CanModify || !ShouldTakeB)
2513 return CanModify && A.asserted() && !B.asserted();
2531void ThreadSafetyAnalyzer::intersectAndWarn(FactSet &EntrySet,
2532 const FactSet &ExitSet,
2533 SourceLocation JoinLoc,
2536 FactSet EntrySetOrig = EntrySet;
2539 for (
const auto &Fact : ExitSet) {
2540 const FactEntry &ExitFact = FactMan[Fact];
2542 FactSet::iterator EntryIt = EntrySet.findLockIter(FactMan, ExitFact);
2543 if (EntryIt != EntrySet.end()) {
2544 if (join(FactMan[*EntryIt], ExitFact, JoinLoc, EntryLEK))
2547 ExitFact.handleRemovalFromIntersection(ExitSet, FactMan, JoinLoc,
2553 for (
const auto &Fact : EntrySetOrig) {
2554 const FactEntry *EntryFact = &FactMan[Fact];
2555 const FactEntry *ExitFact = ExitSet.findLock(FactMan, *EntryFact);
2560 EntryFact->handleRemovalFromIntersection(EntrySetOrig, FactMan, JoinLoc,
2563 EntrySet.removeLock(FactMan, *EntryFact);
2576 if (std::optional<CFGStmt> S =
Last.getAs<
CFGStmt>()) {
2588void ThreadSafetyAnalyzer::runAnalysis(AnalysisDeclContext &AC) {
2591 threadSafety::CFGWalker walker;
2592 if (!walker.
init(AC))
2599 const NamedDecl *D = walker.
getDecl();
2600 CurrentFunction = dyn_cast<FunctionDecl>(D);
2602 if (D->
hasAttr<NoThreadSafetyAnalysisAttr>())
2617 CFGBlockInfo::getEmptyBlockInfo(LocalVarMap));
2623 PostOrderCFGView::CFGBlockSet VisitedBlocks(CFGraph);
2629 Initial.Reachable =
true;
2632 LocalVarMap.traverseCFG(CFGraph, SortedGraph, BlockInfo);
2637 CapExprSet ExclusiveLocksAcquired;
2638 CapExprSet SharedLocksAcquired;
2639 CapExprSet LocksReleased;
2644 if (!SortedGraph->
empty()) {
2646 FactSet &InitialLockset = Initial.EntrySet;
2648 CapExprSet ExclusiveLocksToAdd;
2649 CapExprSet SharedLocksToAdd;
2652 for (
const auto *Attr : D->
attrs()) {
2653 Loc = Attr->getLocation();
2654 if (
const auto *A = dyn_cast<RequiresCapabilityAttr>(Attr)) {
2655 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2657 }
else if (
const auto *A = dyn_cast<ReleaseCapabilityAttr>(Attr)) {
2660 if (A->args_size() == 0)
2662 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2664 getMutexIDs(LocksReleased, A,
nullptr, D);
2665 }
else if (
const auto *A = dyn_cast<AcquireCapabilityAttr>(Attr)) {
2666 if (A->args_size() == 0)
2668 getMutexIDs(A->isShared() ? SharedLocksAcquired
2669 : ExclusiveLocksAcquired,
2676 ArrayRef<ParmVarDecl *> Params;
2677 if (CurrentFunction)
2679 else if (
auto CurrentMethod = dyn_cast<ObjCMethodDecl>(D))
2680 Params = CurrentMethod->getCanonicalDecl()->parameters();
2682 llvm_unreachable(
"Unknown function kind");
2683 for (
const ParmVarDecl *Param : Params) {
2684 CapExprSet UnderlyingLocks;
2685 for (
const auto *Attr : Param->attrs()) {
2686 Loc = Attr->getLocation();
2687 if (
const auto *A = dyn_cast<ReleaseCapabilityAttr>(Attr)) {
2688 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2690 getMutexIDs(LocksReleased, A,
nullptr, Param);
2691 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2692 }
else if (
const auto *A = dyn_cast<RequiresCapabilityAttr>(Attr)) {
2693 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2695 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2696 }
else if (
const auto *A = dyn_cast<AcquireCapabilityAttr>(Attr)) {
2697 getMutexIDs(A->isShared() ? SharedLocksAcquired
2698 : ExclusiveLocksAcquired,
2700 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2701 }
else if (
const auto *A = dyn_cast<LocksExcludedAttr>(Attr)) {
2702 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2705 if (UnderlyingLocks.empty())
2710 auto *ScopedEntry = FactMan.createFact<ScopedLockableFactEntry>(
2711 Cp, Param->getLocation(), FactEntry::Declared,
2712 UnderlyingLocks.size());
2713 for (
const CapabilityExpr &M : UnderlyingLocks)
2714 ScopedEntry->addLock(M);
2715 addLock(InitialLockset, ScopedEntry,
true);
2719 for (
const auto &Mu : ExclusiveLocksToAdd) {
2720 const auto *Entry = FactMan.createFact<LockableFactEntry>(
2722 addLock(InitialLockset, Entry,
true);
2724 for (
const auto &Mu : SharedLocksToAdd) {
2725 const auto *Entry = FactMan.createFact<LockableFactEntry>(
2726 Mu,
LK_Shared, Loc, FactEntry::Declared);
2727 addLock(InitialLockset, Entry,
true);
2733 FactSet ExpectedFunctionExitSet = Initial.EntrySet;
2739 for (
const auto &Lock : ExclusiveLocksAcquired)
2740 ExpectedFunctionExitSet.addLock(
2741 FactMan, FactMan.createFact<LockableFactEntry>(Lock,
LK_Exclusive,
2743 for (
const auto &Lock : SharedLocksAcquired)
2744 ExpectedFunctionExitSet.addLock(
2745 FactMan, FactMan.createFact<LockableFactEntry>(Lock,
LK_Shared,
2747 for (
const auto &Lock : LocksReleased)
2748 ExpectedFunctionExitSet.removeLock(FactMan, Lock);
2750 for (
const auto *CurrBlock : *SortedGraph) {
2751 unsigned CurrBlockID = CurrBlock->
getBlockID();
2752 CFGBlockInfo *CurrBlockInfo = &BlockInfo[CurrBlockID];
2755 VisitedBlocks.insert(CurrBlock);
2770 bool LocksetInitialized =
false;
2772 PE = CurrBlock->
pred_end(); PI != PE; ++PI) {
2774 if (*PI ==
nullptr || !VisitedBlocks.alreadySet(*PI))
2777 unsigned PrevBlockID = (*PI)->getBlockID();
2778 CFGBlockInfo *PrevBlockInfo = &BlockInfo[PrevBlockID];
2785 CurrBlockInfo->Reachable =
true;
2787 FactSet PrevLockset;
2788 getEdgeLockset(PrevLockset, PrevBlockInfo->ExitSet, *PI, CurrBlock);
2790 if (!LocksetInitialized) {
2791 CurrBlockInfo->EntrySet = PrevLockset;
2792 LocksetInitialized =
true;
2798 CurrBlockInfo->EntrySet, PrevLockset, CurrBlockInfo->EntryLoc,
2799 isa_and_nonnull<ContinueStmt>((*PI)->getTerminatorStmt())
2806 if (!CurrBlockInfo->Reachable)
2809 BuildLockset LocksetBuilder(
this, *CurrBlockInfo, ExpectedFunctionExitSet);
2812 for (
const auto &BI : *CurrBlock) {
2813 switch (BI.getKind()) {
2815 CFGStmt CS = BI.castAs<CFGStmt>();
2816 LocksetBuilder.Visit(CS.
getStmt());
2821 CFGAutomaticObjDtor AD = BI.castAs<CFGAutomaticObjDtor>();
2823 if (!DD->hasAttrs())
2826 LocksetBuilder.handleCall(
2834 const CFGCleanupFunction &
CF = BI.castAs<CFGCleanupFunction>();
2835 LocksetBuilder.handleCall(
2836 nullptr,
CF.getFunctionDecl(),
2838 CF.getVarDecl()->getLocation());
2843 auto TD = BI.castAs<CFGTemporaryDtor>();
2847 if (
auto Object = ConstructedObjects.find(
2848 TD.getBindTemporaryExpr()->getSubExpr());
2849 Object != ConstructedObjects.end()) {
2853 LocksetBuilder.handleCall(
nullptr, DD,
Object->second,
2854 TD.getBindTemporaryExpr()->getEndLoc());
2855 ConstructedObjects.erase(Object);
2863 CurrBlockInfo->ExitSet = LocksetBuilder.FSet;
2870 SE = CurrBlock->succ_end(); SI != SE; ++SI) {
2872 if (*SI ==
nullptr || !VisitedBlocks.alreadySet(*SI))
2875 CFGBlock *FirstLoopBlock = *SI;
2876 CFGBlockInfo *PreLoop = &BlockInfo[FirstLoopBlock->
getBlockID()];
2877 CFGBlockInfo *LoopEnd = &BlockInfo[CurrBlockID];
2878 intersectAndWarn(PreLoop->EntrySet, LoopEnd->ExitSet, PreLoop->EntryLoc,
2884 if (!Final.Reachable)
2888 intersectAndWarn(ExpectedFunctionExitSet, Final.ExitSet, Final.ExitLoc,
2904 ThreadSafetyAnalyzer Analyzer(Handler, *BSet);
2905 Analyzer.runAnalysis(AC);
2919 llvm_unreachable(
"Unknown AccessKind");
This file defines AnalysisDeclContext, a class that manages the analysis context data for context sen...
Defines enum values for all the target-independent builtin functions.
static void dump(llvm::raw_ostream &OS, StringRef FunctionName, ArrayRef< CounterExpression > Expressions, ArrayRef< CounterMappingRegion > Regions)
static Decl::Kind getKind(const Decl *D)
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate....
Defines the clang::Expr interface and subclasses for C++ expressions.
Forward-declares and imports various common LLVM datatypes that clang wants to use unqualified.
Defines an enumeration for C++ overloaded operators.
static std::string toString(const clang::SanitizerSet &Sanitizers)
Produce a string containing comma-separated names of sanitizers in Sanitizers set.
Defines the clang::SourceLocation class and associated facilities.
Defines various enumerations that describe declaration and type specifiers.
static void warnInvalidLock(ThreadSafetyHandler &Handler, const Expr *MutexExp, const NamedDecl *D, const Expr *DeclExp, StringRef Kind)
Issue a warning about an invalid lock expression.
static bool getStaticBooleanValue(Expr *E, bool &TCond)
static bool neverReturns(const CFGBlock *B)
static void findBlockLocations(CFG *CFGraph, const PostOrderCFGView *SortedGraph, std::vector< CFGBlockInfo > &BlockInfo)
Find the appropriate source locations to use when producing diagnostics for each block in the CFG.
static const ValueDecl * getValueDecl(const Expr *Exp)
Gets the value decl pointer from DeclRefExprs or MemberExprs.
static const Expr * UnpackConstruction(const Expr *E)
C Language Family Type Representation.
AnalysisDeclContext contains the context data for the function, method or block under analysis.
ASTContext & getASTContext() const
static bool isAssignmentOp(Opcode Opc)
const VarDecl * getVarDecl() const
const Stmt * getTriggerStmt() const
Represents a single basic block in a source-level CFG.
bool hasNoReturnElement() const
ElementList::const_reverse_iterator const_reverse_iterator
succ_iterator succ_begin()
Stmt * getTerminatorStmt()
AdjacentBlocks::const_iterator const_pred_iterator
pred_iterator pred_begin()
unsigned getBlockID() const
Stmt * getTerminatorCondition(bool StripParens=true)
AdjacentBlocks::const_iterator const_succ_iterator
Represents a top-level expression in a basic block.
const CXXDestructorDecl * getDestructorDecl(ASTContext &astContext) const
const Stmt * getStmt() const
Represents a source-level, intra-procedural CFG that represents the control-flow of a Stmt.
unsigned getNumBlockIDs() const
Returns the total number of BlockIDs allocated (which start at 0).
Expr * getArg(unsigned Arg)
Return the specified argument.
CXXConstructorDecl * getConstructor() const
Get the constructor that this expression will (ultimately) call.
bool isCopyConstructor(unsigned &TypeQuals) const
Whether this constructor is a copy constructor (C++ [class.copy]p2, which can be used to copy the cla...
Expr * getArg(unsigned Arg)
getArg - Return the specified argument.
ConstExprIterator const_arg_iterator
FunctionDecl * getDirectCallee()
If the callee is a FunctionDecl, return it. Otherwise return null.
unsigned getNumArgs() const
getNumArgs - Return the number of actual arguments to this call.
CastKind getCastKind() const
const DeclGroupRef getDeclGroup() const
SourceLocation getBeginLoc() const LLVM_READONLY
llvm::iterator_range< specific_attr_iterator< T > > specific_attrs() const
SourceLocation getLocation() const
bool isDefinedOutsideFunctionOrMethod() const
isDefinedOutsideFunctionOrMethod - This predicate returns true if this scoped decl is defined outside...
DeclContext * getDeclContext()
This represents one expression.
Expr * IgnoreParenCasts() LLVM_READONLY
Skip past any parentheses and casts which might surround this expression until reaching a fixed point...
Expr * IgnoreParenImpCasts() LLVM_READONLY
Skip past any parentheses and implicit casts which might surround this expression until reaching a fi...
Expr * IgnoreImplicit() LLVM_READONLY
Skip past any implicit AST nodes which might surround this expression until reaching a fixed point.
Expr * IgnoreParens() LLVM_READONLY
Skip past any parentheses which might surround this expression until reaching a fixed point.
Expr * IgnoreCasts() LLVM_READONLY
Skip past any casts which might surround this expression until reaching a fixed point.
SourceLocation getExprLoc() const LLVM_READONLY
getExprLoc - Return the preferred location for the arrow when diagnosing a problem with a generic exp...
const ParmVarDecl * getParamDecl(unsigned i) const
QualType getReturnType() const
ArrayRef< ParmVarDecl * > parameters() const
FunctionDecl * getCanonicalDecl() override
Retrieves the "canonical" declaration of the given declaration.
unsigned getNumParams() const
Return the number of parameters this function must have based on its FunctionType.
Expr * getSubExpr() const
Retrieve the temporary-generating subexpression whose value will be materialized into a glvalue.
ValueDecl * getExtendingDecl()
Get the declaration which triggered the lifetime-extension of this temporary, if any.
This represents a decl that may have a name.
IdentifierInfo * getIdentifier() const
Get the identifier that names this declaration, if there is one.
StringRef getName() const
Get the name of identifier for this declaration as a StringRef.
std::string getNameAsString() const
Get a human-readable name for the declaration, even if it is one of the special kinds of names (C++ c...
virtual void printName(raw_ostream &OS, const PrintingPolicy &Policy) const
Pretty-print the unqualified name of this declaration.
QualType getCanonicalType() const
bool isConstQualified() const
Determine whether this type is const-qualified.
Encodes a location in the source.
bool isValid() const
Return true if this is a valid SourceLocation object.
Stmt - This represents one statement.
SourceLocation getEndLoc() const LLVM_READONLY
void dump() const
Dumps the specified AST fragment and all subtrees to llvm::errs().
bool isPointerType() const
bool isReferenceType() const
QualType getPointeeType() const
If this is a pointer, ObjC object pointer, or block pointer, this returns the respective pointee.
bool isLValueReferenceType() const
const T * getAs() const
Member-template getAs<specific type>'.
Expr * getSubExpr() const
Represent the declaration of a variable (in which case it is an lvalue) a function (in which case it ...
void checkBeforeAfter(const ValueDecl *Vd, const FactSet &FSet, ThreadSafetyAnalyzer &Analyzer, SourceLocation Loc, StringRef CapKind)
Return true if any mutexes in FSet are in the acquired_before set of Vd.
BeforeInfo * insertAttrExprs(const ValueDecl *Vd, ThreadSafetyAnalyzer &Analyzer)
Process acquired_before and acquired_after attributes on Vd.
BeforeInfo * getBeforeInfoForDecl(const ValueDecl *Vd, ThreadSafetyAnalyzer &Analyzer)
const PostOrderCFGView * getSortedGraph() const
const NamedDecl * getDecl() const
bool init(AnalysisDeclContext &AC)
const CFG * getGraph() const
bool shouldIgnore() const
bool equals(const CapabilityExpr &other) const
const til::SExpr * sexpr() const
std::string toString() const
const ValueDecl * valueDecl() const
StringRef getKind() const
CapabilityExpr translateAttrExpr(const Expr *AttrExp, const NamedDecl *D, const Expr *DeclExp, til::SExpr *Self=nullptr)
Translate a clang expression in an attribute to a til::SExpr.
void setLookupLocalVarExpr(std::function< const Expr *(const NamedDecl *)> F)
til::SExpr * translate(const Stmt *S, CallingContext *Ctx)
til::LiteralPtr * createThisPlaceholder()
til::SExpr * translateVariable(const VarDecl *VD, CallingContext *Ctx)
Handler class for thread safety warnings.
virtual ~ThreadSafetyHandler()
virtual void handleExpectMoreUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc, Name ScopeName, StringRef Kind, Name Expected)
Warn when we get fewer underlying mutexes than expected.
virtual void handleInvalidLockExp(SourceLocation Loc)
Warn about lock expressions which fail to resolve to lockable objects.
virtual void handleUnmatchedUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc, Name ScopeName, StringRef Kind, Name Expected, Name Actual)
Warn when an actual underlying mutex of a scoped lockable does not match the expected.
virtual void handleExpectFewerUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc, Name ScopeName, StringRef Kind, Name Actual)
Warn when we get more underlying mutexes than expected.
virtual void enterFunction(const FunctionDecl *FD)
Called by the analysis when starting analysis of a function.
virtual void handleIncorrectUnlockKind(StringRef Kind, Name LockName, LockKind Expected, LockKind Received, SourceLocation LocLocked, SourceLocation LocUnlock)
Warn about an unlock function call that attempts to unlock a lock with the incorrect lock kind.
virtual void handleMutexHeldEndOfScope(StringRef Kind, Name LockName, SourceLocation LocLocked, SourceLocation LocEndOfScope, LockErrorKind LEK, bool ReentrancyMismatch=false)
Warn about situations where a mutex is sometimes held and sometimes not.
virtual void leaveFunction(const FunctionDecl *FD)
Called by the analysis when finishing analysis of a function.
virtual void handleExclusiveAndShared(StringRef Kind, Name LockName, SourceLocation Loc1, SourceLocation Loc2)
Warn when a mutex is held exclusively and shared at the same point.
virtual void handleMutexNotHeld(StringRef Kind, const NamedDecl *D, ProtectedOperationKind POK, Name LockName, LockKind LK, SourceLocation Loc, Name *PossibleMatch=nullptr)
Warn when a protected operation occurs while the specific mutex protecting the operation is not locke...
virtual void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName, SourceLocation Loc)
Warn when a function is called while an excluded mutex is locked.
virtual void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK, AccessKind AK, SourceLocation Loc)
Warn when a protected operation occurs while no locks are held.
virtual void handleUnmatchedUnlock(StringRef Kind, Name LockName, SourceLocation Loc, SourceLocation LocPreviousUnlock)
Warn about unlock function calls that do not have a prior matching lock expression.
virtual void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg, SourceLocation Loc)
Warn when acquiring a lock that the negative capability is not held.
virtual void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked, SourceLocation LocDoubleLock)
Warn about lock function calls for locks which are already held.
internal::Matcher< T > traverse(TraversalKind TK, const internal::Matcher< T > &InnerMatcher)
Causes all nested matchers to be matched with the specified traversal kind.
unsigned kind
All of the diagnostics that can be emitted by the frontend.
@ CF
Indicates that the tracked object is a CF object.
bool Alloc(InterpState &S, CodePtr OpPC, const Descriptor *Desc)
bool Dec(InterpState &S, CodePtr OpPC, bool CanOverflow)
1) Pops a pointer from the stack 2) Load the value from the pointer 3) Writes the value decreased by ...
bool Neg(InterpState &S, CodePtr OpPC)
std::unique_ptr< DiagnosticConsumer > create(StringRef OutputFile, DiagnosticOptions &DiagOpts, bool MergeChildRecords=false)
Returns a DiagnosticConsumer that serializes diagnostics to a bitcode file.
bool matches(const til::SExpr *E1, const til::SExpr *E2)
LockKind getLockKindFromAccessKind(AccessKind AK)
Helper function that returns a LockKind required for the given level of access.
LockErrorKind
This enum distinguishes between different situations where we warn due to inconsistent locking.
@ LEK_NotLockedAtEndOfFunction
Expecting a capability to be held at the end of function.
@ LEK_LockedSomePredecessors
A capability is locked in some but not all predecessors of a CFGBlock.
@ LEK_LockedAtEndOfFunction
A capability is still locked at the end of a function.
@ LEK_LockedSomeLoopIterations
A capability is locked for some but not all loop iterations.
void threadSafetyCleanup(BeforeSet *Cache)
AccessKind
This enum distinguishes between different ways to access (read or write) a variable.
@ AK_Written
Writing a variable.
@ AK_Read
Reading a variable.
LockKind
This enum distinguishes between different kinds of lock actions.
@ LK_Shared
Shared/reader lock of a mutex.
@ LK_Exclusive
Exclusive/writer lock of a mutex.
@ LK_Generic
Can be either Shared or Exclusive.
void runThreadSafetyAnalysis(AnalysisDeclContext &AC, ThreadSafetyHandler &Handler, BeforeSet **Bset)
Check a function's CFG for thread-safety violations.
ProtectedOperationKind
This enum distinguishes between different kinds of operations that may need to be protected by locks.
@ POK_PtPassByRef
Passing a pt-guarded variable by reference.
@ POK_PassPointer
Passing pointer to a guarded variable.
@ POK_VarDereference
Dereferencing a variable (e.g. p in *p = 5;)
@ POK_PassByRef
Passing a guarded variable by reference.
@ POK_ReturnByRef
Returning a guarded variable by reference.
@ POK_PtPassPointer
Passing a pt-guarded pointer.
@ POK_PtReturnPointer
Returning a pt-guarded pointer.
@ POK_VarAccess
Reading or writing a variable (e.g. x in x = 5;)
@ POK_FunctionCall
Making a function call (e.g. fool())
@ POK_ReturnPointer
Returning pointer to a guarded variable.
@ POK_PtReturnByRef
Returning a pt-guarded variable by reference.
The JSON file list parser is used to communicate input to InstallAPI.
OverloadedOperatorKind
Enumeration specifying the different kinds of C++ overloaded operators.
bool isa(CodeGen::Address addr)
@ Self
'self' clause, allowed on Compute and Combined Constructs, plus 'update'.
nullptr
This class represents a compute construct, representing a 'Kind' of ‘parallel’, 'serial',...
static bool classof(const Stmt *T)
@ Result
The result type of a method or function.
const FunctionProtoType * T
U cast(CodeGen::Address addr)
@ Other
Other implicit parameter.
int const char * function