39#include "llvm/ADT/DenseMap.h"
40#include "llvm/ADT/ImmutableMap.h"
41#include "llvm/ADT/STLExtras.h"
42#include "llvm/ADT/ScopeExit.h"
43#include "llvm/ADT/SmallVector.h"
44#include "llvm/ADT/StringRef.h"
45#include "llvm/Support/Allocator.h"
46#include "llvm/Support/Casting.h"
47#include "llvm/Support/ErrorHandling.h"
48#include "llvm/Support/TrailingObjects.h"
49#include "llvm/Support/raw_ostream.h"
68 const Expr *DeclExp, StringRef Kind) {
82class CapExprSet :
public SmallVector<CapabilityExpr, 4> {
85 void push_back_nodup(
const CapabilityExpr &CapE) {
86 if (llvm::none_of(*
this, [=](
const CapabilityExpr &CapE2) {
102 enum FactEntryKind { Lockable, ScopedLockable };
113 const FactEntryKind Kind : 8;
119 SourceKind Source : 8;
122 SourceLocation AcquireLoc;
125 ~FactEntry() =
default;
128 FactEntry(FactEntryKind FK,
const CapabilityExpr &CE,
LockKind LK,
129 SourceLocation Loc, SourceKind Src)
130 : CapabilityExpr(CE), Kind(FK), LKind(LK), Source(Src), AcquireLoc(Loc) {}
133 SourceLocation loc()
const {
return AcquireLoc; }
134 FactEntryKind getFactEntryKind()
const {
return Kind; }
136 bool asserted()
const {
return Source == Asserted; }
137 bool declared()
const {
return Source == Declared; }
138 bool managed()
const {
return Source == Managed; }
141 handleRemovalFromIntersection(
const FactSet &FSet, FactManager &FactMan,
143 ThreadSafetyHandler &Handler)
const = 0;
144 virtual void handleLock(FactSet &FSet, FactManager &FactMan,
145 const FactEntry &entry,
146 ThreadSafetyHandler &Handler)
const = 0;
147 virtual void handleUnlock(FactSet &FSet, FactManager &FactMan,
148 const CapabilityExpr &Cp, SourceLocation UnlockLoc,
150 ThreadSafetyHandler &Handler)
const = 0;
158using FactID =
unsigned short;
164 llvm::BumpPtrAllocator &Alloc;
165 std::vector<const FactEntry *> Facts;
168 FactManager(llvm::BumpPtrAllocator &Alloc) : Alloc(Alloc) {}
170 template <
typename T,
typename... ArgTypes>
171 T *createFact(ArgTypes &&...Args) {
172 static_assert(std::is_trivially_destructible_v<T>);
173 return T::create(Alloc, std::forward<ArgTypes>(Args)...);
176 FactID newFact(
const FactEntry *Entry) {
177 Facts.push_back(Entry);
178 assert(Facts.size() - 1 <= std::numeric_limits<FactID>::max() &&
179 "FactID space exhausted");
180 return static_cast<unsigned short>(Facts.size() - 1);
183 const FactEntry &operator[](FactID F)
const {
return *Facts[F]; }
195 using FactVec = SmallVector<FactID, 4>;
200 using iterator = FactVec::iterator;
201 using const_iterator = FactVec::const_iterator;
203 iterator begin() {
return FactIDs.begin(); }
204 const_iterator begin()
const {
return FactIDs.begin(); }
206 iterator end() {
return FactIDs.end(); }
207 const_iterator end()
const {
return FactIDs.end(); }
209 bool isEmpty()
const {
return FactIDs.size() == 0; }
212 bool isEmpty(FactManager &FactMan)
const {
213 for (
const auto FID : *
this) {
214 if (!FactMan[FID].negative())
220 void addLockByID(FactID ID) { FactIDs.push_back(ID); }
222 FactID addLock(FactManager &FM,
const FactEntry *Entry) {
223 FactID F = FM.newFact(Entry);
224 FactIDs.push_back(F);
228 bool removeLock(FactManager& FM,
const CapabilityExpr &CapE) {
229 unsigned n = FactIDs.size();
233 for (
unsigned i = 0; i < n-1; ++i) {
234 if (FM[FactIDs[i]].
matches(CapE)) {
235 FactIDs[i] = FactIDs[n-1];
240 if (FM[FactIDs[n-1]].
matches(CapE)) {
247 std::optional<FactID> replaceLock(FactManager &FM, iterator It,
248 const FactEntry *Entry) {
251 FactID F = FM.newFact(Entry);
256 std::optional<FactID> replaceLock(FactManager &FM,
const CapabilityExpr &CapE,
257 const FactEntry *Entry) {
258 return replaceLock(FM, findLockIter(FM, CapE), Entry);
261 iterator findLockIter(FactManager &FM,
const CapabilityExpr &CapE) {
262 return llvm::find_if(*
this,
263 [&](FactID ID) {
return FM[
ID].matches(CapE); });
266 const FactEntry *findLock(FactManager &FM,
const CapabilityExpr &CapE)
const {
268 llvm::find_if(*
this, [&](FactID ID) {
return FM[
ID].matches(CapE); });
269 return I != end() ? &FM[*I] :
nullptr;
272 const FactEntry *findLockUniv(FactManager &FM,
273 const CapabilityExpr &CapE)
const {
274 auto I = llvm::find_if(
275 *
this, [&](FactID ID) ->
bool {
return FM[
ID].matchesUniv(CapE); });
276 return I != end() ? &FM[*I] :
nullptr;
279 const FactEntry *findPartialMatch(FactManager &FM,
280 const CapabilityExpr &CapE)
const {
281 auto I = llvm::find_if(*
this, [&](FactID ID) ->
bool {
282 return FM[
ID].partiallyMatches(CapE);
284 return I != end() ? &FM[*I] :
nullptr;
287 bool containsMutexDecl(FactManager &FM,
const ValueDecl* Vd)
const {
288 auto I = llvm::find_if(
289 *
this, [&](FactID ID) ->
bool {
return FM[
ID].valueDecl() == Vd; });
294class ThreadSafetyAnalyzer;
309 BeforeInfo() =
default;
310 BeforeInfo(BeforeInfo &&) =
default;
314 llvm::DenseMap<const ValueDecl *, std::unique_ptr<BeforeInfo>>;
315 using CycleMap = llvm::DenseMap<const ValueDecl *, bool>;
321 ThreadSafetyAnalyzer& Analyzer);
324 ThreadSafetyAnalyzer &Analyzer);
328 ThreadSafetyAnalyzer& Analyzer,
341class LocalVariableMap;
343using LocalVarContext = llvm::ImmutableMap<const NamedDecl *, unsigned>;
346enum CFGBlockSide { CBS_Entry, CBS_Exit };
359 LocalVarContext EntryContext;
362 LocalVarContext ExitContext;
365 SourceLocation EntryLoc;
368 SourceLocation ExitLoc;
374 bool Reachable =
false;
376 const FactSet &getSet(CFGBlockSide Side)
const {
377 return Side == CBS_Entry ? EntrySet : ExitSet;
380 SourceLocation getLocation(CFGBlockSide Side)
const {
381 return Side == CBS_Entry ? EntryLoc : ExitLoc;
385 CFGBlockInfo(LocalVarContext EmptyCtx)
386 : EntryContext(EmptyCtx), ExitContext(EmptyCtx) {}
389 static CFGBlockInfo getEmptyBlockInfo(LocalVariableMap &M);
405class LocalVariableMap {
407 using Context = LocalVarContext;
413 struct VarDefinition {
415 friend class LocalVariableMap;
418 const NamedDecl *Dec;
421 const Expr *Exp =
nullptr;
424 unsigned DirectRef = 0;
427 unsigned CanonicalRef = 0;
432 bool isReference()
const {
return !Exp; }
434 void invalidateRef() { DirectRef = CanonicalRef = 0; }
438 VarDefinition(
const NamedDecl *D,
const Expr *E, Context
C)
439 : Dec(D), Exp(E), Ctx(
C) {}
442 VarDefinition(
const NamedDecl *D,
unsigned DirectRef,
unsigned CanonicalRef,
444 : Dec(D), DirectRef(DirectRef), CanonicalRef(CanonicalRef), Ctx(
C) {}
448 Context::Factory ContextFactory;
449 std::vector<VarDefinition> VarDefinitions;
450 std::vector<std::pair<const Stmt *, Context>> SavedContexts;
455 VarDefinitions.push_back(VarDefinition(
nullptr, 0, 0, getEmptyContext()));
459 const VarDefinition* lookup(
const NamedDecl *D, Context Ctx) {
460 const unsigned *i = Ctx.lookup(D);
463 assert(*i < VarDefinitions.size());
464 return &VarDefinitions[*i];
470 const Expr* lookupExpr(
const NamedDecl *D, Context &Ctx) {
471 const unsigned *P = Ctx.lookup(D);
477 if (VarDefinitions[i].Exp) {
478 Ctx = VarDefinitions[i].Ctx;
479 return VarDefinitions[i].Exp;
481 i = VarDefinitions[i].DirectRef;
486 Context getEmptyContext() {
return ContextFactory.getEmptyMap(); }
491 Context getNextContext(
unsigned &CtxIndex,
const Stmt *S, Context
C) {
492 if (SavedContexts[CtxIndex+1].first == S) {
494 Context
Result = SavedContexts[CtxIndex].second;
500 void dumpVarDefinitionName(
unsigned i) {
502 llvm::errs() <<
"Undefined";
505 const NamedDecl *
Dec = VarDefinitions[i].Dec;
507 llvm::errs() <<
"<<NULL>>";
510 Dec->printName(llvm::errs());
511 llvm::errs() <<
"." << i <<
" " << ((
const void*) Dec);
516 for (
unsigned i = 1, e = VarDefinitions.size(); i < e; ++i) {
517 const Expr *Exp = VarDefinitions[i].Exp;
518 unsigned Ref = VarDefinitions[i].DirectRef;
520 dumpVarDefinitionName(i);
521 llvm::errs() <<
" = ";
522 if (Exp) Exp->
dump();
524 dumpVarDefinitionName(Ref);
525 llvm::errs() <<
"\n";
531 void dumpContext(Context
C) {
532 for (Context::iterator I =
C.begin(), E =
C.end(); I != E; ++I) {
533 const NamedDecl *D = I.getKey();
535 llvm::errs() <<
" -> ";
536 dumpVarDefinitionName(I.getData());
537 llvm::errs() <<
"\n";
542 void traverseCFG(CFG *CFGraph,
const PostOrderCFGView *SortedGraph,
543 std::vector<CFGBlockInfo> &BlockInfo);
546 friend class VarMapBuilder;
549 unsigned getCanonicalDefinitionID(
unsigned ID)
const {
550 while (ID > 0 && VarDefinitions[ID].isReference())
551 ID = VarDefinitions[
ID].CanonicalRef;
556 unsigned getContextIndex() {
return SavedContexts.size()-1; }
559 void saveContext(
const Stmt *S, Context
C) {
560 SavedContexts.push_back(std::make_pair(S,
C));
565 Context addDefinition(
const NamedDecl *D,
const Expr *Exp, Context Ctx) {
566 assert(!Ctx.contains(D));
567 unsigned newID = VarDefinitions.size();
568 Context NewCtx = ContextFactory.add(Ctx, D, newID);
569 VarDefinitions.push_back(VarDefinition(D, Exp, Ctx));
574 Context addReference(
const NamedDecl *D,
unsigned Ref, Context Ctx) {
575 unsigned newID = VarDefinitions.size();
576 Context NewCtx = ContextFactory.add(Ctx, D, newID);
577 VarDefinitions.push_back(
578 VarDefinition(D, Ref, getCanonicalDefinitionID(Ref), Ctx));
584 Context updateDefinition(
const NamedDecl *D, Expr *Exp, Context Ctx) {
585 if (Ctx.contains(D)) {
586 unsigned newID = VarDefinitions.size();
587 Context NewCtx = ContextFactory.remove(Ctx, D);
588 NewCtx = ContextFactory.add(NewCtx, D, newID);
589 VarDefinitions.push_back(VarDefinition(D, Exp, Ctx));
597 Context clearDefinition(
const NamedDecl *D, Context Ctx) {
598 Context NewCtx = Ctx;
599 if (NewCtx.contains(D)) {
600 NewCtx = ContextFactory.remove(NewCtx, D);
601 NewCtx = ContextFactory.add(NewCtx, D, 0);
607 Context removeDefinition(
const NamedDecl *D, Context Ctx) {
608 Context NewCtx = Ctx;
609 if (NewCtx.contains(D)) {
610 NewCtx = ContextFactory.remove(NewCtx, D);
615 Context intersectContexts(Context C1, Context C2);
616 Context createReferenceContext(Context
C);
617 void intersectBackEdge(Context C1, Context C2);
623CFGBlockInfo CFGBlockInfo::getEmptyBlockInfo(LocalVariableMap &M) {
624 return CFGBlockInfo(M.getEmptyContext());
630class VarMapBuilder :
public ConstStmtVisitor<VarMapBuilder> {
632 LocalVariableMap* VMap;
633 LocalVariableMap::Context Ctx;
635 VarMapBuilder(LocalVariableMap *VM, LocalVariableMap::Context
C)
636 : VMap(VM), Ctx(
C) {}
638 void VisitDeclStmt(
const DeclStmt *S);
639 void VisitBinaryOperator(
const BinaryOperator *BO);
640 void VisitCallExpr(
const CallExpr *CE);
646void VarMapBuilder::VisitDeclStmt(
const DeclStmt *S) {
647 bool modifiedCtx =
false;
649 for (
const auto *D : DGrp) {
650 if (
const auto *VD = dyn_cast_or_null<VarDecl>(D)) {
651 const Expr *E = VD->getInit();
654 QualType T = VD->getType();
656 Ctx = VMap->addDefinition(VD, E, Ctx);
662 VMap->saveContext(S, Ctx);
666void VarMapBuilder::VisitBinaryOperator(
const BinaryOperator *BO) {
673 if (
const auto *DRE = dyn_cast<DeclRefExpr>(LHSExp)) {
674 const ValueDecl *VDec = DRE->getDecl();
675 if (Ctx.lookup(VDec)) {
677 Ctx = VMap->updateDefinition(VDec, BO->
getRHS(), Ctx);
680 Ctx = VMap->clearDefinition(VDec, Ctx);
681 VMap->saveContext(BO, Ctx);
687void VarMapBuilder::VisitCallExpr(
const CallExpr *CE) {
697 if (II->isStr(
"bind") || II->isStr(
"bind_front"))
703 for (
unsigned Idx = 0; Idx < CE->
getNumArgs(); ++Idx) {
709 QualType ParamType = PVD->
getType();
712 const ValueDecl *VDec =
nullptr;
715 if (
const auto *DRE = dyn_cast<DeclRefExpr>(Arg))
716 VDec = DRE->getDecl();
720 if (
const auto *UO = dyn_cast<UnaryOperator>(Arg)) {
721 if (UO->getOpcode() == UO_AddrOf) {
722 const Expr *SubE = UO->getSubExpr()->IgnoreParenCasts();
723 if (
const auto *DRE = dyn_cast<DeclRefExpr>(SubE))
724 VDec = DRE->getDecl();
730 Ctx = VMap->clearDefinition(VDec, Ctx);
734 VMap->saveContext(CE, Ctx);
740LocalVariableMap::Context
741LocalVariableMap::intersectContexts(Context C1, Context C2) {
743 for (
const auto &P : C1) {
744 const NamedDecl *
Dec = P.first;
745 const unsigned *I2 = C2.lookup(Dec);
749 }
else if (getCanonicalDefinitionID(P.second) !=
750 getCanonicalDefinitionID(*I2)) {
762LocalVariableMap::Context LocalVariableMap::createReferenceContext(Context
C) {
763 Context
Result = getEmptyContext();
764 for (
const auto &P :
C)
772void LocalVariableMap::intersectBackEdge(Context C1, Context C2) {
773 for (
const auto &P : C1) {
774 const unsigned I1 = P.second;
775 VarDefinition *VDef = &VarDefinitions[I1];
776 assert(VDef->isReference());
778 const unsigned *I2 = C2.lookup(P.first);
781 VDef->invalidateRef();
787 if (VDef->CanonicalRef != getCanonicalDefinitionID(*I2))
788 VDef->invalidateRef();
829void LocalVariableMap::traverseCFG(CFG *CFGraph,
830 const PostOrderCFGView *SortedGraph,
831 std::vector<CFGBlockInfo> &BlockInfo) {
832 PostOrderCFGView::CFGBlockSet VisitedBlocks(CFGraph);
834 for (
const auto *CurrBlock : *SortedGraph) {
835 unsigned CurrBlockID = CurrBlock->getBlockID();
836 CFGBlockInfo *CurrBlockInfo = &BlockInfo[CurrBlockID];
838 VisitedBlocks.insert(CurrBlock);
841 bool HasBackEdges =
false;
844 PE = CurrBlock->pred_end(); PI != PE; ++PI) {
846 if (*PI ==
nullptr || !VisitedBlocks.alreadySet(*PI)) {
851 unsigned PrevBlockID = (*PI)->getBlockID();
852 CFGBlockInfo *PrevBlockInfo = &BlockInfo[PrevBlockID];
855 CurrBlockInfo->EntryContext = PrevBlockInfo->ExitContext;
859 CurrBlockInfo->EntryContext =
860 intersectContexts(CurrBlockInfo->EntryContext,
861 PrevBlockInfo->ExitContext);
868 CurrBlockInfo->EntryContext =
869 createReferenceContext(CurrBlockInfo->EntryContext);
872 saveContext(
nullptr, CurrBlockInfo->EntryContext);
873 CurrBlockInfo->EntryIndex = getContextIndex();
876 VarMapBuilder VMapBuilder(
this, CurrBlockInfo->EntryContext);
877 for (
const auto &BI : *CurrBlock) {
878 switch (BI.getKind()) {
880 CFGStmt CS = BI.castAs<CFGStmt>();
881 VMapBuilder.Visit(CS.
getStmt());
888 CurrBlockInfo->ExitContext = VMapBuilder.Ctx;
892 SE = CurrBlock->succ_end(); SI != SE; ++SI) {
894 if (*SI ==
nullptr || !VisitedBlocks.alreadySet(*SI))
897 CFGBlock *FirstLoopBlock = *SI;
898 Context LoopBegin = BlockInfo[FirstLoopBlock->
getBlockID()].EntryContext;
899 Context LoopEnd = CurrBlockInfo->ExitContext;
900 intersectBackEdge(LoopBegin, LoopEnd);
906 saveContext(
nullptr, BlockInfo[exitID].ExitContext);
913 std::vector<CFGBlockInfo> &BlockInfo) {
914 for (
const auto *CurrBlock : *SortedGraph) {
915 CFGBlockInfo *CurrBlockInfo = &BlockInfo[CurrBlock->getBlockID()];
919 if (
const Stmt *S = CurrBlock->getTerminatorStmt()) {
920 CurrBlockInfo->EntryLoc = CurrBlockInfo->ExitLoc = S->
getBeginLoc();
923 BE = CurrBlock->rend(); BI != BE; ++BI) {
925 if (std::optional<CFGStmt> CS = BI->getAs<
CFGStmt>()) {
926 CurrBlockInfo->ExitLoc = CS->getStmt()->getBeginLoc();
932 if (CurrBlockInfo->ExitLoc.
isValid()) {
935 for (
const auto &BI : *CurrBlock) {
937 if (std::optional<CFGStmt> CS = BI.getAs<
CFGStmt>()) {
938 CurrBlockInfo->EntryLoc = CS->getStmt()->getBeginLoc();
942 }
else if (CurrBlock->pred_size() == 1 && *CurrBlock->pred_begin() &&
943 CurrBlock != &CFGraph->
getExit()) {
946 CurrBlockInfo->EntryLoc = CurrBlockInfo->ExitLoc =
947 BlockInfo[(*CurrBlock->pred_begin())->getBlockID()].ExitLoc;
948 }
else if (CurrBlock->succ_size() == 1 && *CurrBlock->succ_begin()) {
951 CurrBlockInfo->EntryLoc = CurrBlockInfo->ExitLoc =
952 BlockInfo[(*CurrBlock->succ_begin())->getBlockID()].EntryLoc;
959class LockableFactEntry final :
public FactEntry {
964 unsigned int ReentrancyDepth = 0;
966 LockableFactEntry(
const CapabilityExpr &CE,
LockKind LK, SourceLocation Loc,
968 : FactEntry(Lockable, CE, LK, Loc, Src) {}
971 static LockableFactEntry *
create(llvm::BumpPtrAllocator &Alloc,
972 const LockableFactEntry &
Other) {
976 static LockableFactEntry *
create(llvm::BumpPtrAllocator &Alloc,
977 const CapabilityExpr &CE,
LockKind LK,
979 SourceKind Src = Acquired) {
980 return new (
Alloc) LockableFactEntry(CE, LK, Loc, Src);
983 unsigned int getReentrancyDepth()
const {
return ReentrancyDepth; }
986 handleRemovalFromIntersection(
const FactSet &FSet, FactManager &FactMan,
988 ThreadSafetyHandler &Handler)
const override {
989 if (!asserted() && !negative() && !isUniversal()) {
995 void handleLock(FactSet &FSet, FactManager &FactMan,
const FactEntry &entry,
996 ThreadSafetyHandler &Handler)
const override {
997 if (
const FactEntry *RFact = tryReenter(FactMan, entry.kind())) {
999 FSet.replaceLock(FactMan, entry, RFact);
1006 void handleUnlock(FactSet &FSet, FactManager &FactMan,
1007 const CapabilityExpr &Cp, SourceLocation UnlockLoc,
1009 ThreadSafetyHandler &Handler)
const override {
1010 FSet.removeLock(FactMan, Cp);
1012 if (
const FactEntry *RFact = leaveReentrant(FactMan)) {
1014 FSet.addLock(FactMan, RFact);
1016 FSet.addLock(FactMan, FactMan.createFact<LockableFactEntry>(
1023 const FactEntry *tryReenter(FactManager &FactMan,
1027 if (
kind() != ReenterKind)
1029 auto *NewFact = FactMan.createFact<LockableFactEntry>(*this);
1030 NewFact->ReentrancyDepth++;
1036 const FactEntry *leaveReentrant(FactManager &FactMan)
const {
1037 if (!ReentrancyDepth)
1039 assert(reentrant());
1040 auto *NewFact = FactMan.createFact<LockableFactEntry>(*this);
1041 NewFact->ReentrancyDepth--;
1045 static bool classof(
const FactEntry *A) {
1046 return A->getFactEntryKind() == Lockable;
1050enum UnderlyingCapabilityKind {
1053 UCK_ReleasedExclusive,
1056struct UnderlyingCapability {
1058 UnderlyingCapabilityKind Kind;
1061class ScopedLockableFactEntry final
1063 private llvm::TrailingObjects<ScopedLockableFactEntry,
1064 UnderlyingCapability> {
1065 friend TrailingObjects;
1068 const unsigned ManagedCapacity;
1069 unsigned ManagedSize = 0;
1071 ScopedLockableFactEntry(
const CapabilityExpr &CE, SourceLocation Loc,
1072 SourceKind Src,
unsigned ManagedCapacity)
1073 : FactEntry(ScopedLockable, CE,
LK_Exclusive, Loc, Src),
1074 ManagedCapacity(ManagedCapacity) {}
1076 void addManaged(
const CapabilityExpr &M, UnderlyingCapabilityKind UCK) {
1077 assert(ManagedSize < ManagedCapacity);
1078 new (getTrailingObjects() + ManagedSize) UnderlyingCapability{M, UCK};
1082 ArrayRef<UnderlyingCapability> getManaged()
const {
1083 return getTrailingObjects(ManagedSize);
1087 static ScopedLockableFactEntry *
create(llvm::BumpPtrAllocator &Alloc,
1088 const CapabilityExpr &CE,
1089 SourceLocation Loc, SourceKind Src,
1090 unsigned ManagedCapacity) {
1092 Alloc.Allocate(totalSizeToAlloc<UnderlyingCapability>(ManagedCapacity),
1093 alignof(ScopedLockableFactEntry));
1094 return new (
Storage) ScopedLockableFactEntry(CE, Loc, Src, ManagedCapacity);
1097 CapExprSet getUnderlyingMutexes()
const {
1098 CapExprSet UnderlyingMutexesSet;
1099 for (
const UnderlyingCapability &UnderlyingMutex : getManaged())
1100 UnderlyingMutexesSet.push_back(UnderlyingMutex.Cap);
1101 return UnderlyingMutexesSet;
1108 void addLock(
const CapabilityExpr &M) { addManaged(M, UCK_Acquired); }
1110 void addExclusiveUnlock(
const CapabilityExpr &M) {
1111 addManaged(M, UCK_ReleasedExclusive);
1114 void addSharedUnlock(
const CapabilityExpr &M) {
1115 addManaged(M, UCK_ReleasedShared);
1120 handleRemovalFromIntersection(
const FactSet &FSet, FactManager &FactMan,
1122 ThreadSafetyHandler &Handler)
const override {
1126 for (
const auto &UnderlyingMutex : getManaged()) {
1127 const auto *Entry = FSet.findLock(FactMan, UnderlyingMutex.Cap);
1128 if ((UnderlyingMutex.Kind == UCK_Acquired && Entry) ||
1129 (UnderlyingMutex.Kind != UCK_Acquired && !Entry)) {
1133 UnderlyingMutex.Cap.toString(), loc(),
1139 void handleLock(FactSet &FSet, FactManager &FactMan,
const FactEntry &entry,
1140 ThreadSafetyHandler &Handler)
const override {
1141 for (
const auto &UnderlyingMutex : getManaged()) {
1142 if (UnderlyingMutex.Kind == UCK_Acquired)
1143 lock(FSet, FactMan, UnderlyingMutex.Cap, entry.kind(), entry.loc(),
1146 unlock(FSet, FactMan, UnderlyingMutex.Cap, entry.loc(), &Handler);
1150 void handleUnlock(FactSet &FSet, FactManager &FactMan,
1151 const CapabilityExpr &Cp, SourceLocation UnlockLoc,
1153 ThreadSafetyHandler &Handler)
const override {
1154 assert(!Cp.
negative() &&
"Managing object cannot be negative.");
1155 for (
const auto &UnderlyingMutex : getManaged()) {
1158 ThreadSafetyHandler *TSHandler = FullyRemove ?
nullptr : &Handler;
1159 if (UnderlyingMutex.Kind == UCK_Acquired) {
1160 unlock(FSet, FactMan, UnderlyingMutex.Cap, UnlockLoc, TSHandler);
1162 LockKind kind = UnderlyingMutex.Kind == UCK_ReleasedShared
1165 lock(FSet, FactMan, UnderlyingMutex.Cap,
kind, UnlockLoc, TSHandler);
1169 FSet.removeLock(FactMan, Cp);
1172 static bool classof(
const FactEntry *A) {
1173 return A->getFactEntryKind() == ScopedLockable;
1177 void lock(FactSet &FSet, FactManager &FactMan,
const CapabilityExpr &Cp,
1179 ThreadSafetyHandler *Handler)
const {
1180 if (
const auto It = FSet.findLockIter(FactMan, Cp); It != FSet.end()) {
1182 if (
const FactEntry *RFact = Fact.tryReenter(FactMan,
kind)) {
1184 FSet.replaceLock(FactMan, It, RFact);
1185 }
else if (Handler) {
1189 FSet.removeLock(FactMan, !Cp);
1190 FSet.addLock(FactMan, FactMan.createFact<LockableFactEntry>(Cp,
kind, loc,
1195 void unlock(FactSet &FSet, FactManager &FactMan,
const CapabilityExpr &Cp,
1196 SourceLocation loc, ThreadSafetyHandler *Handler)
const {
1197 if (
const auto It = FSet.findLockIter(FactMan, Cp); It != FSet.end()) {
1199 if (
const FactEntry *RFact = Fact.leaveReentrant(FactMan)) {
1201 FSet.replaceLock(FactMan, It, RFact);
1207 FactMan.createFact<LockableFactEntry>(!Cp,
LK_Exclusive, loc));
1208 }
else if (Handler) {
1209 SourceLocation PrevLoc;
1210 if (
const FactEntry *Neg = FSet.findLock(FactMan, !Cp))
1211 PrevLoc =
Neg->loc();
1218class ThreadSafetyAnalyzer {
1219 friend class BuildLockset;
1220 friend class threadSafety::BeforeSet;
1222 llvm::BumpPtrAllocator Bpa;
1223 threadSafety::til::MemRegionRef Arena;
1224 threadSafety::SExprBuilder SxBuilder;
1226 ThreadSafetyHandler &Handler;
1227 const FunctionDecl *CurrentFunction;
1228 LocalVariableMap LocalVarMap;
1230 llvm::SmallDenseMap<const Expr *, til::LiteralPtr *> ConstructedObjects;
1231 FactManager FactMan;
1232 std::vector<CFGBlockInfo> BlockInfo;
1234 BeforeSet *GlobalBeforeSet;
1237 ThreadSafetyAnalyzer(ThreadSafetyHandler &H, BeforeSet *Bset)
1238 : Arena(&Bpa), SxBuilder(Arena), Handler(H), FactMan(Bpa),
1239 GlobalBeforeSet(Bset) {}
1241 bool inCurrentScope(
const CapabilityExpr &CapE);
1243 void addLock(FactSet &FSet,
const FactEntry *Entry,
bool ReqAttr =
false);
1244 void removeLock(FactSet &FSet,
const CapabilityExpr &CapE,
1245 SourceLocation UnlockLoc,
bool FullyRemove,
LockKind Kind);
1247 template <
typename AttrType>
1248 void getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
const Expr *Exp,
1249 const NamedDecl *D, til::SExpr *
Self =
nullptr);
1251 template <
class AttrType>
1252 void getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
const Expr *Exp,
1254 const CFGBlock *PredBlock,
const CFGBlock *CurrBlock,
1255 Expr *BrE,
bool Neg);
1257 const CallExpr* getTrylockCallExpr(
const Stmt *
Cond, LocalVarContext
C,
1260 void getEdgeLockset(FactSet &
Result,
const FactSet &ExitSet,
1261 const CFGBlock* PredBlock,
1262 const CFGBlock *CurrBlock);
1264 bool join(
const FactEntry &A,
const FactEntry &B, SourceLocation JoinLoc,
1267 void intersectAndWarn(FactSet &EntrySet,
const FactSet &ExitSet,
1271 void intersectAndWarn(FactSet &EntrySet,
const FactSet &ExitSet,
1273 intersectAndWarn(EntrySet, ExitSet, JoinLoc, LEK, LEK);
1276 void runAnalysis(AnalysisDeclContext &AC);
1278 void warnIfMutexNotHeld(
const FactSet &FSet,
const NamedDecl *D,
1279 const Expr *Exp,
AccessKind AK, Expr *MutexExp,
1281 SourceLocation Loc);
1282 void warnIfAnyMutexNotHeldForRead(
const FactSet &FSet,
const NamedDecl *D,
1284 llvm::ArrayRef<Expr *> Args,
1286 SourceLocation Loc);
1287 void warnIfMutexHeld(
const FactSet &FSet,
const NamedDecl *D,
const Expr *Exp,
1288 Expr *MutexExp, til::SExpr *
Self, SourceLocation Loc);
1290 void checkAccess(
const FactSet &FSet,
const Expr *Exp,
AccessKind AK,
1292 void checkPtAccess(
const FactSet &FSet,
const Expr *Exp,
AccessKind AK,
1300 ThreadSafetyAnalyzer& Analyzer) {
1302 BeforeInfo *Info =
nullptr;
1306 std::unique_ptr<BeforeInfo> &InfoPtr = BMap[Vd];
1308 InfoPtr.reset(
new BeforeInfo());
1309 Info = InfoPtr.get();
1312 for (
const auto *At : Vd->
attrs()) {
1313 switch (At->getKind()) {
1314 case attr::AcquiredBefore: {
1318 for (
const auto *Arg : A->args()) {
1320 Analyzer.SxBuilder.translateAttrExpr(Arg,
nullptr);
1322 Info->Vect.push_back(Cpvd);
1323 const auto It = BMap.find(Cpvd);
1324 if (It == BMap.end())
1330 case attr::AcquiredAfter: {
1334 for (
const auto *Arg : A->args()) {
1336 Analyzer.SxBuilder.translateAttrExpr(Arg,
nullptr);
1340 ArgInfo->Vect.push_back(Vd);
1353BeforeSet::BeforeInfo *
1355 ThreadSafetyAnalyzer &Analyzer) {
1356 auto It = BMap.find(Vd);
1357 BeforeInfo *Info =
nullptr;
1358 if (It == BMap.end())
1361 Info = It->second.get();
1362 assert(Info &&
"BMap contained nullptr?");
1368 const FactSet& FSet,
1369 ThreadSafetyAnalyzer& Analyzer,
1381 if (Info->Visited == 1)
1384 if (Info->Visited == 2)
1387 if (Info->Vect.empty())
1390 InfoVect.push_back(Info);
1392 for (
const auto *Vdb : Info->Vect) {
1394 if (FSet.containsMutexDecl(Analyzer.FactMan, Vdb)) {
1395 StringRef L1 = StartVd->
getName();
1396 StringRef L2 = Vdb->getName();
1397 Analyzer.Handler.handleLockAcquiredBefore(CapKind, L1, L2, Loc);
1401 if (CycMap.try_emplace(Vd,
true).second) {
1403 Analyzer.Handler.handleBeforeAfterCycle(L1, Vd->
getLocation());
1413 for (
auto *Info : InfoVect)
1419 if (
const auto *CE = dyn_cast<ImplicitCastExpr>(Exp))
1422 if (
const auto *DR = dyn_cast<DeclRefExpr>(Exp))
1423 return DR->getDecl();
1425 if (
const auto *ME = dyn_cast<MemberExpr>(Exp))
1426 return ME->getMemberDecl();
1431bool ThreadSafetyAnalyzer::inCurrentScope(
const CapabilityExpr &CapE) {
1432 const threadSafety::til::SExpr *SExp = CapE.
sexpr();
1433 assert(SExp &&
"Null expressions should be ignored");
1435 if (
const auto *LP = dyn_cast<til::LiteralPtr>(SExp)) {
1436 const ValueDecl *VD = LP->clangDecl();
1448 if (
const auto *P = dyn_cast<til::Project>(SExp)) {
1449 if (!isa_and_nonnull<CXXMethodDecl>(CurrentFunction))
1451 const ValueDecl *VD = P->clangDecl();
1460void ThreadSafetyAnalyzer::addLock(FactSet &FSet,
const FactEntry *Entry,
1462 if (Entry->shouldIgnore())
1465 if (!ReqAttr && !Entry->negative()) {
1467 CapabilityExpr NegC = !*Entry;
1468 const FactEntry *Nen = FSet.findLock(FactMan, NegC);
1470 FSet.removeLock(FactMan, NegC);
1473 if (inCurrentScope(*Entry) && !Entry->asserted() && !Entry->reentrant())
1480 if (!Entry->asserted() && !Entry->declared()) {
1482 Entry->loc(), Entry->getKind());
1485 if (
const FactEntry *Cp = FSet.findLock(FactMan, *Entry)) {
1486 if (!Entry->asserted())
1487 Cp->handleLock(FSet, FactMan, *Entry, Handler);
1489 FSet.addLock(FactMan, Entry);
1495void ThreadSafetyAnalyzer::removeLock(FactSet &FSet,
const CapabilityExpr &Cp,
1496 SourceLocation UnlockLoc,
1497 bool FullyRemove,
LockKind ReceivedKind) {
1501 const FactEntry *LDat = FSet.findLock(FactMan, Cp);
1503 SourceLocation PrevLoc;
1504 if (
const FactEntry *Neg = FSet.findLock(FactMan, !Cp))
1505 PrevLoc =
Neg->loc();
1513 if (ReceivedKind !=
LK_Generic && LDat->kind() != ReceivedKind) {
1515 ReceivedKind, LDat->loc(), UnlockLoc);
1518 LDat->handleUnlock(FSet, FactMan, Cp, UnlockLoc, FullyRemove, Handler);
1523template <
typename AttrType>
1524void ThreadSafetyAnalyzer::getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
1525 const Expr *Exp,
const NamedDecl *D,
1527 if (Attr->args_size() == 0) {
1536 Mtxs.push_back_nodup(Cp);
1540 for (
const auto *Arg : Attr->args()) {
1548 Mtxs.push_back_nodup(Cp);
1555template <
class AttrType>
1556void ThreadSafetyAnalyzer::getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
1557 const Expr *Exp,
const NamedDecl *D,
1558 const CFGBlock *PredBlock,
1559 const CFGBlock *CurrBlock,
1560 Expr *BrE,
bool Neg) {
1562 bool branch =
false;
1563 if (
const auto *BLE = dyn_cast_or_null<CXXBoolLiteralExpr>(BrE))
1564 branch = BLE->getValue();
1565 else if (
const auto *ILE = dyn_cast_or_null<IntegerLiteral>(BrE))
1566 branch = ILE->getValue().getBoolValue();
1568 int branchnum = branch ? 0 : 1;
1570 branchnum = !branchnum;
1575 SE = PredBlock->
succ_end(); SI != SE && i < 2; ++SI, ++i) {
1576 if (*SI == CurrBlock && i == branchnum)
1577 getMutexIDs(Mtxs, Attr, Exp, D);
1585 }
else if (
const auto *BLE = dyn_cast<CXXBoolLiteralExpr>(E)) {
1586 TCond = BLE->getValue();
1588 }
else if (
const auto *ILE = dyn_cast<IntegerLiteral>(E)) {
1589 TCond = ILE->getValue().getBoolValue();
1591 }
else if (
auto *CE = dyn_cast<ImplicitCastExpr>(E))
1599const CallExpr* ThreadSafetyAnalyzer::getTrylockCallExpr(
const Stmt *
Cond,
1605 if (
const auto *CallExp = dyn_cast<CallExpr>(
Cond)) {
1606 if (CallExp->getBuiltinCallee() == Builtin::BI__builtin_expect)
1607 return getTrylockCallExpr(CallExp->getArg(0),
C, Negate);
1610 else if (
const auto *PE = dyn_cast<ParenExpr>(
Cond))
1611 return getTrylockCallExpr(PE->getSubExpr(),
C, Negate);
1612 else if (
const auto *CE = dyn_cast<ImplicitCastExpr>(
Cond))
1613 return getTrylockCallExpr(CE->getSubExpr(),
C, Negate);
1614 else if (
const auto *FE = dyn_cast<FullExpr>(
Cond))
1615 return getTrylockCallExpr(FE->getSubExpr(),
C, Negate);
1616 else if (
const auto *DRE = dyn_cast<DeclRefExpr>(
Cond)) {
1617 const Expr *E = LocalVarMap.lookupExpr(DRE->getDecl(),
C);
1618 return getTrylockCallExpr(E,
C, Negate);
1620 else if (
const auto *UOP = dyn_cast<UnaryOperator>(
Cond)) {
1621 if (UOP->getOpcode() == UO_LNot) {
1623 return getTrylockCallExpr(UOP->getSubExpr(),
C, Negate);
1627 else if (
const auto *BOP = dyn_cast<BinaryOperator>(
Cond)) {
1628 if (BOP->getOpcode() == BO_EQ || BOP->getOpcode() == BO_NE) {
1629 if (BOP->getOpcode() == BO_NE)
1634 if (!TCond) Negate = !Negate;
1635 return getTrylockCallExpr(BOP->getLHS(),
C, Negate);
1639 if (!TCond) Negate = !Negate;
1640 return getTrylockCallExpr(BOP->getRHS(),
C, Negate);
1644 if (BOP->getOpcode() == BO_LAnd) {
1646 return getTrylockCallExpr(BOP->getRHS(),
C, Negate);
1648 if (BOP->getOpcode() == BO_LOr)
1649 return getTrylockCallExpr(BOP->getRHS(),
C, Negate);
1651 }
else if (
const auto *COP = dyn_cast<ConditionalOperator>(
Cond)) {
1655 if (TCond && !FCond)
1656 return getTrylockCallExpr(COP->getCond(),
C, Negate);
1657 if (!TCond && FCond) {
1659 return getTrylockCallExpr(COP->getCond(),
C, Negate);
1669void ThreadSafetyAnalyzer::getEdgeLockset(FactSet&
Result,
1670 const FactSet &ExitSet,
1671 const CFGBlock *PredBlock,
1672 const CFGBlock *CurrBlock) {
1680 bool Negate =
false;
1681 const CFGBlockInfo *PredBlockInfo = &BlockInfo[PredBlock->
getBlockID()];
1682 const LocalVarContext &LVarCtx = PredBlockInfo->ExitContext;
1687 [
this, Ctx = LVarCtx](
const NamedDecl *D)
mutable ->
const Expr * {
1688 return LocalVarMap.lookupExpr(D, Ctx);
1691 llvm::scope_exit Cleanup(
1694 const auto *Exp = getTrylockCallExpr(
Cond, LVarCtx, Negate);
1698 auto *FunDecl = dyn_cast_or_null<NamedDecl>(Exp->getCalleeDecl());
1699 if (!FunDecl || !FunDecl->hasAttr<TryAcquireCapabilityAttr>())
1702 CapExprSet ExclusiveLocksToAdd;
1703 CapExprSet SharedLocksToAdd;
1706 for (
const auto *Attr : FunDecl->specific_attrs<TryAcquireCapabilityAttr>())
1707 getMutexIDs(Attr->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, Attr,
1708 Exp, FunDecl, PredBlock, CurrBlock, Attr->getSuccessValue(),
1712 SourceLocation Loc = Exp->getExprLoc();
1713 for (
const auto &ExclusiveLockToAdd : ExclusiveLocksToAdd)
1714 addLock(
Result, FactMan.createFact<LockableFactEntry>(ExclusiveLockToAdd,
1716 for (
const auto &SharedLockToAdd : SharedLocksToAdd)
1717 addLock(
Result, FactMan.createFact<LockableFactEntry>(SharedLockToAdd,
1728class BuildLockset :
public ConstStmtVisitor<BuildLockset> {
1729 friend class ThreadSafetyAnalyzer;
1731 ThreadSafetyAnalyzer *Analyzer;
1734 const FactSet &FunctionExitFSet;
1735 LocalVariableMap::Context LVarCtx;
1740 void updateLocalVarMapCtx(
const Stmt *S) {
1742 LVarCtx = Analyzer->LocalVarMap.getNextContext(CtxIndex, S, LVarCtx);
1747 [
this, Ctx = LVarCtx](
const NamedDecl *D)
mutable ->
const Expr * {
1748 return Analyzer->LocalVarMap.lookupExpr(D, Ctx);
1754 void checkAccess(
const Expr *Exp,
AccessKind AK,
1756 Analyzer->checkAccess(FSet, Exp, AK, POK);
1758 void checkPtAccess(
const Expr *Exp,
AccessKind AK,
1760 Analyzer->checkPtAccess(FSet, Exp, AK, POK);
1763 void handleCall(
const Expr *Exp,
const NamedDecl *D,
1764 til::SExpr *
Self =
nullptr,
1765 SourceLocation Loc = SourceLocation());
1766 void examineArguments(
const FunctionDecl *FD,
1769 bool SkipFirstParam =
false);
1772 BuildLockset(ThreadSafetyAnalyzer *Anlzr, CFGBlockInfo &Info,
1773 const FactSet &FunctionExitFSet)
1774 : ConstStmtVisitor<BuildLockset>(), Analyzer(Anlzr), FSet(Info.EntrySet),
1775 FunctionExitFSet(FunctionExitFSet), LVarCtx(Info.EntryContext),
1776 CtxIndex(Info.EntryIndex) {
1777 updateLocalVarMapCtx(
nullptr);
1781 BuildLockset(
const BuildLockset &) =
delete;
1782 BuildLockset &operator=(
const BuildLockset &) =
delete;
1784 void VisitUnaryOperator(
const UnaryOperator *UO);
1785 void VisitBinaryOperator(
const BinaryOperator *BO);
1786 void VisitCastExpr(
const CastExpr *CE);
1787 void VisitCallExpr(
const CallExpr *Exp);
1788 void VisitCXXConstructExpr(
const CXXConstructExpr *Exp);
1789 void VisitDeclStmt(
const DeclStmt *S);
1790 void VisitMaterializeTemporaryExpr(
const MaterializeTemporaryExpr *Exp);
1791 void VisitReturnStmt(
const ReturnStmt *S);
1798void ThreadSafetyAnalyzer::warnIfMutexNotHeld(
1799 const FactSet &FSet,
const NamedDecl *D,
const Expr *Exp,
AccessKind AK,
1801 SourceLocation Loc) {
1813 const FactEntry *LDat = FSet.findLock(FactMan, !Cp);
1816 (!Cp).toString(), Loc);
1822 if (!inCurrentScope(Cp))
1826 LDat = FSet.findLock(FactMan, Cp);
1833 const FactEntry *LDat = FSet.findLockUniv(FactMan, Cp);
1834 bool NoError =
true;
1837 LDat = FSet.findPartialMatch(FactMan, Cp);
1840 std::string PartMatchStr = LDat->toString();
1841 StringRef PartMatchName(PartMatchStr);
1851 if (NoError && LDat && !LDat->isAtLeast(LK)) {
1856void ThreadSafetyAnalyzer::warnIfAnyMutexNotHeldForRead(
1857 const FactSet &FSet,
const NamedDecl *D,
const Expr *Exp,
1859 SourceLocation Loc) {
1860 SmallVector<CapabilityExpr, 2> Caps;
1861 for (
auto *Arg : Args) {
1869 const FactEntry *LDat = FSet.findLockUniv(FactMan, Cp);
1879 SmallVector<std::string, 2> NameStorage;
1880 SmallVector<StringRef, 2> Names;
1881 for (
const auto &Cp : Caps) {
1882 NameStorage.push_back(Cp.
toString());
1883 Names.push_back(NameStorage.back());
1889void ThreadSafetyAnalyzer::warnIfMutexHeld(
const FactSet &FSet,
1890 const NamedDecl *D,
const Expr *Exp,
1891 Expr *MutexExp, til::SExpr *
Self,
1892 SourceLocation Loc) {
1901 const FactEntry *LDat = FSet.findLock(FactMan, Cp);
1913void ThreadSafetyAnalyzer::checkAccess(
const FactSet &FSet,
const Expr *Exp,
1922 while (
const auto *DRE = dyn_cast<DeclRefExpr>(Exp)) {
1923 const auto *VD = dyn_cast<VarDecl>(DRE->getDecl()->getCanonicalDecl());
1925 if (
const auto *E = VD->getInit()) {
1936 if (
const auto *UO = dyn_cast<UnaryOperator>(Exp)) {
1938 if (UO->getOpcode() == UO_Deref)
1939 checkPtAccess(FSet, UO->getSubExpr(), AK, POK);
1943 if (
const auto *BO = dyn_cast<BinaryOperator>(Exp)) {
1946 return checkAccess(FSet, BO->
getLHS(), AK, POK);
1948 return checkPtAccess(FSet, BO->
getLHS(), AK, POK);
1954 if (
const auto *AE = dyn_cast<ArraySubscriptExpr>(Exp)) {
1955 checkPtAccess(FSet, AE->getLHS(), AK, POK);
1959 if (
const auto *ME = dyn_cast<MemberExpr>(Exp)) {
1961 checkPtAccess(FSet, ME->getBase(), AK, POK);
1963 checkAccess(FSet, ME->getBase(), AK, POK);
1970 if (D->
hasAttr<GuardedVarAttr>() && FSet.isEmpty(FactMan)) {
1975 if (AK ==
AK_Written || I->args_size() == 1) {
1978 for (
auto *Arg : I->args())
1979 warnIfMutexNotHeld(FSet, D, Exp, AK, Arg, POK,
nullptr, Loc);
1983 warnIfAnyMutexNotHeldForRead(FSet, D, Exp, I->args(), POK, Loc);
1990void ThreadSafetyAnalyzer::checkPtAccess(
const FactSet &FSet,
const Expr *Exp,
1996 if (
const auto *PE = dyn_cast<ParenExpr>(Exp)) {
1997 Exp = PE->getSubExpr();
2000 if (
const auto *CE = dyn_cast<CastExpr>(Exp)) {
2001 if (CE->getCastKind() == CK_ArrayToPointerDecay) {
2004 checkAccess(FSet, CE->getSubExpr(), AK, POK);
2007 Exp = CE->getSubExpr();
2013 if (
const auto *UO = dyn_cast<UnaryOperator>(Exp)) {
2014 if (UO->getOpcode() == UO_AddrOf) {
2017 checkAccess(FSet, UO->getSubExpr(), AK, POK);
2045 if (D->
hasAttr<PtGuardedVarAttr>() && FSet.isEmpty(FactMan))
2049 if (AK ==
AK_Written || I->args_size() == 1) {
2052 for (
auto *Arg : I->args())
2053 warnIfMutexNotHeld(FSet, D, Exp, AK, Arg, PtPOK,
nullptr,
2058 warnIfAnyMutexNotHeldForRead(FSet, D, Exp, I->args(), PtPOK,
2079void BuildLockset::handleCall(
const Expr *Exp,
const NamedDecl *D,
2080 til::SExpr *
Self, SourceLocation Loc) {
2081 CapExprSet ExclusiveLocksToAdd, SharedLocksToAdd;
2082 CapExprSet ExclusiveLocksToRemove, SharedLocksToRemove, GenericLocksToRemove;
2083 CapExprSet ScopedReqsAndExcludes;
2091 til::LiteralPtr *Placeholder =
2093 [[maybe_unused]]
auto inserted =
2094 Analyzer->ConstructedObjects.insert({Exp, Placeholder});
2095 assert(inserted.second &&
"Are we visiting the same expression again?");
2098 if (TagT->getDecl()->getMostRecentDecl()->hasAttr<ScopedLockableAttr>())
2099 Scp = CapabilityExpr(Placeholder, Exp->
getType(),
false);
2106 for(
const Attr *At : D->
attrs()) {
2107 switch (At->getKind()) {
2110 case attr::AcquireCapability: {
2112 Analyzer->getMutexIDs(A->isShared() ? SharedLocksToAdd
2113 : ExclusiveLocksToAdd,
2121 case attr::AssertCapability: {
2123 CapExprSet AssertLocks;
2124 Analyzer->getMutexIDs(AssertLocks, A, Exp, D,
Self);
2125 for (
const auto &AssertLock : AssertLocks)
2127 FSet, Analyzer->FactMan.createFact<LockableFactEntry>(
2129 Loc, FactEntry::Asserted));
2135 case attr::ReleaseCapability: {
2138 Analyzer->getMutexIDs(GenericLocksToRemove, A, Exp, D,
Self);
2139 else if (A->isShared())
2140 Analyzer->getMutexIDs(SharedLocksToRemove, A, Exp, D,
Self);
2142 Analyzer->getMutexIDs(ExclusiveLocksToRemove, A, Exp, D,
Self);
2146 case attr::RequiresCapability: {
2148 for (
auto *Arg : A->args()) {
2149 Analyzer->warnIfMutexNotHeld(FSet, D, Exp,
2154 Analyzer->getMutexIDs(ScopedReqsAndExcludes, A, Exp, D,
Self);
2159 case attr::LocksExcluded: {
2161 for (
auto *Arg : A->args()) {
2162 Analyzer->warnIfMutexHeld(FSet, D, Exp, Arg,
Self, Loc);
2165 Analyzer->getMutexIDs(ScopedReqsAndExcludes, A, Exp, D,
Self);
2176 std::optional<CallExpr::const_arg_range> Args;
2178 if (
const auto *CE = dyn_cast<CallExpr>(Exp))
2179 Args = CE->arguments();
2180 else if (
const auto *CE = dyn_cast<CXXConstructExpr>(Exp))
2181 Args = CE->arguments();
2183 llvm_unreachable(
"Unknown call kind");
2185 const auto *CalledFunction = dyn_cast<FunctionDecl>(D);
2186 if (CalledFunction && Args.has_value()) {
2187 for (
auto [Param, Arg] : zip(CalledFunction->parameters(), *Args)) {
2188 CapExprSet DeclaredLocks;
2189 for (
const Attr *At : Param->attrs()) {
2190 switch (At->getKind()) {
2191 case attr::AcquireCapability: {
2193 Analyzer->getMutexIDs(A->isShared() ? SharedLocksToAdd
2194 : ExclusiveLocksToAdd,
2196 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2200 case attr::ReleaseCapability: {
2203 Analyzer->getMutexIDs(GenericLocksToRemove, A, Exp, D,
Self);
2204 else if (A->isShared())
2205 Analyzer->getMutexIDs(SharedLocksToRemove, A, Exp, D,
Self);
2207 Analyzer->getMutexIDs(ExclusiveLocksToRemove, A, Exp, D,
Self);
2208 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2212 case attr::RequiresCapability: {
2214 for (
auto *Arg : A->args())
2215 Analyzer->warnIfMutexNotHeld(FSet, D, Exp,
2218 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2222 case attr::LocksExcluded: {
2224 for (
auto *Arg : A->args())
2225 Analyzer->warnIfMutexHeld(FSet, D, Exp, Arg,
Self, Loc);
2226 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2234 if (DeclaredLocks.empty())
2236 CapabilityExpr Cp(Analyzer->SxBuilder.
translate(Arg,
nullptr),
2237 StringRef(
"mutex"),
false,
false);
2238 if (
const auto *CBTE = dyn_cast<CXXBindTemporaryExpr>(Arg->
IgnoreCasts());
2240 if (
auto Object = Analyzer->ConstructedObjects.find(CBTE->getSubExpr());
2241 Object != Analyzer->ConstructedObjects.end())
2242 Cp = CapabilityExpr(
Object->second, StringRef(
"mutex"),
false,
2245 const FactEntry *Fact = FSet.findLock(Analyzer->FactMan, Cp);
2253 for (
const auto &[a,
b] :
2254 zip_longest(DeclaredLocks, Scope->getUnderlyingMutexes())) {
2255 if (!a.has_value()) {
2258 b.value().getKind(),
b.value().toString());
2259 }
else if (!
b.has_value()) {
2262 a.value().getKind(), a.value().toString());
2263 }
else if (!a.value().equals(
b.value())) {
2266 a.value().getKind(), a.value().toString(),
b.value().toString());
2275 for (
const auto &M : ExclusiveLocksToRemove)
2276 Analyzer->removeLock(FSet, M, Loc, Dtor,
LK_Exclusive);
2277 for (
const auto &M : SharedLocksToRemove)
2278 Analyzer->removeLock(FSet, M, Loc, Dtor,
LK_Shared);
2279 for (
const auto &M : GenericLocksToRemove)
2280 Analyzer->removeLock(FSet, M, Loc, Dtor,
LK_Generic);
2283 FactEntry::SourceKind Source =
2284 !Scp.
shouldIgnore() ? FactEntry::Managed : FactEntry::Acquired;
2285 for (
const auto &M : ExclusiveLocksToAdd)
2286 Analyzer->addLock(FSet, Analyzer->FactMan.createFact<LockableFactEntry>(
2288 for (
const auto &M : SharedLocksToAdd)
2289 Analyzer->addLock(FSet, Analyzer->FactMan.createFact<LockableFactEntry>(
2294 auto *ScopedEntry = Analyzer->FactMan.createFact<ScopedLockableFactEntry>(
2295 Scp, Loc, FactEntry::Acquired,
2296 ExclusiveLocksToAdd.size() + SharedLocksToAdd.size() +
2297 ScopedReqsAndExcludes.size() + ExclusiveLocksToRemove.size() +
2298 SharedLocksToRemove.size());
2299 for (
const auto &M : ExclusiveLocksToAdd)
2300 ScopedEntry->addLock(M);
2301 for (
const auto &M : SharedLocksToAdd)
2302 ScopedEntry->addLock(M);
2303 for (
const auto &M : ScopedReqsAndExcludes)
2304 ScopedEntry->addLock(M);
2305 for (
const auto &M : ExclusiveLocksToRemove)
2306 ScopedEntry->addExclusiveUnlock(M);
2307 for (
const auto &M : SharedLocksToRemove)
2308 ScopedEntry->addSharedUnlock(M);
2309 Analyzer->addLock(FSet, ScopedEntry);
2316void BuildLockset::VisitUnaryOperator(
const UnaryOperator *UO) {
2332void BuildLockset::VisitBinaryOperator(
const BinaryOperator *BO) {
2336 updateLocalVarMapCtx(BO);
2342void BuildLockset::VisitCastExpr(
const CastExpr *CE) {
2348void BuildLockset::examineArguments(
const FunctionDecl *FD,
2351 bool SkipFirstParam) {
2361 if (FD->
hasAttr<NoThreadSafetyAnalysisAttr>())
2364 const ArrayRef<ParmVarDecl *> Params = FD->
parameters();
2365 auto Param = Params.begin();
2370 for (
auto Arg = ArgBegin; Param != Params.end() && Arg != ArgEnd;
2372 QualType Qt = (*Param)->getType();
2380void BuildLockset::VisitCallExpr(
const CallExpr *Exp) {
2381 if (
const auto *CE = dyn_cast<CXXMemberCallExpr>(Exp)) {
2382 const auto *ME = dyn_cast<MemberExpr>(CE->getCallee());
2384 const CXXMethodDecl *MD = CE->getMethodDecl();
2387 if (ME->isArrow()) {
2389 checkPtAccess(CE->getImplicitObjectArgument(),
AK_Read);
2392 checkAccess(CE->getImplicitObjectArgument(),
AK_Read);
2396 examineArguments(CE->getDirectCallee(), CE->arg_begin(), CE->arg_end());
2397 }
else if (
const auto *OE = dyn_cast<CXXOperatorCallExpr>(Exp)) {
2405 case OO_PercentEqual:
2409 case OO_LessLessEqual:
2410 case OO_GreaterGreaterEqual:
2411 checkAccess(OE->getArg(1),
AK_Read);
2421 if (!(OEop == OO_Star && OE->getNumArgs() > 1)) {
2423 checkPtAccess(OE->getArg(0),
AK_Read);
2428 const Expr *Obj = OE->getArg(0);
2433 const FunctionDecl *FD = OE->getDirectCallee();
2434 examineArguments(FD, std::next(OE->arg_begin()), OE->arg_end(),
2443 auto *D = dyn_cast_or_null<NamedDecl>(Exp->
getCalleeDecl());
2446 updateLocalVarMapCtx(Exp);
2449void BuildLockset::VisitCXXConstructExpr(
const CXXConstructExpr *Exp) {
2452 const Expr* Source = Exp->
getArg(0);
2462 if (
auto *CE = dyn_cast<CastExpr>(E))
2465 if (
auto *CE = dyn_cast<CastExpr>(E))
2466 if (CE->
getCastKind() == CK_ConstructorConversion ||
2469 if (
auto *BTE = dyn_cast<CXXBindTemporaryExpr>(E))
2470 E = BTE->getSubExpr();
2474void BuildLockset::VisitDeclStmt(
const DeclStmt *S) {
2476 if (
auto *VD = dyn_cast_or_null<VarDecl>(D)) {
2477 const Expr *E = VD->getInit();
2483 if (
auto *EWC = dyn_cast<ExprWithCleanups>(E))
2487 if (
auto Object = Analyzer->ConstructedObjects.find(E);
2488 Object != Analyzer->ConstructedObjects.end()) {
2489 Object->second->setClangDecl(VD);
2490 Analyzer->ConstructedObjects.erase(Object);
2494 updateLocalVarMapCtx(S);
2497void BuildLockset::VisitMaterializeTemporaryExpr(
2498 const MaterializeTemporaryExpr *Exp) {
2500 if (
auto Object = Analyzer->ConstructedObjects.find(
2502 Object != Analyzer->ConstructedObjects.end()) {
2503 Object->second->setClangDecl(ExtD);
2504 Analyzer->ConstructedObjects.erase(Object);
2509void BuildLockset::VisitReturnStmt(
const ReturnStmt *S) {
2510 if (Analyzer->CurrentFunction ==
nullptr)
2518 const QualType ReturnType =
2521 Analyzer->checkAccess(
2522 FunctionExitFSet, RetVal,
2526 Analyzer->checkPtAccess(
2527 FunctionExitFSet, RetVal,
2537bool ThreadSafetyAnalyzer::join(
const FactEntry &A,
const FactEntry &B,
2538 SourceLocation JoinLoc,
2542 unsigned int ReentrancyDepthA = 0;
2543 unsigned int ReentrancyDepthB = 0;
2545 if (
const auto *LFE = dyn_cast<LockableFactEntry>(&A))
2546 ReentrancyDepthA = LFE->getReentrancyDepth();
2547 if (
const auto *LFE = dyn_cast<LockableFactEntry>(&B))
2548 ReentrancyDepthB = LFE->getReentrancyDepth();
2550 if (ReentrancyDepthA != ReentrancyDepthB) {
2556 return CanModify && ReentrancyDepthA < ReentrancyDepthB;
2557 }
else if (A.kind() != B.kind()) {
2560 if ((A.managed() || A.asserted()) && (B.managed() || B.asserted())) {
2562 bool ShouldTakeB = B.kind() ==
LK_Shared;
2563 if (CanModify || !ShouldTakeB)
2572 return CanModify && A.asserted() && !B.asserted();
2590void ThreadSafetyAnalyzer::intersectAndWarn(FactSet &EntrySet,
2591 const FactSet &ExitSet,
2592 SourceLocation JoinLoc,
2595 FactSet EntrySetOrig = EntrySet;
2598 for (
const auto &Fact : ExitSet) {
2599 const FactEntry &ExitFact = FactMan[Fact];
2601 FactSet::iterator EntryIt = EntrySet.findLockIter(FactMan, ExitFact);
2602 if (EntryIt != EntrySet.end()) {
2603 if (join(FactMan[*EntryIt], ExitFact, JoinLoc, EntryLEK))
2606 ExitFact.handleRemovalFromIntersection(ExitSet, FactMan, JoinLoc,
2612 for (
const auto &Fact : EntrySetOrig) {
2613 const FactEntry *EntryFact = &FactMan[Fact];
2614 const FactEntry *ExitFact = ExitSet.findLock(FactMan, *EntryFact);
2619 EntryFact->handleRemovalFromIntersection(EntrySetOrig, FactMan, JoinLoc,
2622 EntrySet.removeLock(FactMan, *EntryFact);
2635 if (std::optional<CFGStmt> S =
Last.getAs<
CFGStmt>()) {
2647void ThreadSafetyAnalyzer::runAnalysis(AnalysisDeclContext &AC) {
2650 threadSafety::CFGWalker walker;
2651 if (!walker.
init(AC))
2658 const NamedDecl *D = walker.
getDecl();
2659 CurrentFunction = dyn_cast<FunctionDecl>(D);
2661 if (D->
hasAttr<NoThreadSafetyAnalysisAttr>())
2676 CFGBlockInfo::getEmptyBlockInfo(LocalVarMap));
2682 PostOrderCFGView::CFGBlockSet VisitedBlocks(CFGraph);
2688 Initial.Reachable =
true;
2691 LocalVarMap.traverseCFG(CFGraph, SortedGraph, BlockInfo);
2696 CapExprSet ExclusiveLocksAcquired;
2697 CapExprSet SharedLocksAcquired;
2698 CapExprSet LocksReleased;
2703 if (!SortedGraph->
empty()) {
2705 FactSet &InitialLockset = Initial.EntrySet;
2707 CapExprSet ExclusiveLocksToAdd;
2708 CapExprSet SharedLocksToAdd;
2711 for (
const auto *Attr : D->
attrs()) {
2712 Loc = Attr->getLocation();
2713 if (
const auto *A = dyn_cast<RequiresCapabilityAttr>(Attr)) {
2714 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2716 }
else if (
const auto *A = dyn_cast<ReleaseCapabilityAttr>(Attr)) {
2719 if (A->args_size() == 0)
2721 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2723 getMutexIDs(LocksReleased, A,
nullptr, D);
2724 }
else if (
const auto *A = dyn_cast<AcquireCapabilityAttr>(Attr)) {
2725 if (A->args_size() == 0)
2727 getMutexIDs(A->isShared() ? SharedLocksAcquired
2728 : ExclusiveLocksAcquired,
2735 ArrayRef<ParmVarDecl *> Params;
2736 if (CurrentFunction)
2738 else if (
auto CurrentMethod = dyn_cast<ObjCMethodDecl>(D))
2739 Params = CurrentMethod->getCanonicalDecl()->parameters();
2741 llvm_unreachable(
"Unknown function kind");
2742 for (
const ParmVarDecl *Param : Params) {
2743 CapExprSet UnderlyingLocks;
2744 for (
const auto *Attr : Param->attrs()) {
2745 Loc = Attr->getLocation();
2746 if (
const auto *A = dyn_cast<ReleaseCapabilityAttr>(Attr)) {
2747 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2749 getMutexIDs(LocksReleased, A,
nullptr, Param);
2750 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2751 }
else if (
const auto *A = dyn_cast<RequiresCapabilityAttr>(Attr)) {
2752 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2754 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2755 }
else if (
const auto *A = dyn_cast<AcquireCapabilityAttr>(Attr)) {
2756 getMutexIDs(A->isShared() ? SharedLocksAcquired
2757 : ExclusiveLocksAcquired,
2759 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2760 }
else if (
const auto *A = dyn_cast<LocksExcludedAttr>(Attr)) {
2761 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2764 if (UnderlyingLocks.empty())
2769 auto *ScopedEntry = FactMan.createFact<ScopedLockableFactEntry>(
2770 Cp, Param->getLocation(), FactEntry::Declared,
2771 UnderlyingLocks.size());
2772 for (
const CapabilityExpr &M : UnderlyingLocks)
2773 ScopedEntry->addLock(M);
2774 addLock(InitialLockset, ScopedEntry,
true);
2778 for (
const auto &Mu : ExclusiveLocksToAdd) {
2779 const auto *Entry = FactMan.createFact<LockableFactEntry>(
2781 addLock(InitialLockset, Entry,
true);
2783 for (
const auto &Mu : SharedLocksToAdd) {
2784 const auto *Entry = FactMan.createFact<LockableFactEntry>(
2785 Mu,
LK_Shared, Loc, FactEntry::Declared);
2786 addLock(InitialLockset, Entry,
true);
2792 FactSet ExpectedFunctionExitSet = Initial.EntrySet;
2798 for (
const auto &Lock : ExclusiveLocksAcquired)
2799 ExpectedFunctionExitSet.addLock(
2800 FactMan, FactMan.createFact<LockableFactEntry>(Lock,
LK_Exclusive,
2802 for (
const auto &Lock : SharedLocksAcquired)
2803 ExpectedFunctionExitSet.addLock(
2804 FactMan, FactMan.createFact<LockableFactEntry>(Lock,
LK_Shared,
2806 for (
const auto &Lock : LocksReleased)
2807 ExpectedFunctionExitSet.removeLock(FactMan, Lock);
2809 for (
const auto *CurrBlock : *SortedGraph) {
2810 unsigned CurrBlockID = CurrBlock->
getBlockID();
2811 CFGBlockInfo *CurrBlockInfo = &BlockInfo[CurrBlockID];
2814 VisitedBlocks.insert(CurrBlock);
2829 bool LocksetInitialized =
false;
2831 PE = CurrBlock->
pred_end(); PI != PE; ++PI) {
2833 if (*PI ==
nullptr || !VisitedBlocks.alreadySet(*PI))
2836 unsigned PrevBlockID = (*PI)->getBlockID();
2837 CFGBlockInfo *PrevBlockInfo = &BlockInfo[PrevBlockID];
2844 CurrBlockInfo->Reachable =
true;
2846 FactSet PrevLockset;
2847 getEdgeLockset(PrevLockset, PrevBlockInfo->ExitSet, *PI, CurrBlock);
2849 if (!LocksetInitialized) {
2850 CurrBlockInfo->EntrySet = PrevLockset;
2851 LocksetInitialized =
true;
2857 CurrBlockInfo->EntrySet, PrevLockset, CurrBlockInfo->EntryLoc,
2858 isa_and_nonnull<ContinueStmt>((*PI)->getTerminatorStmt())
2865 if (!CurrBlockInfo->Reachable)
2868 BuildLockset LocksetBuilder(
this, *CurrBlockInfo, ExpectedFunctionExitSet);
2871 for (
const auto &BI : *CurrBlock) {
2872 switch (BI.getKind()) {
2874 CFGStmt CS = BI.castAs<CFGStmt>();
2875 LocksetBuilder.Visit(CS.
getStmt());
2880 CFGAutomaticObjDtor AD = BI.castAs<CFGAutomaticObjDtor>();
2886 if (isa_and_nonnull<ParmVarDecl>(AD.
getVarDecl()))
2888 if (!DD || !DD->hasAttrs())
2891 LocksetBuilder.handleCall(
2899 const CFGCleanupFunction &
CF = BI.castAs<CFGCleanupFunction>();
2900 LocksetBuilder.handleCall(
2901 nullptr,
CF.getFunctionDecl(),
2903 CF.getVarDecl()->getLocation());
2908 auto TD = BI.castAs<CFGTemporaryDtor>();
2912 if (
auto Object = ConstructedObjects.find(
2913 TD.getBindTemporaryExpr()->getSubExpr());
2914 Object != ConstructedObjects.end()) {
2918 LocksetBuilder.handleCall(
nullptr, DD,
Object->second,
2919 TD.getBindTemporaryExpr()->getEndLoc());
2920 ConstructedObjects.erase(Object);
2928 CurrBlockInfo->ExitSet = LocksetBuilder.FSet;
2935 SE = CurrBlock->succ_end(); SI != SE; ++SI) {
2937 if (*SI ==
nullptr || !VisitedBlocks.alreadySet(*SI))
2940 CFGBlock *FirstLoopBlock = *SI;
2941 CFGBlockInfo *PreLoop = &BlockInfo[FirstLoopBlock->
getBlockID()];
2942 CFGBlockInfo *LoopEnd = &BlockInfo[CurrBlockID];
2943 intersectAndWarn(PreLoop->EntrySet, LoopEnd->ExitSet, PreLoop->EntryLoc,
2949 if (!Final.Reachable)
2953 intersectAndWarn(ExpectedFunctionExitSet, Final.ExitSet, Final.ExitLoc,
2969 ThreadSafetyAnalyzer Analyzer(Handler, *BSet);
2970 Analyzer.runAnalysis(AC);
2984 llvm_unreachable(
"Unknown AccessKind");
This file defines AnalysisDeclContext, a class that manages the analysis context data for context sen...
Defines enum values for all the target-independent builtin functions.
static void dump(llvm::raw_ostream &OS, StringRef FunctionName, ArrayRef< CounterExpression > Expressions, ArrayRef< CounterMappingRegion > Regions)
static Decl::Kind getKind(const Decl *D)
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate....
Defines the clang::Expr interface and subclasses for C++ expressions.
Forward-declares and imports various common LLVM datatypes that clang wants to use unqualified.
*collection of selector each with an associated kind and an ordered *collection of selectors A selector has a kind
Defines an enumeration for C++ overloaded operators.
static std::string toString(const clang::SanitizerSet &Sanitizers)
Produce a string containing comma-separated names of sanitizers in Sanitizers set.
Defines the clang::SourceLocation class and associated facilities.
Defines various enumerations that describe declaration and type specifiers.
static void warnInvalidLock(ThreadSafetyHandler &Handler, const Expr *MutexExp, const NamedDecl *D, const Expr *DeclExp, StringRef Kind)
Issue a warning about an invalid lock expression.
static bool getStaticBooleanValue(Expr *E, bool &TCond)
static bool neverReturns(const CFGBlock *B)
static void findBlockLocations(CFG *CFGraph, const PostOrderCFGView *SortedGraph, std::vector< CFGBlockInfo > &BlockInfo)
Find the appropriate source locations to use when producing diagnostics for each block in the CFG.
static const ValueDecl * getValueDecl(const Expr *Exp)
Gets the value decl pointer from DeclRefExprs or MemberExprs.
static const Expr * UnpackConstruction(const Expr *E)
C Language Family Type Representation.
AnalysisDeclContext contains the context data for the function, method or block under analysis.
ASTContext & getASTContext() const
static bool isAssignmentOp(Opcode Opc)
const VarDecl * getVarDecl() const
const Stmt * getTriggerStmt() const
Represents a single basic block in a source-level CFG.
bool hasNoReturnElement() const
ElementList::const_reverse_iterator const_reverse_iterator
succ_iterator succ_begin()
Stmt * getTerminatorStmt()
AdjacentBlocks::const_iterator const_pred_iterator
const Stmt * getTerminatorCondition(bool StripParens=true) const
pred_iterator pred_begin()
unsigned getBlockID() const
AdjacentBlocks::const_iterator const_succ_iterator
Represents a top-level expression in a basic block.
const CXXDestructorDecl * getDestructorDecl(ASTContext &astContext) const
const Stmt * getStmt() const
Represents a source-level, intra-procedural CFG that represents the control-flow of a Stmt.
unsigned getNumBlockIDs() const
Returns the total number of BlockIDs allocated (which start at 0).
Expr * getArg(unsigned Arg)
Return the specified argument.
CXXConstructorDecl * getConstructor() const
Get the constructor that this expression will (ultimately) call.
bool isCopyConstructor(unsigned &TypeQuals) const
Whether this constructor is a copy constructor (C++ [class.copy]p2, which can be used to copy the cla...
Expr * getArg(unsigned Arg)
getArg - Return the specified argument.
ConstExprIterator const_arg_iterator
FunctionDecl * getDirectCallee()
If the callee is a FunctionDecl, return it. Otherwise return null.
unsigned getNumArgs() const
getNumArgs - Return the number of actual arguments to this call.
CastKind getCastKind() const
const DeclGroupRef getDeclGroup() const
SourceLocation getBeginLoc() const LLVM_READONLY
llvm::iterator_range< specific_attr_iterator< T > > specific_attrs() const
SourceLocation getLocation() const
bool isDefinedOutsideFunctionOrMethod() const
isDefinedOutsideFunctionOrMethod - This predicate returns true if this scoped decl is defined outside...
DeclContext * getDeclContext()
This represents one expression.
Expr * IgnoreParenCasts() LLVM_READONLY
Skip past any parentheses and casts which might surround this expression until reaching a fixed point...
Expr * IgnoreParenImpCasts() LLVM_READONLY
Skip past any parentheses and implicit casts which might surround this expression until reaching a fi...
Expr * IgnoreImplicit() LLVM_READONLY
Skip past any implicit AST nodes which might surround this expression until reaching a fixed point.
Expr * IgnoreParens() LLVM_READONLY
Skip past any parentheses which might surround this expression until reaching a fixed point.
Expr * IgnoreCasts() LLVM_READONLY
Skip past any casts which might surround this expression until reaching a fixed point.
SourceLocation getExprLoc() const LLVM_READONLY
getExprLoc - Return the preferred location for the arrow when diagnosing a problem with a generic exp...
const ParmVarDecl * getParamDecl(unsigned i) const
QualType getReturnType() const
ArrayRef< ParmVarDecl * > parameters() const
FunctionDecl * getCanonicalDecl() override
Retrieves the "canonical" declaration of the given declaration.
unsigned getNumParams() const
Return the number of parameters this function must have based on its FunctionType.
Expr * getSubExpr() const
Retrieve the temporary-generating subexpression whose value will be materialized into a glvalue.
ValueDecl * getExtendingDecl()
Get the declaration which triggered the lifetime-extension of this temporary, if any.
This represents a decl that may have a name.
IdentifierInfo * getIdentifier() const
Get the identifier that names this declaration, if there is one.
StringRef getName() const
Get the name of identifier for this declaration as a StringRef.
std::string getNameAsString() const
Get a human-readable name for the declaration, even if it is one of the special kinds of names (C++ c...
virtual void printName(raw_ostream &OS, const PrintingPolicy &Policy) const
Pretty-print the unqualified name of this declaration.
bool isTrivialType(const ASTContext &Context) const
Return true if this is a trivial type per (C++0x [basic.types]p9)
QualType getCanonicalType() const
bool isConstQualified() const
Determine whether this type is const-qualified.
Encodes a location in the source.
bool isValid() const
Return true if this is a valid SourceLocation object.
Stmt - This represents one statement.
SourceLocation getEndLoc() const LLVM_READONLY
void dump() const
Dumps the specified AST fragment and all subtrees to llvm::errs().
bool isPointerType() const
bool isReferenceType() const
QualType getPointeeType() const
If this is a pointer, ObjC object pointer, or block pointer, this returns the respective pointee.
bool isLValueReferenceType() const
const T * getAs() const
Member-template getAs<specific type>'.
Expr * getSubExpr() const
Represent the declaration of a variable (in which case it is an lvalue) a function (in which case it ...
void checkBeforeAfter(const ValueDecl *Vd, const FactSet &FSet, ThreadSafetyAnalyzer &Analyzer, SourceLocation Loc, StringRef CapKind)
Return true if any mutexes in FSet are in the acquired_before set of Vd.
BeforeInfo * insertAttrExprs(const ValueDecl *Vd, ThreadSafetyAnalyzer &Analyzer)
Process acquired_before and acquired_after attributes on Vd.
BeforeInfo * getBeforeInfoForDecl(const ValueDecl *Vd, ThreadSafetyAnalyzer &Analyzer)
const PostOrderCFGView * getSortedGraph() const
const NamedDecl * getDecl() const
bool init(AnalysisDeclContext &AC)
const CFG * getGraph() const
bool shouldIgnore() const
bool equals(const CapabilityExpr &other) const
const til::SExpr * sexpr() const
std::string toString() const
const ValueDecl * valueDecl() const
StringRef getKind() const
CapabilityExpr translateAttrExpr(const Expr *AttrExp, const NamedDecl *D, const Expr *DeclExp, til::SExpr *Self=nullptr)
Translate a clang expression in an attribute to a til::SExpr.
void setLookupLocalVarExpr(std::function< const Expr *(const NamedDecl *)> F)
til::SExpr * translate(const Stmt *S, CallingContext *Ctx)
til::LiteralPtr * createThisPlaceholder()
til::SExpr * translateVariable(const VarDecl *VD, CallingContext *Ctx)
Handler class for thread safety warnings.
virtual ~ThreadSafetyHandler()
virtual void handleExpectMoreUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc, Name ScopeName, StringRef Kind, Name Expected)
Warn when we get fewer underlying mutexes than expected.
virtual void handleInvalidLockExp(SourceLocation Loc)
Warn about lock expressions which fail to resolve to lockable objects.
virtual void handleUnmatchedUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc, Name ScopeName, StringRef Kind, Name Expected, Name Actual)
Warn when an actual underlying mutex of a scoped lockable does not match the expected.
virtual void handleExpectFewerUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc, Name ScopeName, StringRef Kind, Name Actual)
Warn when we get more underlying mutexes than expected.
virtual void enterFunction(const FunctionDecl *FD)
Called by the analysis when starting analysis of a function.
virtual void handleIncorrectUnlockKind(StringRef Kind, Name LockName, LockKind Expected, LockKind Received, SourceLocation LocLocked, SourceLocation LocUnlock)
Warn about an unlock function call that attempts to unlock a lock with the incorrect lock kind.
virtual void handleMutexHeldEndOfScope(StringRef Kind, Name LockName, SourceLocation LocLocked, SourceLocation LocEndOfScope, LockErrorKind LEK, bool ReentrancyMismatch=false)
Warn about situations where a mutex is sometimes held and sometimes not.
virtual void leaveFunction(const FunctionDecl *FD)
Called by the analysis when finishing analysis of a function.
virtual void handleExclusiveAndShared(StringRef Kind, Name LockName, SourceLocation Loc1, SourceLocation Loc2)
Warn when a mutex is held exclusively and shared at the same point.
virtual void handleMutexNotHeld(StringRef Kind, const NamedDecl *D, ProtectedOperationKind POK, Name LockName, LockKind LK, SourceLocation Loc, Name *PossibleMatch=nullptr)
Warn when a protected operation occurs while the specific mutex protecting the operation is not locke...
virtual void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName, SourceLocation Loc)
Warn when a function is called while an excluded mutex is locked.
virtual void handleGuardedByAnyReadNotHeld(const NamedDecl *D, ProtectedOperationKind POK, ArrayRef< StringRef > LockNames, SourceLocation Loc)
Warn when a read of a multi-capability guarded_by variable occurs while none of the listed capabiliti...
virtual void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK, AccessKind AK, SourceLocation Loc)
Warn when a protected operation occurs while no locks are held.
virtual void handleUnmatchedUnlock(StringRef Kind, Name LockName, SourceLocation Loc, SourceLocation LocPreviousUnlock)
Warn about unlock function calls that do not have a prior matching lock expression.
virtual void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg, SourceLocation Loc)
Warn when acquiring a lock that the negative capability is not held.
virtual void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked, SourceLocation LocDoubleLock)
Warn about lock function calls for locks which are already held.
internal::Matcher< T > traverse(TraversalKind TK, const internal::Matcher< T > &InnerMatcher)
Causes all nested matchers to be matched with the specified traversal kind.
@ CF
Indicates that the tracked object is a CF object.
bool Alloc(InterpState &S, CodePtr OpPC, const Descriptor *Desc)
bool Dec(InterpState &S, CodePtr OpPC, bool CanOverflow)
1) Pops a pointer from the stack 2) Load the value from the pointer 3) Writes the value decreased by ...
bool Neg(InterpState &S, CodePtr OpPC)
utils::ID< struct FactTag > FactID
std::unique_ptr< DiagnosticConsumer > create(StringRef OutputFile, DiagnosticOptions &DiagOpts, bool MergeChildRecords=false)
Returns a DiagnosticConsumer that serializes diagnostics to a bitcode file.
llvm::json::Object Object
bool matches(const til::SExpr *E1, const til::SExpr *E2)
LockKind getLockKindFromAccessKind(AccessKind AK)
Helper function that returns a LockKind required for the given level of access.
LockErrorKind
This enum distinguishes between different situations where we warn due to inconsistent locking.
@ LEK_NotLockedAtEndOfFunction
Expecting a capability to be held at the end of function.
@ LEK_LockedSomePredecessors
A capability is locked in some but not all predecessors of a CFGBlock.
@ LEK_LockedAtEndOfFunction
A capability is still locked at the end of a function.
@ LEK_LockedSomeLoopIterations
A capability is locked for some but not all loop iterations.
void threadSafetyCleanup(BeforeSet *Cache)
AccessKind
This enum distinguishes between different ways to access (read or write) a variable.
@ AK_Written
Writing a variable.
@ AK_Read
Reading a variable.
LockKind
This enum distinguishes between different kinds of lock actions.
@ LK_Shared
Shared/reader lock of a mutex.
@ LK_Exclusive
Exclusive/writer lock of a mutex.
@ LK_Generic
Can be either Shared or Exclusive.
void runThreadSafetyAnalysis(AnalysisDeclContext &AC, ThreadSafetyHandler &Handler, BeforeSet **Bset)
Check a function's CFG for thread-safety violations.
ProtectedOperationKind
This enum distinguishes between different kinds of operations that may need to be protected by locks.
@ POK_PtPassByRef
Passing a pt-guarded variable by reference.
@ POK_PassPointer
Passing pointer to a guarded variable.
@ POK_VarDereference
Dereferencing a variable (e.g. p in *p = 5;)
@ POK_PassByRef
Passing a guarded variable by reference.
@ POK_ReturnByRef
Returning a guarded variable by reference.
@ POK_PtPassPointer
Passing a pt-guarded pointer.
@ POK_PtReturnPointer
Returning a pt-guarded pointer.
@ POK_VarAccess
Reading or writing a variable (e.g. x in x = 5;)
@ POK_FunctionCall
Making a function call (e.g. fool())
@ POK_ReturnPointer
Returning pointer to a guarded variable.
@ POK_PtReturnByRef
Returning a pt-guarded variable by reference.
The JSON file list parser is used to communicate input to InstallAPI.
OverloadedOperatorKind
Enumeration specifying the different kinds of C++ overloaded operators.
bool isa(CodeGen::Address addr)
static bool classof(const OMPClause *T)
@ Self
'self' clause, allowed on Compute and Combined Constructs, plus 'update'.
nullptr
This class represents a compute construct, representing a 'Kind' of ‘parallel’, 'serial',...
@ Result
The result type of a method or function.
U cast(CodeGen::Address addr)
@ Other
Other implicit parameter.
int const char * function