39#include "llvm/ADT/DenseMap.h"
40#include "llvm/ADT/ImmutableMap.h"
41#include "llvm/ADT/STLExtras.h"
42#include "llvm/ADT/SmallVector.h"
43#include "llvm/ADT/StringRef.h"
44#include "llvm/Support/Allocator.h"
45#include "llvm/Support/ErrorHandling.h"
46#include "llvm/Support/TrailingObjects.h"
47#include "llvm/Support/raw_ostream.h"
66 const Expr *DeclExp, StringRef Kind) {
80class CapExprSet :
public SmallVector<CapabilityExpr, 4> {
83 void push_back_nodup(
const CapabilityExpr &CapE) {
84 if (llvm::none_of(*
this, [=](
const CapabilityExpr &CapE2) {
100 enum FactEntryKind { Lockable, ScopedLockable };
111 const FactEntryKind Kind : 8;
117 SourceKind Source : 8;
120 SourceLocation AcquireLoc;
123 ~FactEntry() =
default;
126 FactEntry(FactEntryKind FK,
const CapabilityExpr &CE,
LockKind LK,
127 SourceLocation Loc, SourceKind Src)
128 : CapabilityExpr(CE), Kind(FK), LKind(LK), Source(Src), AcquireLoc(Loc) {}
131 SourceLocation loc()
const {
return AcquireLoc; }
132 FactEntryKind getFactEntryKind()
const {
return Kind; }
134 bool asserted()
const {
return Source == Asserted; }
135 bool declared()
const {
return Source == Declared; }
136 bool managed()
const {
return Source == Managed; }
139 handleRemovalFromIntersection(
const FactSet &FSet, FactManager &FactMan,
141 ThreadSafetyHandler &Handler)
const = 0;
142 virtual void handleLock(FactSet &FSet, FactManager &FactMan,
143 const FactEntry &entry,
144 ThreadSafetyHandler &Handler)
const = 0;
145 virtual void handleUnlock(FactSet &FSet, FactManager &FactMan,
146 const CapabilityExpr &Cp, SourceLocation UnlockLoc,
148 ThreadSafetyHandler &Handler)
const = 0;
156using FactID =
unsigned short;
162 llvm::BumpPtrAllocator &Alloc;
163 std::vector<const FactEntry *> Facts;
166 FactManager(llvm::BumpPtrAllocator &Alloc) : Alloc(Alloc) {}
168 template <
typename T,
typename... ArgTypes>
169 T *createFact(ArgTypes &&...Args) {
170 static_assert(std::is_trivially_destructible_v<T>);
171 return T::create(Alloc, std::forward<ArgTypes>(Args)...);
174 FactID newFact(
const FactEntry *Entry) {
175 Facts.push_back(Entry);
176 assert(Facts.size() - 1 <= std::numeric_limits<FactID>::max() &&
177 "FactID space exhausted");
178 return static_cast<unsigned short>(Facts.size() - 1);
181 const FactEntry &operator[](FactID F)
const {
return *Facts[F]; }
193 using FactVec = SmallVector<FactID, 4>;
198 using iterator = FactVec::iterator;
199 using const_iterator = FactVec::const_iterator;
201 iterator begin() {
return FactIDs.begin(); }
202 const_iterator begin()
const {
return FactIDs.begin(); }
204 iterator end() {
return FactIDs.end(); }
205 const_iterator end()
const {
return FactIDs.end(); }
207 bool isEmpty()
const {
return FactIDs.size() == 0; }
210 bool isEmpty(FactManager &FactMan)
const {
211 for (
const auto FID : *
this) {
212 if (!FactMan[FID].negative())
218 void addLockByID(FactID ID) { FactIDs.push_back(ID); }
220 FactID addLock(FactManager &FM,
const FactEntry *Entry) {
221 FactID F = FM.newFact(Entry);
222 FactIDs.push_back(F);
226 bool removeLock(FactManager& FM,
const CapabilityExpr &CapE) {
227 unsigned n = FactIDs.size();
231 for (
unsigned i = 0; i < n-1; ++i) {
232 if (FM[FactIDs[i]].
matches(CapE)) {
233 FactIDs[i] = FactIDs[n-1];
238 if (FM[FactIDs[n-1]].
matches(CapE)) {
245 std::optional<FactID> replaceLock(FactManager &FM, iterator It,
246 const FactEntry *Entry) {
249 FactID F = FM.newFact(Entry);
254 std::optional<FactID> replaceLock(FactManager &FM,
const CapabilityExpr &CapE,
255 const FactEntry *Entry) {
256 return replaceLock(FM, findLockIter(FM, CapE), Entry);
259 iterator findLockIter(FactManager &FM,
const CapabilityExpr &CapE) {
260 return llvm::find_if(*
this,
261 [&](FactID ID) {
return FM[
ID].matches(CapE); });
264 const FactEntry *findLock(FactManager &FM,
const CapabilityExpr &CapE)
const {
266 llvm::find_if(*
this, [&](FactID ID) {
return FM[
ID].matches(CapE); });
267 return I != end() ? &FM[*I] :
nullptr;
270 const FactEntry *findLockUniv(FactManager &FM,
271 const CapabilityExpr &CapE)
const {
272 auto I = llvm::find_if(
273 *
this, [&](FactID ID) ->
bool {
return FM[
ID].matchesUniv(CapE); });
274 return I != end() ? &FM[*I] :
nullptr;
277 const FactEntry *findPartialMatch(FactManager &FM,
278 const CapabilityExpr &CapE)
const {
279 auto I = llvm::find_if(*
this, [&](FactID ID) ->
bool {
280 return FM[
ID].partiallyMatches(CapE);
282 return I != end() ? &FM[*I] :
nullptr;
285 bool containsMutexDecl(FactManager &FM,
const ValueDecl* Vd)
const {
286 auto I = llvm::find_if(
287 *
this, [&](FactID ID) ->
bool {
return FM[
ID].valueDecl() == Vd; });
292class ThreadSafetyAnalyzer;
307 BeforeInfo() =
default;
308 BeforeInfo(BeforeInfo &&) =
default;
312 llvm::DenseMap<const ValueDecl *, std::unique_ptr<BeforeInfo>>;
313 using CycleMap = llvm::DenseMap<const ValueDecl *, bool>;
319 ThreadSafetyAnalyzer& Analyzer);
322 ThreadSafetyAnalyzer &Analyzer);
326 ThreadSafetyAnalyzer& Analyzer,
339class LocalVariableMap;
341using LocalVarContext = llvm::ImmutableMap<const NamedDecl *, unsigned>;
344enum CFGBlockSide { CBS_Entry, CBS_Exit };
357 LocalVarContext EntryContext;
360 LocalVarContext ExitContext;
363 SourceLocation EntryLoc;
366 SourceLocation ExitLoc;
372 bool Reachable =
false;
374 const FactSet &getSet(CFGBlockSide Side)
const {
375 return Side == CBS_Entry ? EntrySet : ExitSet;
378 SourceLocation getLocation(CFGBlockSide Side)
const {
379 return Side == CBS_Entry ? EntryLoc : ExitLoc;
383 CFGBlockInfo(LocalVarContext EmptyCtx)
384 : EntryContext(EmptyCtx), ExitContext(EmptyCtx) {}
387 static CFGBlockInfo getEmptyBlockInfo(LocalVariableMap &M);
403class LocalVariableMap {
405 using Context = LocalVarContext;
411 struct VarDefinition {
413 friend class LocalVariableMap;
416 const NamedDecl *Dec;
419 const Expr *Exp =
nullptr;
427 bool isReference()
const {
return !Exp; }
431 VarDefinition(
const NamedDecl *D,
const Expr *E, Context
C)
432 : Dec(D), Exp(E), Ctx(
C) {}
435 VarDefinition(
const NamedDecl *D,
unsigned R, Context
C)
436 : Dec(D), Ref(R), Ctx(
C) {}
440 Context::Factory ContextFactory;
441 std::vector<VarDefinition> VarDefinitions;
442 std::vector<std::pair<const Stmt *, Context>> SavedContexts;
447 VarDefinitions.push_back(VarDefinition(
nullptr, 0u, getEmptyContext()));
451 const VarDefinition* lookup(
const NamedDecl *D, Context Ctx) {
452 const unsigned *i = Ctx.lookup(D);
455 assert(*i < VarDefinitions.size());
456 return &VarDefinitions[*i];
462 const Expr* lookupExpr(
const NamedDecl *D, Context &Ctx) {
463 const unsigned *P = Ctx.lookup(D);
469 if (VarDefinitions[i].Exp) {
470 Ctx = VarDefinitions[i].Ctx;
471 return VarDefinitions[i].Exp;
473 i = VarDefinitions[i].Ref;
478 Context getEmptyContext() {
return ContextFactory.getEmptyMap(); }
483 Context getNextContext(
unsigned &CtxIndex,
const Stmt *S, Context
C) {
484 if (SavedContexts[CtxIndex+1].first == S) {
486 Context
Result = SavedContexts[CtxIndex].second;
492 void dumpVarDefinitionName(
unsigned i) {
494 llvm::errs() <<
"Undefined";
497 const NamedDecl *
Dec = VarDefinitions[i].Dec;
499 llvm::errs() <<
"<<NULL>>";
502 Dec->printName(llvm::errs());
503 llvm::errs() <<
"." << i <<
" " << ((
const void*) Dec);
508 for (
unsigned i = 1, e = VarDefinitions.size(); i < e; ++i) {
509 const Expr *Exp = VarDefinitions[i].Exp;
510 unsigned Ref = VarDefinitions[i].Ref;
512 dumpVarDefinitionName(i);
513 llvm::errs() <<
" = ";
514 if (Exp) Exp->
dump();
516 dumpVarDefinitionName(Ref);
517 llvm::errs() <<
"\n";
523 void dumpContext(Context
C) {
524 for (Context::iterator I =
C.begin(), E =
C.end(); I != E; ++I) {
525 const NamedDecl *D = I.getKey();
527 llvm::errs() <<
" -> ";
528 dumpVarDefinitionName(I.getData());
529 llvm::errs() <<
"\n";
534 void traverseCFG(CFG *CFGraph,
const PostOrderCFGView *SortedGraph,
535 std::vector<CFGBlockInfo> &BlockInfo);
538 friend class VarMapBuilder;
541 unsigned getContextIndex() {
return SavedContexts.size()-1; }
544 void saveContext(
const Stmt *S, Context
C) {
545 SavedContexts.push_back(std::make_pair(S,
C));
550 Context addDefinition(
const NamedDecl *D,
const Expr *Exp, Context Ctx) {
551 assert(!Ctx.contains(D));
552 unsigned newID = VarDefinitions.size();
553 Context NewCtx = ContextFactory.add(Ctx, D, newID);
554 VarDefinitions.push_back(VarDefinition(D, Exp, Ctx));
559 Context addReference(
const NamedDecl *D,
unsigned i, Context Ctx) {
560 unsigned newID = VarDefinitions.size();
561 Context NewCtx = ContextFactory.add(Ctx, D, newID);
562 VarDefinitions.push_back(VarDefinition(D, i, Ctx));
568 Context updateDefinition(
const NamedDecl *D, Expr *Exp, Context Ctx) {
569 if (Ctx.contains(D)) {
570 unsigned newID = VarDefinitions.size();
571 Context NewCtx = ContextFactory.remove(Ctx, D);
572 NewCtx = ContextFactory.add(NewCtx, D, newID);
573 VarDefinitions.push_back(VarDefinition(D, Exp, Ctx));
581 Context clearDefinition(
const NamedDecl *D, Context Ctx) {
582 Context NewCtx = Ctx;
583 if (NewCtx.contains(D)) {
584 NewCtx = ContextFactory.remove(NewCtx, D);
585 NewCtx = ContextFactory.add(NewCtx, D, 0);
591 Context removeDefinition(
const NamedDecl *D, Context Ctx) {
592 Context NewCtx = Ctx;
593 if (NewCtx.contains(D)) {
594 NewCtx = ContextFactory.remove(NewCtx, D);
599 Context intersectContexts(Context C1, Context C2);
600 Context createReferenceContext(Context
C);
601 void intersectBackEdge(Context C1, Context C2);
607CFGBlockInfo CFGBlockInfo::getEmptyBlockInfo(LocalVariableMap &M) {
608 return CFGBlockInfo(M.getEmptyContext());
614class VarMapBuilder :
public ConstStmtVisitor<VarMapBuilder> {
616 LocalVariableMap* VMap;
617 LocalVariableMap::Context Ctx;
619 VarMapBuilder(LocalVariableMap *VM, LocalVariableMap::Context
C)
620 : VMap(VM), Ctx(
C) {}
622 void VisitDeclStmt(
const DeclStmt *S);
623 void VisitBinaryOperator(
const BinaryOperator *BO);
629void VarMapBuilder::VisitDeclStmt(
const DeclStmt *S) {
630 bool modifiedCtx =
false;
632 for (
const auto *D : DGrp) {
633 if (
const auto *VD = dyn_cast_or_null<VarDecl>(D)) {
634 const Expr *E = VD->getInit();
637 QualType
T = VD->getType();
638 if (
T.isTrivialType(VD->getASTContext())) {
639 Ctx = VMap->addDefinition(VD, E, Ctx);
645 VMap->saveContext(S, Ctx);
649void VarMapBuilder::VisitBinaryOperator(
const BinaryOperator *BO) {
656 if (
const auto *DRE = dyn_cast<DeclRefExpr>(LHSExp)) {
657 const ValueDecl *VDec = DRE->getDecl();
658 if (Ctx.lookup(VDec)) {
660 Ctx = VMap->updateDefinition(VDec, BO->
getRHS(), Ctx);
663 Ctx = VMap->clearDefinition(VDec, Ctx);
664 VMap->saveContext(BO, Ctx);
672LocalVariableMap::Context
673LocalVariableMap::intersectContexts(Context C1, Context C2) {
675 for (
const auto &P : C1) {
676 const NamedDecl *
Dec = P.first;
677 const unsigned *i2 = C2.lookup(Dec);
680 else if (*i2 != P.second)
689LocalVariableMap::Context LocalVariableMap::createReferenceContext(Context
C) {
690 Context
Result = getEmptyContext();
691 for (
const auto &P :
C)
699void LocalVariableMap::intersectBackEdge(Context C1, Context C2) {
700 for (
const auto &P : C1) {
701 unsigned i1 = P.second;
702 VarDefinition *VDef = &VarDefinitions[i1];
703 assert(VDef->isReference());
705 const unsigned *i2 = C2.lookup(P.first);
706 if (!i2 || (*i2 != i1))
748void LocalVariableMap::traverseCFG(CFG *CFGraph,
749 const PostOrderCFGView *SortedGraph,
750 std::vector<CFGBlockInfo> &BlockInfo) {
751 PostOrderCFGView::CFGBlockSet VisitedBlocks(CFGraph);
753 for (
const auto *CurrBlock : *SortedGraph) {
754 unsigned CurrBlockID = CurrBlock->getBlockID();
755 CFGBlockInfo *CurrBlockInfo = &BlockInfo[CurrBlockID];
757 VisitedBlocks.insert(CurrBlock);
760 bool HasBackEdges =
false;
763 PE = CurrBlock->pred_end(); PI != PE; ++PI) {
765 if (*PI ==
nullptr || !VisitedBlocks.alreadySet(*PI)) {
770 unsigned PrevBlockID = (*PI)->getBlockID();
771 CFGBlockInfo *PrevBlockInfo = &BlockInfo[PrevBlockID];
774 CurrBlockInfo->EntryContext = PrevBlockInfo->ExitContext;
778 CurrBlockInfo->EntryContext =
779 intersectContexts(CurrBlockInfo->EntryContext,
780 PrevBlockInfo->ExitContext);
787 CurrBlockInfo->EntryContext =
788 createReferenceContext(CurrBlockInfo->EntryContext);
791 saveContext(
nullptr, CurrBlockInfo->EntryContext);
792 CurrBlockInfo->EntryIndex = getContextIndex();
795 VarMapBuilder VMapBuilder(
this, CurrBlockInfo->EntryContext);
796 for (
const auto &BI : *CurrBlock) {
797 switch (BI.getKind()) {
799 CFGStmt CS = BI.castAs<CFGStmt>();
800 VMapBuilder.Visit(CS.
getStmt());
807 CurrBlockInfo->ExitContext = VMapBuilder.Ctx;
811 SE = CurrBlock->succ_end(); SI != SE; ++SI) {
813 if (*SI ==
nullptr || !VisitedBlocks.alreadySet(*SI))
816 CFGBlock *FirstLoopBlock = *SI;
817 Context LoopBegin = BlockInfo[FirstLoopBlock->
getBlockID()].EntryContext;
818 Context LoopEnd = CurrBlockInfo->ExitContext;
819 intersectBackEdge(LoopBegin, LoopEnd);
825 saveContext(
nullptr, BlockInfo[exitID].ExitContext);
832 std::vector<CFGBlockInfo> &BlockInfo) {
833 for (
const auto *CurrBlock : *SortedGraph) {
834 CFGBlockInfo *CurrBlockInfo = &BlockInfo[CurrBlock->getBlockID()];
838 if (
const Stmt *S = CurrBlock->getTerminatorStmt()) {
839 CurrBlockInfo->EntryLoc = CurrBlockInfo->ExitLoc = S->
getBeginLoc();
842 BE = CurrBlock->rend(); BI != BE; ++BI) {
844 if (std::optional<CFGStmt> CS = BI->getAs<
CFGStmt>()) {
845 CurrBlockInfo->ExitLoc = CS->getStmt()->getBeginLoc();
851 if (CurrBlockInfo->ExitLoc.
isValid()) {
854 for (
const auto &BI : *CurrBlock) {
856 if (std::optional<CFGStmt> CS = BI.getAs<
CFGStmt>()) {
857 CurrBlockInfo->EntryLoc = CS->getStmt()->getBeginLoc();
861 }
else if (CurrBlock->pred_size() == 1 && *CurrBlock->pred_begin() &&
862 CurrBlock != &CFGraph->
getExit()) {
865 CurrBlockInfo->EntryLoc = CurrBlockInfo->ExitLoc =
866 BlockInfo[(*CurrBlock->pred_begin())->getBlockID()].ExitLoc;
867 }
else if (CurrBlock->succ_size() == 1 && *CurrBlock->succ_begin()) {
870 CurrBlockInfo->EntryLoc = CurrBlockInfo->ExitLoc =
871 BlockInfo[(*CurrBlock->succ_begin())->getBlockID()].EntryLoc;
878class LockableFactEntry final :
public FactEntry {
883 unsigned int ReentrancyDepth = 0;
885 LockableFactEntry(
const CapabilityExpr &CE,
LockKind LK, SourceLocation Loc,
887 : FactEntry(Lockable, CE, LK, Loc, Src) {}
890 static LockableFactEntry *
create(llvm::BumpPtrAllocator &Alloc,
891 const LockableFactEntry &
Other) {
895 static LockableFactEntry *
create(llvm::BumpPtrAllocator &Alloc,
896 const CapabilityExpr &CE,
LockKind LK,
898 SourceKind Src = Acquired) {
899 return new (
Alloc) LockableFactEntry(CE, LK, Loc, Src);
902 unsigned int getReentrancyDepth()
const {
return ReentrancyDepth; }
905 handleRemovalFromIntersection(
const FactSet &FSet, FactManager &FactMan,
907 ThreadSafetyHandler &Handler)
const override {
908 if (!asserted() && !negative() && !isUniversal()) {
914 void handleLock(FactSet &FSet, FactManager &FactMan,
const FactEntry &entry,
915 ThreadSafetyHandler &Handler)
const override {
916 if (
const FactEntry *RFact = tryReenter(FactMan, entry.kind())) {
918 FSet.replaceLock(FactMan, entry, RFact);
925 void handleUnlock(FactSet &FSet, FactManager &FactMan,
926 const CapabilityExpr &Cp, SourceLocation UnlockLoc,
928 ThreadSafetyHandler &Handler)
const override {
929 FSet.removeLock(FactMan, Cp);
931 if (
const FactEntry *RFact = leaveReentrant(FactMan)) {
933 FSet.addLock(FactMan, RFact);
935 FSet.addLock(FactMan, FactMan.createFact<LockableFactEntry>(
942 const FactEntry *tryReenter(FactManager &FactMan,
946 if (
kind() != ReenterKind)
948 auto *NewFact = FactMan.createFact<LockableFactEntry>(*this);
949 NewFact->ReentrancyDepth++;
955 const FactEntry *leaveReentrant(FactManager &FactMan)
const {
956 if (!ReentrancyDepth)
959 auto *NewFact = FactMan.createFact<LockableFactEntry>(*this);
960 NewFact->ReentrancyDepth--;
964 static bool classof(
const FactEntry *A) {
965 return A->getFactEntryKind() == Lockable;
969enum UnderlyingCapabilityKind {
972 UCK_ReleasedExclusive,
975struct UnderlyingCapability {
977 UnderlyingCapabilityKind Kind;
980class ScopedLockableFactEntry final
982 private llvm::TrailingObjects<ScopedLockableFactEntry,
983 UnderlyingCapability> {
984 friend TrailingObjects;
987 const unsigned ManagedCapacity;
988 unsigned ManagedSize = 0;
990 ScopedLockableFactEntry(
const CapabilityExpr &CE, SourceLocation Loc,
991 SourceKind Src,
unsigned ManagedCapacity)
992 : FactEntry(ScopedLockable, CE,
LK_Exclusive, Loc, Src),
993 ManagedCapacity(ManagedCapacity) {}
995 void addManaged(
const CapabilityExpr &M, UnderlyingCapabilityKind UCK) {
996 assert(ManagedSize < ManagedCapacity);
997 new (getTrailingObjects() + ManagedSize) UnderlyingCapability{M, UCK};
1001 ArrayRef<UnderlyingCapability> getManaged()
const {
1002 return getTrailingObjects(ManagedSize);
1006 static ScopedLockableFactEntry *
create(llvm::BumpPtrAllocator &Alloc,
1007 const CapabilityExpr &CE,
1008 SourceLocation Loc, SourceKind Src,
1009 unsigned ManagedCapacity) {
1011 Alloc.Allocate(totalSizeToAlloc<UnderlyingCapability>(ManagedCapacity),
1012 alignof(ScopedLockableFactEntry));
1013 return new (
Storage) ScopedLockableFactEntry(CE, Loc, Src, ManagedCapacity);
1016 CapExprSet getUnderlyingMutexes()
const {
1017 CapExprSet UnderlyingMutexesSet;
1018 for (
const UnderlyingCapability &UnderlyingMutex : getManaged())
1019 UnderlyingMutexesSet.push_back(UnderlyingMutex.Cap);
1020 return UnderlyingMutexesSet;
1027 void addLock(
const CapabilityExpr &M) { addManaged(M, UCK_Acquired); }
1029 void addExclusiveUnlock(
const CapabilityExpr &M) {
1030 addManaged(M, UCK_ReleasedExclusive);
1033 void addSharedUnlock(
const CapabilityExpr &M) {
1034 addManaged(M, UCK_ReleasedShared);
1039 handleRemovalFromIntersection(
const FactSet &FSet, FactManager &FactMan,
1041 ThreadSafetyHandler &Handler)
const override {
1045 for (
const auto &UnderlyingMutex : getManaged()) {
1046 const auto *Entry = FSet.findLock(FactMan, UnderlyingMutex.Cap);
1047 if ((UnderlyingMutex.Kind == UCK_Acquired && Entry) ||
1048 (UnderlyingMutex.Kind != UCK_Acquired && !Entry)) {
1052 UnderlyingMutex.Cap.toString(), loc(),
1058 void handleLock(FactSet &FSet, FactManager &FactMan,
const FactEntry &entry,
1059 ThreadSafetyHandler &Handler)
const override {
1060 for (
const auto &UnderlyingMutex : getManaged()) {
1061 if (UnderlyingMutex.Kind == UCK_Acquired)
1062 lock(FSet, FactMan, UnderlyingMutex.Cap, entry.kind(), entry.loc(),
1065 unlock(FSet, FactMan, UnderlyingMutex.Cap, entry.loc(), &Handler);
1069 void handleUnlock(FactSet &FSet, FactManager &FactMan,
1070 const CapabilityExpr &Cp, SourceLocation UnlockLoc,
1072 ThreadSafetyHandler &Handler)
const override {
1073 assert(!Cp.
negative() &&
"Managing object cannot be negative.");
1074 for (
const auto &UnderlyingMutex : getManaged()) {
1077 ThreadSafetyHandler *TSHandler = FullyRemove ?
nullptr : &Handler;
1078 if (UnderlyingMutex.Kind == UCK_Acquired) {
1079 unlock(FSet, FactMan, UnderlyingMutex.Cap, UnlockLoc, TSHandler);
1081 LockKind kind = UnderlyingMutex.Kind == UCK_ReleasedShared
1084 lock(FSet, FactMan, UnderlyingMutex.Cap, kind, UnlockLoc, TSHandler);
1088 FSet.removeLock(FactMan, Cp);
1091 static bool classof(
const FactEntry *A) {
1092 return A->getFactEntryKind() == ScopedLockable;
1096 void lock(FactSet &FSet, FactManager &FactMan,
const CapabilityExpr &Cp,
1098 ThreadSafetyHandler *Handler)
const {
1099 if (
const auto It = FSet.findLockIter(FactMan, Cp); It != FSet.end()) {
1101 if (
const FactEntry *RFact = Fact.tryReenter(FactMan, kind)) {
1103 FSet.replaceLock(FactMan, It, RFact);
1104 }
else if (Handler) {
1108 FSet.removeLock(FactMan, !Cp);
1109 FSet.addLock(FactMan, FactMan.createFact<LockableFactEntry>(Cp, kind, loc,
1114 void unlock(FactSet &FSet, FactManager &FactMan,
const CapabilityExpr &Cp,
1115 SourceLocation loc, ThreadSafetyHandler *Handler)
const {
1116 if (
const auto It = FSet.findLockIter(FactMan, Cp); It != FSet.end()) {
1118 if (
const FactEntry *RFact = Fact.leaveReentrant(FactMan)) {
1120 FSet.replaceLock(FactMan, It, RFact);
1126 FactMan.createFact<LockableFactEntry>(!Cp,
LK_Exclusive, loc));
1127 }
else if (Handler) {
1128 SourceLocation PrevLoc;
1129 if (
const FactEntry *Neg = FSet.findLock(FactMan, !Cp))
1130 PrevLoc =
Neg->loc();
1137class ThreadSafetyAnalyzer {
1138 friend class BuildLockset;
1139 friend class threadSafety::BeforeSet;
1141 llvm::BumpPtrAllocator Bpa;
1142 threadSafety::til::MemRegionRef Arena;
1143 threadSafety::SExprBuilder SxBuilder;
1145 ThreadSafetyHandler &Handler;
1146 const FunctionDecl *CurrentFunction;
1147 LocalVariableMap LocalVarMap;
1149 llvm::SmallDenseMap<const Expr *, til::LiteralPtr *> ConstructedObjects;
1150 FactManager FactMan;
1151 std::vector<CFGBlockInfo> BlockInfo;
1153 BeforeSet *GlobalBeforeSet;
1156 ThreadSafetyAnalyzer(ThreadSafetyHandler &H, BeforeSet *Bset)
1157 : Arena(&Bpa), SxBuilder(Arena), Handler(H), FactMan(Bpa),
1158 GlobalBeforeSet(Bset) {}
1160 bool inCurrentScope(
const CapabilityExpr &CapE);
1162 void addLock(FactSet &FSet,
const FactEntry *Entry,
bool ReqAttr =
false);
1163 void removeLock(FactSet &FSet,
const CapabilityExpr &CapE,
1164 SourceLocation UnlockLoc,
bool FullyRemove,
LockKind Kind);
1166 template <
typename AttrType>
1167 void getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
const Expr *Exp,
1168 const NamedDecl *D, til::SExpr *
Self =
nullptr);
1170 template <
class AttrType>
1171 void getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
const Expr *Exp,
1173 const CFGBlock *PredBlock,
const CFGBlock *CurrBlock,
1174 Expr *BrE,
bool Neg);
1176 const CallExpr* getTrylockCallExpr(
const Stmt *
Cond, LocalVarContext
C,
1179 void getEdgeLockset(FactSet &
Result,
const FactSet &ExitSet,
1180 const CFGBlock* PredBlock,
1181 const CFGBlock *CurrBlock);
1183 bool join(
const FactEntry &A,
const FactEntry &B, SourceLocation JoinLoc,
1186 void intersectAndWarn(FactSet &EntrySet,
const FactSet &ExitSet,
1190 void intersectAndWarn(FactSet &EntrySet,
const FactSet &ExitSet,
1192 intersectAndWarn(EntrySet, ExitSet, JoinLoc, LEK, LEK);
1195 void runAnalysis(AnalysisDeclContext &AC);
1197 void warnIfMutexNotHeld(
const FactSet &FSet,
const NamedDecl *D,
1198 const Expr *Exp,
AccessKind AK, Expr *MutexExp,
1200 SourceLocation Loc);
1201 void warnIfMutexHeld(
const FactSet &FSet,
const NamedDecl *D,
const Expr *Exp,
1202 Expr *MutexExp, til::LiteralPtr *
Self,
1203 SourceLocation Loc);
1205 void checkAccess(
const FactSet &FSet,
const Expr *Exp,
AccessKind AK,
1207 void checkPtAccess(
const FactSet &FSet,
const Expr *Exp,
AccessKind AK,
1215 ThreadSafetyAnalyzer& Analyzer) {
1217 BeforeInfo *Info =
nullptr;
1221 std::unique_ptr<BeforeInfo> &InfoPtr = BMap[Vd];
1223 InfoPtr.reset(
new BeforeInfo());
1224 Info = InfoPtr.get();
1227 for (
const auto *At : Vd->
attrs()) {
1228 switch (At->getKind()) {
1229 case attr::AcquiredBefore: {
1233 for (
const auto *Arg : A->args()) {
1235 Analyzer.SxBuilder.translateAttrExpr(Arg,
nullptr);
1237 Info->Vect.push_back(Cpvd);
1238 const auto It = BMap.find(Cpvd);
1239 if (It == BMap.end())
1245 case attr::AcquiredAfter: {
1249 for (
const auto *Arg : A->args()) {
1251 Analyzer.SxBuilder.translateAttrExpr(Arg,
nullptr);
1255 ArgInfo->Vect.push_back(Vd);
1268BeforeSet::BeforeInfo *
1270 ThreadSafetyAnalyzer &Analyzer) {
1271 auto It = BMap.find(Vd);
1272 BeforeInfo *Info =
nullptr;
1273 if (It == BMap.end())
1276 Info = It->second.get();
1277 assert(Info &&
"BMap contained nullptr?");
1283 const FactSet& FSet,
1284 ThreadSafetyAnalyzer& Analyzer,
1296 if (Info->Visited == 1)
1299 if (Info->Visited == 2)
1302 if (Info->Vect.empty())
1305 InfoVect.push_back(Info);
1307 for (
const auto *Vdb : Info->Vect) {
1309 if (FSet.containsMutexDecl(Analyzer.FactMan, Vdb)) {
1310 StringRef L1 = StartVd->
getName();
1311 StringRef L2 = Vdb->getName();
1312 Analyzer.Handler.handleLockAcquiredBefore(CapKind, L1, L2, Loc);
1316 if (CycMap.try_emplace(Vd,
true).second) {
1318 Analyzer.Handler.handleBeforeAfterCycle(L1, Vd->
getLocation());
1328 for (
auto *Info : InfoVect)
1334 if (
const auto *CE = dyn_cast<ImplicitCastExpr>(Exp))
1337 if (
const auto *DR = dyn_cast<DeclRefExpr>(Exp))
1338 return DR->getDecl();
1340 if (
const auto *ME = dyn_cast<MemberExpr>(Exp))
1341 return ME->getMemberDecl();
1346bool ThreadSafetyAnalyzer::inCurrentScope(
const CapabilityExpr &CapE) {
1347 const threadSafety::til::SExpr *SExp = CapE.
sexpr();
1348 assert(SExp &&
"Null expressions should be ignored");
1350 if (
const auto *LP = dyn_cast<til::LiteralPtr>(SExp)) {
1351 const ValueDecl *VD = LP->clangDecl();
1363 if (
const auto *P = dyn_cast<til::Project>(SExp)) {
1364 if (!isa_and_nonnull<CXXMethodDecl>(CurrentFunction))
1366 const ValueDecl *VD = P->clangDecl();
1375void ThreadSafetyAnalyzer::addLock(FactSet &FSet,
const FactEntry *Entry,
1377 if (Entry->shouldIgnore())
1380 if (!ReqAttr && !Entry->negative()) {
1382 CapabilityExpr NegC = !*Entry;
1383 const FactEntry *Nen = FSet.findLock(FactMan, NegC);
1385 FSet.removeLock(FactMan, NegC);
1388 if (inCurrentScope(*Entry) && !Entry->asserted() && !Entry->reentrant())
1395 if (!Entry->asserted() && !Entry->declared()) {
1397 Entry->loc(), Entry->getKind());
1400 if (
const FactEntry *Cp = FSet.findLock(FactMan, *Entry)) {
1401 if (!Entry->asserted())
1402 Cp->handleLock(FSet, FactMan, *Entry, Handler);
1404 FSet.addLock(FactMan, Entry);
1410void ThreadSafetyAnalyzer::removeLock(FactSet &FSet,
const CapabilityExpr &Cp,
1411 SourceLocation UnlockLoc,
1412 bool FullyRemove,
LockKind ReceivedKind) {
1416 const FactEntry *LDat = FSet.findLock(FactMan, Cp);
1418 SourceLocation PrevLoc;
1419 if (
const FactEntry *Neg = FSet.findLock(FactMan, !Cp))
1420 PrevLoc =
Neg->loc();
1428 if (ReceivedKind !=
LK_Generic && LDat->kind() != ReceivedKind) {
1430 ReceivedKind, LDat->loc(), UnlockLoc);
1433 LDat->handleUnlock(FSet, FactMan, Cp, UnlockLoc, FullyRemove, Handler);
1438template <
typename AttrType>
1439void ThreadSafetyAnalyzer::getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
1440 const Expr *Exp,
const NamedDecl *D,
1442 if (Attr->args_size() == 0) {
1451 Mtxs.push_back_nodup(Cp);
1455 for (
const auto *Arg : Attr->args()) {
1463 Mtxs.push_back_nodup(Cp);
1470template <
class AttrType>
1471void ThreadSafetyAnalyzer::getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
1472 const Expr *Exp,
const NamedDecl *D,
1473 const CFGBlock *PredBlock,
1474 const CFGBlock *CurrBlock,
1475 Expr *BrE,
bool Neg) {
1477 bool branch =
false;
1478 if (
const auto *BLE = dyn_cast_or_null<CXXBoolLiteralExpr>(BrE))
1479 branch = BLE->getValue();
1480 else if (
const auto *ILE = dyn_cast_or_null<IntegerLiteral>(BrE))
1481 branch = ILE->getValue().getBoolValue();
1483 int branchnum = branch ? 0 : 1;
1485 branchnum = !branchnum;
1490 SE = PredBlock->
succ_end(); SI != SE && i < 2; ++SI, ++i) {
1491 if (*SI == CurrBlock && i == branchnum)
1492 getMutexIDs(Mtxs, Attr, Exp, D);
1500 }
else if (
const auto *BLE = dyn_cast<CXXBoolLiteralExpr>(E)) {
1501 TCond = BLE->getValue();
1503 }
else if (
const auto *ILE = dyn_cast<IntegerLiteral>(E)) {
1504 TCond = ILE->getValue().getBoolValue();
1506 }
else if (
auto *CE = dyn_cast<ImplicitCastExpr>(E))
1514const CallExpr* ThreadSafetyAnalyzer::getTrylockCallExpr(
const Stmt *
Cond,
1520 if (
const auto *CallExp = dyn_cast<CallExpr>(
Cond)) {
1521 if (CallExp->getBuiltinCallee() == Builtin::BI__builtin_expect)
1522 return getTrylockCallExpr(CallExp->getArg(0),
C, Negate);
1525 else if (
const auto *PE = dyn_cast<ParenExpr>(
Cond))
1526 return getTrylockCallExpr(PE->getSubExpr(),
C, Negate);
1527 else if (
const auto *CE = dyn_cast<ImplicitCastExpr>(
Cond))
1528 return getTrylockCallExpr(CE->getSubExpr(),
C, Negate);
1529 else if (
const auto *FE = dyn_cast<FullExpr>(
Cond))
1530 return getTrylockCallExpr(FE->getSubExpr(),
C, Negate);
1531 else if (
const auto *DRE = dyn_cast<DeclRefExpr>(
Cond)) {
1532 const Expr *E = LocalVarMap.lookupExpr(DRE->getDecl(),
C);
1533 return getTrylockCallExpr(E,
C, Negate);
1535 else if (
const auto *UOP = dyn_cast<UnaryOperator>(
Cond)) {
1536 if (UOP->getOpcode() == UO_LNot) {
1538 return getTrylockCallExpr(UOP->getSubExpr(),
C, Negate);
1542 else if (
const auto *BOP = dyn_cast<BinaryOperator>(
Cond)) {
1543 if (BOP->getOpcode() == BO_EQ || BOP->getOpcode() == BO_NE) {
1544 if (BOP->getOpcode() == BO_NE)
1549 if (!TCond) Negate = !Negate;
1550 return getTrylockCallExpr(BOP->getLHS(),
C, Negate);
1554 if (!TCond) Negate = !Negate;
1555 return getTrylockCallExpr(BOP->getRHS(),
C, Negate);
1559 if (BOP->getOpcode() == BO_LAnd) {
1561 return getTrylockCallExpr(BOP->getRHS(),
C, Negate);
1563 if (BOP->getOpcode() == BO_LOr)
1564 return getTrylockCallExpr(BOP->getRHS(),
C, Negate);
1566 }
else if (
const auto *COP = dyn_cast<ConditionalOperator>(
Cond)) {
1570 if (TCond && !FCond)
1571 return getTrylockCallExpr(COP->getCond(),
C, Negate);
1572 if (!TCond && FCond) {
1574 return getTrylockCallExpr(COP->getCond(),
C, Negate);
1584void ThreadSafetyAnalyzer::getEdgeLockset(FactSet&
Result,
1585 const FactSet &ExitSet,
1586 const CFGBlock *PredBlock,
1587 const CFGBlock *CurrBlock) {
1595 bool Negate =
false;
1596 const CFGBlockInfo *PredBlockInfo = &BlockInfo[PredBlock->
getBlockID()];
1597 const LocalVarContext &LVarCtx = PredBlockInfo->ExitContext;
1599 const auto *Exp = getTrylockCallExpr(
Cond, LVarCtx, Negate);
1603 auto *FunDecl = dyn_cast_or_null<NamedDecl>(Exp->getCalleeDecl());
1604 if (!FunDecl || !FunDecl->hasAttr<TryAcquireCapabilityAttr>())
1607 CapExprSet ExclusiveLocksToAdd;
1608 CapExprSet SharedLocksToAdd;
1611 for (
const auto *Attr : FunDecl->specific_attrs<TryAcquireCapabilityAttr>())
1612 getMutexIDs(Attr->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, Attr,
1613 Exp, FunDecl, PredBlock, CurrBlock, Attr->getSuccessValue(),
1618 for (
const auto &ExclusiveLockToAdd : ExclusiveLocksToAdd)
1619 addLock(
Result, FactMan.createFact<LockableFactEntry>(ExclusiveLockToAdd,
1621 for (
const auto &SharedLockToAdd : SharedLocksToAdd)
1622 addLock(
Result, FactMan.createFact<LockableFactEntry>(SharedLockToAdd,
1633class BuildLockset :
public ConstStmtVisitor<BuildLockset> {
1634 friend class ThreadSafetyAnalyzer;
1636 ThreadSafetyAnalyzer *Analyzer;
1639 const FactSet &FunctionExitFSet;
1640 LocalVariableMap::Context LVarCtx;
1645 void checkAccess(
const Expr *Exp,
AccessKind AK,
1647 Analyzer->checkAccess(FSet, Exp, AK, POK);
1649 void checkPtAccess(
const Expr *Exp,
AccessKind AK,
1651 Analyzer->checkPtAccess(FSet, Exp, AK, POK);
1654 void handleCall(
const Expr *Exp,
const NamedDecl *D,
1655 til::LiteralPtr *
Self =
nullptr,
1656 SourceLocation Loc = SourceLocation());
1657 void examineArguments(
const FunctionDecl *FD,
1660 bool SkipFirstParam =
false);
1663 BuildLockset(ThreadSafetyAnalyzer *Anlzr, CFGBlockInfo &Info,
1664 const FactSet &FunctionExitFSet)
1665 : ConstStmtVisitor<BuildLockset>(), Analyzer(Anlzr), FSet(Info.EntrySet),
1666 FunctionExitFSet(FunctionExitFSet), LVarCtx(Info.EntryContext),
1667 CtxIndex(Info.EntryIndex) {}
1669 void VisitUnaryOperator(
const UnaryOperator *UO);
1670 void VisitBinaryOperator(
const BinaryOperator *BO);
1671 void VisitCastExpr(
const CastExpr *CE);
1672 void VisitCallExpr(
const CallExpr *Exp);
1673 void VisitCXXConstructExpr(
const CXXConstructExpr *Exp);
1674 void VisitDeclStmt(
const DeclStmt *S);
1675 void VisitMaterializeTemporaryExpr(
const MaterializeTemporaryExpr *Exp);
1676 void VisitReturnStmt(
const ReturnStmt *S);
1683void ThreadSafetyAnalyzer::warnIfMutexNotHeld(
1684 const FactSet &FSet,
const NamedDecl *D,
const Expr *Exp,
AccessKind AK,
1686 SourceLocation Loc) {
1698 const FactEntry *LDat = FSet.findLock(FactMan, !Cp);
1701 (!Cp).toString(), Loc);
1707 if (!inCurrentScope(Cp))
1711 LDat = FSet.findLock(FactMan, Cp);
1718 const FactEntry *LDat = FSet.findLockUniv(FactMan, Cp);
1719 bool NoError =
true;
1722 LDat = FSet.findPartialMatch(FactMan, Cp);
1725 std::string PartMatchStr = LDat->toString();
1726 StringRef PartMatchName(PartMatchStr);
1736 if (NoError && LDat && !LDat->isAtLeast(LK)) {
1742void ThreadSafetyAnalyzer::warnIfMutexHeld(
const FactSet &FSet,
1743 const NamedDecl *D,
const Expr *Exp,
1745 til::LiteralPtr *
Self,
1746 SourceLocation Loc) {
1755 const FactEntry *LDat = FSet.findLock(FactMan, Cp);
1767void ThreadSafetyAnalyzer::checkAccess(
const FactSet &FSet,
const Expr *Exp,
1776 while (
const auto *DRE = dyn_cast<DeclRefExpr>(Exp)) {
1777 const auto *VD = dyn_cast<VarDecl>(DRE->getDecl()->getCanonicalDecl());
1779 if (
const auto *E = VD->getInit()) {
1790 if (
const auto *UO = dyn_cast<UnaryOperator>(Exp)) {
1792 if (UO->getOpcode() == UO_Deref)
1793 checkPtAccess(FSet, UO->getSubExpr(), AK, POK);
1797 if (
const auto *BO = dyn_cast<BinaryOperator>(Exp)) {
1800 return checkAccess(FSet, BO->
getLHS(), AK, POK);
1802 return checkPtAccess(FSet, BO->
getLHS(), AK, POK);
1808 if (
const auto *AE = dyn_cast<ArraySubscriptExpr>(Exp)) {
1809 checkPtAccess(FSet, AE->getLHS(), AK, POK);
1813 if (
const auto *ME = dyn_cast<MemberExpr>(Exp)) {
1815 checkPtAccess(FSet, ME->getBase(), AK, POK);
1817 checkAccess(FSet, ME->getBase(), AK, POK);
1824 if (D->
hasAttr<GuardedVarAttr>() && FSet.isEmpty(FactMan)) {
1829 warnIfMutexNotHeld(FSet, D, Exp, AK, I->getArg(), POK,
nullptr, Loc);
1834void ThreadSafetyAnalyzer::checkPtAccess(
const FactSet &FSet,
const Expr *Exp,
1840 if (
const auto *PE = dyn_cast<ParenExpr>(Exp)) {
1841 Exp = PE->getSubExpr();
1844 if (
const auto *CE = dyn_cast<CastExpr>(Exp)) {
1845 if (CE->getCastKind() == CK_ArrayToPointerDecay) {
1848 checkAccess(FSet, CE->getSubExpr(), AK, POK);
1851 Exp = CE->getSubExpr();
1857 if (
const auto *UO = dyn_cast<UnaryOperator>(Exp)) {
1858 if (UO->getOpcode() == UO_AddrOf) {
1861 checkAccess(FSet, UO->getSubExpr(), AK, POK);
1889 if (D->
hasAttr<PtGuardedVarAttr>() && FSet.isEmpty(FactMan))
1893 warnIfMutexNotHeld(FSet, D, Exp, AK, I->getArg(), PtPOK,
nullptr,
1912void BuildLockset::handleCall(
const Expr *Exp,
const NamedDecl *D,
1913 til::LiteralPtr *
Self, SourceLocation Loc) {
1914 CapExprSet ExclusiveLocksToAdd, SharedLocksToAdd;
1915 CapExprSet ExclusiveLocksToRemove, SharedLocksToRemove, GenericLocksToRemove;
1916 CapExprSet ScopedReqsAndExcludes;
1924 til::LiteralPtr *Placeholder =
1926 [[maybe_unused]]
auto inserted =
1927 Analyzer->ConstructedObjects.insert({Exp, Placeholder});
1928 assert(inserted.second &&
"Are we visiting the same expression again?");
1931 if (TagT->getOriginalDecl()
1932 ->getMostRecentDecl()
1933 ->hasAttr<ScopedLockableAttr>())
1934 Scp = CapabilityExpr(Placeholder, Exp->
getType(),
false);
1941 for(
const Attr *At : D->
attrs()) {
1942 switch (At->getKind()) {
1945 case attr::AcquireCapability: {
1947 Analyzer->getMutexIDs(A->isShared() ? SharedLocksToAdd
1948 : ExclusiveLocksToAdd,
1956 case attr::AssertCapability: {
1958 CapExprSet AssertLocks;
1959 Analyzer->getMutexIDs(AssertLocks, A, Exp, D,
Self);
1960 for (
const auto &AssertLock : AssertLocks)
1962 FSet, Analyzer->FactMan.createFact<LockableFactEntry>(
1964 Loc, FactEntry::Asserted));
1970 case attr::ReleaseCapability: {
1973 Analyzer->getMutexIDs(GenericLocksToRemove, A, Exp, D,
Self);
1974 else if (A->isShared())
1975 Analyzer->getMutexIDs(SharedLocksToRemove, A, Exp, D,
Self);
1977 Analyzer->getMutexIDs(ExclusiveLocksToRemove, A, Exp, D,
Self);
1981 case attr::RequiresCapability: {
1983 for (
auto *Arg : A->args()) {
1984 Analyzer->warnIfMutexNotHeld(FSet, D, Exp,
1989 Analyzer->getMutexIDs(ScopedReqsAndExcludes, A, Exp, D,
Self);
1994 case attr::LocksExcluded: {
1996 for (
auto *Arg : A->args()) {
1997 Analyzer->warnIfMutexHeld(FSet, D, Exp, Arg,
Self, Loc);
2000 Analyzer->getMutexIDs(ScopedReqsAndExcludes, A, Exp, D,
Self);
2011 std::optional<CallExpr::const_arg_range> Args;
2013 if (
const auto *CE = dyn_cast<CallExpr>(Exp))
2014 Args = CE->arguments();
2015 else if (
const auto *CE = dyn_cast<CXXConstructExpr>(Exp))
2016 Args = CE->arguments();
2018 llvm_unreachable(
"Unknown call kind");
2020 const auto *CalledFunction = dyn_cast<FunctionDecl>(D);
2021 if (CalledFunction && Args.has_value()) {
2022 for (
auto [Param, Arg] : zip(CalledFunction->parameters(), *Args)) {
2023 CapExprSet DeclaredLocks;
2024 for (
const Attr *At : Param->attrs()) {
2025 switch (At->getKind()) {
2026 case attr::AcquireCapability: {
2028 Analyzer->getMutexIDs(A->isShared() ? SharedLocksToAdd
2029 : ExclusiveLocksToAdd,
2031 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2035 case attr::ReleaseCapability: {
2038 Analyzer->getMutexIDs(GenericLocksToRemove, A, Exp, D,
Self);
2039 else if (A->isShared())
2040 Analyzer->getMutexIDs(SharedLocksToRemove, A, Exp, D,
Self);
2042 Analyzer->getMutexIDs(ExclusiveLocksToRemove, A, Exp, D,
Self);
2043 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2047 case attr::RequiresCapability: {
2049 for (
auto *Arg : A->args())
2050 Analyzer->warnIfMutexNotHeld(FSet, D, Exp,
2053 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2057 case attr::LocksExcluded: {
2059 for (
auto *Arg : A->args())
2060 Analyzer->warnIfMutexHeld(FSet, D, Exp, Arg,
Self, Loc);
2061 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2069 if (DeclaredLocks.empty())
2071 CapabilityExpr Cp(Analyzer->SxBuilder.
translate(Arg,
nullptr),
2072 StringRef(
"mutex"),
false,
false);
2073 if (
const auto *CBTE = dyn_cast<CXXBindTemporaryExpr>(Arg->IgnoreCasts());
2075 if (
auto Object = Analyzer->ConstructedObjects.find(CBTE->getSubExpr());
2076 Object != Analyzer->ConstructedObjects.end())
2077 Cp = CapabilityExpr(
Object->second, StringRef(
"mutex"),
false,
2080 const FactEntry *Fact = FSet.findLock(Analyzer->FactMan, Cp);
2088 for (
const auto &[a,
b] :
2089 zip_longest(DeclaredLocks, Scope->getUnderlyingMutexes())) {
2090 if (!a.has_value()) {
2093 b.value().getKind(),
b.value().toString());
2094 }
else if (!
b.has_value()) {
2097 a.value().getKind(), a.value().toString());
2098 }
else if (!a.value().equals(
b.value())) {
2101 a.value().getKind(), a.value().toString(),
b.value().toString());
2110 for (
const auto &M : ExclusiveLocksToRemove)
2111 Analyzer->removeLock(FSet, M, Loc, Dtor,
LK_Exclusive);
2112 for (
const auto &M : SharedLocksToRemove)
2113 Analyzer->removeLock(FSet, M, Loc, Dtor,
LK_Shared);
2114 for (
const auto &M : GenericLocksToRemove)
2115 Analyzer->removeLock(FSet, M, Loc, Dtor,
LK_Generic);
2118 FactEntry::SourceKind Source =
2119 !Scp.
shouldIgnore() ? FactEntry::Managed : FactEntry::Acquired;
2120 for (
const auto &M : ExclusiveLocksToAdd)
2121 Analyzer->addLock(FSet, Analyzer->FactMan.createFact<LockableFactEntry>(
2123 for (
const auto &M : SharedLocksToAdd)
2124 Analyzer->addLock(FSet, Analyzer->FactMan.createFact<LockableFactEntry>(
2129 auto *ScopedEntry = Analyzer->FactMan.createFact<ScopedLockableFactEntry>(
2130 Scp, Loc, FactEntry::Acquired,
2131 ExclusiveLocksToAdd.size() + SharedLocksToAdd.size() +
2132 ScopedReqsAndExcludes.size() + ExclusiveLocksToRemove.size() +
2133 SharedLocksToRemove.size());
2134 for (
const auto &M : ExclusiveLocksToAdd)
2135 ScopedEntry->addLock(M);
2136 for (
const auto &M : SharedLocksToAdd)
2137 ScopedEntry->addLock(M);
2138 for (
const auto &M : ScopedReqsAndExcludes)
2139 ScopedEntry->addLock(M);
2140 for (
const auto &M : ExclusiveLocksToRemove)
2141 ScopedEntry->addExclusiveUnlock(M);
2142 for (
const auto &M : SharedLocksToRemove)
2143 ScopedEntry->addSharedUnlock(M);
2144 Analyzer->addLock(FSet, ScopedEntry);
2151void BuildLockset::VisitUnaryOperator(
const UnaryOperator *UO) {
2167void BuildLockset::VisitBinaryOperator(
const BinaryOperator *BO) {
2172 LVarCtx = Analyzer->LocalVarMap.getNextContext(CtxIndex, BO, LVarCtx);
2180void BuildLockset::VisitCastExpr(
const CastExpr *CE) {
2186void BuildLockset::examineArguments(
const FunctionDecl *FD,
2189 bool SkipFirstParam) {
2199 if (FD->
hasAttr<NoThreadSafetyAnalysisAttr>())
2202 const ArrayRef<ParmVarDecl *> Params = FD->
parameters();
2203 auto Param = Params.begin();
2208 for (
auto Arg = ArgBegin; Param != Params.end() && Arg != ArgEnd;
2210 QualType Qt = (*Param)->getType();
2218void BuildLockset::VisitCallExpr(
const CallExpr *Exp) {
2219 if (
const auto *CE = dyn_cast<CXXMemberCallExpr>(Exp)) {
2220 const auto *ME = dyn_cast<MemberExpr>(CE->getCallee());
2222 const CXXMethodDecl *MD = CE->getMethodDecl();
2225 if (ME->isArrow()) {
2227 checkPtAccess(CE->getImplicitObjectArgument(),
AK_Read);
2230 checkAccess(CE->getImplicitObjectArgument(),
AK_Read);
2234 examineArguments(CE->getDirectCallee(), CE->arg_begin(), CE->arg_end());
2235 }
else if (
const auto *OE = dyn_cast<CXXOperatorCallExpr>(Exp)) {
2243 case OO_PercentEqual:
2247 case OO_LessLessEqual:
2248 case OO_GreaterGreaterEqual:
2249 checkAccess(OE->getArg(1),
AK_Read);
2259 if (!(OEop == OO_Star && OE->getNumArgs() > 1)) {
2261 checkPtAccess(OE->getArg(0),
AK_Read);
2266 const Expr *Obj = OE->getArg(0);
2271 const FunctionDecl *FD = OE->getDirectCallee();
2272 examineArguments(FD, std::next(OE->arg_begin()), OE->arg_end(),
2281 auto *D = dyn_cast_or_null<NamedDecl>(Exp->
getCalleeDecl());
2287void BuildLockset::VisitCXXConstructExpr(
const CXXConstructExpr *Exp) {
2290 const Expr* Source = Exp->
getArg(0);
2300 if (
auto *CE = dyn_cast<CastExpr>(E))
2303 if (
auto *CE = dyn_cast<CastExpr>(E))
2304 if (CE->
getCastKind() == CK_ConstructorConversion ||
2307 if (
auto *BTE = dyn_cast<CXXBindTemporaryExpr>(E))
2308 E = BTE->getSubExpr();
2312void BuildLockset::VisitDeclStmt(
const DeclStmt *S) {
2314 LVarCtx = Analyzer->LocalVarMap.getNextContext(CtxIndex, S, LVarCtx);
2317 if (
auto *VD = dyn_cast_or_null<VarDecl>(D)) {
2318 const Expr *E = VD->getInit();
2324 if (
auto *EWC = dyn_cast<ExprWithCleanups>(E))
2328 if (
auto Object = Analyzer->ConstructedObjects.find(E);
2329 Object != Analyzer->ConstructedObjects.end()) {
2330 Object->second->setClangDecl(VD);
2331 Analyzer->ConstructedObjects.erase(Object);
2337void BuildLockset::VisitMaterializeTemporaryExpr(
2338 const MaterializeTemporaryExpr *Exp) {
2340 if (
auto Object = Analyzer->ConstructedObjects.find(
2342 Object != Analyzer->ConstructedObjects.end()) {
2343 Object->second->setClangDecl(ExtD);
2344 Analyzer->ConstructedObjects.erase(Object);
2349void BuildLockset::VisitReturnStmt(
const ReturnStmt *S) {
2350 if (Analyzer->CurrentFunction ==
nullptr)
2358 const QualType ReturnType =
2361 Analyzer->checkAccess(
2362 FunctionExitFSet, RetVal,
2366 Analyzer->checkPtAccess(
2367 FunctionExitFSet, RetVal,
2377bool ThreadSafetyAnalyzer::join(
const FactEntry &A,
const FactEntry &B,
2378 SourceLocation JoinLoc,
2382 unsigned int ReentrancyDepthA = 0;
2383 unsigned int ReentrancyDepthB = 0;
2385 if (
const auto *LFE = dyn_cast<LockableFactEntry>(&A))
2386 ReentrancyDepthA = LFE->getReentrancyDepth();
2387 if (
const auto *LFE = dyn_cast<LockableFactEntry>(&B))
2388 ReentrancyDepthB = LFE->getReentrancyDepth();
2390 if (ReentrancyDepthA != ReentrancyDepthB) {
2396 return CanModify && ReentrancyDepthA < ReentrancyDepthB;
2397 }
else if (A.kind() != B.kind()) {
2400 if ((A.managed() || A.asserted()) && (B.managed() || B.asserted())) {
2402 bool ShouldTakeB = B.kind() ==
LK_Shared;
2403 if (CanModify || !ShouldTakeB)
2412 return CanModify && A.asserted() && !B.asserted();
2430void ThreadSafetyAnalyzer::intersectAndWarn(FactSet &EntrySet,
2431 const FactSet &ExitSet,
2432 SourceLocation JoinLoc,
2435 FactSet EntrySetOrig = EntrySet;
2438 for (
const auto &Fact : ExitSet) {
2439 const FactEntry &ExitFact = FactMan[Fact];
2441 FactSet::iterator EntryIt = EntrySet.findLockIter(FactMan, ExitFact);
2442 if (EntryIt != EntrySet.end()) {
2443 if (join(FactMan[*EntryIt], ExitFact, JoinLoc, EntryLEK))
2446 ExitFact.handleRemovalFromIntersection(ExitSet, FactMan, JoinLoc,
2452 for (
const auto &Fact : EntrySetOrig) {
2453 const FactEntry *EntryFact = &FactMan[Fact];
2454 const FactEntry *ExitFact = ExitSet.findLock(FactMan, *EntryFact);
2459 EntryFact->handleRemovalFromIntersection(EntrySetOrig, FactMan, JoinLoc,
2462 EntrySet.removeLock(FactMan, *EntryFact);
2475 if (std::optional<CFGStmt> S =
Last.getAs<
CFGStmt>()) {
2487void ThreadSafetyAnalyzer::runAnalysis(AnalysisDeclContext &AC) {
2490 threadSafety::CFGWalker walker;
2491 if (!walker.
init(AC))
2498 const NamedDecl *D = walker.
getDecl();
2499 CurrentFunction = dyn_cast<FunctionDecl>(D);
2501 if (D->
hasAttr<NoThreadSafetyAnalysisAttr>())
2516 CFGBlockInfo::getEmptyBlockInfo(LocalVarMap));
2522 PostOrderCFGView::CFGBlockSet VisitedBlocks(CFGraph);
2528 Initial.Reachable =
true;
2531 LocalVarMap.traverseCFG(CFGraph, SortedGraph, BlockInfo);
2536 CapExprSet ExclusiveLocksAcquired;
2537 CapExprSet SharedLocksAcquired;
2538 CapExprSet LocksReleased;
2543 if (!SortedGraph->
empty()) {
2545 FactSet &InitialLockset = Initial.EntrySet;
2547 CapExprSet ExclusiveLocksToAdd;
2548 CapExprSet SharedLocksToAdd;
2551 for (
const auto *Attr : D->
attrs()) {
2552 Loc = Attr->getLocation();
2553 if (
const auto *A = dyn_cast<RequiresCapabilityAttr>(Attr)) {
2554 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2556 }
else if (
const auto *A = dyn_cast<ReleaseCapabilityAttr>(Attr)) {
2559 if (A->args_size() == 0)
2561 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2563 getMutexIDs(LocksReleased, A,
nullptr, D);
2564 }
else if (
const auto *A = dyn_cast<AcquireCapabilityAttr>(Attr)) {
2565 if (A->args_size() == 0)
2567 getMutexIDs(A->isShared() ? SharedLocksAcquired
2568 : ExclusiveLocksAcquired,
2575 ArrayRef<ParmVarDecl *> Params;
2576 if (CurrentFunction)
2578 else if (
auto CurrentMethod = dyn_cast<ObjCMethodDecl>(D))
2579 Params = CurrentMethod->getCanonicalDecl()->parameters();
2581 llvm_unreachable(
"Unknown function kind");
2582 for (
const ParmVarDecl *Param : Params) {
2583 CapExprSet UnderlyingLocks;
2584 for (
const auto *Attr : Param->attrs()) {
2585 Loc = Attr->getLocation();
2586 if (
const auto *A = dyn_cast<ReleaseCapabilityAttr>(Attr)) {
2587 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2589 getMutexIDs(LocksReleased, A,
nullptr, Param);
2590 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2591 }
else if (
const auto *A = dyn_cast<RequiresCapabilityAttr>(Attr)) {
2592 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2594 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2595 }
else if (
const auto *A = dyn_cast<AcquireCapabilityAttr>(Attr)) {
2596 getMutexIDs(A->isShared() ? SharedLocksAcquired
2597 : ExclusiveLocksAcquired,
2599 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2600 }
else if (
const auto *A = dyn_cast<LocksExcludedAttr>(Attr)) {
2601 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2604 if (UnderlyingLocks.empty())
2608 auto *ScopedEntry = FactMan.createFact<ScopedLockableFactEntry>(
2609 Cp, Param->getLocation(), FactEntry::Declared,
2610 UnderlyingLocks.size());
2611 for (
const CapabilityExpr &M : UnderlyingLocks)
2612 ScopedEntry->addLock(M);
2613 addLock(InitialLockset, ScopedEntry,
true);
2617 for (
const auto &Mu : ExclusiveLocksToAdd) {
2618 const auto *Entry = FactMan.createFact<LockableFactEntry>(
2620 addLock(InitialLockset, Entry,
true);
2622 for (
const auto &Mu : SharedLocksToAdd) {
2623 const auto *Entry = FactMan.createFact<LockableFactEntry>(
2624 Mu,
LK_Shared, Loc, FactEntry::Declared);
2625 addLock(InitialLockset, Entry,
true);
2631 FactSet ExpectedFunctionExitSet = Initial.EntrySet;
2637 for (
const auto &Lock : ExclusiveLocksAcquired)
2638 ExpectedFunctionExitSet.addLock(
2639 FactMan, FactMan.createFact<LockableFactEntry>(Lock,
LK_Exclusive,
2641 for (
const auto &Lock : SharedLocksAcquired)
2642 ExpectedFunctionExitSet.addLock(
2643 FactMan, FactMan.createFact<LockableFactEntry>(Lock,
LK_Shared,
2645 for (
const auto &Lock : LocksReleased)
2646 ExpectedFunctionExitSet.removeLock(FactMan, Lock);
2648 for (
const auto *CurrBlock : *SortedGraph) {
2649 unsigned CurrBlockID = CurrBlock->
getBlockID();
2650 CFGBlockInfo *CurrBlockInfo = &BlockInfo[CurrBlockID];
2653 VisitedBlocks.insert(CurrBlock);
2668 bool LocksetInitialized =
false;
2670 PE = CurrBlock->
pred_end(); PI != PE; ++PI) {
2672 if (*PI ==
nullptr || !VisitedBlocks.alreadySet(*PI))
2675 unsigned PrevBlockID = (*PI)->getBlockID();
2676 CFGBlockInfo *PrevBlockInfo = &BlockInfo[PrevBlockID];
2683 CurrBlockInfo->Reachable =
true;
2685 FactSet PrevLockset;
2686 getEdgeLockset(PrevLockset, PrevBlockInfo->ExitSet, *PI, CurrBlock);
2688 if (!LocksetInitialized) {
2689 CurrBlockInfo->EntrySet = PrevLockset;
2690 LocksetInitialized =
true;
2696 CurrBlockInfo->EntrySet, PrevLockset, CurrBlockInfo->EntryLoc,
2697 isa_and_nonnull<ContinueStmt>((*PI)->getTerminatorStmt())
2704 if (!CurrBlockInfo->Reachable)
2707 BuildLockset LocksetBuilder(
this, *CurrBlockInfo, ExpectedFunctionExitSet);
2710 for (
const auto &BI : *CurrBlock) {
2711 switch (BI.getKind()) {
2713 CFGStmt CS = BI.castAs<CFGStmt>();
2714 LocksetBuilder.Visit(CS.
getStmt());
2719 CFGAutomaticObjDtor AD = BI.castAs<CFGAutomaticObjDtor>();
2721 if (!DD->hasAttrs())
2724 LocksetBuilder.handleCall(
nullptr, DD,
2731 const CFGCleanupFunction &
CF = BI.castAs<CFGCleanupFunction>();
2732 LocksetBuilder.handleCall(
nullptr,
CF.getFunctionDecl(),
2734 CF.getVarDecl()->getLocation());
2739 auto TD = BI.castAs<CFGTemporaryDtor>();
2743 if (
auto Object = ConstructedObjects.find(
2744 TD.getBindTemporaryExpr()->getSubExpr());
2745 Object != ConstructedObjects.end()) {
2749 LocksetBuilder.handleCall(
nullptr, DD,
Object->second,
2750 TD.getBindTemporaryExpr()->getEndLoc());
2751 ConstructedObjects.erase(Object);
2759 CurrBlockInfo->ExitSet = LocksetBuilder.FSet;
2766 SE = CurrBlock->succ_end(); SI != SE; ++SI) {
2768 if (*SI ==
nullptr || !VisitedBlocks.alreadySet(*SI))
2771 CFGBlock *FirstLoopBlock = *SI;
2772 CFGBlockInfo *PreLoop = &BlockInfo[FirstLoopBlock->
getBlockID()];
2773 CFGBlockInfo *LoopEnd = &BlockInfo[CurrBlockID];
2774 intersectAndWarn(PreLoop->EntrySet, LoopEnd->ExitSet, PreLoop->EntryLoc,
2780 if (!Final.Reachable)
2784 intersectAndWarn(ExpectedFunctionExitSet, Final.ExitSet, Final.ExitLoc,
2800 ThreadSafetyAnalyzer Analyzer(Handler, *BSet);
2801 Analyzer.runAnalysis(AC);
2815 llvm_unreachable(
"Unknown AccessKind");
This file defines AnalysisDeclContext, a class that manages the analysis context data for context sen...
Defines enum values for all the target-independent builtin functions.
static void dump(llvm::raw_ostream &OS, StringRef FunctionName, ArrayRef< CounterExpression > Expressions, ArrayRef< CounterMappingRegion > Regions)
static Decl::Kind getKind(const Decl *D)
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate....
Defines the clang::Expr interface and subclasses for C++ expressions.
Forward-declares and imports various common LLVM datatypes that clang wants to use unqualified.
Defines an enumeration for C++ overloaded operators.
static std::string toString(const clang::SanitizerSet &Sanitizers)
Produce a string containing comma-separated names of sanitizers in Sanitizers set.
Defines the clang::SourceLocation class and associated facilities.
Defines various enumerations that describe declaration and type specifiers.
static void warnInvalidLock(ThreadSafetyHandler &Handler, const Expr *MutexExp, const NamedDecl *D, const Expr *DeclExp, StringRef Kind)
Issue a warning about an invalid lock expression.
static bool getStaticBooleanValue(Expr *E, bool &TCond)
static bool neverReturns(const CFGBlock *B)
static void findBlockLocations(CFG *CFGraph, const PostOrderCFGView *SortedGraph, std::vector< CFGBlockInfo > &BlockInfo)
Find the appropriate source locations to use when producing diagnostics for each block in the CFG.
static const ValueDecl * getValueDecl(const Expr *Exp)
Gets the value decl pointer from DeclRefExprs or MemberExprs.
static const Expr * UnpackConstruction(const Expr *E)
C Language Family Type Representation.
AnalysisDeclContext contains the context data for the function, method or block under analysis.
ASTContext & getASTContext() const
static bool isAssignmentOp(Opcode Opc)
const VarDecl * getVarDecl() const
const Stmt * getTriggerStmt() const
Represents a single basic block in a source-level CFG.
bool hasNoReturnElement() const
ElementList::const_reverse_iterator const_reverse_iterator
succ_iterator succ_begin()
Stmt * getTerminatorStmt()
AdjacentBlocks::const_iterator const_pred_iterator
pred_iterator pred_begin()
unsigned getBlockID() const
Stmt * getTerminatorCondition(bool StripParens=true)
AdjacentBlocks::const_iterator const_succ_iterator
Represents a top-level expression in a basic block.
const CXXDestructorDecl * getDestructorDecl(ASTContext &astContext) const
const Stmt * getStmt() const
Represents a source-level, intra-procedural CFG that represents the control-flow of a Stmt.
unsigned getNumBlockIDs() const
Returns the total number of BlockIDs allocated (which start at 0).
Expr * getArg(unsigned Arg)
Return the specified argument.
CXXConstructorDecl * getConstructor() const
Get the constructor that this expression will (ultimately) call.
bool isCopyConstructor(unsigned &TypeQuals) const
Whether this constructor is a copy constructor (C++ [class.copy]p2, which can be used to copy the cla...
ConstExprIterator const_arg_iterator
FunctionDecl * getDirectCallee()
If the callee is a FunctionDecl, return it. Otherwise return null.
CastKind getCastKind() const
const DeclGroupRef getDeclGroup() const
SourceLocation getBeginLoc() const LLVM_READONLY
llvm::iterator_range< specific_attr_iterator< T > > specific_attrs() const
SourceLocation getLocation() const
bool isDefinedOutsideFunctionOrMethod() const
isDefinedOutsideFunctionOrMethod - This predicate returns true if this scoped decl is defined outside...
DeclContext * getDeclContext()
This represents one expression.
Expr * IgnoreParenCasts() LLVM_READONLY
Skip past any parentheses and casts which might surround this expression until reaching a fixed point...
Expr * IgnoreImplicit() LLVM_READONLY
Skip past any implicit AST nodes which might surround this expression until reaching a fixed point.
Expr * IgnoreParens() LLVM_READONLY
Skip past any parentheses which might surround this expression until reaching a fixed point.
SourceLocation getExprLoc() const LLVM_READONLY
getExprLoc - Return the preferred location for the arrow when diagnosing a problem with a generic exp...
QualType getReturnType() const
ArrayRef< ParmVarDecl * > parameters() const
FunctionDecl * getCanonicalDecl() override
Retrieves the "canonical" declaration of the given declaration.
Expr * getSubExpr() const
Retrieve the temporary-generating subexpression whose value will be materialized into a glvalue.
ValueDecl * getExtendingDecl()
Get the declaration which triggered the lifetime-extension of this temporary, if any.
This represents a decl that may have a name.
StringRef getName() const
Get the name of identifier for this declaration as a StringRef.
std::string getNameAsString() const
Get a human-readable name for the declaration, even if it is one of the special kinds of names (C++ c...
virtual void printName(raw_ostream &OS, const PrintingPolicy &Policy) const
Pretty-print the unqualified name of this declaration.
QualType getCanonicalType() const
bool isConstQualified() const
Determine whether this type is const-qualified.
Encodes a location in the source.
bool isValid() const
Return true if this is a valid SourceLocation object.
Stmt - This represents one statement.
SourceLocation getEndLoc() const LLVM_READONLY
void dump() const
Dumps the specified AST fragment and all subtrees to llvm::errs().
bool isPointerType() const
bool isReferenceType() const
QualType getPointeeType() const
If this is a pointer, ObjC object pointer, or block pointer, this returns the respective pointee.
bool isLValueReferenceType() const
const T * getAs() const
Member-template getAs<specific type>'.
Expr * getSubExpr() const
Represent the declaration of a variable (in which case it is an lvalue) a function (in which case it ...
void checkBeforeAfter(const ValueDecl *Vd, const FactSet &FSet, ThreadSafetyAnalyzer &Analyzer, SourceLocation Loc, StringRef CapKind)
Return true if any mutexes in FSet are in the acquired_before set of Vd.
BeforeInfo * insertAttrExprs(const ValueDecl *Vd, ThreadSafetyAnalyzer &Analyzer)
Process acquired_before and acquired_after attributes on Vd.
BeforeInfo * getBeforeInfoForDecl(const ValueDecl *Vd, ThreadSafetyAnalyzer &Analyzer)
const PostOrderCFGView * getSortedGraph() const
const NamedDecl * getDecl() const
bool init(AnalysisDeclContext &AC)
const CFG * getGraph() const
bool shouldIgnore() const
bool equals(const CapabilityExpr &other) const
const til::SExpr * sexpr() const
std::string toString() const
const ValueDecl * valueDecl() const
StringRef getKind() const
CapabilityExpr translateAttrExpr(const Expr *AttrExp, const NamedDecl *D, const Expr *DeclExp, til::SExpr *Self=nullptr)
Translate a clang expression in an attribute to a til::SExpr.
til::SExpr * translate(const Stmt *S, CallingContext *Ctx)
til::LiteralPtr * createVariable(const VarDecl *VD)
Handler class for thread safety warnings.
virtual ~ThreadSafetyHandler()
virtual void handleExpectMoreUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc, Name ScopeName, StringRef Kind, Name Expected)
Warn when we get fewer underlying mutexes than expected.
virtual void handleInvalidLockExp(SourceLocation Loc)
Warn about lock expressions which fail to resolve to lockable objects.
virtual void handleUnmatchedUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc, Name ScopeName, StringRef Kind, Name Expected, Name Actual)
Warn when an actual underlying mutex of a scoped lockable does not match the expected.
virtual void handleExpectFewerUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc, Name ScopeName, StringRef Kind, Name Actual)
Warn when we get more underlying mutexes than expected.
virtual void enterFunction(const FunctionDecl *FD)
Called by the analysis when starting analysis of a function.
virtual void handleIncorrectUnlockKind(StringRef Kind, Name LockName, LockKind Expected, LockKind Received, SourceLocation LocLocked, SourceLocation LocUnlock)
Warn about an unlock function call that attempts to unlock a lock with the incorrect lock kind.
virtual void handleMutexHeldEndOfScope(StringRef Kind, Name LockName, SourceLocation LocLocked, SourceLocation LocEndOfScope, LockErrorKind LEK, bool ReentrancyMismatch=false)
Warn about situations where a mutex is sometimes held and sometimes not.
virtual void leaveFunction(const FunctionDecl *FD)
Called by the analysis when finishing analysis of a function.
virtual void handleExclusiveAndShared(StringRef Kind, Name LockName, SourceLocation Loc1, SourceLocation Loc2)
Warn when a mutex is held exclusively and shared at the same point.
virtual void handleMutexNotHeld(StringRef Kind, const NamedDecl *D, ProtectedOperationKind POK, Name LockName, LockKind LK, SourceLocation Loc, Name *PossibleMatch=nullptr)
Warn when a protected operation occurs while the specific mutex protecting the operation is not locke...
virtual void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName, SourceLocation Loc)
Warn when a function is called while an excluded mutex is locked.
virtual void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK, AccessKind AK, SourceLocation Loc)
Warn when a protected operation occurs while no locks are held.
virtual void handleUnmatchedUnlock(StringRef Kind, Name LockName, SourceLocation Loc, SourceLocation LocPreviousUnlock)
Warn about unlock function calls that do not have a prior matching lock expression.
virtual void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg, SourceLocation Loc)
Warn when acquiring a lock that the negative capability is not held.
virtual void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked, SourceLocation LocDoubleLock)
Warn about lock function calls for locks which are already held.
internal::Matcher< T > traverse(TraversalKind TK, const internal::Matcher< T > &InnerMatcher)
Causes all nested matchers to be matched with the specified traversal kind.
unsigned kind
All of the diagnostics that can be emitted by the frontend.
@ CF
Indicates that the tracked object is a CF object.
bool Alloc(InterpState &S, CodePtr OpPC, const Descriptor *Desc)
bool Dec(InterpState &S, CodePtr OpPC, bool CanOverflow)
1) Pops a pointer from the stack 2) Load the value from the pointer 3) Writes the value decreased by ...
bool Neg(InterpState &S, CodePtr OpPC)
std::unique_ptr< DiagnosticConsumer > create(StringRef OutputFile, DiagnosticOptions &DiagOpts, bool MergeChildRecords=false)
Returns a DiagnosticConsumer that serializes diagnostics to a bitcode file.
bool matches(const til::SExpr *E1, const til::SExpr *E2)
LockKind getLockKindFromAccessKind(AccessKind AK)
Helper function that returns a LockKind required for the given level of access.
LockErrorKind
This enum distinguishes between different situations where we warn due to inconsistent locking.
@ LEK_NotLockedAtEndOfFunction
Expecting a capability to be held at the end of function.
@ LEK_LockedSomePredecessors
A capability is locked in some but not all predecessors of a CFGBlock.
@ LEK_LockedAtEndOfFunction
A capability is still locked at the end of a function.
@ LEK_LockedSomeLoopIterations
A capability is locked for some but not all loop iterations.
void threadSafetyCleanup(BeforeSet *Cache)
AccessKind
This enum distinguishes between different ways to access (read or write) a variable.
@ AK_Written
Writing a variable.
@ AK_Read
Reading a variable.
LockKind
This enum distinguishes between different kinds of lock actions.
@ LK_Shared
Shared/reader lock of a mutex.
@ LK_Exclusive
Exclusive/writer lock of a mutex.
@ LK_Generic
Can be either Shared or Exclusive.
void runThreadSafetyAnalysis(AnalysisDeclContext &AC, ThreadSafetyHandler &Handler, BeforeSet **Bset)
Check a function's CFG for thread-safety violations.
ProtectedOperationKind
This enum distinguishes between different kinds of operations that may need to be protected by locks.
@ POK_PtPassByRef
Passing a pt-guarded variable by reference.
@ POK_PassPointer
Passing pointer to a guarded variable.
@ POK_VarDereference
Dereferencing a variable (e.g. p in *p = 5;)
@ POK_PassByRef
Passing a guarded variable by reference.
@ POK_ReturnByRef
Returning a guarded variable by reference.
@ POK_PtPassPointer
Passing a pt-guarded pointer.
@ POK_PtReturnPointer
Returning a pt-guarded pointer.
@ POK_VarAccess
Reading or writing a variable (e.g. x in x = 5;)
@ POK_FunctionCall
Making a function call (e.g. fool())
@ POK_ReturnPointer
Returning pointer to a guarded variable.
@ POK_PtReturnByRef
Returning a pt-guarded variable by reference.
The JSON file list parser is used to communicate input to InstallAPI.
OverloadedOperatorKind
Enumeration specifying the different kinds of C++ overloaded operators.
bool isa(CodeGen::Address addr)
@ Self
'self' clause, allowed on Compute and Combined Constructs, plus 'update'.
nullptr
This class represents a compute construct, representing a 'Kind' of ‘parallel’, 'serial',...
static bool classof(const Stmt *T)
@ Result
The result type of a method or function.
const FunctionProtoType * T
U cast(CodeGen::Address addr)
@ Other
Other implicit parameter.
int const char * function