39#include "llvm/ADT/DenseMap.h"
40#include "llvm/ADT/ImmutableMap.h"
41#include "llvm/ADT/STLExtras.h"
42#include "llvm/ADT/ScopeExit.h"
43#include "llvm/ADT/SmallVector.h"
44#include "llvm/ADT/StringRef.h"
45#include "llvm/Support/Allocator.h"
46#include "llvm/Support/Casting.h"
47#include "llvm/Support/ErrorHandling.h"
48#include "llvm/Support/TrailingObjects.h"
49#include "llvm/Support/raw_ostream.h"
68 const Expr *DeclExp, StringRef Kind) {
82class CapExprSet :
public SmallVector<CapabilityExpr, 4> {
85 void push_back_nodup(
const CapabilityExpr &CapE) {
86 if (llvm::none_of(*
this, [=](
const CapabilityExpr &CapE2) {
102 enum FactEntryKind { Lockable, ScopedLockable };
113 const FactEntryKind Kind : 8;
119 SourceKind Source : 8;
122 SourceLocation AcquireLoc;
125 ~FactEntry() =
default;
128 FactEntry(FactEntryKind FK,
const CapabilityExpr &CE,
LockKind LK,
129 SourceLocation Loc, SourceKind Src)
130 : CapabilityExpr(CE), Kind(FK), LKind(LK), Source(Src), AcquireLoc(Loc) {}
133 SourceLocation loc()
const {
return AcquireLoc; }
134 FactEntryKind getFactEntryKind()
const {
return Kind; }
136 bool asserted()
const {
return Source == Asserted; }
137 bool declared()
const {
return Source == Declared; }
138 bool managed()
const {
return Source == Managed; }
141 handleRemovalFromIntersection(
const FactSet &FSet, FactManager &FactMan,
143 ThreadSafetyHandler &Handler)
const = 0;
144 virtual void handleLock(FactSet &FSet, FactManager &FactMan,
145 const FactEntry &entry,
146 ThreadSafetyHandler &Handler)
const = 0;
147 virtual void handleUnlock(FactSet &FSet, FactManager &FactMan,
148 const CapabilityExpr &Cp, SourceLocation UnlockLoc,
150 ThreadSafetyHandler &Handler)
const = 0;
158using FactID =
unsigned short;
164 llvm::BumpPtrAllocator &Alloc;
165 std::vector<const FactEntry *> Facts;
168 FactManager(llvm::BumpPtrAllocator &Alloc) : Alloc(Alloc) {}
170 template <
typename T,
typename... ArgTypes>
171 T *createFact(ArgTypes &&...Args) {
172 static_assert(std::is_trivially_destructible_v<T>);
173 return T::create(Alloc, std::forward<ArgTypes>(Args)...);
176 FactID newFact(
const FactEntry *Entry) {
177 Facts.push_back(Entry);
178 assert(Facts.size() - 1 <= std::numeric_limits<FactID>::max() &&
179 "FactID space exhausted");
180 return static_cast<unsigned short>(Facts.size() - 1);
183 const FactEntry &operator[](FactID F)
const {
return *Facts[F]; }
195 using FactVec = SmallVector<FactID, 4>;
200 using iterator = FactVec::iterator;
201 using const_iterator = FactVec::const_iterator;
203 iterator begin() {
return FactIDs.begin(); }
204 const_iterator begin()
const {
return FactIDs.begin(); }
206 iterator end() {
return FactIDs.end(); }
207 const_iterator end()
const {
return FactIDs.end(); }
209 bool isEmpty()
const {
return FactIDs.size() == 0; }
212 bool isEmpty(FactManager &FactMan)
const {
213 for (
const auto FID : *
this) {
214 if (!FactMan[FID].negative())
220 void addLockByID(FactID ID) { FactIDs.push_back(ID); }
222 FactID addLock(FactManager &FM,
const FactEntry *Entry) {
223 FactID F = FM.newFact(Entry);
224 FactIDs.push_back(F);
228 bool removeLock(FactManager& FM,
const CapabilityExpr &CapE) {
229 unsigned n = FactIDs.size();
233 for (
unsigned i = 0; i < n-1; ++i) {
234 if (FM[FactIDs[i]].
matches(CapE)) {
235 FactIDs[i] = FactIDs[n-1];
240 if (FM[FactIDs[n-1]].
matches(CapE)) {
247 std::optional<FactID> replaceLock(FactManager &FM, iterator It,
248 const FactEntry *Entry) {
251 FactID F = FM.newFact(Entry);
256 std::optional<FactID> replaceLock(FactManager &FM,
const CapabilityExpr &CapE,
257 const FactEntry *Entry) {
258 return replaceLock(FM, findLockIter(FM, CapE), Entry);
261 iterator findLockIter(FactManager &FM,
const CapabilityExpr &CapE) {
262 return llvm::find_if(*
this,
263 [&](FactID ID) {
return FM[
ID].matches(CapE); });
266 const FactEntry *findLock(FactManager &FM,
const CapabilityExpr &CapE)
const {
268 llvm::find_if(*
this, [&](FactID ID) {
return FM[
ID].matches(CapE); });
269 return I != end() ? &FM[*I] :
nullptr;
272 const FactEntry *findLockUniv(FactManager &FM,
273 const CapabilityExpr &CapE)
const {
274 auto I = llvm::find_if(
275 *
this, [&](FactID ID) ->
bool {
return FM[
ID].matchesUniv(CapE); });
276 return I != end() ? &FM[*I] :
nullptr;
279 const FactEntry *findPartialMatch(FactManager &FM,
280 const CapabilityExpr &CapE)
const {
281 auto I = llvm::find_if(*
this, [&](FactID ID) ->
bool {
282 return FM[
ID].partiallyMatches(CapE);
284 return I != end() ? &FM[*I] :
nullptr;
287 bool containsMutexDecl(FactManager &FM,
const ValueDecl* Vd)
const {
288 auto I = llvm::find_if(
289 *
this, [&](FactID ID) ->
bool {
return FM[
ID].valueDecl() == Vd; });
294class ThreadSafetyAnalyzer;
309 BeforeInfo() =
default;
310 BeforeInfo(BeforeInfo &&) =
default;
314 llvm::DenseMap<const ValueDecl *, std::unique_ptr<BeforeInfo>>;
315 using CycleMap = llvm::DenseMap<const ValueDecl *, bool>;
321 ThreadSafetyAnalyzer& Analyzer);
324 ThreadSafetyAnalyzer &Analyzer);
328 ThreadSafetyAnalyzer& Analyzer,
341class LocalVariableMap;
343using LocalVarContext = llvm::ImmutableMap<const NamedDecl *, unsigned>;
346enum CFGBlockSide { CBS_Entry, CBS_Exit };
359 LocalVarContext EntryContext;
362 LocalVarContext ExitContext;
365 SourceLocation EntryLoc;
368 SourceLocation ExitLoc;
374 bool Reachable =
false;
376 const FactSet &getSet(CFGBlockSide Side)
const {
377 return Side == CBS_Entry ? EntrySet : ExitSet;
380 SourceLocation getLocation(CFGBlockSide Side)
const {
381 return Side == CBS_Entry ? EntryLoc : ExitLoc;
385 CFGBlockInfo(LocalVarContext EmptyCtx)
386 : EntryContext(EmptyCtx), ExitContext(EmptyCtx) {}
389 static CFGBlockInfo getEmptyBlockInfo(LocalVariableMap &M);
405class LocalVariableMap {
407 using Context = LocalVarContext;
413 struct VarDefinition {
415 friend class LocalVariableMap;
418 const NamedDecl *Dec;
421 const Expr *Exp =
nullptr;
424 unsigned DirectRef = 0;
427 unsigned CanonicalRef = 0;
432 bool isReference()
const {
return !Exp; }
434 void invalidateRef() { DirectRef = CanonicalRef = 0; }
438 VarDefinition(
const NamedDecl *D,
const Expr *E, Context
C)
439 : Dec(D), Exp(E), Ctx(
C) {}
442 VarDefinition(
const NamedDecl *D,
unsigned DirectRef,
unsigned CanonicalRef,
444 : Dec(D), DirectRef(DirectRef), CanonicalRef(CanonicalRef), Ctx(
C) {}
448 Context::Factory ContextFactory;
449 std::vector<VarDefinition> VarDefinitions;
450 std::vector<std::pair<const Stmt *, Context>> SavedContexts;
455 VarDefinitions.push_back(VarDefinition(
nullptr, 0, 0, getEmptyContext()));
459 const VarDefinition* lookup(
const NamedDecl *D, Context Ctx) {
460 const unsigned *i = Ctx.lookup(D);
463 assert(*i < VarDefinitions.size());
464 return &VarDefinitions[*i];
470 const Expr* lookupExpr(
const NamedDecl *D, Context &Ctx) {
471 const unsigned *P = Ctx.lookup(D);
477 if (VarDefinitions[i].Exp) {
478 Ctx = VarDefinitions[i].Ctx;
479 return VarDefinitions[i].Exp;
481 i = VarDefinitions[i].DirectRef;
486 Context getEmptyContext() {
return ContextFactory.getEmptyMap(); }
491 Context getNextContext(
unsigned &CtxIndex,
const Stmt *S, Context
C) {
492 if (SavedContexts[CtxIndex+1].first == S) {
494 Context
Result = SavedContexts[CtxIndex].second;
500 void dumpVarDefinitionName(
unsigned i) {
502 llvm::errs() <<
"Undefined";
505 const NamedDecl *
Dec = VarDefinitions[i].Dec;
507 llvm::errs() <<
"<<NULL>>";
510 Dec->printName(llvm::errs());
511 llvm::errs() <<
"." << i <<
" " << ((
const void*) Dec);
516 for (
unsigned i = 1, e = VarDefinitions.size(); i < e; ++i) {
517 const Expr *Exp = VarDefinitions[i].Exp;
518 unsigned Ref = VarDefinitions[i].DirectRef;
520 dumpVarDefinitionName(i);
521 llvm::errs() <<
" = ";
522 if (Exp) Exp->
dump();
524 dumpVarDefinitionName(Ref);
525 llvm::errs() <<
"\n";
531 void dumpContext(Context
C) {
532 for (Context::iterator I =
C.begin(), E =
C.end(); I != E; ++I) {
533 const NamedDecl *D = I.getKey();
535 llvm::errs() <<
" -> ";
536 dumpVarDefinitionName(I.getData());
537 llvm::errs() <<
"\n";
542 void traverseCFG(CFG *CFGraph,
const PostOrderCFGView *SortedGraph,
543 std::vector<CFGBlockInfo> &BlockInfo);
546 friend class VarMapBuilder;
549 unsigned getCanonicalDefinitionID(
unsigned ID)
const {
550 while (ID > 0 && VarDefinitions[ID].isReference())
551 ID = VarDefinitions[
ID].CanonicalRef;
556 unsigned getContextIndex() {
return SavedContexts.size()-1; }
559 void saveContext(
const Stmt *S, Context
C) {
560 SavedContexts.push_back(std::make_pair(S,
C));
565 Context addDefinition(
const NamedDecl *D,
const Expr *Exp, Context Ctx) {
566 assert(!Ctx.contains(D));
567 unsigned newID = VarDefinitions.size();
568 Context NewCtx = ContextFactory.add(Ctx, D, newID);
569 VarDefinitions.push_back(VarDefinition(D, Exp, Ctx));
574 Context addReference(
const NamedDecl *D,
unsigned Ref, Context Ctx) {
575 unsigned newID = VarDefinitions.size();
576 Context NewCtx = ContextFactory.add(Ctx, D, newID);
577 VarDefinitions.push_back(
578 VarDefinition(D, Ref, getCanonicalDefinitionID(Ref), Ctx));
584 Context updateDefinition(
const NamedDecl *D, Expr *Exp, Context Ctx) {
585 if (Ctx.contains(D)) {
586 unsigned newID = VarDefinitions.size();
587 Context NewCtx = ContextFactory.remove(Ctx, D);
588 NewCtx = ContextFactory.add(NewCtx, D, newID);
589 VarDefinitions.push_back(VarDefinition(D, Exp, Ctx));
597 Context clearDefinition(
const NamedDecl *D, Context Ctx) {
598 Context NewCtx = Ctx;
599 if (NewCtx.contains(D)) {
600 NewCtx = ContextFactory.remove(NewCtx, D);
601 NewCtx = ContextFactory.add(NewCtx, D, 0);
607 Context removeDefinition(
const NamedDecl *D, Context Ctx) {
608 Context NewCtx = Ctx;
609 if (NewCtx.contains(D)) {
610 NewCtx = ContextFactory.remove(NewCtx, D);
615 Context intersectContexts(Context C1, Context C2);
616 Context createReferenceContext(Context
C);
617 void intersectBackEdge(Context C1, Context C2);
623CFGBlockInfo CFGBlockInfo::getEmptyBlockInfo(LocalVariableMap &M) {
624 return CFGBlockInfo(M.getEmptyContext());
630class VarMapBuilder :
public ConstStmtVisitor<VarMapBuilder> {
632 LocalVariableMap* VMap;
633 LocalVariableMap::Context Ctx;
635 VarMapBuilder(LocalVariableMap *VM, LocalVariableMap::Context
C)
636 : VMap(VM), Ctx(
C) {}
638 void VisitDeclStmt(
const DeclStmt *S);
639 void VisitBinaryOperator(
const BinaryOperator *BO);
640 void VisitCallExpr(
const CallExpr *CE);
646void VarMapBuilder::VisitDeclStmt(
const DeclStmt *S) {
647 bool modifiedCtx =
false;
649 for (
const auto *D : DGrp) {
650 if (
const auto *VD = dyn_cast_or_null<VarDecl>(D)) {
651 const Expr *E = VD->getInit();
654 QualType T = VD->getType();
656 Ctx = VMap->addDefinition(VD, E, Ctx);
662 VMap->saveContext(S, Ctx);
666void VarMapBuilder::VisitBinaryOperator(
const BinaryOperator *BO) {
673 if (
const auto *DRE = dyn_cast<DeclRefExpr>(LHSExp)) {
674 const ValueDecl *VDec = DRE->getDecl();
675 if (Ctx.lookup(VDec)) {
677 Ctx = VMap->updateDefinition(VDec, BO->
getRHS(), Ctx);
680 Ctx = VMap->clearDefinition(VDec, Ctx);
681 VMap->saveContext(BO, Ctx);
687void VarMapBuilder::VisitCallExpr(
const CallExpr *CE) {
697 if (II->isStr(
"bind") || II->isStr(
"bind_front"))
703 for (
unsigned Idx = 0; Idx < CE->
getNumArgs(); ++Idx) {
709 QualType ParamType = PVD->
getType();
712 const ValueDecl *VDec =
nullptr;
715 if (
const auto *DRE = dyn_cast<DeclRefExpr>(Arg))
716 VDec = DRE->getDecl();
720 if (
const auto *UO = dyn_cast<UnaryOperator>(Arg)) {
721 if (UO->getOpcode() == UO_AddrOf) {
722 const Expr *SubE = UO->getSubExpr()->IgnoreParenCasts();
723 if (
const auto *DRE = dyn_cast<DeclRefExpr>(SubE))
724 VDec = DRE->getDecl();
730 Ctx = VMap->clearDefinition(VDec, Ctx);
734 VMap->saveContext(CE, Ctx);
740LocalVariableMap::Context
741LocalVariableMap::intersectContexts(Context C1, Context C2) {
743 for (
const auto &P : C1) {
744 const NamedDecl *
Dec = P.first;
745 const unsigned *I2 = C2.lookup(Dec);
749 }
else if (getCanonicalDefinitionID(P.second) !=
750 getCanonicalDefinitionID(*I2)) {
762LocalVariableMap::Context LocalVariableMap::createReferenceContext(Context
C) {
763 Context
Result = getEmptyContext();
764 for (
const auto &P :
C)
772void LocalVariableMap::intersectBackEdge(Context C1, Context C2) {
773 for (
const auto &P : C1) {
774 const unsigned I1 = P.second;
775 VarDefinition *VDef = &VarDefinitions[I1];
776 assert(VDef->isReference());
778 const unsigned *I2 = C2.lookup(P.first);
781 VDef->invalidateRef();
787 if (VDef->CanonicalRef != getCanonicalDefinitionID(*I2))
788 VDef->invalidateRef();
829void LocalVariableMap::traverseCFG(CFG *CFGraph,
830 const PostOrderCFGView *SortedGraph,
831 std::vector<CFGBlockInfo> &BlockInfo) {
832 PostOrderCFGView::CFGBlockSet VisitedBlocks(CFGraph);
834 for (
const auto *CurrBlock : *SortedGraph) {
835 unsigned CurrBlockID = CurrBlock->getBlockID();
836 CFGBlockInfo *CurrBlockInfo = &BlockInfo[CurrBlockID];
838 VisitedBlocks.insert(CurrBlock);
841 bool HasBackEdges =
false;
844 PE = CurrBlock->pred_end(); PI != PE; ++PI) {
846 if (*PI ==
nullptr || !VisitedBlocks.alreadySet(*PI)) {
851 unsigned PrevBlockID = (*PI)->getBlockID();
852 CFGBlockInfo *PrevBlockInfo = &BlockInfo[PrevBlockID];
855 CurrBlockInfo->EntryContext = PrevBlockInfo->ExitContext;
859 CurrBlockInfo->EntryContext =
860 intersectContexts(CurrBlockInfo->EntryContext,
861 PrevBlockInfo->ExitContext);
868 CurrBlockInfo->EntryContext =
869 createReferenceContext(CurrBlockInfo->EntryContext);
872 saveContext(
nullptr, CurrBlockInfo->EntryContext);
873 CurrBlockInfo->EntryIndex = getContextIndex();
876 VarMapBuilder VMapBuilder(
this, CurrBlockInfo->EntryContext);
877 for (
const auto &BI : *CurrBlock) {
878 switch (BI.getKind()) {
880 CFGStmt CS = BI.castAs<CFGStmt>();
881 VMapBuilder.Visit(CS.
getStmt());
888 CurrBlockInfo->ExitContext = VMapBuilder.Ctx;
892 SE = CurrBlock->succ_end(); SI != SE; ++SI) {
894 if (*SI ==
nullptr || !VisitedBlocks.alreadySet(*SI))
897 CFGBlock *FirstLoopBlock = *SI;
898 Context LoopBegin = BlockInfo[FirstLoopBlock->
getBlockID()].EntryContext;
899 Context LoopEnd = CurrBlockInfo->ExitContext;
900 intersectBackEdge(LoopBegin, LoopEnd);
906 saveContext(
nullptr, BlockInfo[exitID].ExitContext);
913 std::vector<CFGBlockInfo> &BlockInfo) {
914 for (
const auto *CurrBlock : *SortedGraph) {
915 CFGBlockInfo *CurrBlockInfo = &BlockInfo[CurrBlock->getBlockID()];
919 if (
const Stmt *S = CurrBlock->getTerminatorStmt()) {
920 CurrBlockInfo->EntryLoc = CurrBlockInfo->ExitLoc = S->
getBeginLoc();
923 BE = CurrBlock->rend(); BI != BE; ++BI) {
925 if (std::optional<CFGStmt> CS = BI->getAs<
CFGStmt>()) {
926 CurrBlockInfo->ExitLoc = CS->getStmt()->getBeginLoc();
932 if (CurrBlockInfo->ExitLoc.
isValid()) {
935 for (
const auto &BI : *CurrBlock) {
937 if (std::optional<CFGStmt> CS = BI.getAs<
CFGStmt>()) {
938 CurrBlockInfo->EntryLoc = CS->getStmt()->getBeginLoc();
942 }
else if (CurrBlock->pred_size() == 1 && *CurrBlock->pred_begin() &&
943 CurrBlock != &CFGraph->
getExit()) {
946 CurrBlockInfo->EntryLoc = CurrBlockInfo->ExitLoc =
947 BlockInfo[(*CurrBlock->pred_begin())->getBlockID()].ExitLoc;
948 }
else if (CurrBlock->succ_size() == 1 && *CurrBlock->succ_begin()) {
951 CurrBlockInfo->EntryLoc = CurrBlockInfo->ExitLoc =
952 BlockInfo[(*CurrBlock->succ_begin())->getBlockID()].EntryLoc;
959class LockableFactEntry final :
public FactEntry {
964 unsigned int ReentrancyDepth = 0;
966 LockableFactEntry(
const CapabilityExpr &CE,
LockKind LK, SourceLocation Loc,
968 : FactEntry(Lockable, CE, LK, Loc, Src) {}
971 static LockableFactEntry *
create(llvm::BumpPtrAllocator &Alloc,
972 const LockableFactEntry &
Other) {
976 static LockableFactEntry *
create(llvm::BumpPtrAllocator &Alloc,
977 const CapabilityExpr &CE,
LockKind LK,
979 SourceKind Src = Acquired) {
980 return new (
Alloc) LockableFactEntry(CE, LK, Loc, Src);
983 unsigned int getReentrancyDepth()
const {
return ReentrancyDepth; }
986 handleRemovalFromIntersection(
const FactSet &FSet, FactManager &FactMan,
988 ThreadSafetyHandler &Handler)
const override {
989 if (!asserted() && !negative() && !isUniversal()) {
995 void handleLock(FactSet &FSet, FactManager &FactMan,
const FactEntry &entry,
996 ThreadSafetyHandler &Handler)
const override {
997 if (
const FactEntry *RFact = tryReenter(FactMan, entry.kind())) {
999 FSet.replaceLock(FactMan, entry, RFact);
1006 void handleUnlock(FactSet &FSet, FactManager &FactMan,
1007 const CapabilityExpr &Cp, SourceLocation UnlockLoc,
1009 ThreadSafetyHandler &Handler)
const override {
1010 FSet.removeLock(FactMan, Cp);
1012 if (
const FactEntry *RFact = leaveReentrant(FactMan)) {
1014 FSet.addLock(FactMan, RFact);
1016 FSet.addLock(FactMan, FactMan.createFact<LockableFactEntry>(
1023 const FactEntry *tryReenter(FactManager &FactMan,
1027 if (
kind() != ReenterKind)
1029 auto *NewFact = FactMan.createFact<LockableFactEntry>(*this);
1030 NewFact->ReentrancyDepth++;
1036 const FactEntry *leaveReentrant(FactManager &FactMan)
const {
1037 if (!ReentrancyDepth)
1039 assert(reentrant());
1040 auto *NewFact = FactMan.createFact<LockableFactEntry>(*this);
1041 NewFact->ReentrancyDepth--;
1045 static bool classof(
const FactEntry *A) {
1046 return A->getFactEntryKind() == Lockable;
1050enum UnderlyingCapabilityKind {
1053 UCK_ReleasedExclusive,
1056struct UnderlyingCapability {
1058 UnderlyingCapabilityKind Kind;
1061class ScopedLockableFactEntry final
1063 private llvm::TrailingObjects<ScopedLockableFactEntry,
1064 UnderlyingCapability> {
1065 friend TrailingObjects;
1068 const unsigned ManagedCapacity;
1069 unsigned ManagedSize = 0;
1071 ScopedLockableFactEntry(
const CapabilityExpr &CE, SourceLocation Loc,
1072 SourceKind Src,
unsigned ManagedCapacity)
1073 : FactEntry(ScopedLockable, CE,
LK_Exclusive, Loc, Src),
1074 ManagedCapacity(ManagedCapacity) {}
1076 void addManaged(
const CapabilityExpr &M, UnderlyingCapabilityKind UCK) {
1077 assert(ManagedSize < ManagedCapacity);
1078 new (getTrailingObjects() + ManagedSize) UnderlyingCapability{M, UCK};
1082 ArrayRef<UnderlyingCapability> getManaged()
const {
1083 return getTrailingObjects(ManagedSize);
1087 static ScopedLockableFactEntry *
create(llvm::BumpPtrAllocator &Alloc,
1088 const CapabilityExpr &CE,
1089 SourceLocation Loc, SourceKind Src,
1090 unsigned ManagedCapacity) {
1092 Alloc.Allocate(totalSizeToAlloc<UnderlyingCapability>(ManagedCapacity),
1093 alignof(ScopedLockableFactEntry));
1094 return new (
Storage) ScopedLockableFactEntry(CE, Loc, Src, ManagedCapacity);
1097 CapExprSet getUnderlyingMutexes()
const {
1098 CapExprSet UnderlyingMutexesSet;
1099 for (
const UnderlyingCapability &UnderlyingMutex : getManaged())
1100 UnderlyingMutexesSet.push_back(UnderlyingMutex.Cap);
1101 return UnderlyingMutexesSet;
1108 void addLock(
const CapabilityExpr &M) { addManaged(M, UCK_Acquired); }
1110 void addExclusiveUnlock(
const CapabilityExpr &M) {
1111 addManaged(M, UCK_ReleasedExclusive);
1114 void addSharedUnlock(
const CapabilityExpr &M) {
1115 addManaged(M, UCK_ReleasedShared);
1120 handleRemovalFromIntersection(
const FactSet &FSet, FactManager &FactMan,
1122 ThreadSafetyHandler &Handler)
const override {
1126 for (
const auto &UnderlyingMutex : getManaged()) {
1127 const auto *Entry = FSet.findLock(FactMan, UnderlyingMutex.Cap);
1128 if ((UnderlyingMutex.Kind == UCK_Acquired && Entry) ||
1129 (UnderlyingMutex.Kind != UCK_Acquired && !Entry)) {
1133 UnderlyingMutex.Cap.toString(), loc(),
1139 void handleLock(FactSet &FSet, FactManager &FactMan,
const FactEntry &entry,
1140 ThreadSafetyHandler &Handler)
const override {
1141 for (
const auto &UnderlyingMutex : getManaged()) {
1142 if (UnderlyingMutex.Kind == UCK_Acquired)
1143 lock(FSet, FactMan, UnderlyingMutex.Cap, entry.kind(), entry.loc(),
1146 unlock(FSet, FactMan, UnderlyingMutex.Cap, entry.loc(), &Handler);
1150 void handleUnlock(FactSet &FSet, FactManager &FactMan,
1151 const CapabilityExpr &Cp, SourceLocation UnlockLoc,
1153 ThreadSafetyHandler &Handler)
const override {
1154 assert(!Cp.
negative() &&
"Managing object cannot be negative.");
1155 for (
const auto &UnderlyingMutex : getManaged()) {
1158 ThreadSafetyHandler *TSHandler = FullyRemove ?
nullptr : &Handler;
1159 if (UnderlyingMutex.Kind == UCK_Acquired) {
1160 unlock(FSet, FactMan, UnderlyingMutex.Cap, UnlockLoc, TSHandler);
1162 LockKind kind = UnderlyingMutex.Kind == UCK_ReleasedShared
1165 lock(FSet, FactMan, UnderlyingMutex.Cap,
kind, UnlockLoc, TSHandler);
1169 FSet.removeLock(FactMan, Cp);
1172 static bool classof(
const FactEntry *A) {
1173 return A->getFactEntryKind() == ScopedLockable;
1177 void lock(FactSet &FSet, FactManager &FactMan,
const CapabilityExpr &Cp,
1179 ThreadSafetyHandler *Handler)
const {
1180 if (
const auto It = FSet.findLockIter(FactMan, Cp); It != FSet.end()) {
1182 if (
const FactEntry *RFact = Fact.tryReenter(FactMan,
kind)) {
1184 FSet.replaceLock(FactMan, It, RFact);
1185 }
else if (Handler) {
1189 FSet.removeLock(FactMan, !Cp);
1190 FSet.addLock(FactMan, FactMan.createFact<LockableFactEntry>(Cp,
kind, loc,
1195 void unlock(FactSet &FSet, FactManager &FactMan,
const CapabilityExpr &Cp,
1196 SourceLocation loc, ThreadSafetyHandler *Handler)
const {
1197 if (
const auto It = FSet.findLockIter(FactMan, Cp); It != FSet.end()) {
1199 if (
const FactEntry *RFact = Fact.leaveReentrant(FactMan)) {
1201 FSet.replaceLock(FactMan, It, RFact);
1207 FactMan.createFact<LockableFactEntry>(!Cp,
LK_Exclusive, loc));
1208 }
else if (Handler) {
1209 SourceLocation PrevLoc;
1210 if (
const FactEntry *Neg = FSet.findLock(FactMan, !Cp))
1211 PrevLoc =
Neg->loc();
1218class ThreadSafetyAnalyzer {
1219 friend class BuildLockset;
1220 friend class threadSafety::BeforeSet;
1222 llvm::BumpPtrAllocator Bpa;
1223 threadSafety::til::MemRegionRef Arena;
1224 threadSafety::SExprBuilder SxBuilder;
1226 ThreadSafetyHandler &Handler;
1227 const FunctionDecl *CurrentFunction;
1228 LocalVariableMap LocalVarMap;
1230 llvm::SmallDenseMap<const Expr *, til::LiteralPtr *> ConstructedObjects;
1231 FactManager FactMan;
1232 std::vector<CFGBlockInfo> BlockInfo;
1234 BeforeSet *GlobalBeforeSet;
1237 ThreadSafetyAnalyzer(ThreadSafetyHandler &H, BeforeSet *Bset)
1238 : Arena(&Bpa), SxBuilder(Arena), Handler(H), FactMan(Bpa),
1239 GlobalBeforeSet(Bset) {}
1241 bool inCurrentScope(
const CapabilityExpr &CapE);
1243 void addLock(FactSet &FSet,
const FactEntry *Entry,
bool ReqAttr =
false);
1244 void removeLock(FactSet &FSet,
const CapabilityExpr &CapE,
1245 SourceLocation UnlockLoc,
bool FullyRemove,
LockKind Kind);
1247 template <
typename AttrType>
1248 void getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
const Expr *Exp,
1249 const NamedDecl *D, til::SExpr *
Self =
nullptr);
1251 template <
class AttrType>
1252 void getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
const Expr *Exp,
1254 const CFGBlock *PredBlock,
const CFGBlock *CurrBlock,
1255 Expr *BrE,
bool Neg);
1257 const CallExpr* getTrylockCallExpr(
const Stmt *
Cond, LocalVarContext
C,
1260 void getEdgeLockset(FactSet &
Result,
const FactSet &ExitSet,
1261 const CFGBlock* PredBlock,
1262 const CFGBlock *CurrBlock);
1264 bool join(
const FactEntry &A,
const FactEntry &B, SourceLocation JoinLoc,
1267 void intersectAndWarn(FactSet &EntrySet,
const FactSet &ExitSet,
1271 void intersectAndWarn(FactSet &EntrySet,
const FactSet &ExitSet,
1273 intersectAndWarn(EntrySet, ExitSet, JoinLoc, LEK, LEK);
1276 void runAnalysis(AnalysisDeclContext &AC);
1278 void warnIfMutexNotHeld(
const FactSet &FSet,
const NamedDecl *D,
1279 const Expr *Exp,
AccessKind AK, Expr *MutexExp,
1281 SourceLocation Loc);
1282 void warnIfMutexHeld(
const FactSet &FSet,
const NamedDecl *D,
const Expr *Exp,
1283 Expr *MutexExp, til::SExpr *
Self, SourceLocation Loc);
1285 void checkAccess(
const FactSet &FSet,
const Expr *Exp,
AccessKind AK,
1287 void checkPtAccess(
const FactSet &FSet,
const Expr *Exp,
AccessKind AK,
1295 ThreadSafetyAnalyzer& Analyzer) {
1297 BeforeInfo *Info =
nullptr;
1301 std::unique_ptr<BeforeInfo> &InfoPtr = BMap[Vd];
1303 InfoPtr.reset(
new BeforeInfo());
1304 Info = InfoPtr.get();
1307 for (
const auto *At : Vd->
attrs()) {
1308 switch (At->getKind()) {
1309 case attr::AcquiredBefore: {
1313 for (
const auto *Arg : A->args()) {
1315 Analyzer.SxBuilder.translateAttrExpr(Arg,
nullptr);
1317 Info->Vect.push_back(Cpvd);
1318 const auto It = BMap.find(Cpvd);
1319 if (It == BMap.end())
1325 case attr::AcquiredAfter: {
1329 for (
const auto *Arg : A->args()) {
1331 Analyzer.SxBuilder.translateAttrExpr(Arg,
nullptr);
1335 ArgInfo->Vect.push_back(Vd);
1348BeforeSet::BeforeInfo *
1350 ThreadSafetyAnalyzer &Analyzer) {
1351 auto It = BMap.find(Vd);
1352 BeforeInfo *Info =
nullptr;
1353 if (It == BMap.end())
1356 Info = It->second.get();
1357 assert(Info &&
"BMap contained nullptr?");
1363 const FactSet& FSet,
1364 ThreadSafetyAnalyzer& Analyzer,
1376 if (Info->Visited == 1)
1379 if (Info->Visited == 2)
1382 if (Info->Vect.empty())
1385 InfoVect.push_back(Info);
1387 for (
const auto *Vdb : Info->Vect) {
1389 if (FSet.containsMutexDecl(Analyzer.FactMan, Vdb)) {
1390 StringRef L1 = StartVd->
getName();
1391 StringRef L2 = Vdb->getName();
1392 Analyzer.Handler.handleLockAcquiredBefore(CapKind, L1, L2, Loc);
1396 if (CycMap.try_emplace(Vd,
true).second) {
1398 Analyzer.Handler.handleBeforeAfterCycle(L1, Vd->
getLocation());
1408 for (
auto *Info : InfoVect)
1414 if (
const auto *CE = dyn_cast<ImplicitCastExpr>(Exp))
1417 if (
const auto *DR = dyn_cast<DeclRefExpr>(Exp))
1418 return DR->getDecl();
1420 if (
const auto *ME = dyn_cast<MemberExpr>(Exp))
1421 return ME->getMemberDecl();
1426bool ThreadSafetyAnalyzer::inCurrentScope(
const CapabilityExpr &CapE) {
1427 const threadSafety::til::SExpr *SExp = CapE.
sexpr();
1428 assert(SExp &&
"Null expressions should be ignored");
1430 if (
const auto *LP = dyn_cast<til::LiteralPtr>(SExp)) {
1431 const ValueDecl *VD = LP->clangDecl();
1443 if (
const auto *P = dyn_cast<til::Project>(SExp)) {
1444 if (!isa_and_nonnull<CXXMethodDecl>(CurrentFunction))
1446 const ValueDecl *VD = P->clangDecl();
1455void ThreadSafetyAnalyzer::addLock(FactSet &FSet,
const FactEntry *Entry,
1457 if (Entry->shouldIgnore())
1460 if (!ReqAttr && !Entry->negative()) {
1462 CapabilityExpr NegC = !*Entry;
1463 const FactEntry *Nen = FSet.findLock(FactMan, NegC);
1465 FSet.removeLock(FactMan, NegC);
1468 if (inCurrentScope(*Entry) && !Entry->asserted() && !Entry->reentrant())
1475 if (!Entry->asserted() && !Entry->declared()) {
1477 Entry->loc(), Entry->getKind());
1480 if (
const FactEntry *Cp = FSet.findLock(FactMan, *Entry)) {
1481 if (!Entry->asserted())
1482 Cp->handleLock(FSet, FactMan, *Entry, Handler);
1484 FSet.addLock(FactMan, Entry);
1490void ThreadSafetyAnalyzer::removeLock(FactSet &FSet,
const CapabilityExpr &Cp,
1491 SourceLocation UnlockLoc,
1492 bool FullyRemove,
LockKind ReceivedKind) {
1496 const FactEntry *LDat = FSet.findLock(FactMan, Cp);
1498 SourceLocation PrevLoc;
1499 if (
const FactEntry *Neg = FSet.findLock(FactMan, !Cp))
1500 PrevLoc =
Neg->loc();
1508 if (ReceivedKind !=
LK_Generic && LDat->kind() != ReceivedKind) {
1510 ReceivedKind, LDat->loc(), UnlockLoc);
1513 LDat->handleUnlock(FSet, FactMan, Cp, UnlockLoc, FullyRemove, Handler);
1518template <
typename AttrType>
1519void ThreadSafetyAnalyzer::getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
1520 const Expr *Exp,
const NamedDecl *D,
1522 if (Attr->args_size() == 0) {
1531 Mtxs.push_back_nodup(Cp);
1535 for (
const auto *Arg : Attr->args()) {
1543 Mtxs.push_back_nodup(Cp);
1550template <
class AttrType>
1551void ThreadSafetyAnalyzer::getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
1552 const Expr *Exp,
const NamedDecl *D,
1553 const CFGBlock *PredBlock,
1554 const CFGBlock *CurrBlock,
1555 Expr *BrE,
bool Neg) {
1557 bool branch =
false;
1558 if (
const auto *BLE = dyn_cast_or_null<CXXBoolLiteralExpr>(BrE))
1559 branch = BLE->getValue();
1560 else if (
const auto *ILE = dyn_cast_or_null<IntegerLiteral>(BrE))
1561 branch = ILE->getValue().getBoolValue();
1563 int branchnum = branch ? 0 : 1;
1565 branchnum = !branchnum;
1570 SE = PredBlock->
succ_end(); SI != SE && i < 2; ++SI, ++i) {
1571 if (*SI == CurrBlock && i == branchnum)
1572 getMutexIDs(Mtxs, Attr, Exp, D);
1580 }
else if (
const auto *BLE = dyn_cast<CXXBoolLiteralExpr>(E)) {
1581 TCond = BLE->getValue();
1583 }
else if (
const auto *ILE = dyn_cast<IntegerLiteral>(E)) {
1584 TCond = ILE->getValue().getBoolValue();
1586 }
else if (
auto *CE = dyn_cast<ImplicitCastExpr>(E))
1594const CallExpr* ThreadSafetyAnalyzer::getTrylockCallExpr(
const Stmt *
Cond,
1600 if (
const auto *CallExp = dyn_cast<CallExpr>(
Cond)) {
1601 if (CallExp->getBuiltinCallee() == Builtin::BI__builtin_expect)
1602 return getTrylockCallExpr(CallExp->getArg(0),
C, Negate);
1605 else if (
const auto *PE = dyn_cast<ParenExpr>(
Cond))
1606 return getTrylockCallExpr(PE->getSubExpr(),
C, Negate);
1607 else if (
const auto *CE = dyn_cast<ImplicitCastExpr>(
Cond))
1608 return getTrylockCallExpr(CE->getSubExpr(),
C, Negate);
1609 else if (
const auto *FE = dyn_cast<FullExpr>(
Cond))
1610 return getTrylockCallExpr(FE->getSubExpr(),
C, Negate);
1611 else if (
const auto *DRE = dyn_cast<DeclRefExpr>(
Cond)) {
1612 const Expr *E = LocalVarMap.lookupExpr(DRE->getDecl(),
C);
1613 return getTrylockCallExpr(E,
C, Negate);
1615 else if (
const auto *UOP = dyn_cast<UnaryOperator>(
Cond)) {
1616 if (UOP->getOpcode() == UO_LNot) {
1618 return getTrylockCallExpr(UOP->getSubExpr(),
C, Negate);
1622 else if (
const auto *BOP = dyn_cast<BinaryOperator>(
Cond)) {
1623 if (BOP->getOpcode() == BO_EQ || BOP->getOpcode() == BO_NE) {
1624 if (BOP->getOpcode() == BO_NE)
1629 if (!TCond) Negate = !Negate;
1630 return getTrylockCallExpr(BOP->getLHS(),
C, Negate);
1634 if (!TCond) Negate = !Negate;
1635 return getTrylockCallExpr(BOP->getRHS(),
C, Negate);
1639 if (BOP->getOpcode() == BO_LAnd) {
1641 return getTrylockCallExpr(BOP->getRHS(),
C, Negate);
1643 if (BOP->getOpcode() == BO_LOr)
1644 return getTrylockCallExpr(BOP->getRHS(),
C, Negate);
1646 }
else if (
const auto *COP = dyn_cast<ConditionalOperator>(
Cond)) {
1650 if (TCond && !FCond)
1651 return getTrylockCallExpr(COP->getCond(),
C, Negate);
1652 if (!TCond && FCond) {
1654 return getTrylockCallExpr(COP->getCond(),
C, Negate);
1664void ThreadSafetyAnalyzer::getEdgeLockset(FactSet&
Result,
1665 const FactSet &ExitSet,
1666 const CFGBlock *PredBlock,
1667 const CFGBlock *CurrBlock) {
1675 bool Negate =
false;
1676 const CFGBlockInfo *PredBlockInfo = &BlockInfo[PredBlock->
getBlockID()];
1677 const LocalVarContext &LVarCtx = PredBlockInfo->ExitContext;
1682 [
this, Ctx = LVarCtx](
const NamedDecl *D)
mutable ->
const Expr * {
1683 return LocalVarMap.lookupExpr(D, Ctx);
1686 llvm::scope_exit Cleanup(
1689 const auto *Exp = getTrylockCallExpr(
Cond, LVarCtx, Negate);
1693 auto *FunDecl = dyn_cast_or_null<NamedDecl>(Exp->getCalleeDecl());
1694 if (!FunDecl || !FunDecl->hasAttr<TryAcquireCapabilityAttr>())
1697 CapExprSet ExclusiveLocksToAdd;
1698 CapExprSet SharedLocksToAdd;
1701 for (
const auto *Attr : FunDecl->specific_attrs<TryAcquireCapabilityAttr>())
1702 getMutexIDs(Attr->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, Attr,
1703 Exp, FunDecl, PredBlock, CurrBlock, Attr->getSuccessValue(),
1707 SourceLocation Loc = Exp->getExprLoc();
1708 for (
const auto &ExclusiveLockToAdd : ExclusiveLocksToAdd)
1709 addLock(
Result, FactMan.createFact<LockableFactEntry>(ExclusiveLockToAdd,
1711 for (
const auto &SharedLockToAdd : SharedLocksToAdd)
1712 addLock(
Result, FactMan.createFact<LockableFactEntry>(SharedLockToAdd,
1723class BuildLockset :
public ConstStmtVisitor<BuildLockset> {
1724 friend class ThreadSafetyAnalyzer;
1726 ThreadSafetyAnalyzer *Analyzer;
1729 const FactSet &FunctionExitFSet;
1730 LocalVariableMap::Context LVarCtx;
1735 void updateLocalVarMapCtx(
const Stmt *S) {
1737 LVarCtx = Analyzer->LocalVarMap.getNextContext(CtxIndex, S, LVarCtx);
1742 [
this, Ctx = LVarCtx](
const NamedDecl *D)
mutable ->
const Expr * {
1743 return Analyzer->LocalVarMap.lookupExpr(D, Ctx);
1749 void checkAccess(
const Expr *Exp,
AccessKind AK,
1751 Analyzer->checkAccess(FSet, Exp, AK, POK);
1753 void checkPtAccess(
const Expr *Exp,
AccessKind AK,
1755 Analyzer->checkPtAccess(FSet, Exp, AK, POK);
1758 void handleCall(
const Expr *Exp,
const NamedDecl *D,
1759 til::SExpr *
Self =
nullptr,
1760 SourceLocation Loc = SourceLocation());
1761 void examineArguments(
const FunctionDecl *FD,
1764 bool SkipFirstParam =
false);
1767 BuildLockset(ThreadSafetyAnalyzer *Anlzr, CFGBlockInfo &Info,
1768 const FactSet &FunctionExitFSet)
1769 : ConstStmtVisitor<BuildLockset>(), Analyzer(Anlzr), FSet(Info.EntrySet),
1770 FunctionExitFSet(FunctionExitFSet), LVarCtx(Info.EntryContext),
1771 CtxIndex(Info.EntryIndex) {
1772 updateLocalVarMapCtx(
nullptr);
1776 BuildLockset(
const BuildLockset &) =
delete;
1777 BuildLockset &operator=(
const BuildLockset &) =
delete;
1779 void VisitUnaryOperator(
const UnaryOperator *UO);
1780 void VisitBinaryOperator(
const BinaryOperator *BO);
1781 void VisitCastExpr(
const CastExpr *CE);
1782 void VisitCallExpr(
const CallExpr *Exp);
1783 void VisitCXXConstructExpr(
const CXXConstructExpr *Exp);
1784 void VisitDeclStmt(
const DeclStmt *S);
1785 void VisitMaterializeTemporaryExpr(
const MaterializeTemporaryExpr *Exp);
1786 void VisitReturnStmt(
const ReturnStmt *S);
1793void ThreadSafetyAnalyzer::warnIfMutexNotHeld(
1794 const FactSet &FSet,
const NamedDecl *D,
const Expr *Exp,
AccessKind AK,
1796 SourceLocation Loc) {
1808 const FactEntry *LDat = FSet.findLock(FactMan, !Cp);
1811 (!Cp).toString(), Loc);
1817 if (!inCurrentScope(Cp))
1821 LDat = FSet.findLock(FactMan, Cp);
1828 const FactEntry *LDat = FSet.findLockUniv(FactMan, Cp);
1829 bool NoError =
true;
1832 LDat = FSet.findPartialMatch(FactMan, Cp);
1835 std::string PartMatchStr = LDat->toString();
1836 StringRef PartMatchName(PartMatchStr);
1846 if (NoError && LDat && !LDat->isAtLeast(LK)) {
1852void ThreadSafetyAnalyzer::warnIfMutexHeld(
const FactSet &FSet,
1853 const NamedDecl *D,
const Expr *Exp,
1854 Expr *MutexExp, til::SExpr *
Self,
1855 SourceLocation Loc) {
1864 const FactEntry *LDat = FSet.findLock(FactMan, Cp);
1876void ThreadSafetyAnalyzer::checkAccess(
const FactSet &FSet,
const Expr *Exp,
1885 while (
const auto *DRE = dyn_cast<DeclRefExpr>(Exp)) {
1886 const auto *VD = dyn_cast<VarDecl>(DRE->getDecl()->getCanonicalDecl());
1888 if (
const auto *E = VD->getInit()) {
1899 if (
const auto *UO = dyn_cast<UnaryOperator>(Exp)) {
1901 if (UO->getOpcode() == UO_Deref)
1902 checkPtAccess(FSet, UO->getSubExpr(), AK, POK);
1906 if (
const auto *BO = dyn_cast<BinaryOperator>(Exp)) {
1909 return checkAccess(FSet, BO->
getLHS(), AK, POK);
1911 return checkPtAccess(FSet, BO->
getLHS(), AK, POK);
1917 if (
const auto *AE = dyn_cast<ArraySubscriptExpr>(Exp)) {
1918 checkPtAccess(FSet, AE->getLHS(), AK, POK);
1922 if (
const auto *ME = dyn_cast<MemberExpr>(Exp)) {
1924 checkPtAccess(FSet, ME->getBase(), AK, POK);
1926 checkAccess(FSet, ME->getBase(), AK, POK);
1933 if (D->
hasAttr<GuardedVarAttr>() && FSet.isEmpty(FactMan)) {
1938 warnIfMutexNotHeld(FSet, D, Exp, AK, I->getArg(), POK,
nullptr, Loc);
1943void ThreadSafetyAnalyzer::checkPtAccess(
const FactSet &FSet,
const Expr *Exp,
1949 if (
const auto *PE = dyn_cast<ParenExpr>(Exp)) {
1950 Exp = PE->getSubExpr();
1953 if (
const auto *CE = dyn_cast<CastExpr>(Exp)) {
1954 if (CE->getCastKind() == CK_ArrayToPointerDecay) {
1957 checkAccess(FSet, CE->getSubExpr(), AK, POK);
1960 Exp = CE->getSubExpr();
1966 if (
const auto *UO = dyn_cast<UnaryOperator>(Exp)) {
1967 if (UO->getOpcode() == UO_AddrOf) {
1970 checkAccess(FSet, UO->getSubExpr(), AK, POK);
1998 if (D->
hasAttr<PtGuardedVarAttr>() && FSet.isEmpty(FactMan))
2002 warnIfMutexNotHeld(FSet, D, Exp, AK, I->getArg(), PtPOK,
nullptr,
2021void BuildLockset::handleCall(
const Expr *Exp,
const NamedDecl *D,
2022 til::SExpr *
Self, SourceLocation Loc) {
2023 CapExprSet ExclusiveLocksToAdd, SharedLocksToAdd;
2024 CapExprSet ExclusiveLocksToRemove, SharedLocksToRemove, GenericLocksToRemove;
2025 CapExprSet ScopedReqsAndExcludes;
2033 til::LiteralPtr *Placeholder =
2035 [[maybe_unused]]
auto inserted =
2036 Analyzer->ConstructedObjects.insert({Exp, Placeholder});
2037 assert(inserted.second &&
"Are we visiting the same expression again?");
2040 if (TagT->getDecl()->getMostRecentDecl()->hasAttr<ScopedLockableAttr>())
2041 Scp = CapabilityExpr(Placeholder, Exp->
getType(),
false);
2048 for(
const Attr *At : D->
attrs()) {
2049 switch (At->getKind()) {
2052 case attr::AcquireCapability: {
2054 Analyzer->getMutexIDs(A->isShared() ? SharedLocksToAdd
2055 : ExclusiveLocksToAdd,
2063 case attr::AssertCapability: {
2065 CapExprSet AssertLocks;
2066 Analyzer->getMutexIDs(AssertLocks, A, Exp, D,
Self);
2067 for (
const auto &AssertLock : AssertLocks)
2069 FSet, Analyzer->FactMan.createFact<LockableFactEntry>(
2071 Loc, FactEntry::Asserted));
2077 case attr::ReleaseCapability: {
2080 Analyzer->getMutexIDs(GenericLocksToRemove, A, Exp, D,
Self);
2081 else if (A->isShared())
2082 Analyzer->getMutexIDs(SharedLocksToRemove, A, Exp, D,
Self);
2084 Analyzer->getMutexIDs(ExclusiveLocksToRemove, A, Exp, D,
Self);
2088 case attr::RequiresCapability: {
2090 for (
auto *Arg : A->args()) {
2091 Analyzer->warnIfMutexNotHeld(FSet, D, Exp,
2096 Analyzer->getMutexIDs(ScopedReqsAndExcludes, A, Exp, D,
Self);
2101 case attr::LocksExcluded: {
2103 for (
auto *Arg : A->args()) {
2104 Analyzer->warnIfMutexHeld(FSet, D, Exp, Arg,
Self, Loc);
2107 Analyzer->getMutexIDs(ScopedReqsAndExcludes, A, Exp, D,
Self);
2118 std::optional<CallExpr::const_arg_range> Args;
2120 if (
const auto *CE = dyn_cast<CallExpr>(Exp))
2121 Args = CE->arguments();
2122 else if (
const auto *CE = dyn_cast<CXXConstructExpr>(Exp))
2123 Args = CE->arguments();
2125 llvm_unreachable(
"Unknown call kind");
2127 const auto *CalledFunction = dyn_cast<FunctionDecl>(D);
2128 if (CalledFunction && Args.has_value()) {
2129 for (
auto [Param, Arg] : zip(CalledFunction->parameters(), *Args)) {
2130 CapExprSet DeclaredLocks;
2131 for (
const Attr *At : Param->attrs()) {
2132 switch (At->getKind()) {
2133 case attr::AcquireCapability: {
2135 Analyzer->getMutexIDs(A->isShared() ? SharedLocksToAdd
2136 : ExclusiveLocksToAdd,
2138 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2142 case attr::ReleaseCapability: {
2145 Analyzer->getMutexIDs(GenericLocksToRemove, A, Exp, D,
Self);
2146 else if (A->isShared())
2147 Analyzer->getMutexIDs(SharedLocksToRemove, A, Exp, D,
Self);
2149 Analyzer->getMutexIDs(ExclusiveLocksToRemove, A, Exp, D,
Self);
2150 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2154 case attr::RequiresCapability: {
2156 for (
auto *Arg : A->args())
2157 Analyzer->warnIfMutexNotHeld(FSet, D, Exp,
2160 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2164 case attr::LocksExcluded: {
2166 for (
auto *Arg : A->args())
2167 Analyzer->warnIfMutexHeld(FSet, D, Exp, Arg,
Self, Loc);
2168 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2176 if (DeclaredLocks.empty())
2178 CapabilityExpr Cp(Analyzer->SxBuilder.
translate(Arg,
nullptr),
2179 StringRef(
"mutex"),
false,
false);
2180 if (
const auto *CBTE = dyn_cast<CXXBindTemporaryExpr>(Arg->
IgnoreCasts());
2182 if (
auto Object = Analyzer->ConstructedObjects.find(CBTE->getSubExpr());
2183 Object != Analyzer->ConstructedObjects.end())
2184 Cp = CapabilityExpr(
Object->second, StringRef(
"mutex"),
false,
2187 const FactEntry *Fact = FSet.findLock(Analyzer->FactMan, Cp);
2195 for (
const auto &[a,
b] :
2196 zip_longest(DeclaredLocks, Scope->getUnderlyingMutexes())) {
2197 if (!a.has_value()) {
2200 b.value().getKind(),
b.value().toString());
2201 }
else if (!
b.has_value()) {
2204 a.value().getKind(), a.value().toString());
2205 }
else if (!a.value().equals(
b.value())) {
2208 a.value().getKind(), a.value().toString(),
b.value().toString());
2217 for (
const auto &M : ExclusiveLocksToRemove)
2218 Analyzer->removeLock(FSet, M, Loc, Dtor,
LK_Exclusive);
2219 for (
const auto &M : SharedLocksToRemove)
2220 Analyzer->removeLock(FSet, M, Loc, Dtor,
LK_Shared);
2221 for (
const auto &M : GenericLocksToRemove)
2222 Analyzer->removeLock(FSet, M, Loc, Dtor,
LK_Generic);
2225 FactEntry::SourceKind Source =
2226 !Scp.
shouldIgnore() ? FactEntry::Managed : FactEntry::Acquired;
2227 for (
const auto &M : ExclusiveLocksToAdd)
2228 Analyzer->addLock(FSet, Analyzer->FactMan.createFact<LockableFactEntry>(
2230 for (
const auto &M : SharedLocksToAdd)
2231 Analyzer->addLock(FSet, Analyzer->FactMan.createFact<LockableFactEntry>(
2236 auto *ScopedEntry = Analyzer->FactMan.createFact<ScopedLockableFactEntry>(
2237 Scp, Loc, FactEntry::Acquired,
2238 ExclusiveLocksToAdd.size() + SharedLocksToAdd.size() +
2239 ScopedReqsAndExcludes.size() + ExclusiveLocksToRemove.size() +
2240 SharedLocksToRemove.size());
2241 for (
const auto &M : ExclusiveLocksToAdd)
2242 ScopedEntry->addLock(M);
2243 for (
const auto &M : SharedLocksToAdd)
2244 ScopedEntry->addLock(M);
2245 for (
const auto &M : ScopedReqsAndExcludes)
2246 ScopedEntry->addLock(M);
2247 for (
const auto &M : ExclusiveLocksToRemove)
2248 ScopedEntry->addExclusiveUnlock(M);
2249 for (
const auto &M : SharedLocksToRemove)
2250 ScopedEntry->addSharedUnlock(M);
2251 Analyzer->addLock(FSet, ScopedEntry);
2258void BuildLockset::VisitUnaryOperator(
const UnaryOperator *UO) {
2274void BuildLockset::VisitBinaryOperator(
const BinaryOperator *BO) {
2278 updateLocalVarMapCtx(BO);
2284void BuildLockset::VisitCastExpr(
const CastExpr *CE) {
2290void BuildLockset::examineArguments(
const FunctionDecl *FD,
2293 bool SkipFirstParam) {
2303 if (FD->
hasAttr<NoThreadSafetyAnalysisAttr>())
2306 const ArrayRef<ParmVarDecl *> Params = FD->
parameters();
2307 auto Param = Params.begin();
2312 for (
auto Arg = ArgBegin; Param != Params.end() && Arg != ArgEnd;
2314 QualType Qt = (*Param)->getType();
2322void BuildLockset::VisitCallExpr(
const CallExpr *Exp) {
2323 if (
const auto *CE = dyn_cast<CXXMemberCallExpr>(Exp)) {
2324 const auto *ME = dyn_cast<MemberExpr>(CE->getCallee());
2326 const CXXMethodDecl *MD = CE->getMethodDecl();
2329 if (ME->isArrow()) {
2331 checkPtAccess(CE->getImplicitObjectArgument(),
AK_Read);
2334 checkAccess(CE->getImplicitObjectArgument(),
AK_Read);
2338 examineArguments(CE->getDirectCallee(), CE->arg_begin(), CE->arg_end());
2339 }
else if (
const auto *OE = dyn_cast<CXXOperatorCallExpr>(Exp)) {
2347 case OO_PercentEqual:
2351 case OO_LessLessEqual:
2352 case OO_GreaterGreaterEqual:
2353 checkAccess(OE->getArg(1),
AK_Read);
2363 if (!(OEop == OO_Star && OE->getNumArgs() > 1)) {
2365 checkPtAccess(OE->getArg(0),
AK_Read);
2370 const Expr *Obj = OE->getArg(0);
2375 const FunctionDecl *FD = OE->getDirectCallee();
2376 examineArguments(FD, std::next(OE->arg_begin()), OE->arg_end(),
2385 auto *D = dyn_cast_or_null<NamedDecl>(Exp->
getCalleeDecl());
2388 updateLocalVarMapCtx(Exp);
2391void BuildLockset::VisitCXXConstructExpr(
const CXXConstructExpr *Exp) {
2394 const Expr* Source = Exp->
getArg(0);
2404 if (
auto *CE = dyn_cast<CastExpr>(E))
2407 if (
auto *CE = dyn_cast<CastExpr>(E))
2408 if (CE->
getCastKind() == CK_ConstructorConversion ||
2411 if (
auto *BTE = dyn_cast<CXXBindTemporaryExpr>(E))
2412 E = BTE->getSubExpr();
2416void BuildLockset::VisitDeclStmt(
const DeclStmt *S) {
2418 if (
auto *VD = dyn_cast_or_null<VarDecl>(D)) {
2419 const Expr *E = VD->getInit();
2425 if (
auto *EWC = dyn_cast<ExprWithCleanups>(E))
2429 if (
auto Object = Analyzer->ConstructedObjects.find(E);
2430 Object != Analyzer->ConstructedObjects.end()) {
2431 Object->second->setClangDecl(VD);
2432 Analyzer->ConstructedObjects.erase(Object);
2436 updateLocalVarMapCtx(S);
2439void BuildLockset::VisitMaterializeTemporaryExpr(
2440 const MaterializeTemporaryExpr *Exp) {
2442 if (
auto Object = Analyzer->ConstructedObjects.find(
2444 Object != Analyzer->ConstructedObjects.end()) {
2445 Object->second->setClangDecl(ExtD);
2446 Analyzer->ConstructedObjects.erase(Object);
2451void BuildLockset::VisitReturnStmt(
const ReturnStmt *S) {
2452 if (Analyzer->CurrentFunction ==
nullptr)
2460 const QualType ReturnType =
2463 Analyzer->checkAccess(
2464 FunctionExitFSet, RetVal,
2468 Analyzer->checkPtAccess(
2469 FunctionExitFSet, RetVal,
2479bool ThreadSafetyAnalyzer::join(
const FactEntry &A,
const FactEntry &B,
2480 SourceLocation JoinLoc,
2484 unsigned int ReentrancyDepthA = 0;
2485 unsigned int ReentrancyDepthB = 0;
2487 if (
const auto *LFE = dyn_cast<LockableFactEntry>(&A))
2488 ReentrancyDepthA = LFE->getReentrancyDepth();
2489 if (
const auto *LFE = dyn_cast<LockableFactEntry>(&B))
2490 ReentrancyDepthB = LFE->getReentrancyDepth();
2492 if (ReentrancyDepthA != ReentrancyDepthB) {
2498 return CanModify && ReentrancyDepthA < ReentrancyDepthB;
2499 }
else if (A.kind() != B.kind()) {
2502 if ((A.managed() || A.asserted()) && (B.managed() || B.asserted())) {
2504 bool ShouldTakeB = B.kind() ==
LK_Shared;
2505 if (CanModify || !ShouldTakeB)
2514 return CanModify && A.asserted() && !B.asserted();
2532void ThreadSafetyAnalyzer::intersectAndWarn(FactSet &EntrySet,
2533 const FactSet &ExitSet,
2534 SourceLocation JoinLoc,
2537 FactSet EntrySetOrig = EntrySet;
2540 for (
const auto &Fact : ExitSet) {
2541 const FactEntry &ExitFact = FactMan[Fact];
2543 FactSet::iterator EntryIt = EntrySet.findLockIter(FactMan, ExitFact);
2544 if (EntryIt != EntrySet.end()) {
2545 if (join(FactMan[*EntryIt], ExitFact, JoinLoc, EntryLEK))
2548 ExitFact.handleRemovalFromIntersection(ExitSet, FactMan, JoinLoc,
2554 for (
const auto &Fact : EntrySetOrig) {
2555 const FactEntry *EntryFact = &FactMan[Fact];
2556 const FactEntry *ExitFact = ExitSet.findLock(FactMan, *EntryFact);
2561 EntryFact->handleRemovalFromIntersection(EntrySetOrig, FactMan, JoinLoc,
2564 EntrySet.removeLock(FactMan, *EntryFact);
2577 if (std::optional<CFGStmt> S =
Last.getAs<
CFGStmt>()) {
2589void ThreadSafetyAnalyzer::runAnalysis(AnalysisDeclContext &AC) {
2592 threadSafety::CFGWalker walker;
2593 if (!walker.
init(AC))
2600 const NamedDecl *D = walker.
getDecl();
2601 CurrentFunction = dyn_cast<FunctionDecl>(D);
2603 if (D->
hasAttr<NoThreadSafetyAnalysisAttr>())
2618 CFGBlockInfo::getEmptyBlockInfo(LocalVarMap));
2624 PostOrderCFGView::CFGBlockSet VisitedBlocks(CFGraph);
2630 Initial.Reachable =
true;
2633 LocalVarMap.traverseCFG(CFGraph, SortedGraph, BlockInfo);
2638 CapExprSet ExclusiveLocksAcquired;
2639 CapExprSet SharedLocksAcquired;
2640 CapExprSet LocksReleased;
2645 if (!SortedGraph->
empty()) {
2647 FactSet &InitialLockset = Initial.EntrySet;
2649 CapExprSet ExclusiveLocksToAdd;
2650 CapExprSet SharedLocksToAdd;
2653 for (
const auto *Attr : D->
attrs()) {
2654 Loc = Attr->getLocation();
2655 if (
const auto *A = dyn_cast<RequiresCapabilityAttr>(Attr)) {
2656 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2658 }
else if (
const auto *A = dyn_cast<ReleaseCapabilityAttr>(Attr)) {
2661 if (A->args_size() == 0)
2663 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2665 getMutexIDs(LocksReleased, A,
nullptr, D);
2666 }
else if (
const auto *A = dyn_cast<AcquireCapabilityAttr>(Attr)) {
2667 if (A->args_size() == 0)
2669 getMutexIDs(A->isShared() ? SharedLocksAcquired
2670 : ExclusiveLocksAcquired,
2677 ArrayRef<ParmVarDecl *> Params;
2678 if (CurrentFunction)
2680 else if (
auto CurrentMethod = dyn_cast<ObjCMethodDecl>(D))
2681 Params = CurrentMethod->getCanonicalDecl()->parameters();
2683 llvm_unreachable(
"Unknown function kind");
2684 for (
const ParmVarDecl *Param : Params) {
2685 CapExprSet UnderlyingLocks;
2686 for (
const auto *Attr : Param->attrs()) {
2687 Loc = Attr->getLocation();
2688 if (
const auto *A = dyn_cast<ReleaseCapabilityAttr>(Attr)) {
2689 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2691 getMutexIDs(LocksReleased, A,
nullptr, Param);
2692 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2693 }
else if (
const auto *A = dyn_cast<RequiresCapabilityAttr>(Attr)) {
2694 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2696 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2697 }
else if (
const auto *A = dyn_cast<AcquireCapabilityAttr>(Attr)) {
2698 getMutexIDs(A->isShared() ? SharedLocksAcquired
2699 : ExclusiveLocksAcquired,
2701 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2702 }
else if (
const auto *A = dyn_cast<LocksExcludedAttr>(Attr)) {
2703 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2706 if (UnderlyingLocks.empty())
2711 auto *ScopedEntry = FactMan.createFact<ScopedLockableFactEntry>(
2712 Cp, Param->getLocation(), FactEntry::Declared,
2713 UnderlyingLocks.size());
2714 for (
const CapabilityExpr &M : UnderlyingLocks)
2715 ScopedEntry->addLock(M);
2716 addLock(InitialLockset, ScopedEntry,
true);
2720 for (
const auto &Mu : ExclusiveLocksToAdd) {
2721 const auto *Entry = FactMan.createFact<LockableFactEntry>(
2723 addLock(InitialLockset, Entry,
true);
2725 for (
const auto &Mu : SharedLocksToAdd) {
2726 const auto *Entry = FactMan.createFact<LockableFactEntry>(
2727 Mu,
LK_Shared, Loc, FactEntry::Declared);
2728 addLock(InitialLockset, Entry,
true);
2734 FactSet ExpectedFunctionExitSet = Initial.EntrySet;
2740 for (
const auto &Lock : ExclusiveLocksAcquired)
2741 ExpectedFunctionExitSet.addLock(
2742 FactMan, FactMan.createFact<LockableFactEntry>(Lock,
LK_Exclusive,
2744 for (
const auto &Lock : SharedLocksAcquired)
2745 ExpectedFunctionExitSet.addLock(
2746 FactMan, FactMan.createFact<LockableFactEntry>(Lock,
LK_Shared,
2748 for (
const auto &Lock : LocksReleased)
2749 ExpectedFunctionExitSet.removeLock(FactMan, Lock);
2751 for (
const auto *CurrBlock : *SortedGraph) {
2752 unsigned CurrBlockID = CurrBlock->
getBlockID();
2753 CFGBlockInfo *CurrBlockInfo = &BlockInfo[CurrBlockID];
2756 VisitedBlocks.insert(CurrBlock);
2771 bool LocksetInitialized =
false;
2773 PE = CurrBlock->
pred_end(); PI != PE; ++PI) {
2775 if (*PI ==
nullptr || !VisitedBlocks.alreadySet(*PI))
2778 unsigned PrevBlockID = (*PI)->getBlockID();
2779 CFGBlockInfo *PrevBlockInfo = &BlockInfo[PrevBlockID];
2786 CurrBlockInfo->Reachable =
true;
2788 FactSet PrevLockset;
2789 getEdgeLockset(PrevLockset, PrevBlockInfo->ExitSet, *PI, CurrBlock);
2791 if (!LocksetInitialized) {
2792 CurrBlockInfo->EntrySet = PrevLockset;
2793 LocksetInitialized =
true;
2799 CurrBlockInfo->EntrySet, PrevLockset, CurrBlockInfo->EntryLoc,
2800 isa_and_nonnull<ContinueStmt>((*PI)->getTerminatorStmt())
2807 if (!CurrBlockInfo->Reachable)
2810 BuildLockset LocksetBuilder(
this, *CurrBlockInfo, ExpectedFunctionExitSet);
2813 for (
const auto &BI : *CurrBlock) {
2814 switch (BI.getKind()) {
2816 CFGStmt CS = BI.castAs<CFGStmt>();
2817 LocksetBuilder.Visit(CS.
getStmt());
2822 CFGAutomaticObjDtor AD = BI.castAs<CFGAutomaticObjDtor>();
2828 if (isa_and_nonnull<ParmVarDecl>(AD.
getVarDecl()))
2830 if (!DD || !DD->hasAttrs())
2833 LocksetBuilder.handleCall(
2841 const CFGCleanupFunction &
CF = BI.castAs<CFGCleanupFunction>();
2842 LocksetBuilder.handleCall(
2843 nullptr,
CF.getFunctionDecl(),
2845 CF.getVarDecl()->getLocation());
2850 auto TD = BI.castAs<CFGTemporaryDtor>();
2854 if (
auto Object = ConstructedObjects.find(
2855 TD.getBindTemporaryExpr()->getSubExpr());
2856 Object != ConstructedObjects.end()) {
2860 LocksetBuilder.handleCall(
nullptr, DD,
Object->second,
2861 TD.getBindTemporaryExpr()->getEndLoc());
2862 ConstructedObjects.erase(Object);
2870 CurrBlockInfo->ExitSet = LocksetBuilder.FSet;
2877 SE = CurrBlock->succ_end(); SI != SE; ++SI) {
2879 if (*SI ==
nullptr || !VisitedBlocks.alreadySet(*SI))
2882 CFGBlock *FirstLoopBlock = *SI;
2883 CFGBlockInfo *PreLoop = &BlockInfo[FirstLoopBlock->
getBlockID()];
2884 CFGBlockInfo *LoopEnd = &BlockInfo[CurrBlockID];
2885 intersectAndWarn(PreLoop->EntrySet, LoopEnd->ExitSet, PreLoop->EntryLoc,
2891 if (!Final.Reachable)
2895 intersectAndWarn(ExpectedFunctionExitSet, Final.ExitSet, Final.ExitLoc,
2911 ThreadSafetyAnalyzer Analyzer(Handler, *BSet);
2912 Analyzer.runAnalysis(AC);
2926 llvm_unreachable(
"Unknown AccessKind");
This file defines AnalysisDeclContext, a class that manages the analysis context data for context sen...
Defines enum values for all the target-independent builtin functions.
static void dump(llvm::raw_ostream &OS, StringRef FunctionName, ArrayRef< CounterExpression > Expressions, ArrayRef< CounterMappingRegion > Regions)
static Decl::Kind getKind(const Decl *D)
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate....
Defines the clang::Expr interface and subclasses for C++ expressions.
Forward-declares and imports various common LLVM datatypes that clang wants to use unqualified.
*collection of selector each with an associated kind and an ordered *collection of selectors A selector has a kind
Defines an enumeration for C++ overloaded operators.
static std::string toString(const clang::SanitizerSet &Sanitizers)
Produce a string containing comma-separated names of sanitizers in Sanitizers set.
Defines the clang::SourceLocation class and associated facilities.
Defines various enumerations that describe declaration and type specifiers.
static void warnInvalidLock(ThreadSafetyHandler &Handler, const Expr *MutexExp, const NamedDecl *D, const Expr *DeclExp, StringRef Kind)
Issue a warning about an invalid lock expression.
static bool getStaticBooleanValue(Expr *E, bool &TCond)
static bool neverReturns(const CFGBlock *B)
static void findBlockLocations(CFG *CFGraph, const PostOrderCFGView *SortedGraph, std::vector< CFGBlockInfo > &BlockInfo)
Find the appropriate source locations to use when producing diagnostics for each block in the CFG.
static const ValueDecl * getValueDecl(const Expr *Exp)
Gets the value decl pointer from DeclRefExprs or MemberExprs.
static const Expr * UnpackConstruction(const Expr *E)
C Language Family Type Representation.
AnalysisDeclContext contains the context data for the function, method or block under analysis.
ASTContext & getASTContext() const
static bool isAssignmentOp(Opcode Opc)
const VarDecl * getVarDecl() const
const Stmt * getTriggerStmt() const
Represents a single basic block in a source-level CFG.
bool hasNoReturnElement() const
ElementList::const_reverse_iterator const_reverse_iterator
succ_iterator succ_begin()
Stmt * getTerminatorStmt()
AdjacentBlocks::const_iterator const_pred_iterator
const Stmt * getTerminatorCondition(bool StripParens=true) const
pred_iterator pred_begin()
unsigned getBlockID() const
AdjacentBlocks::const_iterator const_succ_iterator
Represents a top-level expression in a basic block.
const CXXDestructorDecl * getDestructorDecl(ASTContext &astContext) const
const Stmt * getStmt() const
Represents a source-level, intra-procedural CFG that represents the control-flow of a Stmt.
unsigned getNumBlockIDs() const
Returns the total number of BlockIDs allocated (which start at 0).
Expr * getArg(unsigned Arg)
Return the specified argument.
CXXConstructorDecl * getConstructor() const
Get the constructor that this expression will (ultimately) call.
bool isCopyConstructor(unsigned &TypeQuals) const
Whether this constructor is a copy constructor (C++ [class.copy]p2, which can be used to copy the cla...
Expr * getArg(unsigned Arg)
getArg - Return the specified argument.
ConstExprIterator const_arg_iterator
FunctionDecl * getDirectCallee()
If the callee is a FunctionDecl, return it. Otherwise return null.
unsigned getNumArgs() const
getNumArgs - Return the number of actual arguments to this call.
CastKind getCastKind() const
const DeclGroupRef getDeclGroup() const
SourceLocation getBeginLoc() const LLVM_READONLY
llvm::iterator_range< specific_attr_iterator< T > > specific_attrs() const
SourceLocation getLocation() const
bool isDefinedOutsideFunctionOrMethod() const
isDefinedOutsideFunctionOrMethod - This predicate returns true if this scoped decl is defined outside...
DeclContext * getDeclContext()
This represents one expression.
Expr * IgnoreParenCasts() LLVM_READONLY
Skip past any parentheses and casts which might surround this expression until reaching a fixed point...
Expr * IgnoreParenImpCasts() LLVM_READONLY
Skip past any parentheses and implicit casts which might surround this expression until reaching a fi...
Expr * IgnoreImplicit() LLVM_READONLY
Skip past any implicit AST nodes which might surround this expression until reaching a fixed point.
Expr * IgnoreParens() LLVM_READONLY
Skip past any parentheses which might surround this expression until reaching a fixed point.
Expr * IgnoreCasts() LLVM_READONLY
Skip past any casts which might surround this expression until reaching a fixed point.
SourceLocation getExprLoc() const LLVM_READONLY
getExprLoc - Return the preferred location for the arrow when diagnosing a problem with a generic exp...
const ParmVarDecl * getParamDecl(unsigned i) const
QualType getReturnType() const
ArrayRef< ParmVarDecl * > parameters() const
FunctionDecl * getCanonicalDecl() override
Retrieves the "canonical" declaration of the given declaration.
unsigned getNumParams() const
Return the number of parameters this function must have based on its FunctionType.
Expr * getSubExpr() const
Retrieve the temporary-generating subexpression whose value will be materialized into a glvalue.
ValueDecl * getExtendingDecl()
Get the declaration which triggered the lifetime-extension of this temporary, if any.
This represents a decl that may have a name.
IdentifierInfo * getIdentifier() const
Get the identifier that names this declaration, if there is one.
StringRef getName() const
Get the name of identifier for this declaration as a StringRef.
std::string getNameAsString() const
Get a human-readable name for the declaration, even if it is one of the special kinds of names (C++ c...
virtual void printName(raw_ostream &OS, const PrintingPolicy &Policy) const
Pretty-print the unqualified name of this declaration.
bool isTrivialType(const ASTContext &Context) const
Return true if this is a trivial type per (C++0x [basic.types]p9)
QualType getCanonicalType() const
bool isConstQualified() const
Determine whether this type is const-qualified.
Encodes a location in the source.
bool isValid() const
Return true if this is a valid SourceLocation object.
Stmt - This represents one statement.
SourceLocation getEndLoc() const LLVM_READONLY
void dump() const
Dumps the specified AST fragment and all subtrees to llvm::errs().
bool isPointerType() const
bool isReferenceType() const
QualType getPointeeType() const
If this is a pointer, ObjC object pointer, or block pointer, this returns the respective pointee.
bool isLValueReferenceType() const
const T * getAs() const
Member-template getAs<specific type>'.
Expr * getSubExpr() const
Represent the declaration of a variable (in which case it is an lvalue) a function (in which case it ...
void checkBeforeAfter(const ValueDecl *Vd, const FactSet &FSet, ThreadSafetyAnalyzer &Analyzer, SourceLocation Loc, StringRef CapKind)
Return true if any mutexes in FSet are in the acquired_before set of Vd.
BeforeInfo * insertAttrExprs(const ValueDecl *Vd, ThreadSafetyAnalyzer &Analyzer)
Process acquired_before and acquired_after attributes on Vd.
BeforeInfo * getBeforeInfoForDecl(const ValueDecl *Vd, ThreadSafetyAnalyzer &Analyzer)
const PostOrderCFGView * getSortedGraph() const
const NamedDecl * getDecl() const
bool init(AnalysisDeclContext &AC)
const CFG * getGraph() const
bool shouldIgnore() const
bool equals(const CapabilityExpr &other) const
const til::SExpr * sexpr() const
std::string toString() const
const ValueDecl * valueDecl() const
StringRef getKind() const
CapabilityExpr translateAttrExpr(const Expr *AttrExp, const NamedDecl *D, const Expr *DeclExp, til::SExpr *Self=nullptr)
Translate a clang expression in an attribute to a til::SExpr.
void setLookupLocalVarExpr(std::function< const Expr *(const NamedDecl *)> F)
til::SExpr * translate(const Stmt *S, CallingContext *Ctx)
til::LiteralPtr * createThisPlaceholder()
til::SExpr * translateVariable(const VarDecl *VD, CallingContext *Ctx)
Handler class for thread safety warnings.
virtual ~ThreadSafetyHandler()
virtual void handleExpectMoreUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc, Name ScopeName, StringRef Kind, Name Expected)
Warn when we get fewer underlying mutexes than expected.
virtual void handleInvalidLockExp(SourceLocation Loc)
Warn about lock expressions which fail to resolve to lockable objects.
virtual void handleUnmatchedUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc, Name ScopeName, StringRef Kind, Name Expected, Name Actual)
Warn when an actual underlying mutex of a scoped lockable does not match the expected.
virtual void handleExpectFewerUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc, Name ScopeName, StringRef Kind, Name Actual)
Warn when we get more underlying mutexes than expected.
virtual void enterFunction(const FunctionDecl *FD)
Called by the analysis when starting analysis of a function.
virtual void handleIncorrectUnlockKind(StringRef Kind, Name LockName, LockKind Expected, LockKind Received, SourceLocation LocLocked, SourceLocation LocUnlock)
Warn about an unlock function call that attempts to unlock a lock with the incorrect lock kind.
virtual void handleMutexHeldEndOfScope(StringRef Kind, Name LockName, SourceLocation LocLocked, SourceLocation LocEndOfScope, LockErrorKind LEK, bool ReentrancyMismatch=false)
Warn about situations where a mutex is sometimes held and sometimes not.
virtual void leaveFunction(const FunctionDecl *FD)
Called by the analysis when finishing analysis of a function.
virtual void handleExclusiveAndShared(StringRef Kind, Name LockName, SourceLocation Loc1, SourceLocation Loc2)
Warn when a mutex is held exclusively and shared at the same point.
virtual void handleMutexNotHeld(StringRef Kind, const NamedDecl *D, ProtectedOperationKind POK, Name LockName, LockKind LK, SourceLocation Loc, Name *PossibleMatch=nullptr)
Warn when a protected operation occurs while the specific mutex protecting the operation is not locke...
virtual void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName, SourceLocation Loc)
Warn when a function is called while an excluded mutex is locked.
virtual void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK, AccessKind AK, SourceLocation Loc)
Warn when a protected operation occurs while no locks are held.
virtual void handleUnmatchedUnlock(StringRef Kind, Name LockName, SourceLocation Loc, SourceLocation LocPreviousUnlock)
Warn about unlock function calls that do not have a prior matching lock expression.
virtual void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg, SourceLocation Loc)
Warn when acquiring a lock that the negative capability is not held.
virtual void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked, SourceLocation LocDoubleLock)
Warn about lock function calls for locks which are already held.
internal::Matcher< T > traverse(TraversalKind TK, const internal::Matcher< T > &InnerMatcher)
Causes all nested matchers to be matched with the specified traversal kind.
@ CF
Indicates that the tracked object is a CF object.
bool Alloc(InterpState &S, CodePtr OpPC, const Descriptor *Desc)
bool Dec(InterpState &S, CodePtr OpPC, bool CanOverflow)
1) Pops a pointer from the stack 2) Load the value from the pointer 3) Writes the value decreased by ...
bool Neg(InterpState &S, CodePtr OpPC)
utils::ID< struct FactTag > FactID
std::unique_ptr< DiagnosticConsumer > create(StringRef OutputFile, DiagnosticOptions &DiagOpts, bool MergeChildRecords=false)
Returns a DiagnosticConsumer that serializes diagnostics to a bitcode file.
llvm::json::Object Object
bool matches(const til::SExpr *E1, const til::SExpr *E2)
LockKind getLockKindFromAccessKind(AccessKind AK)
Helper function that returns a LockKind required for the given level of access.
LockErrorKind
This enum distinguishes between different situations where we warn due to inconsistent locking.
@ LEK_NotLockedAtEndOfFunction
Expecting a capability to be held at the end of function.
@ LEK_LockedSomePredecessors
A capability is locked in some but not all predecessors of a CFGBlock.
@ LEK_LockedAtEndOfFunction
A capability is still locked at the end of a function.
@ LEK_LockedSomeLoopIterations
A capability is locked for some but not all loop iterations.
void threadSafetyCleanup(BeforeSet *Cache)
AccessKind
This enum distinguishes between different ways to access (read or write) a variable.
@ AK_Written
Writing a variable.
@ AK_Read
Reading a variable.
LockKind
This enum distinguishes between different kinds of lock actions.
@ LK_Shared
Shared/reader lock of a mutex.
@ LK_Exclusive
Exclusive/writer lock of a mutex.
@ LK_Generic
Can be either Shared or Exclusive.
void runThreadSafetyAnalysis(AnalysisDeclContext &AC, ThreadSafetyHandler &Handler, BeforeSet **Bset)
Check a function's CFG for thread-safety violations.
ProtectedOperationKind
This enum distinguishes between different kinds of operations that may need to be protected by locks.
@ POK_PtPassByRef
Passing a pt-guarded variable by reference.
@ POK_PassPointer
Passing pointer to a guarded variable.
@ POK_VarDereference
Dereferencing a variable (e.g. p in *p = 5;)
@ POK_PassByRef
Passing a guarded variable by reference.
@ POK_ReturnByRef
Returning a guarded variable by reference.
@ POK_PtPassPointer
Passing a pt-guarded pointer.
@ POK_PtReturnPointer
Returning a pt-guarded pointer.
@ POK_VarAccess
Reading or writing a variable (e.g. x in x = 5;)
@ POK_FunctionCall
Making a function call (e.g. fool())
@ POK_ReturnPointer
Returning pointer to a guarded variable.
@ POK_PtReturnByRef
Returning a pt-guarded variable by reference.
The JSON file list parser is used to communicate input to InstallAPI.
OverloadedOperatorKind
Enumeration specifying the different kinds of C++ overloaded operators.
bool isa(CodeGen::Address addr)
static bool classof(const OMPClause *T)
@ Self
'self' clause, allowed on Compute and Combined Constructs, plus 'update'.
nullptr
This class represents a compute construct, representing a 'Kind' of ‘parallel’, 'serial',...
@ Result
The result type of a method or function.
U cast(CodeGen::Address addr)
@ Other
Other implicit parameter.
int const char * function