20#include "llvm/ADT/ArrayRef.h"
21#include "llvm/ADT/STLExtras.h"
22#include "llvm/Support/Debug.h"
23#include "llvm/Support/ErrorHandling.h"
24#include "llvm/Support/FormatVariadic.h"
25#include "llvm/Support/raw_ostream.h"
77 assert(
SM.getSLocEntry(TargetFile).isFile());
88 while (
First.isMacroID() &&
Last.isMacroID()) {
89 auto DecFirst =
SM.getDecomposedLoc(
First);
90 auto DecLast =
SM.getDecomposedLoc(
Last);
91 auto &ExpFirst =
SM.getSLocEntry(DecFirst.first).getExpansion();
92 auto &ExpLast =
SM.getSLocEntry(DecLast.first).getExpansion();
94 if (!ExpFirst.isMacroArgExpansion() || !ExpLast.isMacroArgExpansion())
98 if (ExpFirst.getExpansionLocStart() != ExpLast.getExpansionLocStart())
107 SM.getSLocEntry(
SM.getFileID(ExpFirst.getExpansionLocStart()))
109 if (ExpMacro.getExpansionLocStart().isMacroID())
113 First = ExpFirst.getSpellingLoc().getLocWithOffset(DecFirst.second);
114 Last = ExpLast.getSpellingLoc().getLocWithOffset(DecLast.second);
139 if (Bound.isInvalid() || !Bound.isMacroID())
141 auto DecBound =
SM.getDecomposedLoc(Bound);
142 auto &ExpBound =
SM.getSLocEntry(DecBound.first).getExpansion();
143 if (ExpBound.isMacroArgExpansion() &&
144 ExpBound.getExpansionLocStart() == ExpFirst.getExpansionLocStart()) {
146 Bound = ExpBound.getSpellingLoc().getLocWithOffset(DecBound.second);
149 while (Bound.isMacroID()) {
150 SourceRange Exp =
SM.getImmediateExpansionRange(Bound).getAsRange();
151 if (Exp.
getBegin() == ExpMacro.getExpansionLocStart()) {
172 auto DecFirst =
SM.getDecomposedExpansionLoc(Candidate.
getBegin());
173 auto DecLast =
SM.getDecomposedLoc(Candidate.
getEnd());
175 if (Candidate.
isInvalid() || DecFirst.first != TargetFile || DecLast.first != TargetFile)
179 auto Dec =
SM.getDecomposedLoc(
SM.getExpansionRange(Prev).getBegin());
180 if (
Dec.first != DecFirst.first ||
Dec.second >= DecFirst.second)
183 if (Next.isValid()) {
184 auto Dec =
SM.getDecomposedLoc(
SM.getExpansionRange(Next).getEnd());
185 if (
Dec.first != DecLast.first ||
Dec.second <= DecLast.second)
197 : Location(Location), Length(Length), Kind(Kind) {
208 const char *Start =
SM.getCharacterData(location(), &
Invalid);
210 return llvm::StringRef(Start,
length());
214 assert(location().isFileID() &&
"must be a spelled token");
216 unsigned StartOffset;
217 std::tie(
File, StartOffset) =
SM.getDecomposedLoc(location());
226 assert(F.file() == L.file() &&
"tokens from different files");
227 assert((F == L || F.endOffset() <= L.beginOffset()) &&
228 "wrong order of tokens");
229 return FileRange(F.file(), F.beginOffset(), L.endOffset());
233 return OS << T.
str();
238 assert(
File.isValid());
239 assert(BeginOffset <= EndOffset);
247 std::tie(File,
Begin) =
SM.getDecomposedLoc(BeginLoc);
248 End =
Begin + Length;
256 assert(
SM.getFileID(BeginLoc) ==
SM.getFileID(EndLoc));
257 assert(
SM.getFileOffset(BeginLoc) <=
SM.getFileOffset(EndLoc));
259 std::tie(File,
Begin) =
SM.getDecomposedLoc(BeginLoc);
260 End =
SM.getFileOffset(EndLoc);
265 return OS << llvm::formatv(
"FileRange(file = {0}, offsets = {1}-{2})",
276 assert(End <=
Text.size());
282 if (!ExpandedTokIndex.empty())
284 ExpandedTokIndex.reserve(ExpandedTokens.size());
286 for (
size_t I = 0, E = ExpandedTokens.size(); I != E; ++I) {
289 ExpandedTokIndex[Loc] = I;
296 if (!ExpandedTokIndex.empty()) {
300 const auto B = ExpandedTokIndex.find(R.
getBegin());
301 const auto E = ExpandedTokIndex.find(R.
getEnd());
302 if (B != ExpandedTokIndex.end() && E != ExpandedTokIndex.end()) {
303 const Token *L = ExpandedTokens.data() + B->getSecond();
305 const Token *R = ExpandedTokens.data() + E->getSecond() + 1;
322std::pair<const syntax::Token *, const TokenBuffer::Mapping *>
323TokenBuffer::spelledForExpandedToken(
const syntax::Token *Expanded)
const {
325 assert(ExpandedTokens.data() <= Expanded &&
326 Expanded < ExpandedTokens.data() + ExpandedTokens.size());
328 auto FileIt = Files.find(
330 assert(FileIt != Files.end() &&
"no file for an expanded token");
332 const MarkedFile &
File = FileIt->second;
334 unsigned ExpandedIndex = Expanded - ExpandedTokens.data();
336 auto It = llvm::partition_point(
File.Mappings, [&](
const Mapping &M) {
337 return M.BeginExpanded <= ExpandedIndex;
340 if (It ==
File.Mappings.begin()) {
342 return {&
File.SpelledTokens[ExpandedIndex -
File.BeginExpanded],
348 if (ExpandedIndex < It->EndExpanded)
349 return {&
File.SpelledTokens[It->BeginSpelled], &*It};
354 &
File.SpelledTokens[It->EndSpelled + (ExpandedIndex - It->EndExpanded)],
358const TokenBuffer::Mapping *
359TokenBuffer::mappingStartingBeforeSpelled(
const MarkedFile &F,
361 assert(F.SpelledTokens.data() <= Spelled);
362 unsigned SpelledI = Spelled - F.SpelledTokens.data();
363 assert(SpelledI < F.SpelledTokens.size());
365 auto It = llvm::partition_point(F.Mappings, [SpelledI](
const Mapping &M) {
366 return M.BeginSpelled <= SpelledI;
368 if (It == F.Mappings.begin())
378 const auto &
File = fileForSpelled(Spelled);
380 auto *FrontMapping = mappingStartingBeforeSpelled(
File, &Spelled.front());
381 unsigned SpelledFrontI = &Spelled.front() -
File.SpelledTokens.data();
382 assert(SpelledFrontI <
File.SpelledTokens.size());
383 unsigned ExpandedBegin;
387 ExpandedBegin =
File.BeginExpanded + SpelledFrontI;
388 }
else if (SpelledFrontI < FrontMapping->EndSpelled) {
390 if (SpelledFrontI != FrontMapping->BeginSpelled) {
395 ExpandedBegin = FrontMapping->BeginExpanded;
400 FrontMapping->EndExpanded + (SpelledFrontI - FrontMapping->EndSpelled);
403 auto *BackMapping = mappingStartingBeforeSpelled(
File, &Spelled.back());
404 unsigned SpelledBackI = &Spelled.back() -
File.SpelledTokens.data();
405 unsigned ExpandedEnd;
409 ExpandedEnd =
File.BeginExpanded + SpelledBackI + 1;
410 }
else if (SpelledBackI < BackMapping->EndSpelled) {
412 if (SpelledBackI + 1 != BackMapping->EndSpelled) {
416 ExpandedEnd = BackMapping->EndExpanded;
420 BackMapping->EndExpanded + (SpelledBackI - BackMapping->EndSpelled) + 1;
423 assert(ExpandedBegin < ExpandedTokens.size());
424 assert(ExpandedEnd < ExpandedTokens.size());
426 if (ExpandedBegin == ExpandedEnd)
429 ExpandedTokens.data() + ExpandedEnd)};
433 auto It = Files.find(FID);
434 assert(It != Files.end());
435 return It->second.SpelledTokens;
440 const auto *Tok = llvm::partition_point(
442 [&](
const syntax::Token &Tok) { return Tok.location() < Loc; });
448std::string TokenBuffer::Mapping::str()
const {
450 llvm::formatv(
"spelled tokens: [{0},{1}), expanded tokens: [{2},{3})",
451 BeginSpelled, EndSpelled, BeginExpanded, EndExpanded));
454std::optional<llvm::ArrayRef<syntax::Token>>
458 if (Expanded.empty())
462 auto [FirstSpelled, FirstMapping] = spelledForExpandedToken(
First);
463 auto [LastSpelled, LastMapping] = spelledForExpandedToken(
Last);
467 if (FID != SourceMgr->
getFileID(LastSpelled->location()))
470 const MarkedFile &
File = Files.find(FID)->second;
474 if (FirstMapping && FirstMapping == LastMapping &&
480 : (
First - 1)->location();
483 : (
Last + 1)->location();
485 First->location(),
Last->location(), Prev, Next, FID, *SourceMgr);
486 if (Range.isInvalid())
488 return getTokensCovering(
File.SpelledTokens, Range, *SourceMgr);
493 unsigned FirstExpanded = Expanded.begin() - ExpandedTokens.data();
494 unsigned LastExpanded = Expanded.end() - ExpandedTokens.data();
495 if (FirstMapping && FirstExpanded != FirstMapping->BeginExpanded)
497 if (LastMapping && LastMapping->EndExpanded != LastExpanded)
500 FirstMapping ?
File.SpelledTokens.data() + FirstMapping->BeginSpelled
502 LastMapping ?
File.SpelledTokens.data() + LastMapping->EndSpelled
507 const Mapping &M)
const {
510 F.SpelledTokens.data() + M.EndSpelled);
511 E.Expanded =
llvm::ArrayRef(ExpandedTokens.data() + M.BeginExpanded,
512 ExpandedTokens.data() + M.EndExpanded);
516const TokenBuffer::MarkedFile &
518 assert(!Spelled.empty());
519 assert(Spelled.front().location().isFileID() &&
"not a spelled token");
520 auto FileIt = Files.find(SourceMgr->
getFileID(Spelled.front().location()));
521 assert(FileIt != Files.end() &&
"file not tracked by token buffer");
522 const auto &
File = FileIt->second;
523 assert(
File.SpelledTokens.data() <= Spelled.data() &&
525 (
File.SpelledTokens.data() +
File.SpelledTokens.size()) &&
526 "Tokens not in spelled range");
528 auto T1 = Spelled.back().location();
529 auto T2 =
File.SpelledTokens.back().location();
530 assert(T1 == T2 ||
sourceManager().isBeforeInTranslationUnit(T1, T2));
535std::optional<TokenBuffer::Expansion>
538 const auto &
File = fileForSpelled(*Spelled);
540 unsigned SpelledIndex = Spelled -
File.SpelledTokens.data();
541 auto M = llvm::partition_point(
File.Mappings, [&](
const Mapping &M) {
542 return M.BeginSpelled < SpelledIndex;
544 if (M ==
File.Mappings.end() || M->BeginSpelled != SpelledIndex)
546 return makeExpansion(
File, *M);
553 const auto &
File = fileForSpelled(Spelled);
556 unsigned SpelledBeginIndex = Spelled.begin() -
File.SpelledTokens.data();
557 unsigned SpelledEndIndex = Spelled.end() -
File.SpelledTokens.data();
558 auto M = llvm::partition_point(
File.Mappings, [&](
const Mapping &M) {
559 return M.EndSpelled <= SpelledBeginIndex;
561 std::vector<TokenBuffer::Expansion> Expansions;
562 for (; M !=
File.Mappings.end() && M->BeginSpelled < SpelledEndIndex; ++M)
563 Expansions.push_back(makeExpansion(
File, *M));
572 auto *Right = llvm::partition_point(
574 bool AcceptRight = Right != Tokens.end() && Right->location() <= Loc;
576 Right != Tokens.begin() && (Right - 1)->endLocation() >= Loc;
578 Right + (AcceptRight ? 1 : 0));
585 Loc, Tokens.spelledTokens(Tokens.sourceManager().getFileID(Loc)));
592 if (Tok.kind() == tok::identifier)
602 Loc, Tokens.spelledTokens(Tokens.sourceManager().getFileID(Loc)));
605std::vector<const syntax::Token *>
607 auto FileIt = Files.find(FID);
608 assert(FileIt != Files.end() &&
"file not tracked by token buffer");
609 auto &
File = FileIt->second;
610 std::vector<const syntax::Token *> Expansions;
611 auto &Spelled =
File.SpelledTokens;
612 for (
auto Mapping :
File.Mappings) {
614 if (
Token->
kind() == tok::TokenKind::identifier)
615 Expansions.push_back(
Token);
623 std::vector<syntax::Token> Tokens;
627 if (T.getKind() == tok::raw_identifier && !T.needsCleaning() &&
630 T.setIdentifierInfo(&II);
636 auto SrcBuffer =
SM.getBufferData(FR.
file());
637 Lexer L(
SM.getLocForStartOfFile(FR.
file()), LO, SrcBuffer.data(),
641 SrcBuffer.data() + SrcBuffer.size());
673 const auto &
SM = Collector->PP.getSourceManager();
688 if (!Range.getEnd().isFileID())
692 if (LastExpansionEnd.isValid() &&
693 !
SM.isBeforeInTranslationUnit(LastExpansionEnd, Range.getEnd()))
699 if (!Range.getBegin().isFileID()) {
700 Range.setBegin(
SM.getExpansionLoc(Range.getBegin()));
701 assert(Collector->Expansions.count(Range.getBegin()) &&
702 "Overlapping macros should have same expansion location");
705 Collector->Expansions[Range.getBegin()] = Range.getEnd();
706 LastExpansionEnd = Range.getEnd();
733 DEBUG_WITH_TYPE(
"collect-tokens", llvm::dbgs()
744 auto CB = std::make_unique<CollectPPExpansions>(*
this);
745 this->Collector = CB.get();
753 Builder(std::vector<syntax::Token> Expanded, PPExpansions CollectedExpansions,
755 : Result(
SM), CollectedExpansions(
std::move(CollectedExpansions)),
SM(
SM),
757 Result.ExpandedTokens = std::move(Expanded);
761 assert(!Result.ExpandedTokens.empty());
762 assert(Result.ExpandedTokens.back().kind() == tok::eof);
765 buildSpelledTokens();
771 while (NextExpanded < Result.ExpandedTokens.size() - 1 ) {
777 unsigned OldPosition = NextExpanded;
779 if (NextExpanded == OldPosition)
780 diagnoseAdvanceFailure();
784 for (
const auto &
File : Result.Files)
788 for (
auto &pair : Result.Files) {
789 auto &mappings = pair.second.Mappings;
790 assert(llvm::is_sorted(mappings, [](
const TokenBuffer::Mapping &M1,
791 const TokenBuffer::Mapping &M2) {
792 return M1.BeginSpelled < M2.BeginSpelled &&
793 M1.EndSpelled < M2.EndSpelled &&
794 M1.BeginExpanded < M2.BeginExpanded &&
795 M1.EndExpanded < M2.EndExpanded;
800 return std::move(Result);
808 void discard(std::optional<FileID> Drain = std::nullopt) {
810 Drain ?
SM.getLocForEndOfFile(*Drain)
811 :
SM.getExpansionLoc(
812 Result.ExpandedTokens[NextExpanded].location());
814 const auto &SpelledTokens = Result.Files[
File].SpelledTokens;
815 auto &NextSpelled = this->NextSpelled[
File];
817 TokenBuffer::Mapping Mapping;
818 Mapping.BeginSpelled = NextSpelled;
821 Mapping.BeginExpanded = Mapping.EndExpanded =
822 Drain ? Result.Files[*Drain].EndExpanded : NextExpanded;
825 auto FlushMapping = [&,
this] {
826 Mapping.EndSpelled = NextSpelled;
827 if (Mapping.BeginSpelled != Mapping.EndSpelled)
828 Result.Files[
File].Mappings.push_back(Mapping);
829 Mapping.BeginSpelled = NextSpelled;
832 while (NextSpelled < SpelledTokens.size() &&
833 SpelledTokens[NextSpelled].location() < Target) {
838 CollectedExpansions.lookup(SpelledTokens[NextSpelled].location());
841 while (NextSpelled < SpelledTokens.size() &&
842 SpelledTokens[NextSpelled].location() <= KnownEnd)
857 const syntax::Token &Tok = Result.ExpandedTokens[NextExpanded];
860 const auto &SpelledTokens = Result.Files[
File].SpelledTokens;
861 auto &NextSpelled = this->NextSpelled[
File];
865 while (NextSpelled < SpelledTokens.size() &&
866 NextExpanded < Result.ExpandedTokens.size() &&
867 SpelledTokens[NextSpelled].location() ==
868 Result.ExpandedTokens[NextExpanded].location()) {
875 auto End = CollectedExpansions.lookup(Expansion);
876 assert(End.isValid() &&
"Macro expansion wasn't captured?");
879 TokenBuffer::Mapping Mapping;
880 Mapping.BeginExpanded = NextExpanded;
881 Mapping.BeginSpelled = NextSpelled;
883 while (NextSpelled < SpelledTokens.size() &&
884 SpelledTokens[NextSpelled].location() <= End)
887 while (NextExpanded < Result.ExpandedTokens.size() &&
889 Result.ExpandedTokens[NextExpanded].location()) == Expansion)
892 Mapping.EndExpanded = NextExpanded;
893 Mapping.EndSpelled = NextSpelled;
894 Result.Files[
File].Mappings.push_back(Mapping);
899 void diagnoseAdvanceFailure() {
902 for (
unsigned I = (NextExpanded < 10) ? 0 : NextExpanded - 10;
903 I < NextExpanded + 5 && I < Result.ExpandedTokens.size(); ++I) {
905 (I == NextExpanded) ?
"!! " : (I < NextExpanded) ?
"ok " :
" ";
906 llvm::errs() << L << Result.ExpandedTokens[I].dumpForTests(
SM) <<
"\n";
909 llvm_unreachable(
"Couldn't map expanded token to spelled tokens!");
914 void buildSpelledTokens() {
915 for (
unsigned I = 0; I < Result.ExpandedTokens.size(); ++I) {
916 const auto &Tok = Result.ExpandedTokens[I];
917 auto FID =
SM.getFileID(
SM.getExpansionLoc(Tok.
location()));
918 auto It = Result.Files.try_emplace(FID);
919 TokenBuffer::MarkedFile &
File = It.first->second;
922 File.EndExpanded = Tok.
kind() == tok::eof ? I : I + 1;
927 File.BeginExpanded = I;
933 unsigned NextExpanded = 0;
934 llvm::DenseMap<FileID, unsigned> NextSpelled;
935 PPExpansions CollectedExpansions;
941 PP.setTokenWatcher(
nullptr);
942 Collector->disable();
943 return Builder(std::move(Expanded), std::move(Expansions),
944 PP.getSourceManager(), PP.getLangOpts())
949 return std::string(llvm::formatv(
"Token({0}, length = {1})",
954 return std::string(llvm::formatv(
"Token(`{0}`, {1}, length = {2})", text(
SM),
959 auto PrintToken = [
this](
const syntax::Token &T) -> std::string {
960 if (T.
kind() == tok::eof)
962 return std::string(T.
text(*SourceMgr));
965 auto DumpTokens = [
this, &PrintToken](llvm::raw_ostream &OS,
967 if (Tokens.empty()) {
971 OS << Tokens[0].text(*SourceMgr);
972 for (
unsigned I = 1; I < Tokens.size(); ++I) {
973 if (Tokens[I].kind() == tok::eof)
975 OS <<
" " << PrintToken(Tokens[I]);
980 llvm::raw_string_ostream OS(Dump);
982 OS <<
"expanded tokens:\n"
988 std::vector<FileID> Keys;
989 for (
const auto &F : Files)
990 Keys.push_back(F.first);
994 const MarkedFile &
File = Files.find(ID)->second;
998 OS << llvm::formatv(
"file '{0}'\n", Entry->getName())
999 <<
" spelled tokens:\n"
1001 DumpTokens(OS,
File.SpelledTokens);
1004 if (
File.Mappings.empty()) {
1005 OS <<
" no mappings.\n";
1008 OS <<
" mappings:\n";
1009 for (
auto &M :
File.Mappings) {
1010 OS << llvm::formatv(
1011 " ['{0}'_{1}, '{2}'_{3}) => ['{4}'_{5}, '{6}'_{7})\n",
1012 PrintToken(
File.SpelledTokens[M.BeginSpelled]), M.BeginSpelled,
1013 M.EndSpelled ==
File.SpelledTokens.size()
1015 : PrintToken(
File.SpelledTokens[M.EndSpelled]),
1016 M.EndSpelled, PrintToken(ExpandedTokens[M.BeginExpanded]),
1017 M.BeginExpanded, PrintToken(ExpandedTokens[M.EndExpanded]),
Defines the Diagnostic-related interfaces.
static Decl::Kind getKind(const Decl *D)
Defines the clang::IdentifierInfo, clang::IdentifierTable, and clang::Selector interfaces.
Forward-declares and imports various common LLVM datatypes that clang wants to use unqualified.
Defines the clang::LangOptions interface.
Defines the PPCallbacks interface.
static ParseState advance(ParseState S, size_t N)
Defines the clang::Preprocessor interface.
Defines the clang::SourceLocation class and associated facilities.
Defines the SourceManager interface.
Defines the clang::TokenKind enum and support functions.
Builds mappings and spelled tokens in the TokenBuffer based on the expanded token stream.
Builder(std::vector< syntax::Token > Expanded, PPExpansions CollectedExpansions, const SourceManager &SM, const LangOptions &LangOpts)
Records information reqired to construct mappings for the token buffer that we are collecting.
CollectPPExpansions(TokenCollector &C)
void disable()
Disabled instance will stop reporting anything to TokenCollector.
void MacroExpands(const clang::Token &MacroNameTok, const MacroDefinition &MD, SourceRange Range, const MacroArgs *Args) override
Called by Preprocessor::HandleMacroExpandedIdentifier when a macro invocation is found.
Represents a character-granular source range.
An opaque identifier used by SourceManager which refers to a source file (MemoryBuffer) along with it...
unsigned getHashValue() const
One of these records is kept for each identifier that is lexed.
tok::TokenKind getTokenID() const
If this is a source-language token (e.g.
Implements an efficient mapping from strings to IdentifierInfo nodes.
IdentifierInfo & get(StringRef Name)
Return the identifier token info for the specified named identifier.
Keeps track of the various options that can be enabled, which controls the dialect of C or C++ that i...
Lexer - This provides a simple interface that turns a text buffer into a stream of tokens.
bool LexFromRawLexer(Token &Result)
LexFromRawLexer - Lex a token from a designated raw lexer (one with no associated preprocessor object...
unsigned getCurrentBufferOffset()
Returns the current lexing offset.
MacroArgs - An instance of this class captures information about the formal arguments specified to a ...
A description of the current definition of a macro.
This interface provides a way to observe the actions of the preprocessor as it does its thing.
Engages in a tight little dance with the lexer to efficiently preprocess tokens.
void addPPCallbacks(std::unique_ptr< PPCallbacks > C)
SourceManager & getSourceManager() const
void setTokenWatcher(llvm::unique_function< void(const clang::Token &)> F)
Register a function that would be called on each token in the final expanded token stream.
Encodes a location in the source.
bool isValid() const
Return true if this is a valid SourceLocation object.
This class handles loading and caching of source files into memory.
FileID getFileID(SourceLocation SpellingLoc) const
Return the FileID for a SourceLocation.
bool isMacroArgExpansion(SourceLocation Loc, SourceLocation *StartLoc=nullptr) const
Tests whether the given source location represents a macro argument's expansion into the function-lik...
const FileEntry * getFileEntryForID(FileID FID) const
Returns the FileEntry record for the provided FileID.
SourceLocation getExpansionLoc(SourceLocation Loc) const
Given a SourceLocation object Loc, return the expansion location referenced by the ID.
A trivial tuple used to represent a source range.
SourceLocation getEnd() const
SourceLocation getBegin() const
Token - This structure provides full information about a lexed token.
SourceLocation getLocation() const
Return a source location identifier for the specified offset in the current file.
bool isAnnotation() const
Return true if this is any of tok::annot_* kind tokens.
A list of tokens obtained by preprocessing a text buffer and operations to map between the expanded a...
const SourceManager & sourceManager() const
void indexExpandedTokens()
Builds a cache to make future calls to expandedToken(SourceRange) faster.
llvm::SmallVector< llvm::ArrayRef< syntax::Token >, 1 > expandedForSpelled(llvm::ArrayRef< syntax::Token > Spelled) const
Find the subranges of expanded tokens, corresponding to Spelled.
llvm::ArrayRef< syntax::Token > expandedTokens() const
All tokens produced by the preprocessor after all macro replacements, directives, etc.
std::string dumpForTests() const
std::optional< llvm::ArrayRef< syntax::Token > > spelledForExpanded(llvm::ArrayRef< syntax::Token > Expanded) const
Returns the subrange of spelled tokens corresponding to AST node spanning Expanded.
const syntax::Token * spelledTokenAt(SourceLocation Loc) const
Returns the spelled Token starting at Loc, if there are no such tokens returns nullptr.
std::vector< Expansion > expansionsOverlapping(llvm::ArrayRef< syntax::Token > Spelled) const
Returns all expansions (partially) expanded from the specified tokens.
std::optional< Expansion > expansionStartingAt(const syntax::Token *Spelled) const
If Spelled starts a mapping (e.g.
llvm::ArrayRef< syntax::Token > spelledTokens(FileID FID) const
Lexed tokens of a file before preprocessing.
std::vector< const syntax::Token * > macroExpansions(FileID FID) const
Get all tokens that expand a macro in FID.
Collects tokens for the main file while running the frontend action.
TokenBuffer consume() &&
Finalizes token collection.
TokenCollector(Preprocessor &P)
Adds the hooks to collect the tokens.
A token coming directly from a file or from a macro invocation.
std::string str() const
For debugging purposes.
llvm::StringRef text(const SourceManager &SM) const
Get the substring covered by the token.
tok::TokenKind kind() const
FileRange range(const SourceManager &SM) const
Gets a range of this token.
Token(SourceLocation Location, unsigned Length, tok::TokenKind Kind)
std::string dumpForTests(const SourceManager &SM) const
SourceLocation location() const
Location of the first character of a token.
bool Dec(InterpState &S, CodePtr OpPC)
1) Pops a pointer from the stack 2) Load the value from the pointer 3) Writes the value decreased by ...
const syntax::Token * spelledIdentifierTouching(SourceLocation Loc, llvm::ArrayRef< syntax::Token > Tokens)
The identifier token that overlaps or touches a spelling location Loc.
std::vector< syntax::Token > tokenize(FileID FID, const SourceManager &SM, const LangOptions &LO)
Lex the text buffer, corresponding to FID, in raw mode and record the resulting spelled tokens.
raw_ostream & operator<<(raw_ostream &OS, NodeKind K)
For debugging purposes.
llvm::ArrayRef< syntax::Token > spelledTokensTouching(SourceLocation Loc, const syntax::TokenBuffer &Tokens)
The spelled tokens that overlap or touch a spelling location Loc.
const char * getTokenName(TokenKind Kind) LLVM_READNONE
Determines the name of a token as used within the front end.
TokenKind
Provides a simple uniform namespace for tokens from all C languages.
@ C
Languages that the frontend can parse and compile.
float __ovld __cnfn length(float)
Return the length of vector p, i.e., sqrt(p.x2 + p.y 2 + ...)
A half-open character range inside a particular file, the start offset is included and the end offset...
CharSourceRange toCharRange(const SourceManager &SM) const
Convert to the clang range.
FileRange(FileID File, unsigned BeginOffset, unsigned EndOffset)
EXPECTS: File.isValid() && Begin <= End.
unsigned beginOffset() const
Start is a start offset (inclusive) in the corresponding file.
llvm::StringRef text(const SourceManager &SM) const
Gets the substring that this FileRange refers to.
unsigned endOffset() const
End offset (exclusive) in the corresponding file.
An expansion produced by the preprocessor, includes macro expansions and preprocessor directives.
llvm::ArrayRef< syntax::Token > Spelled