20#include "llvm/ADT/ArrayRef.h"
21#include "llvm/ADT/STLExtras.h"
22#include "llvm/Support/Debug.h"
23#include "llvm/Support/ErrorHandling.h"
24#include "llvm/Support/FormatVariadic.h"
25#include "llvm/Support/raw_ostream.h"
46 return SM.isBeforeInTranslationUnit(
T.location(), R.
getBegin());
50 return !
SM.isBeforeInTranslationUnit(R.
getEnd(),
T.location());
77 assert(
SM.getSLocEntry(TargetFile).isFile());
88 while (
First.isMacroID() &&
Last.isMacroID()) {
89 auto DecFirst =
SM.getDecomposedLoc(
First);
90 auto DecLast =
SM.getDecomposedLoc(
Last);
91 auto &ExpFirst =
SM.getSLocEntry(DecFirst.first).getExpansion();
92 auto &ExpLast =
SM.getSLocEntry(DecLast.first).getExpansion();
94 if (!ExpFirst.isMacroArgExpansion() || !ExpLast.isMacroArgExpansion())
98 if (ExpFirst.getExpansionLocStart() != ExpLast.getExpansionLocStart())
106 auto ExpFileID =
SM.getFileID(ExpFirst.getExpansionLocStart());
107 if (ExpFileID == TargetFile)
111 First = ExpFirst.getSpellingLoc().getLocWithOffset(DecFirst.second);
112 Last = ExpLast.getSpellingLoc().getLocWithOffset(DecLast.second);
119 auto DecFirst =
SM.getDecomposedExpansionLoc(Candidate.
getBegin());
120 auto DecLast =
SM.getDecomposedExpansionLoc(Candidate.
getEnd());
122 if (Candidate.
isInvalid() || DecFirst.first != TargetFile ||
123 DecLast.first != TargetFile)
127 auto Dec =
SM.getDecomposedLoc(
SM.getExpansionRange(Prev).getBegin());
128 if (
Dec.first != DecFirst.first ||
Dec.second >= DecFirst.second)
131 if (Next.isValid()) {
132 auto Dec =
SM.getDecomposedLoc(
SM.getExpansionRange(Next).getEnd());
133 if (
Dec.first != DecLast.first ||
Dec.second <= DecLast.second)
145 : Location(Location), Length(Length), Kind(Kind) {
151 assert(!
T.isAnnotation());
156 const char *Start =
SM.getCharacterData(location(), &
Invalid);
158 return llvm::StringRef(Start,
length());
162 assert(location().isFileID() &&
"must be a spelled token");
164 unsigned StartOffset;
165 std::tie(
File, StartOffset) =
SM.getDecomposedLoc(location());
174 assert(F.file() == L.file() &&
"tokens from different files");
175 assert((F == L || F.endOffset() <= L.beginOffset()) &&
176 "wrong order of tokens");
177 return FileRange(F.file(), F.beginOffset(), L.endOffset());
181 return OS <<
T.str();
186 assert(
File.isValid());
187 assert(BeginOffset <= EndOffset);
195 std::tie(File,
Begin) =
SM.getDecomposedLoc(BeginLoc);
196 End =
Begin + Length;
204 assert(
SM.getFileID(BeginLoc) ==
SM.getFileID(EndLoc));
205 assert(
SM.getFileOffset(BeginLoc) <=
SM.getFileOffset(EndLoc));
207 std::tie(File,
Begin) =
SM.getDecomposedLoc(BeginLoc);
208 End =
SM.getFileOffset(EndLoc);
213 return OS << llvm::formatv(
"FileRange(file = {0}, offsets = {1}-{2})",
224 assert(End <=
Text.size());
230 if (!ExpandedTokIndex.empty())
232 ExpandedTokIndex.reserve(ExpandedTokens.size());
234 for (
size_t I = 0,
E = ExpandedTokens.size(); I !=
E; ++I) {
237 ExpandedTokIndex[
Loc] = I;
244 if (!ExpandedTokIndex.empty()) {
248 const auto B = ExpandedTokIndex.find(R.
getBegin());
249 const auto E = ExpandedTokIndex.find(R.
getEnd());
250 if (B != ExpandedTokIndex.end() &&
E != ExpandedTokIndex.end()) {
251 const Token *L = ExpandedTokens.data() + B->getSecond();
253 const Token *R = ExpandedTokens.data() +
E->getSecond() + 1;
270std::pair<const syntax::Token *, const TokenBuffer::Mapping *>
271TokenBuffer::spelledForExpandedToken(
const syntax::Token *Expanded)
const {
273 assert(ExpandedTokens.data() <= Expanded &&
274 Expanded < ExpandedTokens.data() + ExpandedTokens.size());
276 auto FileIt = Files.find(
278 assert(FileIt != Files.end() &&
"no file for an expanded token");
280 const MarkedFile &
File = FileIt->second;
282 unsigned ExpandedIndex = Expanded - ExpandedTokens.data();
284 auto It = llvm::partition_point(
File.Mappings, [&](
const Mapping &M) {
285 return M.BeginExpanded <= ExpandedIndex;
288 if (It ==
File.Mappings.begin()) {
290 return {&
File.SpelledTokens[ExpandedIndex -
File.BeginExpanded],
296 if (ExpandedIndex < It->EndExpanded)
297 return {&
File.SpelledTokens[It->BeginSpelled], &*It};
302 &
File.SpelledTokens[It->EndSpelled + (ExpandedIndex - It->EndExpanded)],
306const TokenBuffer::Mapping *
307TokenBuffer::mappingStartingBeforeSpelled(
const MarkedFile &F,
309 assert(F.SpelledTokens.data() <= Spelled);
310 unsigned SpelledI = Spelled - F.SpelledTokens.data();
311 assert(SpelledI < F.SpelledTokens.size());
313 auto It = llvm::partition_point(F.Mappings, [SpelledI](
const Mapping &M) {
314 return M.BeginSpelled <= SpelledI;
316 if (It == F.Mappings.begin())
326 const auto &
File = fileForSpelled(Spelled);
328 auto *FrontMapping = mappingStartingBeforeSpelled(
File, &Spelled.front());
329 unsigned SpelledFrontI = &Spelled.front() -
File.SpelledTokens.data();
330 assert(SpelledFrontI <
File.SpelledTokens.size());
331 unsigned ExpandedBegin;
335 ExpandedBegin =
File.BeginExpanded + SpelledFrontI;
336 }
else if (SpelledFrontI < FrontMapping->EndSpelled) {
338 if (SpelledFrontI != FrontMapping->BeginSpelled) {
343 ExpandedBegin = FrontMapping->BeginExpanded;
348 FrontMapping->EndExpanded + (SpelledFrontI - FrontMapping->EndSpelled);
351 auto *BackMapping = mappingStartingBeforeSpelled(
File, &Spelled.back());
352 unsigned SpelledBackI = &Spelled.back() -
File.SpelledTokens.data();
353 unsigned ExpandedEnd;
357 ExpandedEnd =
File.BeginExpanded + SpelledBackI + 1;
358 }
else if (SpelledBackI < BackMapping->EndSpelled) {
360 if (SpelledBackI + 1 != BackMapping->EndSpelled) {
364 ExpandedEnd = BackMapping->EndExpanded;
368 BackMapping->EndExpanded + (SpelledBackI - BackMapping->EndSpelled) + 1;
371 assert(ExpandedBegin < ExpandedTokens.size());
372 assert(ExpandedEnd < ExpandedTokens.size());
374 if (ExpandedBegin == ExpandedEnd)
377 ExpandedTokens.data() + ExpandedEnd)};
381 auto It = Files.find(FID);
382 assert(It != Files.end());
383 return It->second.SpelledTokens;
389 const auto *Tok = llvm::partition_point(
391 [&](
const syntax::Token &Tok) { return Tok.endLocation() <= Loc; });
392 if (!Tok || Loc < Tok->location())
397std::string TokenBuffer::Mapping::str()
const {
399 llvm::formatv(
"spelled tokens: [{0},{1}), expanded tokens: [{2},{3})",
400 BeginSpelled, EndSpelled, BeginExpanded, EndExpanded));
403std::optional<llvm::ArrayRef<syntax::Token>>
408 if (!Expanded.empty() && Expanded.back().kind() == tok::eof) {
409 Expanded = Expanded.drop_back();
413 if (Expanded.empty())
417 auto [FirstSpelled, FirstMapping] = spelledForExpandedToken(
First);
418 auto [LastSpelled, LastMapping] = spelledForExpandedToken(
Last);
422 if (FID != SourceMgr->
getFileID(LastSpelled->location()))
425 const MarkedFile &
File = Files.find(FID)->second;
429 if (FirstMapping && FirstMapping == LastMapping &&
435 : (
First - 1)->location();
438 : (
Last + 1)->location();
440 First->location(),
Last->location(), Prev, Next, FID, *SourceMgr);
443 return getTokensCovering(
File.SpelledTokens,
Range, *SourceMgr);
448 unsigned FirstExpanded = Expanded.begin() - ExpandedTokens.data();
449 unsigned LastExpanded = Expanded.end() - ExpandedTokens.data();
450 if (FirstMapping && FirstExpanded != FirstMapping->BeginExpanded)
452 if (LastMapping && LastMapping->EndExpanded != LastExpanded)
455 FirstMapping ?
File.SpelledTokens.data() + FirstMapping->BeginSpelled
457 LastMapping ?
File.SpelledTokens.data() + LastMapping->EndSpelled
462 const Mapping &M)
const {
465 F.SpelledTokens.data() + M.EndSpelled);
467 ExpandedTokens.data() + M.EndExpanded);
471const TokenBuffer::MarkedFile &
473 assert(!Spelled.empty());
474 assert(Spelled.front().location().isFileID() &&
"not a spelled token");
475 auto FileIt = Files.find(SourceMgr->
getFileID(Spelled.front().location()));
476 assert(FileIt != Files.end() &&
"file not tracked by token buffer");
477 const auto &
File = FileIt->second;
478 assert(
File.SpelledTokens.data() <= Spelled.data() &&
480 (
File.SpelledTokens.data() +
File.SpelledTokens.size()) &&
481 "Tokens not in spelled range");
483 auto T1 = Spelled.back().location();
484 auto T2 =
File.SpelledTokens.back().location();
485 assert(T1 == T2 ||
sourceManager().isBeforeInTranslationUnit(T1, T2));
490std::optional<TokenBuffer::Expansion>
493 const auto &
File = fileForSpelled(*Spelled);
495 unsigned SpelledIndex = Spelled -
File.SpelledTokens.data();
496 auto M = llvm::partition_point(
File.Mappings, [&](
const Mapping &M) {
497 return M.BeginSpelled < SpelledIndex;
499 if (M ==
File.Mappings.end() || M->BeginSpelled != SpelledIndex)
501 return makeExpansion(
File, *M);
508 const auto &
File = fileForSpelled(Spelled);
511 unsigned SpelledBeginIndex = Spelled.begin() -
File.SpelledTokens.data();
512 unsigned SpelledEndIndex = Spelled.end() -
File.SpelledTokens.data();
513 auto M = llvm::partition_point(
File.Mappings, [&](
const Mapping &M) {
514 return M.EndSpelled <= SpelledBeginIndex;
516 std::vector<TokenBuffer::Expansion> Expansions;
517 for (; M !=
File.Mappings.end() && M->BeginSpelled < SpelledEndIndex; ++M)
518 Expansions.push_back(makeExpansion(
File, *M));
527 auto *Right = llvm::partition_point(
529 bool AcceptRight = Right != Tokens.end() && Right->location() <=
Loc;
531 Right != Tokens.begin() && (Right - 1)->endLocation() >=
Loc;
533 Right + (AcceptRight ? 1 : 0));
540 Loc, Tokens.spelledTokens(Tokens.sourceManager().getFileID(
Loc)));
547 if (Tok.kind() == tok::identifier)
557 Loc, Tokens.spelledTokens(Tokens.sourceManager().getFileID(
Loc)));
560std::vector<const syntax::Token *>
562 auto FileIt = Files.find(FID);
563 assert(FileIt != Files.end() &&
"file not tracked by token buffer");
564 auto &
File = FileIt->second;
565 std::vector<const syntax::Token *> Expansions;
566 auto &Spelled =
File.SpelledTokens;
567 for (
auto Mapping :
File.Mappings) {
569 if (
Token->
kind() == tok::TokenKind::identifier)
570 Expansions.push_back(
Token);
578 std::vector<syntax::Token> Tokens;
582 if (
T.getKind() == tok::raw_identifier && !
T.needsCleaning() &&
585 T.setIdentifierInfo(&II);
591 auto SrcBuffer =
SM.getBufferData(FR.
file());
592 Lexer L(
SM.getLocForStartOfFile(FR.
file()), LO, SrcBuffer.data(),
596 SrcBuffer.data() + SrcBuffer.size());
628 const auto &
SM = Collector->PP.getSourceManager();
647 if (LastExpansionEnd.isValid() &&
648 !
SM.isBeforeInTranslationUnit(LastExpansionEnd,
Range.
getEnd()))
657 "Overlapping macros should have same expansion location");
686 if (
T.isAnnotation())
688 DEBUG_WITH_TYPE(
"collect-tokens", llvm::dbgs()
699 auto CB = std::make_unique<CollectPPExpansions>(*
this);
700 this->Collector = CB.get();
708 Builder(std::vector<syntax::Token> Expanded, PPExpansions CollectedExpansions,
710 : Result(
SM), CollectedExpansions(
std::move(CollectedExpansions)),
SM(
SM),
712 Result.ExpandedTokens = std::move(Expanded);
716 assert(!Result.ExpandedTokens.empty());
717 assert(Result.ExpandedTokens.back().kind() == tok::eof);
720 buildSpelledTokens();
726 while (NextExpanded < Result.ExpandedTokens.size() - 1 ) {
732 unsigned OldPosition = NextExpanded;
734 if (NextExpanded == OldPosition)
735 diagnoseAdvanceFailure();
739 for (
const auto &
File : Result.Files)
743 for (
auto &pair : Result.Files) {
744 auto &mappings = pair.second.Mappings;
745 assert(llvm::is_sorted(mappings, [](
const TokenBuffer::Mapping &M1,
746 const TokenBuffer::Mapping &M2) {
747 return M1.BeginSpelled < M2.BeginSpelled &&
748 M1.EndSpelled < M2.EndSpelled &&
749 M1.BeginExpanded < M2.BeginExpanded &&
750 M1.EndExpanded < M2.EndExpanded;
755 return std::move(Result);
763 void discard(std::optional<FileID> Drain = std::nullopt) {
765 Drain ?
SM.getLocForEndOfFile(*Drain)
766 :
SM.getExpansionLoc(
767 Result.ExpandedTokens[NextExpanded].location());
769 const auto &SpelledTokens = Result.Files[
File].SpelledTokens;
770 auto &NextSpelled = this->NextSpelled[
File];
772 TokenBuffer::Mapping Mapping;
773 Mapping.BeginSpelled = NextSpelled;
776 Mapping.BeginExpanded = Mapping.EndExpanded =
777 Drain ? Result.Files[*Drain].EndExpanded : NextExpanded;
780 auto FlushMapping = [&,
this] {
781 Mapping.EndSpelled = NextSpelled;
782 if (Mapping.BeginSpelled != Mapping.EndSpelled)
783 Result.Files[
File].Mappings.push_back(Mapping);
784 Mapping.BeginSpelled = NextSpelled;
787 while (NextSpelled < SpelledTokens.size() &&
788 SpelledTokens[NextSpelled].location() <
Target) {
793 CollectedExpansions.lookup(SpelledTokens[NextSpelled].location());
796 while (NextSpelled < SpelledTokens.size() &&
797 SpelledTokens[NextSpelled].location() <= KnownEnd)
812 const syntax::Token &Tok = Result.ExpandedTokens[NextExpanded];
815 const auto &SpelledTokens = Result.Files[
File].SpelledTokens;
816 auto &NextSpelled = this->NextSpelled[
File];
820 while (NextSpelled < SpelledTokens.size() &&
821 NextExpanded < Result.ExpandedTokens.size() &&
822 SpelledTokens[NextSpelled].location() ==
823 Result.ExpandedTokens[NextExpanded].location()) {
830 auto End = CollectedExpansions.lookup(Expansion);
831 assert(End.isValid() &&
"Macro expansion wasn't captured?");
834 TokenBuffer::Mapping Mapping;
835 Mapping.BeginExpanded = NextExpanded;
836 Mapping.BeginSpelled = NextSpelled;
838 while (NextSpelled < SpelledTokens.size() &&
839 SpelledTokens[NextSpelled].location() <= End)
842 while (NextExpanded < Result.ExpandedTokens.size() &&
844 Result.ExpandedTokens[NextExpanded].location()) == Expansion)
847 Mapping.EndExpanded = NextExpanded;
848 Mapping.EndSpelled = NextSpelled;
849 Result.Files[
File].Mappings.push_back(Mapping);
854 void diagnoseAdvanceFailure() {
857 for (
unsigned I = (NextExpanded < 10) ? 0 : NextExpanded - 10;
858 I < NextExpanded + 5 && I < Result.ExpandedTokens.size(); ++I) {
860 (I == NextExpanded) ?
"!! " : (I < NextExpanded) ?
"ok " :
" ";
861 llvm::errs() << L << Result.ExpandedTokens[I].dumpForTests(
SM) <<
"\n";
864 llvm_unreachable(
"Couldn't map expanded token to spelled tokens!");
869 void buildSpelledTokens() {
870 for (
unsigned I = 0; I < Result.ExpandedTokens.size(); ++I) {
871 const auto &Tok = Result.ExpandedTokens[I];
872 auto FID =
SM.getFileID(
SM.getExpansionLoc(Tok.
location()));
873 auto It = Result.Files.try_emplace(FID);
874 TokenBuffer::MarkedFile &
File = It.first->second;
877 File.EndExpanded = Tok.
kind() == tok::eof ? I : I + 1;
882 File.BeginExpanded = I;
888 unsigned NextExpanded = 0;
889 llvm::DenseMap<FileID, unsigned> NextSpelled;
890 PPExpansions CollectedExpansions;
896 PP.setTokenWatcher(
nullptr);
897 Collector->disable();
898 return Builder(std::move(Expanded), std::move(Expansions),
899 PP.getSourceManager(), PP.getLangOpts())
904 return std::string(llvm::formatv(
"Token({0}, length = {1})",
909 return std::string(llvm::formatv(
"Token(`{0}`, {1}, length = {2})", text(
SM),
915 if (
T.kind() == tok::eof)
917 return std::string(
T.text(*SourceMgr));
920 auto DumpTokens = [
this, &PrintToken](llvm::raw_ostream &OS,
922 if (Tokens.empty()) {
926 OS << Tokens[0].text(*SourceMgr);
927 for (
unsigned I = 1; I < Tokens.size(); ++I) {
928 if (Tokens[I].kind() == tok::eof)
930 OS <<
" " << PrintToken(Tokens[I]);
935 llvm::raw_string_ostream OS(Dump);
937 OS <<
"expanded tokens:\n"
943 std::vector<FileID> Keys;
944 for (
const auto &F : Files)
945 Keys.push_back(F.first);
949 const MarkedFile &
File = Files.find(ID)->second;
953 std::string
Path = llvm::sys::path::convert_to_slash(Entry->getName());
954 OS << llvm::formatv(
"file '{0}'\n",
Path) <<
" spelled tokens:\n"
956 DumpTokens(OS,
File.SpelledTokens);
959 if (
File.Mappings.empty()) {
960 OS <<
" no mappings.\n";
963 OS <<
" mappings:\n";
964 for (
auto &M :
File.Mappings) {
966 " ['{0}'_{1}, '{2}'_{3}) => ['{4}'_{5}, '{6}'_{7})\n",
967 PrintToken(
File.SpelledTokens[M.BeginSpelled]), M.BeginSpelled,
968 M.EndSpelled ==
File.SpelledTokens.size()
970 : PrintToken(
File.SpelledTokens[M.EndSpelled]),
971 M.EndSpelled, PrintToken(ExpandedTokens[M.BeginExpanded]),
972 M.BeginExpanded, PrintToken(ExpandedTokens[M.EndExpanded]),
Defines the Diagnostic-related interfaces.
static Decl::Kind getKind(const Decl *D)
Defines the clang::IdentifierInfo, clang::IdentifierTable, and clang::Selector interfaces.
Forward-declares and imports various common LLVM datatypes that clang wants to use unqualified.
Defines the clang::LangOptions interface.
llvm::MachO::Target Target
Defines the PPCallbacks interface.
static ParseState advance(ParseState S, size_t N)
Defines the clang::Preprocessor interface.
Defines the clang::SourceLocation class and associated facilities.
Defines the SourceManager interface.
Defines the clang::TokenKind enum and support functions.
Builds mappings and spelled tokens in the TokenBuffer based on the expanded token stream.
Builder(std::vector< syntax::Token > Expanded, PPExpansions CollectedExpansions, const SourceManager &SM, const LangOptions &LangOpts)
Records information reqired to construct mappings for the token buffer that we are collecting.
CollectPPExpansions(TokenCollector &C)
void disable()
Disabled instance will stop reporting anything to TokenCollector.
void MacroExpands(const clang::Token &MacroNameTok, const MacroDefinition &MD, SourceRange Range, const MacroArgs *Args) override
Called by Preprocessor::HandleMacroExpandedIdentifier when a macro invocation is found.
Represents a character-granular source range.
An opaque identifier used by SourceManager which refers to a source file (MemoryBuffer) along with it...
unsigned getHashValue() const
One of these records is kept for each identifier that is lexed.
tok::TokenKind getTokenID() const
If this is a source-language token (e.g.
Implements an efficient mapping from strings to IdentifierInfo nodes.
IdentifierInfo & get(StringRef Name)
Return the identifier token info for the specified named identifier.
Keeps track of the various options that can be enabled, which controls the dialect of C or C++ that i...
Lexer - This provides a simple interface that turns a text buffer into a stream of tokens.
bool LexFromRawLexer(Token &Result)
LexFromRawLexer - Lex a token from a designated raw lexer (one with no associated preprocessor object...
unsigned getCurrentBufferOffset()
Returns the current lexing offset.
MacroArgs - An instance of this class captures information about the formal arguments specified to a ...
A description of the current definition of a macro.
This interface provides a way to observe the actions of the preprocessor as it does its thing.
Engages in a tight little dance with the lexer to efficiently preprocess tokens.
void addPPCallbacks(std::unique_ptr< PPCallbacks > C)
SourceManager & getSourceManager() const
void setTokenWatcher(llvm::unique_function< void(const clang::Token &)> F)
Register a function that would be called on each token in the final expanded token stream.
Encodes a location in the source.
bool isValid() const
Return true if this is a valid SourceLocation object.
This class handles loading and caching of source files into memory.
FileID getFileID(SourceLocation SpellingLoc) const
Return the FileID for a SourceLocation.
OptionalFileEntryRef getFileEntryRefForID(FileID FID) const
Returns the FileEntryRef for the provided FileID.
bool isMacroArgExpansion(SourceLocation Loc, SourceLocation *StartLoc=nullptr) const
Tests whether the given source location represents a macro argument's expansion into the function-lik...
SourceLocation getExpansionLoc(SourceLocation Loc) const
Given a SourceLocation object Loc, return the expansion location referenced by the ID.
A trivial tuple used to represent a source range.
void setBegin(SourceLocation b)
SourceLocation getEnd() const
SourceLocation getBegin() const
Token - This structure provides full information about a lexed token.
A list of tokens obtained by preprocessing a text buffer and operations to map between the expanded a...
const syntax::Token * spelledTokenContaining(SourceLocation Loc) const
Returns the spelled Token containing the Loc, if there are no such tokens returns nullptr.
const SourceManager & sourceManager() const
void indexExpandedTokens()
Builds a cache to make future calls to expandedToken(SourceRange) faster.
llvm::SmallVector< llvm::ArrayRef< syntax::Token >, 1 > expandedForSpelled(llvm::ArrayRef< syntax::Token > Spelled) const
Find the subranges of expanded tokens, corresponding to Spelled.
llvm::ArrayRef< syntax::Token > expandedTokens() const
All tokens produced by the preprocessor after all macro replacements, directives, etc.
std::string dumpForTests() const
std::optional< llvm::ArrayRef< syntax::Token > > spelledForExpanded(llvm::ArrayRef< syntax::Token > Expanded) const
Returns the subrange of spelled tokens corresponding to AST node spanning Expanded.
std::vector< Expansion > expansionsOverlapping(llvm::ArrayRef< syntax::Token > Spelled) const
Returns all expansions (partially) expanded from the specified tokens.
std::optional< Expansion > expansionStartingAt(const syntax::Token *Spelled) const
If Spelled starts a mapping (e.g.
llvm::ArrayRef< syntax::Token > spelledTokens(FileID FID) const
Lexed tokens of a file before preprocessing.
std::vector< const syntax::Token * > macroExpansions(FileID FID) const
Get all tokens that expand a macro in FID.
Collects tokens for the main file while running the frontend action.
TokenBuffer consume() &&
Finalizes token collection.
TokenCollector(Preprocessor &P)
Adds the hooks to collect the tokens.
A token coming directly from a file or from a macro invocation.
std::string str() const
For debugging purposes.
llvm::StringRef text(const SourceManager &SM) const
Get the substring covered by the token.
tok::TokenKind kind() const
FileRange range(const SourceManager &SM) const
Gets a range of this token.
Token(SourceLocation Location, unsigned Length, tok::TokenKind Kind)
std::string dumpForTests(const SourceManager &SM) const
SourceLocation location() const
Location of the first character of a token.
bool Dec(InterpState &S, CodePtr OpPC)
1) Pops a pointer from the stack 2) Load the value from the pointer 3) Writes the value decreased by ...
const syntax::Token * spelledIdentifierTouching(SourceLocation Loc, llvm::ArrayRef< syntax::Token > Tokens)
The identifier token that overlaps or touches a spelling location Loc.
std::vector< syntax::Token > tokenize(FileID FID, const SourceManager &SM, const LangOptions &LO)
Lex the text buffer, corresponding to FID, in raw mode and record the resulting spelled tokens.
raw_ostream & operator<<(raw_ostream &OS, NodeKind K)
For debugging purposes.
llvm::ArrayRef< syntax::Token > spelledTokensTouching(SourceLocation Loc, const syntax::TokenBuffer &Tokens)
The spelled tokens that overlap or touch a spelling location Loc.
const char * getTokenName(TokenKind Kind) LLVM_READNONE
Determines the name of a token as used within the front end.
TokenKind
Provides a simple uniform namespace for tokens from all C languages.
The JSON file list parser is used to communicate input to InstallAPI.
const FunctionProtoType * T
float __ovld __cnfn length(float)
Return the length of vector p, i.e., sqrt(p.x2 + p.y 2 + ...)
A half-open character range inside a particular file, the start offset is included and the end offset...
CharSourceRange toCharRange(const SourceManager &SM) const
Convert to the clang range.
FileRange(FileID File, unsigned BeginOffset, unsigned EndOffset)
EXPECTS: File.isValid() && Begin <= End.
unsigned beginOffset() const
Start is a start offset (inclusive) in the corresponding file.
llvm::StringRef text(const SourceManager &SM) const
Gets the substring that this FileRange refers to.
unsigned endOffset() const
End offset (exclusive) in the corresponding file.
An expansion produced by the preprocessor, includes macro expansions and preprocessor directives.