clang: lib/Tooling/Syntax/Tokens.cpp Source File (original) (raw)
1
2
3
4
5
6
7
9
20#include "llvm/ADT/ArrayRef.h"
21#include "llvm/ADT/STLExtras.h"
22#include "llvm/Support/Debug.h"
23#include "llvm/Support/ErrorHandling.h"
24#include "llvm/Support/FormatVariadic.h"
25#include "llvm/Support/raw_ostream.h"
26#include
27#include
28#include
29#include
30#include
31#include
32#include
33
34using namespace clang;
36
37namespace {
38
43 return {};
45 llvm::partition_point(Toks, [&](const syntax::Token &T) {
46 return SM.isBeforeInTranslationUnit(T.location(), R.getBegin());
47 });
49 llvm::partition_point(Toks, [&](const syntax::Token &T) {
50 return .isBeforeInTranslationUnit(R.getEnd(), T.location());
51 });
53 return {};
54 return {Begin, End};
55}
56
57
58
59
60
61
62
63
64
65
70
71
72
73
74
75
76
77 assert(SM.getSLocEntry(TargetFile).isFile());
78
79
80
81
82
83
84
85
86
87
88 while (First.isMacroID() && Last.isMacroID()) {
89 auto DecFirst = SM.getDecomposedLoc(First);
90 auto DecLast = SM.getDecomposedLoc(Last);
91 auto &ExpFirst = SM.getSLocEntry(DecFirst.first).getExpansion();
92 auto &ExpLast = SM.getSLocEntry(DecLast.first).getExpansion();
93
94 if (!ExpFirst.isMacroArgExpansion() || !ExpLast.isMacroArgExpansion())
95 break;
96
97
98 if (ExpFirst.getExpansionLocStart() != ExpLast.getExpansionLocStart())
99 break;
100
101
102
103
104
105
106 auto ExpFileID = SM.getFileID(ExpFirst.getExpansionLocStart());
107 if (ExpFileID == TargetFile)
108 break;
109
110
111 First = ExpFirst.getSpellingLoc().getLocWithOffset(DecFirst.second);
112 Last = ExpLast.getSpellingLoc().getLocWithOffset(DecLast.second);
113 }
114
115
116
119 auto DecFirst = SM.getDecomposedExpansionLoc(Candidate.getBegin());
120 auto DecLast = SM.getDecomposedExpansionLoc(Candidate.getEnd());
121
122 if (Candidate.isInvalid() || DecFirst.first != TargetFile ||
123 DecLast.first != TargetFile)
125
127 auto Dec = SM.getDecomposedLoc(SM.getExpansionRange(Prev).getBegin());
128 if (Dec.first != DecFirst.first || Dec.second >= DecFirst.second)
130 }
131 if (Next.isValid()) {
132 auto Dec = SM.getDecomposedLoc(SM.getExpansionRange(Next).getEnd());
133 if (Dec.first != DecLast.first || Dec.second <= DecLast.second)
135 }
136
137
138 return Candidate;
139}
140
141}
142
145 : Location(Location), Length(Length), Kind(Kind) {
146 assert(Location.isValid());
147}
148
151 assert(.isAnnotation());
152}
153
156 const char *Start = SM.getCharacterData(location(), &Invalid);
158 return llvm::StringRef(Start, length());
159}
160
162 assert(location().isFileID() && "must be a spelled token");
164 unsigned StartOffset;
165 std::tie(File, StartOffset) = SM.getDecomposedLoc(location());
167}
168
174 assert(F.file() == L.file() && "tokens from different files");
175 assert((F == L || F.endOffset() <= L.beginOffset()) &&
176 "wrong order of tokens");
177 return FileRange(F.file(), F.beginOffset(), L.endOffset());
178}
179
181 return OS << T.str();
182}
183
185 : File(File), Begin(BeginOffset), End(EndOffset) {
186 assert(File.isValid());
187 assert(BeginOffset <= EndOffset);
188}
189
191 unsigned Length) {
192 assert(BeginLoc.isValid());
194
195 std::tie(File, Begin) = SM.getDecomposedLoc(BeginLoc);
196 End = Begin + Length;
197}
200 assert(BeginLoc.isValid());
202 assert(EndLoc.isValid());
204 assert(SM.getFileID(BeginLoc) == SM.getFileID(EndLoc));
205 assert(SM.getFileOffset(BeginLoc) <= SM.getFileOffset(EndLoc));
206
207 std::tie(File, Begin) = SM.getDecomposedLoc(BeginLoc);
208 End = SM.getFileOffset(EndLoc);
209}
210
213 return OS << llvm::formatv("FileRange(file = {0}, offsets = {1}-{2})",
216}
217
220 StringRef Text = SM.getBufferData(File, &Invalid);
222 return "";
224 assert(End <= Text.size());
226}
227
229
230 if (!ExpandedTokIndex.empty())
231 return;
232 ExpandedTokIndex.reserve(ExpandedTokens.size());
233
234 for (size_t I = 0, E = ExpandedTokens.size(); I != E; ++I) {
237 ExpandedTokIndex[Loc] = I;
238 }
239}
240
243 return {};
244 if (!ExpandedTokIndex.empty()) {
245
246
247
248 const auto B = ExpandedTokIndex.find(R.getBegin());
249 const auto E = ExpandedTokIndex.find(R.getEnd());
250 if (B != ExpandedTokIndex.end() && E != ExpandedTokIndex.end()) {
251 const Token *L = ExpandedTokens.data() + B->getSecond();
252
253 const Token *R = ExpandedTokens.data() + E->getSecond() + 1;
254 if (L > R)
255 return {};
256 return {L, R};
257 }
258 }
259
260
261 return getTokensCovering(expandedTokens(), R, *SourceMgr);
262}
263
267 false);
268}
269
270std::pair<const syntax::Token *, const TokenBuffer::Mapping *>
271TokenBuffer::spelledForExpandedToken(const syntax::Token *Expanded) const {
272 assert(Expanded);
273 assert(ExpandedTokens.data() <= Expanded &&
274 Expanded < ExpandedTokens.data() + ExpandedTokens.size());
275
276 auto FileIt = Files.find(
278 assert(FileIt != Files.end() && "no file for an expanded token");
279
280 const MarkedFile &File = FileIt->second;
281
282 unsigned ExpandedIndex = Expanded - ExpandedTokens.data();
283
284 auto It = llvm::partition_point(File.Mappings, [&](const Mapping &M) {
285 return M.BeginExpanded <= ExpandedIndex;
286 });
287
288 if (It == File.Mappings.begin()) {
289
290 return {&File.SpelledTokens[ExpandedIndex - File.BeginExpanded],
291 nullptr};
292 }
293 --It;
294
295
296 if (ExpandedIndex < It->EndExpanded)
297 return {&File.SpelledTokens[It->BeginSpelled], &*It};
298
299
300
301 return {
302 &File.SpelledTokens[It->EndSpelled + (ExpandedIndex - It->EndExpanded)],
303 nullptr};
304}
305
306const TokenBuffer::Mapping *
307TokenBuffer::mappingStartingBeforeSpelled(const MarkedFile &F,
309 assert(F.SpelledTokens.data() <= Spelled);
310 unsigned SpelledI = Spelled - F.SpelledTokens.data();
311 assert(SpelledI < F.SpelledTokens.size());
312
313 auto It = llvm::partition_point(F.Mappings, [SpelledI](const Mapping &M) {
314 return M.BeginSpelled <= SpelledI;
315 });
316 if (It == F.Mappings.begin())
317 return nullptr;
318 --It;
319 return &*It;
320}
321
324 if (Spelled.empty())
325 return {};
326 const auto &File = fileForSpelled(Spelled);
327
328 auto *FrontMapping = mappingStartingBeforeSpelled(File, &Spelled.front());
329 unsigned SpelledFrontI = &Spelled.front() - File.SpelledTokens.data();
330 assert(SpelledFrontI < File.SpelledTokens.size());
331 unsigned ExpandedBegin;
332 if (!FrontMapping) {
333
334
335 ExpandedBegin = File.BeginExpanded + SpelledFrontI;
336 } else if (SpelledFrontI < FrontMapping->EndSpelled) {
337
338 if (SpelledFrontI != FrontMapping->BeginSpelled) {
339
340 return {};
341 }
342
343 ExpandedBegin = FrontMapping->BeginExpanded;
344 } else {
345
346
347 ExpandedBegin =
348 FrontMapping->EndExpanded + (SpelledFrontI - FrontMapping->EndSpelled);
349 }
350
351 auto *BackMapping = mappingStartingBeforeSpelled(File, &Spelled.back());
352 unsigned SpelledBackI = &Spelled.back() - File.SpelledTokens.data();
353 unsigned ExpandedEnd;
354 if (!BackMapping) {
355
356
357 ExpandedEnd = File.BeginExpanded + SpelledBackI + 1;
358 } else if (SpelledBackI < BackMapping->EndSpelled) {
359
360 if (SpelledBackI + 1 != BackMapping->EndSpelled) {
361
362 return {};
363 }
364 ExpandedEnd = BackMapping->EndExpanded;
365 } else {
366
367 ExpandedEnd =
368 BackMapping->EndExpanded + (SpelledBackI - BackMapping->EndSpelled) + 1;
369 }
370
371 assert(ExpandedBegin < ExpandedTokens.size());
372 assert(ExpandedEnd < ExpandedTokens.size());
373
374 if (ExpandedBegin == ExpandedEnd)
375 return {};
376 return {llvm::ArrayRef(ExpandedTokens.data() + ExpandedBegin,
377 ExpandedTokens.data() + ExpandedEnd)};
378}
379
381 auto It = Files.find(FID);
382 assert(It != Files.end());
383 return It->second.SpelledTokens;
384}
385
389 const auto *Tok = llvm::partition_point(
391 [&](const syntax::Token &Tok) { return Tok.endLocation() <= Loc; });
392 if (!Tok || Loc < Tok->location())
393 return nullptr;
394 return Tok;
395}
396
397std::string TokenBuffer::Mapping::str() const {
398 return std::string(
399 llvm::formatv("spelled tokens: [{0},{1}), expanded tokens: [{2},{3})",
400 BeginSpelled, EndSpelled, BeginExpanded, EndExpanded));
401}
402
403std::optional<llvm::ArrayRefsyntax::Token>
405
406
407
408 if (!Expanded.empty() && Expanded.back().kind() == tok::eof) {
409 Expanded = Expanded.drop_back();
410 }
411
412
413 if (Expanded.empty())
414 return std::nullopt;
417 auto [FirstSpelled, FirstMapping] = spelledForExpandedToken(First);
418 auto [LastSpelled, LastMapping] = spelledForExpandedToken(Last);
419
420 FileID FID = SourceMgr->getFileID(FirstSpelled->location());
421
422 if (FID != SourceMgr->getFileID(LastSpelled->location()))
423 return std::nullopt;
424
425 const MarkedFile &File = Files.find(FID)->second;
426
427
428
429 if (FirstMapping && FirstMapping == LastMapping &&
432
435 : (First - 1)->location();
438 : (Last + 1)->location();
440 First->location(), Last->location(), Prev, Next, FID, *SourceMgr);
442 return std::nullopt;
443 return getTokensCovering(File.SpelledTokens, Range, *SourceMgr);
444 }
445
446
447
448 unsigned FirstExpanded = Expanded.begin() - ExpandedTokens.data();
449 unsigned LastExpanded = Expanded.end() - ExpandedTokens.data();
450 if (FirstMapping && FirstExpanded != FirstMapping->BeginExpanded)
451 return std::nullopt;
452 if (LastMapping && LastMapping->EndExpanded != LastExpanded)
453 return std::nullopt;
455 FirstMapping ? File.SpelledTokens.data() + FirstMapping->BeginSpelled
456 : FirstSpelled,
457 LastMapping ? File.SpelledTokens.data() + LastMapping->EndSpelled
458 : LastSpelled + 1);
459}
460
462 const Mapping &M) const {
463 Expansion E;
464 E.Spelled = llvm::ArrayRef(F.SpelledTokens.data() + M.BeginSpelled,
465 F.SpelledTokens.data() + M.EndSpelled);
466 E.Expanded = llvm::ArrayRef(ExpandedTokens.data() + M.BeginExpanded,
467 ExpandedTokens.data() + M.EndExpanded);
468 return E;
469}
470
471const TokenBuffer::MarkedFile &
473 assert(!Spelled.empty());
474 assert(Spelled.front().location().isFileID() && "not a spelled token");
475 auto FileIt = Files.find(SourceMgr->getFileID(Spelled.front().location()));
476 assert(FileIt != Files.end() && "file not tracked by token buffer");
477 const auto &File = FileIt->second;
478 assert(File.SpelledTokens.data() <= Spelled.data() &&
479 Spelled.end() <=
480 (File.SpelledTokens.data() + File.SpelledTokens.size()) &&
481 "Tokens not in spelled range");
482#ifndef NDEBUG
483 auto T1 = Spelled.back().location();
484 auto T2 = File.SpelledTokens.back().location();
485 assert(T1 == T2 || sourceManager().isBeforeInTranslationUnit(T1, T2));
486#endif
488}
489
490std::optionalTokenBuffer::Expansion
492 assert(Spelled);
493 const auto &File = fileForSpelled(*Spelled);
494
495 unsigned SpelledIndex = Spelled - File.SpelledTokens.data();
496 auto M = llvm::partition_point(File.Mappings, [&](const Mapping &M) {
497 return M.BeginSpelled < SpelledIndex;
498 });
499 if (M == File.Mappings.end() || M->BeginSpelled != SpelledIndex)
500 return std::nullopt;
501 return makeExpansion(File, *M);
502}
503
506 if (Spelled.empty())
507 return {};
508 const auto &File = fileForSpelled(Spelled);
509
510
511 unsigned SpelledBeginIndex = Spelled.begin() - File.SpelledTokens.data();
512 unsigned SpelledEndIndex = Spelled.end() - File.SpelledTokens.data();
513 auto M = llvm::partition_point(File.Mappings, [&](const Mapping &M) {
514 return M.EndSpelled <= SpelledBeginIndex;
515 });
516 std::vectorTokenBuffer::Expansion Expansions;
517 for (; M != File.Mappings.end() && M->BeginSpelled < SpelledEndIndex; ++M)
518 Expansions.push_back(makeExpansion(File, *M));
519 return Expansions;
520}
521
526
527 auto *Right = llvm::partition_point(
529 bool AcceptRight = Right != Tokens.end() && Right->location() <= Loc;
530 bool AcceptLeft =
531 Right != Tokens.begin() && (Right - 1)->endLocation() >= Loc;
533 Right + (AcceptRight ? 1 : 0));
534}
535
540 Loc, Tokens.spelledTokens(Tokens.sourceManager().getFileID(Loc)));
541}
542
547 if (Tok.kind() == tok::identifier)
548 return &Tok;
549 }
550 return nullptr;
551}
552
557 Loc, Tokens.spelledTokens(Tokens.sourceManager().getFileID(Loc)));
558}
559
560std::vector<const syntax::Token *>
562 auto FileIt = Files.find(FID);
563 assert(FileIt != Files.end() && "file not tracked by token buffer");
564 auto &File = FileIt->second;
565 std::vector<const syntax::Token *> Expansions;
566 auto &Spelled = File.SpelledTokens;
567 for (auto Mapping : File.Mappings) {
569 if (Token->kind() == tok::TokenKind::identifier)
570 Expansions.push_back(Token);
571 }
572 return Expansions;
573}
574
578 std::vectorsyntax::Token Tokens;
581
582 if (T.getKind() == tok::raw_identifier && .needsCleaning() &&
583 .hasUCN()) {
585 T.setIdentifierInfo(&II);
587 }
589 };
590
591 auto SrcBuffer = SM.getBufferData(FR.file());
592 Lexer L(SM.getLocForStartOfFile(FR.file()), LO, SrcBuffer.data(),
594
595
596 SrcBuffer.data() + SrcBuffer.size());
597
600 AddToken(T);
601
602
603 if (SM.getFileOffset(T.getLocation()) < FR.endOffset())
604 AddToken(T);
605 return Tokens;
606}
607
611}
612
613
614
616public:
618
619
620
621
622 void disable() { Collector = nullptr; }
623
626 if (!Collector)
627 return;
628 const auto &SM = Collector->PP.getSourceManager();
629
630
631
632
633
634
635
636
637
638
639
640
641
642
644 return;
645
646
647 if (LastExpansionEnd.isValid() &&
648 .isBeforeInTranslationUnit(LastExpansionEnd, Range.getEnd()))
649 return;
650
651
652
653
656 assert(Collector->Expansions.count(Range.getBegin()) &&
657 "Overlapping macros should have same expansion location");
658 }
659
662 }
663
664private:
666
668};
669
670
671
672
673
674
675
676
677
678
679
680
681
682
684
686 if (T.isAnnotation())
687 return;
688 DEBUG_WITH_TYPE("collect-tokens", llvm::dbgs()
689 << "Token: "
692 << "\n"
693
694 );
696 });
697
698
699 auto CB = std::make_unique(*this);
700 this->Collector = CB.get();
702}
703
704
705
707public:
708 Builder(std::vectorsyntax::Token Expanded, PPExpansions CollectedExpansions,
710 : Result(SM), CollectedExpansions(std::move(CollectedExpansions)), SM(SM),
711 LangOpts(LangOpts) {
712 Result.ExpandedTokens = std::move(Expanded);
713 }
714
716 assert(!Result.ExpandedTokens.empty());
717 assert(Result.ExpandedTokens.back().kind() == tok::eof);
718
719
720 buildSpelledTokens();
721
722
723
724
725
726 while (NextExpanded < Result.ExpandedTokens.size() - 1 ) {
727
728
729 discard();
730
731
732 unsigned OldPosition = NextExpanded;
734 if (NextExpanded == OldPosition)
735 diagnoseAdvanceFailure();
736 }
737
738
739 for (const auto &File : Result.Files)
740 discard(File.first);
741
742#ifndef NDEBUG
743 for (auto &pair : Result.Files) {
744 auto &mappings = pair.second.Mappings;
745 assert(llvm::is_sorted(mappings, [](const TokenBuffer::Mapping &M1,
746 const TokenBuffer::Mapping &M2) {
747 return M1.BeginSpelled < M2.BeginSpelled &&
748 M1.EndSpelled < M2.EndSpelled &&
749 M1.BeginExpanded < M2.BeginExpanded &&
750 M1.EndExpanded < M2.EndExpanded;
751 }));
752 }
753#endif
754
755 return std::move(Result);
756 }
757
758private:
759
760
761
762
763 void discard(std::optional Drain = std::nullopt) {
765 Drain ? SM.getLocForEndOfFile(*Drain)
766 : SM.getExpansionLoc(
767 Result.ExpandedTokens[NextExpanded].location());
769 const auto &SpelledTokens = Result.Files[File].SpelledTokens;
770 auto &NextSpelled = this->NextSpelled[File];
771
772 TokenBuffer::Mapping Mapping;
773 Mapping.BeginSpelled = NextSpelled;
774
775
776 Mapping.BeginExpanded = Mapping.EndExpanded =
777 Drain ? Result.Files[*Drain].EndExpanded : NextExpanded;
778
779
780 auto FlushMapping = [&, this] {
781 Mapping.EndSpelled = NextSpelled;
782 if (Mapping.BeginSpelled != Mapping.EndSpelled)
783 Result.Files[File].Mappings.push_back(Mapping);
784 Mapping.BeginSpelled = NextSpelled;
785 };
786
787 while (NextSpelled < SpelledTokens.size() &&
788 SpelledTokens[NextSpelled].location() < Target) {
789
790
791
793 CollectedExpansions.lookup(SpelledTokens[NextSpelled].location());
794 if (KnownEnd.isValid()) {
795 FlushMapping();
796 while (NextSpelled < SpelledTokens.size() &&
797 SpelledTokens[NextSpelled].location() <= KnownEnd)
798 ++NextSpelled;
799 FlushMapping();
800
801 } else {
802 ++NextSpelled;
803 }
804 }
805 FlushMapping();
806 }
807
808
809
810
812 const syntax::Token &Tok = Result.ExpandedTokens[NextExpanded];
815 const auto &SpelledTokens = Result.Files[File].SpelledTokens;
816 auto &NextSpelled = this->NextSpelled[File];
817
819
820 while (NextSpelled < SpelledTokens.size() &&
821 NextExpanded < Result.ExpandedTokens.size() &&
822 SpelledTokens[NextSpelled].location() ==
823 Result.ExpandedTokens[NextExpanded].location()) {
824 ++NextSpelled;
825 ++NextExpanded;
826 }
827
828 } else {
829
830 auto End = CollectedExpansions.lookup(Expansion);
831 assert(End.isValid() && "Macro expansion wasn't captured?");
832
833
834 TokenBuffer::Mapping Mapping;
835 Mapping.BeginExpanded = NextExpanded;
836 Mapping.BeginSpelled = NextSpelled;
837
838 while (NextSpelled < SpelledTokens.size() &&
839 SpelledTokens[NextSpelled].location() <= End)
840 ++NextSpelled;
841
842 while (NextExpanded < Result.ExpandedTokens.size() &&
843 SM.getExpansionLoc(
844 Result.ExpandedTokens[NextExpanded].location()) == Expansion)
845 ++NextExpanded;
846
847 Mapping.EndExpanded = NextExpanded;
848 Mapping.EndSpelled = NextSpelled;
849 Result.Files[File].Mappings.push_back(Mapping);
850 }
851 }
852
853
854 void diagnoseAdvanceFailure() {
855#ifndef NDEBUG
856
857 for (unsigned I = (NextExpanded < 10) ? 0 : NextExpanded - 10;
858 I < NextExpanded + 5 && I < Result.ExpandedTokens.size(); ++I) {
859 const char *L =
860 (I == NextExpanded) ? "!! " : (I < NextExpanded) ? "ok " : " ";
861 llvm::errs() << L << Result.ExpandedTokens[I].dumpForTests(SM) << "\n";
862 }
863#endif
864 llvm_unreachable("Couldn't map expanded token to spelled tokens!");
865 }
866
867
868
869 void buildSpelledTokens() {
870 for (unsigned I = 0; I < Result.ExpandedTokens.size(); ++I) {
871 const auto &Tok = Result.ExpandedTokens[I];
872 auto FID = SM.getFileID(SM.getExpansionLoc(Tok.location()));
873 auto It = Result.Files.try_emplace(FID);
874 TokenBuffer::MarkedFile &File = It.first->second;
875
876
877 File.EndExpanded = Tok.kind() == tok::eof ? I : I + 1;
878
879 if (!It.second)
880 continue;
881
882 File.BeginExpanded = I;
884 }
885 }
886
888 unsigned NextExpanded = 0;
889 llvm::DenseMap<FileID, unsigned> NextSpelled;
890 PPExpansions CollectedExpansions;
893};
894
896 PP.setTokenWatcher(nullptr);
897 Collector->disable();
898 return Builder(std::move(Expanded), std::move(Expansions),
899 PP.getSourceManager(), PP.getLangOpts())
900 .build();
901}
902
904 return std::string(llvm::formatv("Token({0}, length = {1})",
906}
907
909 return std::string(llvm::formatv("Token(`{0}`, {1}, length = {2})", text(SM),
911}
912
914 auto PrintToken = [this](const syntax::Token &T) -> std::string {
915 if (T.kind() == tok::eof)
916 return "";
917 return std::string(T.text(*SourceMgr));
918 };
919
920 auto DumpTokens = [this, &PrintToken](llvm::raw_ostream &OS,
922 if (Tokens.empty()) {
923 OS << "";
924 return;
925 }
926 OS << Tokens[0].text(*SourceMgr);
927 for (unsigned I = 1; I < Tokens.size(); ++I) {
928 if (Tokens[I].kind() == tok::eof)
929 continue;
930 OS << " " << PrintToken(Tokens[I]);
931 }
932 };
933
934 std::string Dump;
935 llvm::raw_string_ostream OS(Dump);
936
937 OS << "expanded tokens:\n"
938 << " ";
939
940 DumpTokens(OS, llvm::ArrayRef(ExpandedTokens).drop_back());
941 OS << "\n";
942
943 std::vector Keys;
944 for (const auto &F : Files)
945 Keys.push_back(F.first);
946 llvm::sort(Keys);
947
948 for (FileID ID : Keys) {
949 const MarkedFile &File = Files.find(ID)->second;
951 if (!Entry)
952 continue;
953 std::string Path = llvm::sys::path::convert_to_slash(Entry->getName());
954 OS << llvm::formatv("file '{0}'\n", Path) << " spelled tokens:\n"
955 << " ";
956 DumpTokens(OS, File.SpelledTokens);
957 OS << "\n";
958
959 if (File.Mappings.empty()) {
960 OS << " no mappings.\n";
961 continue;
962 }
963 OS << " mappings:\n";
964 for (auto &M : File.Mappings) {
965 OS << llvm::formatv(
966 " ['{0}'_{1}, '{2}'_{3}) => ['{4}'_{5}, '{6}'_{7})\n",
967 PrintToken(File.SpelledTokens[M.BeginSpelled]), M.BeginSpelled,
968 M.EndSpelled == File.SpelledTokens.size()
969 ? ""
970 : PrintToken(File.SpelledTokens[M.EndSpelled]),
971 M.EndSpelled, PrintToken(ExpandedTokens[M.BeginExpanded]),
972 M.BeginExpanded, PrintToken(ExpandedTokens[M.EndExpanded]),
973 M.EndExpanded);
974 }
975 }
976 return Dump;
977}
Defines the Diagnostic-related interfaces.
static Decl::Kind getKind(const Decl *D)
Defines the clang::IdentifierInfo, clang::IdentifierTable, and clang::Selector interfaces.
Forward-declares and imports various common LLVM datatypes that clang wants to use unqualified.
Defines the clang::LangOptions interface.
llvm::MachO::Target Target
Defines the PPCallbacks interface.
static ParseState advance(ParseState S, size_t N)
Defines the clang::Preprocessor interface.
Defines the clang::SourceLocation class and associated facilities.
Defines the SourceManager interface.
Defines the clang::TokenKind enum and support functions.
Builds mappings and spelled tokens in the TokenBuffer based on the expanded token stream.
Builder(std::vector< syntax::Token > Expanded, PPExpansions CollectedExpansions, const SourceManager &SM, const LangOptions &LangOpts)
Records information reqired to construct mappings for the token buffer that we are collecting.
CollectPPExpansions(TokenCollector &C)
void disable()
Disabled instance will stop reporting anything to TokenCollector.
void MacroExpands(const clang::Token &MacroNameTok, const MacroDefinition &MD, SourceRange Range, const MacroArgs *Args) override
Called by Preprocessor::HandleMacroExpandedIdentifier when a macro invocation is found.
Represents a character-granular source range.
An opaque identifier used by SourceManager which refers to a source file (MemoryBuffer) along with it...
unsigned getHashValue() const
One of these records is kept for each identifier that is lexed.
tok::TokenKind getTokenID() const
If this is a source-language token (e.g.
Implements an efficient mapping from strings to IdentifierInfo nodes.
IdentifierInfo & get(StringRef Name)
Return the identifier token info for the specified named identifier.
Keeps track of the various options that can be enabled, which controls the dialect of C or C++ that i...
Lexer - This provides a simple interface that turns a text buffer into a stream of tokens.
bool LexFromRawLexer(Token &Result)
LexFromRawLexer - Lex a token from a designated raw lexer (one with no associated preprocessor object...
unsigned getCurrentBufferOffset()
Returns the current lexing offset.
MacroArgs - An instance of this class captures information about the formal arguments specified to a ...
A description of the current definition of a macro.
This interface provides a way to observe the actions of the preprocessor as it does its thing.
Engages in a tight little dance with the lexer to efficiently preprocess tokens.
void addPPCallbacks(std::unique_ptr< PPCallbacks > C)
SourceManager & getSourceManager() const
void setTokenWatcher(llvm::unique_function< void(const clang::Token &)> F)
Register a function that would be called on each token in the final expanded token stream.
Encodes a location in the source.
bool isValid() const
Return true if this is a valid SourceLocation object.
This class handles loading and caching of source files into memory.
FileID getFileID(SourceLocation SpellingLoc) const
Return the FileID for a SourceLocation.
OptionalFileEntryRef getFileEntryRefForID(FileID FID) const
Returns the FileEntryRef for the provided FileID.
bool isMacroArgExpansion(SourceLocation Loc, SourceLocation *StartLoc=nullptr) const
Tests whether the given source location represents a macro argument's expansion into the function-lik...
SourceLocation getExpansionLoc(SourceLocation Loc) const
Given a SourceLocation object Loc, return the expansion location referenced by the ID.
A trivial tuple used to represent a source range.
void setBegin(SourceLocation b)
SourceLocation getEnd() const
SourceLocation getBegin() const
Token - This structure provides full information about a lexed token.
A list of tokens obtained by preprocessing a text buffer and operations to map between the expanded a...
const syntax::Token * spelledTokenContaining(SourceLocation Loc) const
Returns the spelled Token containing the Loc, if there are no such tokens returns nullptr.
const SourceManager & sourceManager() const
void indexExpandedTokens()
Builds a cache to make future calls to expandedToken(SourceRange) faster.
llvm::SmallVector< llvm::ArrayRef< syntax::Token >, 1 > expandedForSpelled(llvm::ArrayRef< syntax::Token > Spelled) const
Find the subranges of expanded tokens, corresponding to Spelled.
llvm::ArrayRef< syntax::Token > expandedTokens() const
All tokens produced by the preprocessor after all macro replacements, directives, etc.
std::string dumpForTests() const
std::optional< llvm::ArrayRef< syntax::Token > > spelledForExpanded(llvm::ArrayRef< syntax::Token > Expanded) const
Returns the subrange of spelled tokens corresponding to AST node spanning Expanded.
std::vector< Expansion > expansionsOverlapping(llvm::ArrayRef< syntax::Token > Spelled) const
Returns all expansions (partially) expanded from the specified tokens.
std::optional< Expansion > expansionStartingAt(const syntax::Token *Spelled) const
If Spelled starts a mapping (e.g.
llvm::ArrayRef< syntax::Token > spelledTokens(FileID FID) const
Lexed tokens of a file before preprocessing.
std::vector< const syntax::Token * > macroExpansions(FileID FID) const
Get all tokens that expand a macro in FID.
Collects tokens for the main file while running the frontend action.
TokenBuffer consume() &&
Finalizes token collection.
TokenCollector(Preprocessor &P)
Adds the hooks to collect the tokens.
A token coming directly from a file or from a macro invocation.
std::string str() const
For debugging purposes.
llvm::StringRef text(const SourceManager &SM) const
Get the substring covered by the token.
tok::TokenKind kind() const
FileRange range(const SourceManager &SM) const
Gets a range of this token.
Token(SourceLocation Location, unsigned Length, tok::TokenKind Kind)
std::string dumpForTests(const SourceManager &SM) const
SourceLocation location() const
Location of the first character of a token.
bool Dec(InterpState &S, CodePtr OpPC)
- Pops a pointer from the stack 2) Load the value from the pointer 3) Writes the value decreased by ...
const syntax::Token * spelledIdentifierTouching(SourceLocation Loc, llvm::ArrayRef< syntax::Token > Tokens)
The identifier token that overlaps or touches a spelling location Loc.
std::vector< syntax::Token > tokenize(FileID FID, const SourceManager &SM, const LangOptions &LO)
Lex the text buffer, corresponding to FID, in raw mode and record the resulting spelled tokens.
raw_ostream & operator<<(raw_ostream &OS, NodeKind K)
For debugging purposes.
llvm::ArrayRef< syntax::Token > spelledTokensTouching(SourceLocation Loc, const syntax::TokenBuffer &Tokens)
The spelled tokens that overlap or touch a spelling location Loc.
const char * getTokenName(TokenKind Kind) LLVM_READNONE
Determines the name of a token as used within the front end.
TokenKind
Provides a simple uniform namespace for tokens from all C languages.
The JSON file list parser is used to communicate input to InstallAPI.
const FunctionProtoType * T
float __ovld __cnfn length(float)
Return the length of vector p, i.e., sqrt(p.x2 + p.y 2 + ...)
A half-open character range inside a particular file, the start offset is included and the end offset...
CharSourceRange toCharRange(const SourceManager &SM) const
Convert to the clang range.
FileRange(FileID File, unsigned BeginOffset, unsigned EndOffset)
EXPECTS: File.isValid() && Begin <= End.
unsigned beginOffset() const
Start is a start offset (inclusive) in the corresponding file.
llvm::StringRef text(const SourceManager &SM) const
Gets the substring that this FileRange refers to.
unsigned endOffset() const
End offset (exclusive) in the corresponding file.
An expansion produced by the preprocessor, includes macro expansions and preprocessor directives.