clang: lib/Tooling/Syntax/Tokens.cpp Source File (original) (raw)

1

2

3

4

5

6

7

9

20#include "llvm/ADT/ArrayRef.h"

21#include "llvm/ADT/STLExtras.h"

22#include "llvm/Support/Debug.h"

23#include "llvm/Support/ErrorHandling.h"

24#include "llvm/Support/FormatVariadic.h"

25#include "llvm/Support/raw_ostream.h"

26#include

27#include

28#include

29#include

30#include

31

32using namespace clang;

34

35namespace {

36

41 return {};

43 llvm::partition_point(Toks, [&](const syntax::Token &T) {

44 return SM.isBeforeInTranslationUnit(T.location(), R.getBegin());

45 });

47 llvm::partition_point(Toks, [&](const syntax::Token &T) {

48 return SM.isBeforeInTranslationUnit(R.getEnd(), T.location());

49 });

50 if (Begin > End)

51 return {};

52 return {Begin, End};

53}

54

55

56

57

58

59

60

61

62

63

68

69

70

71

72

73

74

75 assert(SM.getSLocEntry(TargetFile).isFile());

76

77

78

79

80

81

82

83

84

85

86 while (First.isMacroID() && Last.isMacroID()) {

87 auto DecFirst = SM.getDecomposedLoc(First);

88 auto DecLast = SM.getDecomposedLoc(Last);

89 auto &ExpFirst = SM.getSLocEntry(DecFirst.first).getExpansion();

90 auto &ExpLast = SM.getSLocEntry(DecLast.first).getExpansion();

91

92 if (!ExpFirst.isMacroArgExpansion() || !ExpLast.isMacroArgExpansion())

93 break;

94

95

96 if (ExpFirst.getExpansionLocStart() != ExpLast.getExpansionLocStart())

97 break;

98

99

100

101

102

103

104 auto ExpFileID = SM.getFileID(ExpFirst.getExpansionLocStart());

105 if (ExpFileID == TargetFile)

106 break;

107

108

109 First = ExpFirst.getSpellingLoc().getLocWithOffset(DecFirst.second);

110 Last = ExpLast.getSpellingLoc().getLocWithOffset(DecLast.second);

111 }

112

113

114

117 auto DecFirst = SM.getDecomposedExpansionLoc(Candidate.getBegin());

118 auto DecLast = SM.getDecomposedExpansionLoc(Candidate.getEnd());

119

120 if (Candidate.isInvalid() || DecFirst.first != TargetFile ||

121 DecLast.first != TargetFile)

123

125 auto Dec = SM.getDecomposedLoc(SM.getExpansionRange(Prev).getBegin());

126 if (Dec.first != DecFirst.first || Dec.second >= DecFirst.second)

128 }

129 if (Next.isValid()) {

130 auto Dec = SM.getDecomposedLoc(SM.getExpansionRange(Next).getEnd());

131 if (Dec.first != DecLast.first || Dec.second <= DecLast.second)

133 }

134

135

136 return Candidate;

137}

138

139}

140

143 : Location(Location), Length(Length), Kind(Kind) {

144 assert(Location.isValid());

145}

146

149 assert(T.isAnnotation());

150}

151

156 return llvm::StringRef(Start, length());

157}

158

160 assert(location().isFileID() && "must be a spelled token");

162 unsigned StartOffset;

163 std::tie(File, StartOffset) = SM.getDecomposedLoc(location());

165}

166

170 auto F = First.range(SM);

171 auto L = Last.range(SM);

172 assert(F.file() == L.file() && "tokens from different files");

173 assert((F == L || F.endOffset() <= L.beginOffset()) &&

174 "wrong order of tokens");

175 return FileRange(F.file(), F.beginOffset(), L.endOffset());

176}

177

179 return OS << T.str();

180}

181

183 : File(File), Begin(BeginOffset), End(EndOffset) {

184 assert(File.isValid());

185 assert(BeginOffset <= EndOffset);

186}

187

189 unsigned Length) {

190 assert(BeginLoc.isValid());

192

193 std::tie(File, Begin) = SM.getDecomposedLoc(BeginLoc);

194 End = Begin + Length;

195}

198 assert(BeginLoc.isValid());

200 assert(EndLoc.isValid());

202 assert(SM.getFileID(BeginLoc) == SM.getFileID(EndLoc));

203 assert(SM.getFileOffset(BeginLoc) <= SM.getFileOffset(EndLoc));

204

205 std::tie(File, Begin) = SM.getDecomposedLoc(BeginLoc);

206 End = SM.getFileOffset(EndLoc);

207}

208

211 return OS << llvm::formatv("FileRange(file = {0}, offsets = {1}-{2})",

214}

215

218 StringRef Text = SM.getBufferData(File, &Invalid);

220 return "";

221 assert(Begin <= Text.size());

222 assert(End <= Text.size());

224}

225

227

228 if (!ExpandedTokIndex.empty())

229 return;

230 ExpandedTokIndex.reserve(ExpandedTokens.size());

231

232 for (size_t I = 0, E = ExpandedTokens.size(); I != E; ++I) {

235 ExpandedTokIndex[Loc] = I;

236 }

237}

238

241 return {};

242 if (!ExpandedTokIndex.empty()) {

243

244

245

246 const auto B = ExpandedTokIndex.find(R.getBegin());

247 const auto E = ExpandedTokIndex.find(R.getEnd());

248 if (B != ExpandedTokIndex.end() && E != ExpandedTokIndex.end()) {

249 const Token *L = ExpandedTokens.data() + B->getSecond();

250

251 const Token *R = ExpandedTokens.data() + E->getSecond() + 1;

252 if (L > R)

253 return {};

254 return {L, R};

255 }

256 }

257

258

259 return getTokensCovering(expandedTokens(), R, *SourceMgr);

260}

261

264 SourceRange(SM.getComposedLoc(File, Begin), SM.getComposedLoc(File, End)),

265 false);

266}

267

268std::pair<const syntax::Token *, const TokenBuffer::Mapping *>

269TokenBuffer::spelledForExpandedToken(const syntax::Token *Expanded) const {

270 assert(Expanded);

271 assert(ExpandedTokens.data() <= Expanded &&

272 Expanded < ExpandedTokens.data() + ExpandedTokens.size());

273

274 auto FileIt = Files.find(

275 SourceMgr->getFileID(SourceMgr->getExpansionLoc(Expanded->location())));

276 assert(FileIt != Files.end() && "no file for an expanded token");

277

278 const MarkedFile &File = FileIt->second;

279

280 unsigned ExpandedIndex = Expanded - ExpandedTokens.data();

281

282 auto It = llvm::partition_point(File.Mappings, [&](const Mapping &M) {

283 return M.BeginExpanded <= ExpandedIndex;

284 });

285

286 if (It == File.Mappings.begin()) {

287

288 return {&File.SpelledTokens[ExpandedIndex - File.BeginExpanded],

289 nullptr};

290 }

291 --It;

292

293

294 if (ExpandedIndex < It->EndExpanded)

295 return {&File.SpelledTokens[It->BeginSpelled], &*It};

296

297

298

299 return {

300 &File.SpelledTokens[It->EndSpelled + (ExpandedIndex - It->EndExpanded)],

301 nullptr};

302}

303

304const TokenBuffer::Mapping *

305TokenBuffer::mappingStartingBeforeSpelled(const MarkedFile &F,

306 const syntax::Token *Spelled) {

307 assert(F.SpelledTokens.data() <= Spelled);

308 unsigned SpelledI = Spelled - F.SpelledTokens.data();

309 assert(SpelledI < F.SpelledTokens.size());

310

311 auto It = llvm::partition_point(F.Mappings, [SpelledI](const Mapping &M) {

312 return M.BeginSpelled <= SpelledI;

313 });

314 if (It == F.Mappings.begin())

315 return nullptr;

316 --It;

317 return &*It;

318}

319

320llvm::SmallVector<llvm::ArrayRefsyntax::Token, 1>

322 if (Spelled.empty())

323 return {};

324 const auto &File = fileForSpelled(Spelled);

325

326 auto *FrontMapping = mappingStartingBeforeSpelled(File, &Spelled.front());

327 unsigned SpelledFrontI = &Spelled.front() - File.SpelledTokens.data();

328 assert(SpelledFrontI < File.SpelledTokens.size());

329 unsigned ExpandedBegin;

330 if (!FrontMapping) {

331

332

333 ExpandedBegin = File.BeginExpanded + SpelledFrontI;

334 } else if (SpelledFrontI < FrontMapping->EndSpelled) {

335

336 if (SpelledFrontI != FrontMapping->BeginSpelled) {

337

338 return {};

339 }

340

341 ExpandedBegin = FrontMapping->BeginExpanded;

342 } else {

343

344

345 ExpandedBegin =

346 FrontMapping->EndExpanded + (SpelledFrontI - FrontMapping->EndSpelled);

347 }

348

349 auto *BackMapping = mappingStartingBeforeSpelled(File, &Spelled.back());

350 unsigned SpelledBackI = &Spelled.back() - File.SpelledTokens.data();

351 unsigned ExpandedEnd;

352 if (!BackMapping) {

353

354

355 ExpandedEnd = File.BeginExpanded + SpelledBackI + 1;

356 } else if (SpelledBackI < BackMapping->EndSpelled) {

357

358 if (SpelledBackI + 1 != BackMapping->EndSpelled) {

359

360 return {};

361 }

362 ExpandedEnd = BackMapping->EndExpanded;

363 } else {

364

365 ExpandedEnd =

366 BackMapping->EndExpanded + (SpelledBackI - BackMapping->EndSpelled) + 1;

367 }

368

369 assert(ExpandedBegin < ExpandedTokens.size());

370 assert(ExpandedEnd < ExpandedTokens.size());

371

372 if (ExpandedBegin == ExpandedEnd)

373 return {};

374 return {llvm::ArrayRef(ExpandedTokens.data() + ExpandedBegin,

375 ExpandedTokens.data() + ExpandedEnd)};

376}

377

379 auto It = Files.find(FID);

380 assert(It != Files.end());

381 return It->second.SpelledTokens;

382}

383

387 const auto *Tok = llvm::partition_point(

389 [&](const syntax::Token &Tok) { return Tok.endLocation() <= Loc; });

390 if (Tok || Loc < Tok->location())

391 return nullptr;

392 return Tok;

393}

394

395std::string TokenBuffer::Mapping::str() const {

396 return std::string(

397 llvm::formatv("spelled tokens: [{0},{1}), expanded tokens: [{2},{3})",

398 BeginSpelled, EndSpelled, BeginExpanded, EndExpanded));

399}

400

401std::optional<llvm::ArrayRefsyntax::Token>

403

404

405

406 if (!Expanded.empty() && Expanded.back().kind() == tok::eof) {

407 Expanded = Expanded.drop_back();

408 }

409

410

411 if (Expanded.empty())

412 return std::nullopt;

415 auto [FirstSpelled, FirstMapping] = spelledForExpandedToken(First);

416 auto [LastSpelled, LastMapping] = spelledForExpandedToken(Last);

417

418 FileID FID = SourceMgr->getFileID(FirstSpelled->location());

419

420 if (FID != SourceMgr->getFileID(LastSpelled->location()))

421 return std::nullopt;

422

423 const MarkedFile &File = Files.find(FID)->second;

424

425

426

427 if (FirstMapping && FirstMapping == LastMapping &&

428 SourceMgr->isMacroArgExpansion(First->location()) &&

429 SourceMgr->isMacroArgExpansion(Last->location())) {

430

433 : (First - 1)->location();

436 : (Last + 1)->location();

437 SourceRange Range = spelledForExpandedSlow(

438 First->location(), Last->location(), Prev, Next, FID, *SourceMgr);

439 if (Range.isInvalid())

440 return std::nullopt;

441 return getTokensCovering(File.SpelledTokens, Range, *SourceMgr);

442 }

443

444

445

446 unsigned FirstExpanded = Expanded.begin() - ExpandedTokens.data();

447 unsigned LastExpanded = Expanded.end() - ExpandedTokens.data();

448 if (FirstMapping && FirstExpanded != FirstMapping->BeginExpanded)

449 return std::nullopt;

450 if (LastMapping && LastMapping->EndExpanded != LastExpanded)

451 return std::nullopt;

453 FirstMapping ? File.SpelledTokens.data() + FirstMapping->BeginSpelled

454 : FirstSpelled,

455 LastMapping ? File.SpelledTokens.data() + LastMapping->EndSpelled

456 : LastSpelled + 1);

457}

458

460 const Mapping &M) const {

461 Expansion E;

463 F.SpelledTokens.data() + M.EndSpelled);

464 E.Expanded = llvm::ArrayRef(ExpandedTokens.data() + M.BeginExpanded,

465 ExpandedTokens.data() + M.EndExpanded);

466 return E;

467}

468

469const TokenBuffer::MarkedFile &

471 assert(!Spelled.empty());

472 assert(Spelled.front().location().isFileID() && "not a spelled token");

473 auto FileIt = Files.find(SourceMgr->getFileID(Spelled.front().location()));

474 assert(FileIt != Files.end() && "file not tracked by token buffer");

475 const auto &File = FileIt->second;

476 assert(File.SpelledTokens.data() <= Spelled.data() &&

477 Spelled.end() <=

478 (File.SpelledTokens.data() + File.SpelledTokens.size()) &&

479 "Tokens not in spelled range");

480#ifndef NDEBUG

481 auto T1 = Spelled.back().location();

482 auto T2 = File.SpelledTokens.back().location();

483 assert(T1 == T2 || sourceManager().isBeforeInTranslationUnit(T1, T2));

484#endif

486}

487

488std::optionalTokenBuffer::Expansion

490 assert(Spelled);

491 const auto &File = fileForSpelled(*Spelled);

492

493 unsigned SpelledIndex = Spelled - File.SpelledTokens.data();

494 auto M = llvm::partition_point(File.Mappings, [&](const Mapping &M) {

495 return M.BeginSpelled < SpelledIndex;

496 });

497 if (M == File.Mappings.end() || M->BeginSpelled != SpelledIndex)

498 return std::nullopt;

499 return makeExpansion(File, *M);

500}

501

504 if (Spelled.empty())

505 return {};

506 const auto &File = fileForSpelled(Spelled);

507

508

509 unsigned SpelledBeginIndex = Spelled.begin() - File.SpelledTokens.data();

510 unsigned SpelledEndIndex = Spelled.end() - File.SpelledTokens.data();

511 auto M = llvm::partition_point(File.Mappings, [&](const Mapping &M) {

512 return M.EndSpelled <= SpelledBeginIndex;

513 });

514 std::vectorTokenBuffer::Expansion Expansions;

515 for (; M != File.Mappings.end() && M->BeginSpelled < SpelledEndIndex; ++M)

516 Expansions.push_back(makeExpansion(File, *M));

517 return Expansions;

518}

519

524

525 auto *Right = llvm::partition_point(

527 bool AcceptRight = Right != Tokens.end() && Right->location() <= Loc;

528 bool AcceptLeft =

529 Right != Tokens.begin() && (Right - 1)->endLocation() >= Loc;

531 Right + (AcceptRight ? 1 : 0));

532}

533

540

545 if (Tok.kind() == tok::identifier)

546 return &Tok;

547 }

548 return nullptr;

549}

550

557

558std::vector<const syntax::Token *>

560 auto FileIt = Files.find(FID);

561 assert(FileIt != Files.end() && "file not tracked by token buffer");

562 auto &File = FileIt->second;

563 std::vector<const syntax::Token *> Expansions;

564 auto &Spelled = File.SpelledTokens;

565 for (auto Mapping : File.Mappings) {

567 if (Token->kind() == tok::TokenKind::identifier)

568 Expansions.push_back(Token);

569 }

570 return Expansions;

571}

572

576 std::vectorsyntax::Token Tokens;

579

580 if (T.getKind() == tok::raw_identifier && T.needsCleaning() &&

581 T.hasUCN()) {

583 T.setIdentifierInfo(&II);

585 }

587 };

588

589 auto SrcBuffer = SM.getBufferData(FR.file());

590 Lexer L(SM.getLocForStartOfFile(FR.file()), LO, SrcBuffer.data(),

592

593

594 SrcBuffer.data() + SrcBuffer.size());

595

598 AddToken(T);

599

600

601 if (SM.getFileOffset(T.getLocation()) < FR.endOffset())

602 AddToken(T);

603 return Tokens;

604}

605

610

611

612

614public:

616

617

618

619

620 void disable() { Collector = nullptr; }

621

624 if (!Collector)

625 return;

626 const auto &SM = Collector->PP.getSourceManager();

627

628

629

630

631

632

633

634

635

636

637

638

639

640

641 if (!Range.getEnd().isFileID())

642 return;

643

644

645 if (LastExpansionEnd.isValid() &&

646 SM.isBeforeInTranslationUnit(LastExpansionEnd, Range.getEnd()))

647 return;

648

649

650

651

652 if (!Range.getBegin().isFileID()) {

653 Range.setBegin(SM.getExpansionLoc(Range.getBegin()));

654 assert(Collector->Expansions.count(Range.getBegin()) &&

655 "Overlapping macros should have same expansion location");

656 }

657

658 Collector->Expansions[Range.getBegin()] = Range.getEnd();

659 LastExpansionEnd = Range.getEnd();

660 }

661

662private:

663 TokenCollector *Collector;

664

666};

667

668

669

670

671

672

673

674

675

676

677

678

679

680

682

683 PP.setTokenWatcher([this](const clang::Token &T) {

684 if (T.isAnnotation())

685 return;

686 DEBUG_WITH_TYPE("collect-tokens", llvm::dbgs()

687 << "Token: "

690 << "\n"

691

692 );

694 });

695

696

697 auto CB = std::make_unique(*this);

698 this->Collector = CB.get();

699 PP.addPPCallbacks(std::move(CB));

700}

701

702

703

705public:

706 Builder(std::vectorsyntax::Token Expanded, PPExpansions CollectedExpansions,

708 : Result(SM), CollectedExpansions(std::move(CollectedExpansions)), SM(SM),

709 LangOpts(LangOpts) {

710 Result.ExpandedTokens = std::move(Expanded);

711 }

712

714 assert(!Result.ExpandedTokens.empty());

715 assert(Result.ExpandedTokens.back().kind() == tok::eof);

716

717

718 buildSpelledTokens();

719

720

721

722

723

724 while (NextExpanded < Result.ExpandedTokens.size() - 1 ) {

725

726

727 discard();

728

729

730 unsigned OldPosition = NextExpanded;

732 if (NextExpanded == OldPosition)

733 diagnoseAdvanceFailure();

734 }

735

736

737 for (const auto &File : Result.Files)

738 discard(File.first);

739

740#ifndef NDEBUG

741 for (auto &pair : Result.Files) {

742 auto &mappings = pair.second.Mappings;

743 assert(llvm::is_sorted(mappings, [](const TokenBuffer::Mapping &M1,

744 const TokenBuffer::Mapping &M2) {

745 return M1.BeginSpelled < M2.BeginSpelled &&

746 M1.EndSpelled < M2.EndSpelled &&

747 M1.BeginExpanded < M2.BeginExpanded &&

748 M1.EndExpanded < M2.EndExpanded;

749 }));

750 }

751#endif

752

753 return std::move(Result);

754 }

755

756private:

757

758

759

760

761 void discard(std::optional Drain = std::nullopt) {

763 Drain ? SM.getLocForEndOfFile(*Drain)

764 : SM.getExpansionLoc(

765 Result.ExpandedTokens[NextExpanded].location());

767 const auto &SpelledTokens = Result.Files[File].SpelledTokens;

768 auto &NextSpelled = this->NextSpelled[File];

769

770 TokenBuffer::Mapping Mapping;

771 Mapping.BeginSpelled = NextSpelled;

772

773

774 Mapping.BeginExpanded = Mapping.EndExpanded =

775 Drain ? Result.Files[*Drain].EndExpanded : NextExpanded;

776

777

778 auto FlushMapping = [&, this] {

779 Mapping.EndSpelled = NextSpelled;

780 if (Mapping.BeginSpelled != Mapping.EndSpelled)

781 Result.Files[File].Mappings.push_back(Mapping);

782 Mapping.BeginSpelled = NextSpelled;

783 };

784

785 while (NextSpelled < SpelledTokens.size() &&

786 SpelledTokens[NextSpelled].location() < Target) {

787

788

789

791 CollectedExpansions.lookup(SpelledTokens[NextSpelled].location());

792 if (KnownEnd.isValid()) {

793 FlushMapping();

794 while (NextSpelled < SpelledTokens.size() &&

795 SpelledTokens[NextSpelled].location() <= KnownEnd)

796 ++NextSpelled;

797 FlushMapping();

798

799 } else {

800 ++NextSpelled;

801 }

802 }

803 FlushMapping();

804 }

805

806

807

808

810 const syntax::Token &Tok = Result.ExpandedTokens[NextExpanded];

811 SourceLocation Expansion = SM.getExpansionLoc(Tok.location());

812 FileID File = SM.getFileID(Expansion);

813 const auto &SpelledTokens = Result.Files[File].SpelledTokens;

814 auto &NextSpelled = this->NextSpelled[File];

815

816 if (Tok.location().isFileID()) {

817

818 while (NextSpelled < SpelledTokens.size() &&

819 NextExpanded < Result.ExpandedTokens.size() &&

820 SpelledTokens[NextSpelled].location() ==

821 Result.ExpandedTokens[NextExpanded].location()) {

822 ++NextSpelled;

823 ++NextExpanded;

824 }

825

826 } else {

827

828 auto End = CollectedExpansions.lookup(Expansion);

829 assert(End.isValid() && "Macro expansion wasn't captured?");

830

831

832 TokenBuffer::Mapping Mapping;

833 Mapping.BeginExpanded = NextExpanded;

834 Mapping.BeginSpelled = NextSpelled;

835

836 while (NextSpelled < SpelledTokens.size() &&

837 SpelledTokens[NextSpelled].location() <= End)

838 ++NextSpelled;

839

840 while (NextExpanded < Result.ExpandedTokens.size() &&

841 SM.getExpansionLoc(

842 Result.ExpandedTokens[NextExpanded].location()) == Expansion)

843 ++NextExpanded;

844

845 Mapping.EndExpanded = NextExpanded;

846 Mapping.EndSpelled = NextSpelled;

847 Result.Files[File].Mappings.push_back(Mapping);

848 }

849 }

850

851

852 void diagnoseAdvanceFailure() {

853#ifndef NDEBUG

854

855 for (unsigned I = (NextExpanded < 10) ? 0 : NextExpanded - 10;

856 I < NextExpanded + 5 && I < Result.ExpandedTokens.size(); ++I) {

857 const char *L =

858 (I == NextExpanded) ? "!! " : (I < NextExpanded) ? "ok " : " ";

859 llvm::errs() << L << Result.ExpandedTokens[I].dumpForTests(SM) << "\n";

860 }

861#endif

862 llvm_unreachable("Couldn't map expanded token to spelled tokens!");

863 }

864

865

866

867 void buildSpelledTokens() {

868 for (unsigned I = 0; I < Result.ExpandedTokens.size(); ++I) {

869 const auto &Tok = Result.ExpandedTokens[I];

870 auto FID = SM.getFileID(SM.getExpansionLoc(Tok.location()));

871 auto It = Result.Files.try_emplace(FID);

872 TokenBuffer::MarkedFile &File = It.first->second;

873

874

875 File.EndExpanded = Tok.kind() == tok::eof ? I : I + 1;

876

877 if (!It.second)

878 continue;

879

880 File.BeginExpanded = I;

882 }

883 }

884

886 unsigned NextExpanded = 0;

887 llvm::DenseMap<FileID, unsigned> NextSpelled;

888 PPExpansions CollectedExpansions;

889 const SourceManager &SM;

890 const LangOptions &LangOpts;

891};

892

894 PP.setTokenWatcher(nullptr);

895 Collector->disable();

896 return Builder(std::move(Expanded), std::move(Expansions),

897 PP.getSourceManager(), PP.getLangOpts())

898 .build();

899}

900

902 return std::string(llvm::formatv("Token({0}, length = {1})",

904}

905

907 return std::string(llvm::formatv("Token(`{0}`, {1}, length = {2})", text(SM),

909}

910

912 auto PrintToken = [this](const syntax::Token &T) -> std::string {

913 if (T.kind() == tok::eof)

914 return "";

915 return std::string(T.text(*SourceMgr));

916 };

917

918 auto DumpTokens = [this, &PrintToken](llvm::raw_ostream &OS,

920 if (Tokens.empty()) {

921 OS << "";

922 return;

923 }

924 OS << Tokens[0].text(*SourceMgr);

925 for (unsigned I = 1; I < Tokens.size(); ++I) {

926 if (Tokens[I].kind() == tok::eof)

927 continue;

928 OS << " " << PrintToken(Tokens[I]);

929 }

930 };

931

932 std::string Dump;

933 llvm::raw_string_ostream OS(Dump);

934

935 OS << "expanded tokens:\n"

936 << " ";

937

938 DumpTokens(OS, llvm::ArrayRef(ExpandedTokens).drop_back());

939 OS << "\n";

940

941 std::vector Keys;

942 for (const auto &F : Files)

943 Keys.push_back(F.first);

944 llvm::sort(Keys);

945

946 for (FileID ID : Keys) {

947 const MarkedFile &File = Files.find(ID)->second;

948 auto Entry = SourceMgr->getFileEntryRefForID(ID);

949 if (!Entry)

950 continue;

951 std::string Path = llvm::sys::path::convert_to_slash(Entry->getName());

952 OS << llvm::formatv("file '{0}'\n", Path) << " spelled tokens:\n"

953 << " ";

954 DumpTokens(OS, File.SpelledTokens);

955 OS << "\n";

956

957 if (File.Mappings.empty()) {

958 OS << " no mappings.\n";

959 continue;

960 }

961 OS << " mappings:\n";

962 for (auto &M : File.Mappings) {

963 OS << llvm::formatv(

964 " ['{0}'_{1}, '{2}'_{3}) => ['{4}'_{5}, '{6}'_{7})\n",

965 PrintToken(File.SpelledTokens[M.BeginSpelled]), M.BeginSpelled,

966 M.EndSpelled == File.SpelledTokens.size()

967 ? ""

968 : PrintToken(File.SpelledTokens[M.EndSpelled]),

969 M.EndSpelled, PrintToken(ExpandedTokens[M.BeginExpanded]),

970 M.BeginExpanded, PrintToken(ExpandedTokens[M.EndExpanded]),

971 M.EndExpanded);

972 }

973 }

974 return Dump;

975}

Defines the Diagnostic-related interfaces.

static Decl::Kind getKind(const Decl *D)

FormatToken * Next

The next token in the unwrapped line.

Defines the clang::IdentifierInfo, clang::IdentifierTable, and clang::Selector interfaces.

Forward-declares and imports various common LLVM datatypes that clang wants to use unqualified.

Defines the clang::LangOptions interface.

llvm::MachO::Target Target

Defines the PPCallbacks interface.

static ParseState advance(ParseState S, size_t N)

Defines the clang::Preprocessor interface.

Defines the clang::SourceLocation class and associated facilities.

Defines the SourceManager interface.

Defines the clang::TokenKind enum and support functions.

Builds mappings and spelled tokens in the TokenBuffer based on the expanded token stream.

Definition Tokens.cpp:704

TokenBuffer build() &&

Definition Tokens.cpp:713

Builder(std::vector< syntax::Token > Expanded, PPExpansions CollectedExpansions, const SourceManager &SM, const LangOptions &LangOpts)

Definition Tokens.cpp:706

Records information reqired to construct mappings for the token buffer that we are collecting.

Definition Tokens.cpp:613

CollectPPExpansions(TokenCollector &C)

Definition Tokens.cpp:615

void disable()

Disabled instance will stop reporting anything to TokenCollector.

Definition Tokens.cpp:620

void MacroExpands(const clang::Token &MacroNameTok, const MacroDefinition &MD, SourceRange Range, const MacroArgs *Args) override

Called by Preprocessor::HandleMacroExpandedIdentifier when a macro invocation is found.

Definition Tokens.cpp:622

Represents a character-granular source range.

An opaque identifier used by SourceManager which refers to a source file (MemoryBuffer) along with it...

unsigned getHashValue() const

One of these records is kept for each identifier that is lexed.

tok::TokenKind getTokenID() const

If this is a source-language token (e.g.

Implements an efficient mapping from strings to IdentifierInfo nodes.

IdentifierInfo & get(StringRef Name)

Return the identifier token info for the specified named identifier.

Keeps track of the various options that can be enabled, which controls the dialect of C or C++ that i...

Lexer - This provides a simple interface that turns a text buffer into a stream of tokens.

bool LexFromRawLexer(Token &Result)

LexFromRawLexer - Lex a token from a designated raw lexer (one with no associated preprocessor object...

unsigned getCurrentBufferOffset()

Returns the current lexing offset.

MacroArgs - An instance of this class captures information about the formal arguments specified to a ...

A description of the current definition of a macro.

This interface provides a way to observe the actions of the preprocessor as it does its thing.

Engages in a tight little dance with the lexer to efficiently preprocess tokens.

SourceManager & getSourceManager() const

Encodes a location in the source.

bool isValid() const

Return true if this is a valid SourceLocation object.

This class handles loading and caching of source files into memory.

FileID getFileID(SourceLocation SpellingLoc) const

Return the FileID for a SourceLocation.

A trivial tuple used to represent a source range.

SourceLocation getEnd() const

SourceLocation getBegin() const

Token - This structure provides full information about a lexed token.

A list of tokens obtained by preprocessing a text buffer and operations to map between the expanded a...

const syntax::Token * spelledTokenContaining(SourceLocation Loc) const

Returns the spelled Token containing the Loc, if there are no such tokens returns nullptr.

Definition Tokens.cpp:385

const SourceManager & sourceManager() const

void indexExpandedTokens()

Builds a cache to make future calls to expandedToken(SourceRange) faster.

Definition Tokens.cpp:226

llvm::SmallVector< llvm::ArrayRef< syntax::Token >, 1 > expandedForSpelled(llvm::ArrayRef< syntax::Token > Spelled) const

Find the subranges of expanded tokens, corresponding to Spelled.

Definition Tokens.cpp:321

llvm::ArrayRef< syntax::Token > expandedTokens() const

All tokens produced by the preprocessor after all macro replacements, directives, etc.

std::string dumpForTests() const

Definition Tokens.cpp:911

std::optional< llvm::ArrayRef< syntax::Token > > spelledForExpanded(llvm::ArrayRef< syntax::Token > Expanded) const

Returns the subrange of spelled tokens corresponding to AST node spanning Expanded.

Definition Tokens.cpp:402

std::vector< Expansion > expansionsOverlapping(llvm::ArrayRef< syntax::Token > Spelled) const

Returns all expansions (partially) expanded from the specified tokens.

Definition Tokens.cpp:502

std::optional< Expansion > expansionStartingAt(const syntax::Token *Spelled) const

If Spelled starts a mapping (e.g.

Definition Tokens.cpp:489

llvm::ArrayRef< syntax::Token > spelledTokens(FileID FID) const

Lexed tokens of a file before preprocessing.

Definition Tokens.cpp:378

std::vector< const syntax::Token * > macroExpansions(FileID FID) const

Get all tokens that expand a macro in FID.

Definition Tokens.cpp:559

TokenBuffer consume() &&

Finalizes token collection.

Definition Tokens.cpp:893

TokenCollector(Preprocessor &P)

Adds the hooks to collect the tokens.

Definition Tokens.cpp:681

A token coming directly from a file or from a macro invocation.

std::string str() const

For debugging purposes.

Definition Tokens.cpp:901

llvm::StringRef text(const SourceManager &SM) const

Get the substring covered by the token.

Definition Tokens.cpp:152

tok::TokenKind kind() const

FileRange range(const SourceManager &SM) const

Gets a range of this token.

Definition Tokens.cpp:159

Token(SourceLocation Location, unsigned Length, tok::TokenKind Kind)

Definition Tokens.cpp:141

std::string dumpForTests(const SourceManager &SM) const

Definition Tokens.cpp:906

SourceLocation location() const

Location of the first character of a token.

bool Dec(InterpState &S, CodePtr OpPC, bool CanOverflow)

  1. Pops a pointer from the stack 2) Load the value from the pointer 3) Writes the value decreased by ...

const syntax::Token * spelledIdentifierTouching(SourceLocation Loc, llvm::ArrayRef< syntax::Token > Tokens)

The identifier token that overlaps or touches a spelling location Loc.

Definition Tokens.cpp:542

std::vector< syntax::Token > tokenize(FileID FID, const SourceManager &SM, const LangOptions &LO)

Lex the text buffer, corresponding to FID, in raw mode and record the resulting spelled tokens.

Definition Tokens.cpp:606

raw_ostream & operator<<(raw_ostream &OS, NodeKind K)

For debugging purposes.

llvm::ArrayRef< syntax::Token > spelledTokensTouching(SourceLocation Loc, const syntax::TokenBuffer &Tokens)

The spelled tokens that overlap or touch a spelling location Loc.

Definition Tokens.cpp:535

const char * getTokenName(TokenKind Kind) LLVM_READNONE

Determines the name of a token as used within the front end.

TokenKind

Provides a simple uniform namespace for tokens from all C languages.

The JSON file list parser is used to communicate input to InstallAPI.

@ Result

The result type of a method or function.

const FunctionProtoType * T

float __ovld __cnfn length(float)

Return the length of vector p, i.e., sqrt(p.x2 + p.y 2 + ...)

A half-open character range inside a particular file, the start offset is included and the end offset...

CharSourceRange toCharRange(const SourceManager &SM) const

Convert to the clang range.

Definition Tokens.cpp:262

FileRange(FileID File, unsigned BeginOffset, unsigned EndOffset)

EXPECTS: File.isValid() && Begin <= End.

Definition Tokens.cpp:182

unsigned beginOffset() const

Start is a start offset (inclusive) in the corresponding file.

llvm::StringRef text(const SourceManager &SM) const

Gets the substring that this FileRange refers to.

Definition Tokens.cpp:216

unsigned endOffset() const

End offset (exclusive) in the corresponding file.

An expansion produced by the preprocessor, includes macro expansions and preprocessor directives.

llvm::ArrayRef< syntax::Token > Spelled