LLVM: lib/ProfileData/InstrProfWriter.cpp Source File (original) (raw)

1

2

3

4

5

6

7

8

9

10

11

12

13

28#include

29#include

30#include

31#include

32#include

33#include

34#include

35

36using namespace llvm;

37

38namespace llvm {

39

41public:

44

47

50

54

56

60

61 static std::pair<offset_type, offset_type>

64

66

69

71 for (const auto &ProfileData : *V) {

73 M += sizeof(uint64_t);

74 M += sizeof(uint64_t);

76 M += sizeof(uint64_t);

78

79

80 M += ValueProfData::getSize(ProfileData.second);

81 }

83

84 return std::make_pair(N, M);

85 }

86

88 Out.write(K.data(), N);

89 }

90

93

95 for (const auto &ProfileData : *V) {

99 else

101

102 LE.write<uint64_t>(ProfileData.first);

106

110

111

112 std::unique_ptr VDataPtr =

113 ValueProfData::serializeFrom(ProfileData.second);

114 uint32_t S = VDataPtr->getSize();

116 Out.write((const char *)VDataPtr.get(), S);

117 }

118 }

119};

120

121}

122

124 bool Sparse, uint64_t TemporalProfTraceReservoirSize,

125 uint64_t MaxTemporalProfTraceLength, bool WritePrevVersion,

127 bool MemprofGenerateRandomHotness,

128 unsigned MemprofGenerateRandomHotnessSeed)

129 : Sparse(Sparse), MaxTemporalProfTraceLength(MaxTemporalProfTraceLength),

130 TemporalProfTraceReservoirSize(TemporalProfTraceReservoirSize),

132 WritePrevVersion(WritePrevVersion),

133 MemProfVersionRequested(MemProfVersionRequested),

134 MemProfFullSchema(MemProfFullSchema),

135 MemprofGenerateRandomHotness(MemprofGenerateRandomHotness) {

136

137 if (MemprofGenerateRandomHotness) {

138 unsigned seed = MemprofGenerateRandomHotnessSeed

139 ? MemprofGenerateRandomHotnessSeed

140 : std::time(nullptr);

141 errs() << "random hotness seed = " << seed << "\n";

142 std::srand(seed);

143 }

144}

145

147

148

150 InfoObj->ValueProfDataEndianness = Endianness;

151}

152

154

157 auto Name = I.Name;

158 auto Hash = I.Hash;

159 addRecord(Name, Hash, std::move(I), Weight, Warn);

160}

161

166 auto Name = Other.Name;

167 auto Hash = Other.Hash;

168 Other.accumulateCounts(FuncLevelOverlap.Test);

169 auto It = FunctionData.find(Name);

170 if (It == FunctionData.end()) {

172 return;

173 }

176 return;

177 }

178 auto &ProfileDataMap = It->second;

179 auto [Where, NewFunc] = ProfileDataMap.try_emplace(Hash);

180 if (NewFunc) {

182 return;

183 }

185

188 ValueCutoff = 0;

189

190 Dest.overlap(Other, Overlap, FuncLevelOverlap, ValueCutoff);

191}

192

196 auto &ProfileDataMap = FunctionData[Name];

197

198 auto [Where, NewFunc] = ProfileDataMap.try_emplace(Hash);

200

203 };

204

205 if (NewFunc) {

206

207 Dest = std::move(I);

208 if (Weight > 1)

209 Dest.scale(Weight, 1, MapWarn);

210 } else {

211

212 Dest.merge(I, Weight, MapWarn);

213 }

214

216}

217

218void InstrProfWriter::addMemProfRecord(

220 auto NewRecord = Record;

221

222

223

224 if (MemprofGenerateRandomHotness) {

225 for (auto &Alloc : NewRecord.AllocSites) {

226

227

228 uint64_t NewTLAD = std::numeric_limits<uint64_t>::max();

229 uint64_t NewTL = 0;

230 bool IsCold = std::rand() % 2;

231 if (IsCold) {

232

233

234 NewTLAD = 0;

235 NewTL = std::numeric_limits<uint64_t>::max();

236 }

237 Alloc.Info.setTotalLifetimeAccessDensity(NewTLAD);

238 Alloc.Info.setTotalLifetime(NewTL);

239 }

240 }

241 MemProfSumBuilder.addRecord(NewRecord);

242 auto [Iter, Inserted] = MemProfData.Records.insert({Id, NewRecord});

243

244 if (Inserted) {

245 return;

246 }

247 memprof::IndexedMemProfRecord &Existing = Iter->second;

248 Existing.merge(NewRecord);

249}

250

251bool InstrProfWriter::addMemProfFrame(const memprof::FrameId Id,

254 auto [Iter, Inserted] = MemProfData.Frames.insert({Id, Frame});

255

256

257

258

259 if (!Inserted && Iter->second != Frame) {

261 "frame to id mapping mismatch"));

262 return false;

263 }

264 return true;

265}

266

267bool InstrProfWriter::addMemProfCallStack(

271 auto [Iter, Inserted] = MemProfData.CallStacks.insert({CSId, CallStack});

272

273

274

275

276 if (!Inserted && Iter->second != CallStack) {

278 "call stack to id mapping mismatch"));

279 return false;

280 }

281 return true;

282}

283

286

287 if (Incoming.Frames.empty() && Incoming.CallStacks.empty() &&

289 return true;

290

291

294

295 if (MemProfData.Frames.empty())

296 MemProfData.Frames = std::move(Incoming.Frames);

297 else

298 for (const auto &[Id, F] : Incoming.Frames)

299 if (addMemProfFrame(Id, F, Warn))

300 return false;

301

302 if (MemProfData.CallStacks.empty())

303 MemProfData.CallStacks = std::move(Incoming.CallStacks);

304 else

305 for (const auto &[CSId, CS] : Incoming.CallStacks)

306 if (addMemProfCallStack(CSId, CS, Warn))

307 return false;

308

309

310 if (MemProfData.Records.empty() && !MemprofGenerateRandomHotness) {

311

312

314 MemProfSumBuilder.addRecord(Record);

315 MemProfData.Records = std::move(Incoming.Records);

316 } else {

318 addMemProfRecord(GUID, Record);

319 }

320

321 return true;

322}

323

327

329 std::unique_ptrmemprof::DataAccessProfData DataAccessProfDataIn) {

330 DataAccessProfileData = std::move(DataAccessProfDataIn);

331}

332

335 if (TemporalProfTraces.size() > TemporalProfTraceReservoirSize)

336 TemporalProfTraces.truncate(TemporalProfTraceReservoirSize);

337 for (auto &Trace : SrcTraces)

338 if (Trace.FunctionNameRefs.size() > MaxTemporalProfTraceLength)

339 Trace.FunctionNameRefs.resize(MaxTemporalProfTraceLength);

340 llvm::erase_if(SrcTraces, [](auto &T) { return T.FunctionNameRefs.empty(); });

341

342

343

344 if (SrcTraces.empty())

345 return;

346

347 auto SrcTraceIt = SrcTraces.begin();

348 while (TemporalProfTraces.size() < TemporalProfTraceReservoirSize &&

349 SrcTraceIt < SrcTraces.end())

350 TemporalProfTraces.push_back(*SrcTraceIt++);

351

353 for (uint64_t I = TemporalProfTraces.size();

354 I < SrcStreamSize && SrcTraceIt < SrcTraces.end(); I++) {

355 std::uniform_int_distribution<uint64_t> Distribution(0, I);

356 uint64_t RandomIndex = Distribution(RNG);

357 if (RandomIndex < TemporalProfTraces.size())

358 TemporalProfTraces[RandomIndex] = *SrcTraceIt++;

359 }

360 TemporalProfTraceStreamSize += SrcStreamSize;

361}

362

365 for (auto &I : IPW.FunctionData)

366 for (auto &Func : I.getValue())

367 addRecord(I.getKey(), Func.first, std::move(Func.second), 1, Warn);

368

369 BinaryIds.reserve(BinaryIds.size() + IPW.BinaryIds.size());

370 for (auto &I : IPW.BinaryIds)

372

374 IPW.TemporalProfTraceStreamSize);

375

376 MemProfData.Frames.reserve(IPW.MemProfData.Frames.size());

377 for (auto &[FrameId, Frame] : IPW.MemProfData.Frames) {

378

379

380 if (!addMemProfFrame(FrameId, Frame, Warn))

381 return;

382 }

383

384 MemProfData.CallStacks.reserve(IPW.MemProfData.CallStacks.size());

385 for (auto &[CSId, CallStack] : IPW.MemProfData.CallStacks) {

386 if (!addMemProfCallStack(CSId, CallStack, Warn))

387 return;

388 }

389

390 MemProfData.Records.reserve(IPW.MemProfData.Records.size());

391 for (auto &[GUID, Record] : IPW.MemProfData.Records) {

392 addMemProfRecord(GUID, Record);

393 }

394}

395

396bool InstrProfWriter::shouldEncodeData(const ProfilingData &PD) {

397 if (!Sparse)

398 return true;

399 for (const auto &Func : PD) {

402 return true;

404 return true;

405 }

406 return false;

407}

408

425

427 const bool WritePrevVersion,

429

430 for (int I = 0; I < 4; I++)

431 OS.write(reinterpret_cast<const uint64_t *>(&Header)[I]);

432

433

434 auto BackPatchStartOffset = OS.tell();

435

436

437 OS.write(0);

438 OS.write(0);

439 OS.write(0);

440 OS.write(0);

441 if (!WritePrevVersion)

442 OS.write(0);

443

444 return BackPatchStartOffset;

445}

446

448

449

450

451

452

453

454

455 uint64_t BinaryIdsSectionSize = 0;

456

457

459 BinaryIds.erase(llvm::unique(BinaryIds), BinaryIds.end());

460

461 for (const auto &BI : BinaryIds) {

462

463 BinaryIdsSectionSize += sizeof(uint64_t);

464

465 BinaryIdsSectionSize += alignToPowerOf2(BI.size(), sizeof(uint64_t));

466 }

467

468 OS.write(BinaryIdsSectionSize);

469

470 for (const auto &BI : BinaryIds) {

471 uint64_t BILen = BI.size();

472

474

475 for (unsigned K = 0; K < BILen; K++)

477

478 uint64_t PaddingSize = alignToPowerOf2(BILen, sizeof(uint64_t)) - BILen;

479 for (unsigned K = 0; K < PaddingSize; K++)

481 }

482

484}

485

487 std::vectorstd::string VTableNameStrs;

488 for (StringRef VTableName : VTableNames.keys())

489 VTableNameStrs.push_back(VTableName.str());

490

491 std::string CompressedVTableNames;

492 if (!VTableNameStrs.empty())

495 CompressedVTableNames))

496 return E;

497

498 const uint64_t CompressedStringLen = CompressedVTableNames.length();

499

500

501 OS.write(CompressedStringLen);

502

503

504 for (auto &c : CompressedVTableNames)

505 OS.writeByte(static_cast<uint8_t>(c));

506

507

508

509 const uint64_t PaddedLength = alignTo(CompressedStringLen, 8);

510

511 for (uint64_t K = CompressedStringLen; K < PaddedLength; K++)

513

515}

516

518 using namespace IndexedInstrProf;

519 using namespace support;

520

521 OnDiskChainedHashTableGenerator Generator;

522

524 InfoObj->SummaryBuilder = &ISB;

526 InfoObj->CSSummaryBuilder = &CSISB;

527

528

530 for (const auto &I : FunctionData)

531 if (shouldEncodeData(I.getValue()))

532 OrderedData.emplace_back((I.getKey()), &I.getValue());

533 llvm::sort(OrderedData, less_first());

534 for (const auto &I : OrderedData)

535 Generator.insert(I.first, I.second);

536

537

538 IndexedInstrProf::Header Header;

539 Header.Version = WritePrevVersion

542

543

550 if (static_cast<bool>(ProfileKind &

553 if (static_cast<bool>(ProfileKind &

564

565 const uint64_t BackPatchStartOffset =

566 writeHeader(Header, WritePrevVersion, OS);

567

568

570 uint32_t SummarySize = Summary::getSize(Summary::NumKinds, NumEntries);

571

572 uint64_t SummaryOffset = OS.tell();

573 for (unsigned I = 0; I < SummarySize / sizeof(uint64_t); I++)

575 uint64_t CSSummaryOffset = 0;

576 uint64_t CSSummarySize = 0;

578 CSSummaryOffset = OS.tell();

579 CSSummarySize = SummarySize / sizeof(uint64_t);

580 for (unsigned I = 0; I < CSSummarySize; I++)

582 }

583

584

585 uint64_t HashTableStart = Generator.Emit(OS.OS, *InfoObj);

586

587

588 uint64_t MemProfSectionStart = 0;

590 MemProfSectionStart = OS.tell();

591

593 OS, MemProfData, MemProfVersionRequested, MemProfFullSchema,

594 std::move(DataAccessProfileData), MemProfSumBuilder.getSummary()))

595 return E;

596 }

597

598 uint64_t BinaryIdSectionStart = OS.tell();

599 if (auto E = writeBinaryIds(OS))

600 return E;

601

602 uint64_t VTableNamesSectionStart = OS.tell();

603

604 if (!WritePrevVersion)

605 if (Error E = writeVTableNames(OS))

606 return E;

607

608 uint64_t TemporalProfTracesSectionStart = 0;

610 TemporalProfTracesSectionStart = OS.tell();

611 OS.write(TemporalProfTraces.size());

612 OS.write(TemporalProfTraceStreamSize);

613 for (auto &Trace : TemporalProfTraces) {

614 OS.write(Trace.Weight);

615 OS.write(Trace.FunctionNameRefs.size());

616 for (auto &NameRef : Trace.FunctionNameRefs)

617 OS.write(NameRef);

618 }

619 }

620

621

622 std::unique_ptrIndexedInstrProf::Summary TheSummary =

624

625

626 std::unique_ptr PS = ISB.getSummary();

628 InfoObj->SummaryBuilder = nullptr;

629

630

631 std::unique_ptrIndexedInstrProf::Summary TheCSSummary = nullptr;

634 std::unique_ptr CSPS = CSISB.getSummary();

635 setSummary(TheCSSummary.get(), *CSPS);

636 }

637 InfoObj->CSSummaryBuilder = nullptr;

638

639 SmallVector<uint64_t, 8> HeaderOffsets = {HashTableStart, MemProfSectionStart,

640 BinaryIdSectionStart,

641 TemporalProfTracesSectionStart};

642 if (!WritePrevVersion)

643 HeaderOffsets.push_back(VTableNamesSectionStart);

644

645 PatchItem PatchItems[] = {

646

647 {BackPatchStartOffset, HeaderOffsets},

648

649 {SummaryOffset,

650 ArrayRef<uint64_t>(reinterpret_cast<uint64_t *>(TheSummary.get()),

651 SummarySize / sizeof(uint64_t))},

652 {CSSummaryOffset,

653 ArrayRef<uint64_t>(reinterpret_cast<uint64_t *>(TheCSSummary.get()),

654 CSSummarySize)}};

655

656 OS.patch(PatchItems);

657

658 for (const auto &I : FunctionData)

659 for (const auto &F : I.getValue())

661 return E;

662

664}

665

667

669 return writeImpl(POS);

670}

671

674 return writeImpl(POS);

675}

676

678 std::string Data;

680

682 return nullptr;

683

685}

686

688#define VALUE_PROF_KIND(Enumerator, Value, Descr) #Enumerator,

690};

691

693 for (uint32_t VK = 0; VK <= IPVK_Last; VK++) {

694 if (VK == IPVK_IndirectCallTarget || VK == IPVK_VTableTarget)

695 continue;

696 uint32_t NS = Func.getNumValueSites(VK);

697 for (uint32_t S = 0; S < NS; S++) {

699 for (const auto &V : Func.getValueArrayForSite(VK, S))

700 if (!SeenValues.insert(V.Value).second)

702 }

703 }

704

706}

707

712 OS << Name << "\n";

713 OS << "# Func Hash:\n" << Hash << "\n";

714 OS << "# Num Counters:\n" << Func.Counts.size() << "\n";

715 OS << "# Counter Values:\n";

717 OS << Count << "\n";

718

719 if (Func.BitmapBytes.size() > 0) {

720 OS << "# Num Bitmap Bytes:\n$" << Func.BitmapBytes.size() << "\n";

721 OS << "# Bitmap Byte Values:\n";

722 for (uint8_t Byte : Func.BitmapBytes) {

723 OS << "0x";

725 OS << "\n";

726 }

727 OS << "\n";

728 }

729

730 uint32_t NumValueKinds = Func.getNumValueKinds();

731 if (!NumValueKinds) {

732 OS << "\n";

733 return;

734 }

735

736 OS << "# Num Value Kinds:\n" << Func.getNumValueKinds() << "\n";

737 for (uint32_t VK = 0; VK < IPVK_Last + 1; VK++) {

738 uint32_t NS = Func.getNumValueSites(VK);

739 if (!NS)

740 continue;

741 OS << "# ValueKind = " << ValueProfKindStr[VK] << ":\n" << VK << "\n";

742 OS << "# NumValueSites:\n" << NS << "\n";

743 for (uint32_t S = 0; S < NS; S++) {

744 auto VD = Func.getValueArrayForSite(VK, S);

745 OS << VD.size() << "\n";

746 for (const auto &V : VD) {

747 if (VK == IPVK_IndirectCallTarget || VK == IPVK_VTableTarget)

749 << "\n";

750 else

751 OS << V.Value << ":" << V.Count << "\n";

752 }

753 }

754 }

755

756 OS << "\n";

757}

758

760

762 OS << "# CSIR level Instrumentation Flag\n:csir\n";

764 OS << "# IR level Instrumentation Flag\n:ir\n";

765

766 if (static_cast<bool>(ProfileKind &

768 OS << "# Always instrument the function entry block\n:entry_first\n";

769 if (static_cast<bool>(ProfileKind &

771 OS << "# Always instrument the loop entry "

772 "blocks\n:instrument_loop_entries\n";

774 OS << "# Instrument block coverage\n:single_byte_coverage\n";

776

778 using RecordType = std::pair<StringRef, FuncPair>;

780

781 for (const auto &I : FunctionData) {

782 if (shouldEncodeData(I.getValue())) {

784 return E;

785 for (const auto &Func : I.getValue())

786 OrderedFuncData.push_back(std::make_pair(I.getKey(), Func));

787 }

788 }

789

790 for (const auto &VTableName : VTableNames)

792 return E;

793

796

798 return std::tie(A.first, A.second.first) <

799 std::tie(B.first, B.second.first);

800 });

801

802 for (const auto &record : OrderedFuncData) {

803 const StringRef &Name = record.first;

804 const FuncPair &Func = record.second;

806 }

807

808 for (const auto &record : OrderedFuncData) {

809 const FuncPair &Func = record.second;

811 return E;

812 }

813

815}

816

819 OS << ":temporal_prof_traces\n";

820 OS << "# Num Temporal Profile Traces:\n" << TemporalProfTraces.size() << "\n";

821 OS << "# Temporal Profile Trace Stream Size:\n"

822 << TemporalProfTraceStreamSize << "\n";

823 for (auto &Trace : TemporalProfTraces) {

824 OS << "# Weight:\n" << Trace.Weight << "\n";

825 for (auto &NameRef : Trace.FunctionNameRefs)

827 OS << "\n";

828 }

829 OS << "\n";

830}

assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")

static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")

static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")

static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")

static void setSummary(IndexedInstrProf::Summary *TheSummary, ProfileSummary &PS)

Definition InstrProfWriter.cpp:409

static const char * ValueProfKindStr[]

Definition InstrProfWriter.cpp:687

#define VARIANT_MASK_CSIR_PROF

#define VARIANT_MASK_MEMPROF

#define VARIANT_MASK_TEMPORAL_PROF

#define VARIANT_MASK_IR_PROF

#define VARIANT_MASK_BYTE_COVERAGE

#define VARIANT_MASK_INSTR_ENTRY

#define VARIANT_MASK_FUNCTION_ENTRY_ONLY

#define VARIANT_MASK_INSTR_LOOP_ENTRIES

Defines facilities for reading and writing on-disk hash tables.

FunctionLoweringInfo::StatepointRelocationRecord RecordType

ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...

size_t size() const

size - Get the array size.

Implements a dense probed hash-table based set.

Lightweight error class with error context and mandatory checking.

static ErrorSuccess success()

Create a success value.

uint64_t GUID

Declare a type to represent a global unique identifier for a global value.

Definition InstrProfWriter.cpp:40

static std::pair< offset_type, offset_type > EmitKeyDataLength(raw_ostream &Out, key_type_ref K, data_type_ref V)

Definition InstrProfWriter.cpp:62

const InstrProfWriter::ProfilingData *const data_type_ref

Definition InstrProfWriter.cpp:46

InstrProfSummaryBuilder * SummaryBuilder

Definition InstrProfWriter.cpp:52

StringRef key_type_ref

Definition InstrProfWriter.cpp:43

llvm::endianness ValueProfDataEndianness

Definition InstrProfWriter.cpp:51

uint64_t offset_type

Definition InstrProfWriter.cpp:49

static hash_value_type ComputeHash(key_type_ref K)

Definition InstrProfWriter.cpp:57

StringRef key_type

Definition InstrProfWriter.cpp:42

InstrProfSummaryBuilder * CSSummaryBuilder

Definition InstrProfWriter.cpp:53

InstrProfRecordWriterTrait()=default

void EmitKey(raw_ostream &Out, key_type_ref K, offset_type N)

Definition InstrProfWriter.cpp:87

uint64_t hash_value_type

Definition InstrProfWriter.cpp:48

void EmitData(raw_ostream &Out, key_type_ref, data_type_ref V, offset_type)

Definition InstrProfWriter.cpp:91

const InstrProfWriter::ProfilingData *const data_type

Definition InstrProfWriter.cpp:45

A symbol table used for function [IR]PGO name look-up with keys (such as pointers,...

StringRef getFuncOrVarName(uint64_t ValMD5Hash) const

Return name of functions or global variables from the name's md5 hash value.

StringRef getFuncOrVarNameIfDefined(uint64_t ValMD5Hash) const

Just like getFuncOrVarName, except that it will return literal string 'External Symbol' if the functi...

Error addVTableName(StringRef VTableName)

Adds VTableName as a known symbol, and inserts it to a map that tracks all vtable names.

Error addFuncName(StringRef FuncName)

The method name is kept since there are many callers.

LLVM_ABI InstrProfWriter(bool Sparse=false, uint64_t TemporalProfTraceReservoirSize=0, uint64_t MaxTemporalProfTraceLength=0, bool WritePrevVersion=false, memprof::IndexedVersion MemProfVersionRequested=static_cast< memprof::IndexedVersion >(memprof::MinimumSupportedVersion), bool MemProfFullSchema=false, bool MemprofGenerateRandomHotness=false, unsigned MemprofGenerateRandomHotnessSeed=0)

Definition InstrProfWriter.cpp:123

LLVM_ABI Error write(raw_fd_ostream &OS)

Write the profile to OS.

Definition InstrProfWriter.cpp:666

LLVM_ABI void addTemporalProfileTraces(SmallVectorImpl< TemporalProfTraceTy > &SrcTraces, uint64_t SrcStreamSize)

Add SrcTraces using reservoir sampling where SrcStreamSize is the total number of temporal profiling ...

Definition InstrProfWriter.cpp:333

LLVM_ABI void overlapRecord(NamedInstrProfRecord &&Other, OverlapStats &Overlap, OverlapStats &FuncLevelOverlap, const OverlapFuncFilters &FuncFilter)

Definition InstrProfWriter.cpp:162

LLVM_ABI Error writeText(raw_fd_ostream &OS)

Write the profile in text format to OS.

Definition InstrProfWriter.cpp:759

LLVM_ABI void addBinaryIds(ArrayRef< llvm::object::BuildID > BIs)

Definition InstrProfWriter.cpp:324

static LLVM_ABI void writeRecordInText(StringRef Name, uint64_t Hash, const InstrProfRecord &Counters, InstrProfSymtab &Symtab, raw_fd_ostream &OS)

Write Record in text format to OS.

Definition InstrProfWriter.cpp:708

LLVM_ABI void setValueProfDataEndianness(llvm::endianness Endianness)

Definition InstrProfWriter.cpp:149

LLVM_ABI void addRecord(NamedInstrProfRecord &&I, uint64_t Weight, function_ref< void(Error)> Warn)

Add function counts for the given function.

Definition InstrProfWriter.cpp:155

LLVM_ABI ~InstrProfWriter()

Definition InstrProfWriter.cpp:146

LLVM_ABI void mergeRecordsFromWriter(InstrProfWriter &&IPW, function_ref< void(Error)> Warn)

Merge existing function counts from the given writer.

Definition InstrProfWriter.cpp:363

LLVM_ABI void writeTextTemporalProfTraceData(raw_fd_ostream &OS, InstrProfSymtab &Symtab)

Write temporal profile trace data to the header in text format to OS.

Definition InstrProfWriter.cpp:817

SmallDenseMap< uint64_t, InstrProfRecord > ProfilingData

LLVM_ABI std::unique_ptr< MemoryBuffer > writeBuffer()

Write the profile, returning the raw data. For testing.

Definition InstrProfWriter.cpp:677

LLVM_ABI void setOutputSparse(bool Sparse)

Definition InstrProfWriter.cpp:153

LLVM_ABI bool addMemProfData(memprof::IndexedMemProfData Incoming, function_ref< void(Error)> Warn)

Add the entire MemProfData Incoming to the writer context.

Definition InstrProfWriter.cpp:284

LLVM_ABI void addDataAccessProfData(std::unique_ptr< memprof::DataAccessProfData > DataAccessProfile)

Definition InstrProfWriter.cpp:328

LLVM_ABI Error validateRecord(const InstrProfRecord &Func)

Definition InstrProfWriter.cpp:692

static std::unique_ptr< MemoryBuffer > getMemBufferCopy(StringRef InputData, const Twine &BufferName="")

Open the specified memory range as a MemoryBuffer, copying the contents and taking ownership of it.

offset_type Emit(raw_ostream &Out)

Emit the table to Out, which must not be at offset 0.

LLVM_ABI uint64_t tell() const

LLVM_ABI void writeByte(uint8_t V)

LLVM_ABI void patch(ArrayRef< PatchItem > P)

LLVM_ABI void write(uint64_t V)

static LLVM_ABI const ArrayRef< uint32_t > DefaultCutoffs

A vector of useful cutoff values for detailed summary.

uint64_t getTotalCount() const

uint64_t getMaxCount() const

const SummaryEntryVector & getDetailedSummary()

uint32_t getNumCounts() const

uint64_t getMaxInternalCount() const

uint64_t getMaxFunctionCount() const

uint32_t getNumFunctions() const

This class consists of common code factored out of the SmallVector class to reduce code duplication b...

reference emplace_back(ArgTypes &&... Args)

void push_back(const T &Elt)

This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.

StringRef - Represent a constant reference to a string, i.e.

std::pair< iterator, bool > insert(const ValueT &V)

An efficient, type-erasing, non-owning reference to a callable.

A raw_ostream that writes to a file descriptor.

This class implements an extremely fast bulk output stream that can only output to a stream.

raw_ostream & write_hex(unsigned long long N)

Output N in hexadecimal, without any prefix or padding.

raw_ostream & write(unsigned char C)

A raw_ostream that writes to an std::string.

std::unique_ptr< Summary > allocSummary(uint32_t TotalSize)

uint64_t ComputeHash(StringRef K)

LLVM_ABI bool isAvailable()

This is an optimization pass for GlobalISel generic memory operations.

void append_range(Container &C, Range &&R)

Wrapper function to append range R to container C.

void shuffle(Iterator first, Iterator last, RNG &&g)

auto unique(Range &&R, Predicate P)

bool any_of(R &&range, UnaryPredicate P)

Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.

void sort(IteratorTy Start, IteratorTy End)

constexpr T alignToPowerOf2(U Value, V Align)

Will overflow only if result is not representable in T.

FunctionAddr VTableAddr Count

class LLVM_GSL_OWNER SmallVector

Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...

Error make_error(ArgTs &&... Args)

Make a Error instance representing failure using the given error info type.

LLVM_ABI raw_fd_ostream & errs()

This returns a reference to a raw_ostream for standard error.

FunctionAddr VTableAddr uintptr_t uintptr_t Data

uint64_t alignTo(uint64_t Size, Align A)

Returns a multiple of A needed to store Size bytes.

LLVM_ABI Error collectGlobalObjectNameStrings(ArrayRef< std::string > NameStrs, bool doCompression, std::string &Result)

Given a vector of strings (names of global objects like functions or, virtual tables) NameStrs,...

void erase_if(Container &C, UnaryPredicate P)

Provide a container algorithm similar to C++ Library Fundamentals v2's erase_if which is equivalent t...

LLVM_ABI Error writeMemProf(ProfOStream &OS, memprof::IndexedMemProfData &MemProfData, memprof::IndexedVersion MemProfVersionRequested, bool MemProfFullSchema, std::unique_ptr< memprof::DataAccessProfData > DataAccessProfileData, std::unique_ptr< memprof::MemProfSummary > MemProfSum)

@ LoopEntriesInstrumentation

@ FunctionEntryInstrumentation

Incoming for lane maks phi as machine instruction, incoming register Reg and incoming block Block are...

void set(SummaryFieldKind K, uint64_t V)

void setEntry(uint32_t I, const ProfileSummaryEntry &E)

uint64_t NumSummaryFields

uint64_t NumCutoffEntries

Profiling information for a single function.

std::vector< uint64_t > Counts

LLVM_ABI void merge(InstrProfRecord &Other, uint64_t Weight, function_ref< void(instrprof_error)> Warn)

Merge the counts in Other into this one.

LLVM_ABI void overlap(InstrProfRecord &Other, OverlapStats &Overlap, OverlapStats &FuncLevelOverlap, uint64_t ValueCutoff)

Compute the overlap b/w this IntrprofRecord and Other.

void sortValueData()

Sort value profile data (per site) by count.

std::vector< uint8_t > BitmapBytes

LLVM_ABI void scale(uint64_t N, uint64_t D, function_ref< void(instrprof_error)> Warn)

Scale up profile counts (including value profile data) by a factor of (N / D).

static bool hasCSFlagInHash(uint64_t FuncHash)

const std::string NameFilter

LLVM_ABI void addOneMismatch(const CountSumOrPercent &MismatchFunc)

CountSumOrPercent Overlap

LLVM_ABI void addOneUnique(const CountSumOrPercent &UniqueFunc)

void merge(const IndexedMemProfRecord &Other)

Adapter to write values to a stream in a particular byte order.