LLVM: lib/ProfileData/MemProfReader.cpp Source File (original) (raw)
1
2
3
4
5
6
7
8
9
10
11
12
13#include
14#include
15#include <type_traits>
16
44
45#define DEBUG_TYPE "memprof"
46
47namespace llvm {
48namespace memprof {
49namespace {
50template inline T alignedRead(const char *Ptr) {
51 static_assert(std::is_integral_v, "Not an integral type");
52 assert(reinterpret_cast<size_t>(Ptr) % sizeof(T) == 0 && "Unaligned Read");
53 return *reinterpret_cast<const T *>(Ptr);
54}
55
56Error checkBuffer(const MemoryBuffer &Buffer) {
59
60 if (Buffer.getBufferSize() == 0)
62
63 if (Buffer.getBufferSize() < sizeof(Header)) {
65 }
66
67
68
70 const char *Next = Buffer.getBufferStart();
71 while (Next < Buffer.getBufferEnd()) {
72 const auto *H = reinterpret_cast<const Header *>(Next);
73
74
75 bool IsSupported = false;
76 for (auto SupportedVersion : MEMPROF_RAW_SUPPORTED_VERSIONS) {
77 if (H->Version == SupportedVersion)
78 IsSupported = true;
79 }
80 if (!IsSupported) {
82 }
83
84 TotalSize += H->TotalSize;
86 }
87
88 if (Buffer.getBufferSize() != TotalSize) {
90 }
92}
93
95 using namespace support;
96
97 const uint64_t NumItemsToRead =
100 for (uint64_t I = 0; I < NumItemsToRead; I++) {
101 Items.push_back(*reinterpret_cast<const SegmentEntry *>(
102 Ptr + I * sizeof(SegmentEntry)));
103 }
104 return Items;
105}
106
108readMemInfoBlocksV3(const char *Ptr) {
109 using namespace support;
110
111 const uint64_t NumItemsToRead =
113
115 for (uint64_t I = 0; I < NumItemsToRead; I++) {
118
119
120
121
122
123
124
125 MemInfoBlock MIB = *reinterpret_cast<const MemInfoBlock *>(Ptr);
126
127 MIB.AccessHistogramSize = 0;
128 MIB.AccessHistogram = 0;
129
131
132 Ptr += MEMPROF_V3_MIB_SIZE;
133 }
134 return Items;
135}
136
138readMemInfoBlocksCommon(const char *Ptr, bool IsHistogramEncoded = false) {
139 using namespace support;
140
141 const uint64_t NumItemsToRead =
143
145 for (uint64_t I = 0; I < NumItemsToRead; I++) {
148
149 MemInfoBlock MIB;
150#define READ_MIB_FIELD(FIELD) \
151 MIB.FIELD = endian::readNext<decltype(MIB.FIELD), llvm::endianness::little, \
152 unaligned>(Ptr)
153
181#undef READ_MIB_FIELD
182
183 if (MIB.AccessHistogramSize > 0) {
184
185 MIB.AccessHistogram =
186 (uintptr_t)malloc(MIB.AccessHistogramSize * sizeof(uint64_t));
187 for (uint64_t J = 0; J < MIB.AccessHistogramSize; J++) {
188 if (!IsHistogramEncoded) {
189 ((uint64_t *)MIB.AccessHistogram)[J] =
191 Ptr);
192 } else {
193
196 Ptr);
197 ((uint64_t *)MIB.AccessHistogram)[J] = decodeHistogramCount(Val);
198 }
199 }
200 }
202 }
203 return Items;
204}
205
207readMemInfoBlocksV4(const char *Ptr) {
208 return readMemInfoBlocksCommon(Ptr);
209}
210
212readMemInfoBlocksV5(const char *Ptr) {
213 return readMemInfoBlocksCommon(Ptr, true);
214}
215
217 using namespace support;
218
219 const uint64_t NumItemsToRead =
222
223 for (uint64_t I = 0; I < NumItemsToRead; I++) {
228
229 SmallVector<uint64_t> CallStack;
230 CallStack.reserve(NumPCs);
231 for (uint64_t J = 0; J < NumPCs; J++) {
232 CallStack.push_back(
234 }
235
236 Items[StackId] = CallStack;
237 }
238 return Items;
239}
240
241
242
243
245 for (const auto &[Id, Stack] : From) {
246 auto [It, Inserted] = To.try_emplace(Id, Stack);
247
248 if (!Inserted && Stack != It->second)
249 return true;
250 }
251 return false;
252}
253
254Error report(Error E, const StringRef Context) {
256 std::move(E));
257}
258
259bool isRuntimePath(const StringRef Path) {
261
262
263 return Filename == "memprof_malloc_linux.cpp" ||
264 Filename == "memprof_interceptors.cpp" ||
265 Filename == "memprof_new_delete.cpp";
266}
267
268std::string getBuildIdString(const SegmentEntry &Entry) {
269
270 if (Entry.BuildIdSize == 0)
271 return "";
272
273 std::string Str;
274 raw_string_ostream OS(Str);
275 for (size_t I = 0; I < Entry.BuildIdSize; I++) {
277 }
278 return OS.str();
279}
280}
281
282Expected<std::unique_ptr>
284 bool KeepName) {
286 if (std::error_code EC = BufferOr.getError())
287 return report(errorCodeToError(EC), Path.getSingleStringRef());
288
289 std::unique_ptr Buffer(BufferOr.get().release());
290 return create(std::move(Buffer), ProfiledBinary, KeepName);
291}
292
295 const StringRef ProfiledBinary, bool KeepName) {
296 if (Error E = checkBuffer(*Buffer))
297 return report(std::move(E), Buffer->getBufferIdentifier());
298
299 if (ProfiledBinary.empty()) {
300
301 const std::vectorstd::string BuildIds = peekBuildIds(Buffer.get());
302 std::string ErrorMessage(
303 R"(Path to profiled binary is empty, expected binary with one of the following build ids:
304)");
305 for (const auto &Id : BuildIds) {
306 ErrorMessage += "\n BuildId: ";
307 ErrorMessage += Id;
308 }
309 return report(
311 "");
312 }
313
315 if (!BinaryOr) {
316 return report(BinaryOr.takeError(), ProfiledBinary);
317 }
318
319
320 std::unique_ptr Reader(
322 if (Error E = Reader->initialize(std::move(Buffer))) {
323 return std::move(E);
324 }
325 return std::move(Reader);
326}
327
328
331 for (auto &[_, MIB] : CallstackProfileData) {
332 if (MemprofRawVersion >= 4ULL && MIB.AccessHistogramSize > 0) {
333 free((void *)MIB.AccessHistogram);
334 }
335 }
336}
340 if (!BufferOr)
341 return false;
342
343 std::unique_ptr Buffer(BufferOr.get().release());
345}
349 return false;
350
351
353 return Magic == MEMPROF_RAW_MAGIC_64;
354}
358 uint64_t NumAllocFunctions = 0, NumMibInfo = 0;
359 for (const auto &KV : MemProfData.Records) {
360 MemProfSumBuilder.addRecord(KV.second);
361 const size_t NumAllocSites = KV.second.AllocSites.size();
362 if (NumAllocSites > 0) {
363 NumAllocFunctions++;
364 NumMibInfo += NumAllocSites;
365 }
366 }
367
368
369 auto MemProfSum = MemProfSumBuilder.getSummary();
370 MemProfSum->printSummaryYaml(OS);
371
372 OS << "MemprofProfile:\n";
373 OS << " Summary:\n";
374 OS << " Version: " << MemprofRawVersion << "\n";
375 OS << " NumSegments: " << SegmentInfo.size() << "\n";
376 OS << " NumMibInfo: " << NumMibInfo << "\n";
377 OS << " NumAllocFunctions: " << NumAllocFunctions << "\n";
378 OS << " NumStackOffsets: " << StackMap.size() << "\n";
379
380 OS << " Segments:\n";
381 for (const auto &Entry : SegmentInfo) {
382 OS << " -\n";
383 OS << " BuildId: " << getBuildIdString(Entry) << "\n";
384 OS << " Start: 0x" << llvm::utohexstr(Entry.Start) << "\n";
386 OS << " Offset: 0x" << llvm::utohexstr(Entry.Offset) << "\n";
387 }
388
389 OS << " Records:\n";
390 for (const auto &[GUID, Record] : *this) {
391 OS << " -\n";
392 OS << " FunctionGUID: " << GUID << "\n";
394 }
395}
396
397Error RawMemProfReader::initialize(std::unique_ptr DataBuffer) {
398 const StringRef FileName = Binary.getBinary()->getFileName();
399
401 if (!ElfObject) {
404 FileName);
405 }
406
407
408
409
413 if (!PHdrsOr)
414 return report(
417 FileName);
418
419 int NumExecutableSegments = 0;
420 for (const auto &Phdr : *PHdrsOr) {
423
424
425 if (++NumExecutableSegments > 1) {
426 return report(
428 "Expect only one executable load segment in the binary",
430 FileName);
431 }
432
433
434
435
436
437 PreferredTextSegmentAddress = Phdr.p_vaddr;
438 assert(Phdr.p_vaddr == (Phdr.p_vaddr & ~(0x1000 - 1U)) &&
439 "Expect p_vaddr to always be page aligned");
440 assert(Phdr.p_offset == 0 && "Expect p_offset = 0 for symbolization.");
441 }
442 }
443 }
444
445 auto Triple = ElfObject->makeTriple();
446 if (!Triple.isX86())
448 Triple.getArchName(),
450 FileName);
451
452
453 if (Error E = readRawProfile(std::move(DataBuffer)))
454 return E;
455
456 if (Error E = setupForSymbolization())
457 return E;
458
462
464 Object, std::move(Context), false);
465 if (!SOFOr)
466 return report(SOFOr.takeError(), FileName);
467 auto Symbolizer = std::move(SOFOr.get());
468
469
470
471
472
473 if (Error E = symbolizeAndFilterStackFrames(std::move(Symbolizer)))
474 return E;
475
476 return mapRawProfileToRecords();
477}
478
479Error RawMemProfReader::setupForSymbolization() {
482 if (BinaryId.empty())
484 Binary.getBinary()->getFileName(),
486
487 int NumMatched = 0;
488 for (const auto &Entry : SegmentInfo) {
489 llvm::ArrayRef<uint8_t> SegmentId(Entry.BuildId, Entry.BuildIdSize);
490 if (BinaryId == SegmentId) {
491
492
493 if (++NumMatched > 1) {
495 "We expect only one executable segment in the profiled binary",
497 }
498 ProfiledTextSegmentStart = Entry.Start;
499 ProfiledTextSegmentEnd = Entry.End;
500 }
501 }
502 if (NumMatched == 0)
504 Twine("No matching executable segments found in binary ") +
505 Binary.getBinary()->getFileName(),
507 assert((PreferredTextSegmentAddress == 0 ||
508 (PreferredTextSegmentAddress == ProfiledTextSegmentStart)) &&
509 "Expect text segment address to be 0 or equal to profiled text "
510 "segment start.");
512}
513
514Error RawMemProfReader::mapRawProfileToRecords() {
515
516
517
518 using LocationPtr = const llvm::SmallVector *;
519 llvm::MapVector<GlobalValue::GUID, llvm::SetVector>
520 PerFunctionCallSites;
521
522
523
524 for (const auto &[StackId, MIB] : CallstackProfileData) {
525 auto It = StackMap.find(StackId);
526 if (It == StackMap.end())
529 "memprof callstack record does not contain id: " + Twine(StackId));
530
531
532 llvm::SmallVector Callstack;
533 Callstack.reserve(It->getSecond().size());
534
535 llvm::ArrayRef<uint64_t> Addresses = It->getSecond();
536 for (size_t I = 0; I < Addresses.size(); I++) {
537 const uint64_t Address = Addresses[I];
539 "Address not found in SymbolizedFrame map");
540 const SmallVector &Frames = SymbolizedFrame[Address];
541
543 "The last frame should not be inlined");
544
545
546
547
548 for (size_t J = 0; J < Frames.size(); J++) {
549 if (I == 0 && J == 0)
550 continue;
551
552
553
554
556 PerFunctionCallSites[Guid].insert(&Frames);
557 }
558
559
560 Callstack.append(Frames.begin(), Frames.end());
561 }
562
564
565
566
567 for (size_t I = 0; ; I++) {
568 const Frame &F = idToFrame(Callstack[I]);
569 IndexedMemProfRecord &Record = MemProfData.Records[F.Function];
570 Record.AllocSites.emplace_back(CSId, MIB);
571
572 if (.IsInlineFrame)
573 break;
574 }
575 }
576
577
578 for (const auto &[Id, Locs] : PerFunctionCallSites) {
579
580
581 IndexedMemProfRecord &Record = MemProfData.Records[Id];
582 for (LocationPtr Loc : Locs)
583 Record.CallSites.emplace_back(MemProfData.addCallStack(*Loc));
584 }
585
587}
588
589Error RawMemProfReader::symbolizeAndFilterStackFrames(
590 std::unique_ptrllvm::symbolize::SymbolizableModule Symbolizer) {
591
592 const DILineInfoSpecifier Specifier(
594 DILineInfoSpecifier::FunctionNameKind::LinkageName);
595
596
597
598 llvm::SmallVector<uint64_t> EntriesToErase;
599
600
601 llvm::DenseSet<uint64_t> AllVAddrsToDiscard;
602 for (auto &Entry : StackMap) {
603 for (const uint64_t VAddr : Entry.getSecond()) {
604
605
606
607 if (SymbolizedFrame.count(VAddr) > 0 ||
608 AllVAddrsToDiscard.contains(VAddr))
609 continue;
610
611 Expected DIOr = Symbolizer->symbolizeInlinedCode(
612 getModuleOffset(VAddr), Specifier, false);
613 if (!DIOr)
614 return DIOr.takeError();
615 DIInliningInfo DI = DIOr.get();
616
617
619 isRuntimePath(DI.getFrame(0).FileName)) {
620 AllVAddrsToDiscard.insert(VAddr);
621 continue;
622 }
623
624 for (size_t I = 0, NumFrames = DI.getNumberOfFrames(); I < NumFrames;
625 I++) {
626 const auto &DIFrame = DI.getFrame(I);
628 const Frame F(Guid, DIFrame.Line - DIFrame.StartLine, DIFrame.Column,
629
630 I != NumFrames - 1);
631
632
633
634
635 if (KeepSymbolName) {
636 StringRef CanonicalName =
638 DIFrame.FunctionName);
639 GuidToSymbolName.insert({Guid, CanonicalName.str()});
640 }
641
642 SymbolizedFrame[VAddr].push_back(MemProfData.addFrame(F));
643 }
644 }
645
646 auto &CallStack = Entry.getSecond();
647 llvm::erase_if(CallStack, [&AllVAddrsToDiscard](const uint64_t A) {
648 return AllVAddrsToDiscard.contains(A);
649 });
650 if (CallStack.empty())
652 }
653
654
655 for (const uint64_t Id : EntriesToErase) {
656 StackMap.erase(Id);
657 if (auto It = CallstackProfileData.find(Id);
658 It != CallstackProfileData.end()) {
659 if (It->second.AccessHistogramSize > 0)
660 free((void *)It->second.AccessHistogram);
661 CallstackProfileData.erase(It);
662 }
663 }
664
665 if (StackMap.empty())
668 "no entries in callstack map after symbolization");
669
671}
672
673std::vectorstd::string
676
677
678
679
680
681
684 BuildIds;
686 const auto *Header = reinterpret_cast<const memprof::Header *>(Next);
687
689 readSegmentEntries(Next + Header->SegmentOffset);
690
691 for (const auto &Entry : Entries)
692 BuildIds.insert(getBuildIdString(Entry));
693
694 Next += Header->TotalSize;
695 }
697}
698
699
700
701
703RawMemProfReader::readMemInfoBlocks(const char *Ptr) {
704 if (MemprofRawVersion == 3ULL)
705 return readMemInfoBlocksV3(Ptr);
706 if (MemprofRawVersion == 4ULL)
707 return readMemInfoBlocksV4(Ptr);
708 if (MemprofRawVersion == 5ULL)
709 return readMemInfoBlocksV5(Ptr);
711 "Panic: Unsupported version number when reading MemInfoBlocks");
712}
713
714Error RawMemProfReader::readRawProfile(
715 std::unique_ptr DataBuffer) {
716 const char *Next = DataBuffer->getBufferStart();
717
719 const auto *Header = reinterpret_cast<const memprof::Header *>(Next);
720
721
722
723 MemprofRawVersion = Header->Version;
724
725
726
728 readSegmentEntries(Next + Header->SegmentOffset);
729 if (!SegmentInfo.empty() && SegmentInfo != Entries) {
730
731
732
735 "memprof raw profile has different segment information");
736 }
737 SegmentInfo.assign(Entries.begin(), Entries.end());
738
739
740
741
742 for (const auto &[Id, MIB] : readMemInfoBlocks(Next + Header->MIBOffset)) {
743 if (CallstackProfileData.count(Id)) {
744
745 if (MemprofRawVersion >= 4ULL &&
746 (CallstackProfileData[Id].AccessHistogramSize > 0 ||
747 MIB.AccessHistogramSize > 0)) {
748 uintptr_t ShorterHistogram;
749 if (CallstackProfileData[Id].AccessHistogramSize >
750 MIB.AccessHistogramSize)
751 ShorterHistogram = MIB.AccessHistogram;
752 else
753 ShorterHistogram = CallstackProfileData[Id].AccessHistogram;
754 CallstackProfileData[Id].Merge(MIB);
755 free((void *)ShorterHistogram);
756 } else {
757 CallstackProfileData[Id].Merge(MIB);
758 }
759 } else {
760 CallstackProfileData[Id] = MIB;
761 }
762 }
763
764
765
766 const CallStackMap CSM = readStackInfo(Next + Header->StackOffset);
767 if (StackMap.empty()) {
768 StackMap = CSM;
769 } else {
770 if (mergeStackMap(CSM, StackMap))
773 "memprof raw profile got different call stack for same id");
774 }
775
776 Next += Header->TotalSize;
777 }
778
780}
781
782object::SectionedAddress
783RawMemProfReader::getModuleOffset(const uint64_t VirtualAddress) {
784 if (VirtualAddress > ProfiledTextSegmentStart &&
785 VirtualAddress <= ProfiledTextSegmentEnd) {
786
787
788
789
790 const uint64_t AdjustedAddress =
791 VirtualAddress + PreferredTextSegmentAddress - ProfiledTextSegmentStart;
792 return object::SectionedAddress{AdjustedAddress};
793 }
794
795
796
797 return object::SectionedAddress{VirtualAddress};
798}
802 std::function<const Frame(const FrameId)> Callback) {
803
804
805
806 auto IdToFrameCallback = [this](const FrameId Id) {
808 if (!this->KeepSymbolName)
809 return F;
810 auto Iter = this->GuidToSymbolName.find(F.Function);
811 assert(Iter != this->GuidToSymbolName.end());
812 F.SymbolName = std::make_uniquestd::string(Iter->getSecond());
813 return F;
814 };
816}
817
821 if (std::error_code EC = BufferOr.getError())
822 return report(errorCodeToError(EC), Path.getSingleStringRef());
823
824 std::unique_ptr Buffer(BufferOr.get().release());
825 return create(std::move(Buffer));
826}
827
830 auto Reader = std::make_unique();
831 Reader->parse(Buffer->getBuffer());
832 return std::move(Reader);
833}
837 if (!BufferOr)
838 return false;
839
840 std::unique_ptr Buffer(BufferOr.get().release());
842}
846}
851
852 Yin >> Doc;
854 return;
855
856
862 return MemProfData.addCallStack(std::move(IndexedCallStack));
863 };
864
867
868
871 IndexedRecord.AllocSites.emplace_back(CSId, AI.Info);
872 }
873
874
875 for (const auto &CallSite : Record.CallSites) {
876 CallStackId CSId = AddCallStack(CallSite.Frames);
877 IndexedRecord.CallSites.emplace_back(CSId, CallSite.CalleeGuids);
878 }
879
880 MemProfData.Records.try_emplace(GUID, std::move(IndexedRecord));
881 }
882
884 return;
885
886 auto ToSymHandleRef =
888 if (std::holds_alternativestd::string(Handle))
889 return StringRef(std::getstd::string(Handle));
890 return std::get<uint64_t>(Handle);
891 };
892
893 auto DataAccessProfileData = std::make_uniquememprof::DataAccessProfData();
895 if (Error E = DataAccessProfileData->setDataAccessProfile(
896 ToSymHandleRef(Record.SymHandle), Record.AccessCount,
899
901 if (Error E = DataAccessProfileData->addKnownSymbolWithoutSamples(Hash))
903
904 for (const std::string &Sym :
906 if (Error E = DataAccessProfileData->addKnownSymbolWithoutSamples(Sym))
908
909 setDataAccessProfileData(std::move(DataAccessProfileData));
910}
911}
912}
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
This file declares a library for handling Build IDs and using them to find debug info.
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
This file defines the DenseMap class.
#define READ_MIB_FIELD(FIELD)
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallSet class.
This file defines the SmallVector class.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
static std::unique_ptr< DWARFContext > create(const object::ObjectFile &Obj, ProcessDebugRelocations RelocAction=ProcessDebugRelocations::Process, const LoadedObjectInfo *L=nullptr, std::string DWPName="", std::function< void(Error)> RecoverableErrorHandler=WithColor::defaultErrorHandler, std::function< void(Error)> WarningHandler=WithColor::defaultWarningHandler, bool ThreadSafe=false)
Lightweight error class with error context and mandatory checking.
static ErrorSuccess success()
Create a success value.
Tagged union holding either a T or a Error.
uint64_t GUID
Declare a type to represent a global unique identifier for a global value.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
This interface provides simple read-only access to a block of memory, and provides simple methods for...
size_t getBufferSize() const
StringRef getBuffer() const
static ErrorOr< std::unique_ptr< MemoryBuffer > > getFileOrSTDIN(const Twine &Filename, bool IsText=false, bool RequiresNullTerminator=true, std::optional< Align > Alignment=std::nullopt)
Open the specified file as a MemoryBuffer, or open stdin if the Filename is "-".
const char * getBufferStart() const
A vector that has set insertion semantics.
Vector takeVector()
Clear the SetVector and return the underlying vector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
void reserve(size_type N)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StringRef - Represent a constant reference to a string, i.e.
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
constexpr bool empty() const
empty - Check if the string is empty.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
std::pair< iterator, bool > insert(const ValueT &V)
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
Helper class to iterate through stack ids in both metadata (memprof MIB and callsite) and the corresp...
const Frame & idToFrame(const FrameId Id) const
IndexedMemProfData MemProfData
virtual Error readNextRecord(GuidMemProfRecordPair &GuidRecord, std::function< const Frame(const FrameId)> Callback=nullptr)
llvm::MapVector< GlobalValue::GUID, IndexedMemProfRecord >::iterator Iter
std::pair< GlobalValue::GUID, MemProfRecord > GuidMemProfRecordPair
LLVM_ABI std::unique_ptr< MemProfSummary > getSummary()
void printYAML(raw_ostream &OS)
Definition MemProfReader.cpp:355
RawMemProfReader(const RawMemProfReader &)=delete
static Expected< std::unique_ptr< RawMemProfReader > > create(const Twine &Path, StringRef ProfiledBinary, bool KeepName=false)
Definition MemProfReader.cpp:283
static std::vector< std::string > peekBuildIds(MemoryBuffer *DataBuffer)
Definition MemProfReader.cpp:673
Error readNextRecord(GuidMemProfRecordPair &GuidRecord, std::function< const Frame(const FrameId)> Callback) override
Definition MemProfReader.cpp:799
static bool hasFormat(const MemoryBuffer &DataBuffer)
Definition MemProfReader.cpp:346
~RawMemProfReader() override
Definition MemProfReader.cpp:329
static LLVM_ABI bool hasFormat(const MemoryBuffer &DataBuffer)
Definition MemProfReader.cpp:843
static LLVM_ABI Expected< std::unique_ptr< YAMLMemProfReader > > create(const Twine &Path)
Definition MemProfReader.cpp:818
LLVM_ABI void parse(StringRef YAMLData)
Definition MemProfReader.cpp:847
Expected< Elf_Phdr_Range > program_headers() const
Iterate over program header table.
This class implements an extremely fast bulk output stream that can only output to a stream.
static StringRef getCanonicalFnName(const Function &F)
Return the canonical name for a function, taking into account suffix elision policy attributes.
static Expected< std::unique_ptr< SymbolizableObjectFile > > create(const object::ObjectFile *Obj, std::unique_ptr< DIContext > DICtx, bool UntagAddresses)
The Input class is used to parse a yaml document into in-memory structs and vectors.
std::error_code error() override
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
std::variant< std::string, uint64_t > SymbolHandle
std::variant< StringRef, uint64_t > SymbolHandleRef
LLVM_ABI GlobalValue::GUID getGUID(const StringRef FunctionName)
llvm::DenseMap< uint64_t, llvm::SmallVector< uint64_t > > CallStackMap
LLVM_ABI BuildIDRef getBuildID(const ObjectFile *Obj)
Returns the build ID, if any, contained in the given object file.
ArrayRef< uint8_t > BuildIDRef
A reference to a BuildID in binary form.
LLVM_ABI Expected< std::unique_ptr< Binary > > createBinary(MemoryBufferRef Source, LLVMContext *Context=nullptr, bool InitContent=true)
Create a Binary from Source, autodetecting the file type.
ELFFile< ELF64LE > ELF64LEFile
value_type readNext(const CharT *&memory, endianness endian)
Read a value of a particular endianness from a buffer, and increment the buffer past that value.
LLVM_ABI StringRef filename(StringRef path LLVM_LIFETIME_BOUND, Style style=Style::native)
Get filename.
This is an optimization pass for GlobalISel generic memory operations.
decltype(auto) dyn_cast(const From &Val)
dyn_cast - Return the argument parameter cast to the specified type.
LLVM_ABI std::error_code inconvertibleErrorCode()
The value returned by this function can be returned from convertToErrorCode for Error values where no...
std::string utohexstr(uint64_t X, bool LowerCase=false, unsigned Width=0)
Error createStringError(std::error_code EC, char const *Fmt, const Ts &... Vals)
Create formatted StringError object.
LLVM_ABI void reportFatalInternalError(Error Err)
Report a fatal error that indicates a bug in LLVM.
Error joinErrors(Error E1, Error E2)
Concatenate errors.
FormattedNumber format_hex_no_prefix(uint64_t N, unsigned Width, bool Upper=false)
format_hex_no_prefix - Output N as a fixed width hexadecimal.
Error make_error(ArgTs &&... Args)
Make a Error instance representing failure using the given error info type.
FunctionAddr VTableAddr Next
decltype(auto) cast(const From &Val)
cast - Return the argument parameter cast to the specified type.
void erase_if(Container &C, UnaryPredicate P)
Provide a container algorithm similar to C++ Library Fundamentals v2's erase_if which is equivalent t...
LLVM_ABI Error errorCodeToError(std::error_code EC)
Helper for converting an std::error_code to a Error.
static constexpr const char *const BadString
YamlDataAccessProfData YamlifiedDataAccessProfiles
std::vector< GUIDMemProfRecordPair > HeapProfileRecords
std::vector< Frame > CallStack
PortableMemInfoBlock Info
GlobalValue::GUID Function
llvm::SmallVector< IndexedAllocationInfo > AllocSites
llvm::SmallVector< IndexedCallSiteInfo > CallSites
std::vector< memprof::DataAccessProfRecord > Records
std::vector< uint64_t > KnownColdStrHashes
std::vector< std::string > KnownColdSymbols