LLVM: lib/ProfileData/MemProfReader.cpp Source File (original) (raw)
1
2
3
4
5
6
7
8
9
10
11
12
13#include
14#include
15#include <type_traits>
16
42
43#define DEBUG_TYPE "memprof"
44
45namespace llvm {
46namespace memprof {
47namespace {
48template inline T alignedRead(const char *Ptr) {
49 static_assert(std::is_integral_v, "Not an integral type");
50 assert(reinterpret_cast<size_t>(Ptr) % sizeof(T) == 0 && "Unaligned Read");
51 return *reinterpret_cast<const T *>(Ptr);
52}
53
54Error checkBuffer(const MemoryBuffer &Buffer) {
57
58 if (Buffer.getBufferSize() == 0)
60
61 if (Buffer.getBufferSize() < sizeof(Header)) {
63 }
64
65
66
68 const char *Next = Buffer.getBufferStart();
69 while (Next < Buffer.getBufferEnd()) {
70 const auto *H = reinterpret_cast<const Header *>(Next);
71
72
73 bool IsSupported = false;
74 for (auto SupportedVersion : MEMPROF_RAW_SUPPORTED_VERSIONS) {
75 if (H->Version == SupportedVersion)
76 IsSupported = true;
77 }
78 if (!IsSupported) {
80 }
81
82 TotalSize += H->TotalSize;
83 Next += H->TotalSize;
84 }
85
86 if (Buffer.getBufferSize() != TotalSize) {
88 }
90}
91
93 using namespace support;
94
95 const uint64_t NumItemsToRead =
96 endian::readNext<uint64_t, llvm::endianness::little>(Ptr);
98 for (uint64_t I = 0; I < NumItemsToRead; I++) {
99 Items.push_back(*reinterpret_cast<const SegmentEntry *>(
100 Ptr + I * sizeof(SegmentEntry)));
101 }
102 return Items;
103}
104
106readMemInfoBlocksV3(const char *Ptr) {
107 using namespace support;
108
109 const uint64_t NumItemsToRead =
110 endian::readNext<uint64_t, llvm::endianness::little, unaligned>(Ptr);
111
113 for (uint64_t I = 0; I < NumItemsToRead; I++) {
115 endian::readNext<uint64_t, llvm::endianness::little, unaligned>(Ptr);
116
117
118
119
120
121
122
123 MemInfoBlock MIB = *reinterpret_cast<const MemInfoBlock *>(Ptr);
124
125 MIB.AccessHistogramSize = 0;
126 MIB.AccessHistogram = 0;
127
129
130 Ptr += MEMPROF_V3_MIB_SIZE;
131 }
132 return Items;
133}
134
136readMemInfoBlocksV4(const char *Ptr) {
137 using namespace support;
138
139 const uint64_t NumItemsToRead =
140 endian::readNext<uint64_t, llvm::endianness::little, unaligned>(Ptr);
141
143 for (uint64_t I = 0; I < NumItemsToRead; I++) {
145 endian::readNext<uint64_t, llvm::endianness::little, unaligned>(Ptr);
146
147
148 MemInfoBlock MIB = *reinterpret_cast<const MemInfoBlock *>(Ptr);
149
150
151 Ptr += sizeof(MemInfoBlock);
152
153 if (MIB.AccessHistogramSize > 0) {
154 MIB.AccessHistogram =
155 (uintptr_t)malloc(MIB.AccessHistogramSize * sizeof(uint64_t));
156 }
157
158 for (uint64_t J = 0; J < MIB.AccessHistogramSize; J++) {
159 ((uint64_t *)MIB.AccessHistogram)[J] =
160 endian::readNext<uint64_t, llvm::endianness::little, unaligned>(Ptr);
161 }
163 }
164 return Items;
165}
166
168 using namespace support;
169
170 const uint64_t NumItemsToRead =
171 endian::readNext<uint64_t, llvm::endianness::little>(Ptr);
173
174 for (uint64_t I = 0; I < NumItemsToRead; I++) {
176 endian::readNext<uint64_t, llvm::endianness::little>(Ptr);
178 endian::readNext<uint64_t, llvm::endianness::little>(Ptr);
179
180 SmallVector<uint64_t> CallStack;
181 CallStack.reserve(NumPCs);
182 for (uint64_t J = 0; J < NumPCs; J++) {
183 CallStack.push_back(
184 endian::readNext<uint64_t, llvm::endianness::little>(Ptr));
185 }
186
187 Items[StackId] = CallStack;
188 }
189 return Items;
190}
191
192
193
194
196 for (const auto &[Id, Stack] : From) {
197 auto [It, Inserted] = To.try_emplace(Id, Stack);
198
199 if (!Inserted && Stack != It->second)
200 return true;
201 }
202 return false;
203}
204
205Error report(Error E, const StringRef Context) {
207 std::move(E));
208}
209
210bool isRuntimePath(const StringRef Path) {
212
213
214 return Filename == "memprof_malloc_linux.cpp" ||
215 Filename == "memprof_interceptors.cpp" ||
216 Filename == "memprof_new_delete.cpp";
217}
218
219std::string getBuildIdString(const SegmentEntry &Entry) {
220
221 if (Entry.BuildIdSize == 0)
222 return "";
223
224 std::string Str;
225 raw_string_ostream OS(Str);
226 for (size_t I = 0; I < Entry.BuildIdSize; I++) {
228 }
229 return OS.str();
230}
231}
232
233Expected<std::unique_ptr>
235 bool KeepName) {
237 if (std::error_code EC = BufferOr.getError())
238 return report(errorCodeToError(EC), Path.getSingleStringRef());
239
240 std::unique_ptr Buffer(BufferOr.get().release());
241 return create(std::move(Buffer), ProfiledBinary, KeepName);
242}
243
246 const StringRef ProfiledBinary, bool KeepName) {
247 if (Error E = checkBuffer(*Buffer))
248 return report(std::move(E), Buffer->getBufferIdentifier());
249
250 if (ProfiledBinary.empty()) {
251
252 const std::vectorstd::string BuildIds = peekBuildIds(Buffer.get());
253 std::string ErrorMessage(
254 R"(Path to profiled binary is empty, expected binary with one of the following build ids:
255)");
256 for (const auto &Id : BuildIds) {
257 ErrorMessage += "\n BuildId: ";
258 ErrorMessage += Id;
259 }
260 return report(
262 "");
263 }
264
266 if (!BinaryOr) {
267 return report(BinaryOr.takeError(), ProfiledBinary);
268 }
269
270
271 std::unique_ptr Reader(
273 if (Error E = Reader->initialize(std::move(Buffer))) {
274 return std::move(E);
275 }
276 return std::move(Reader);
277}
278
279
282 for (auto &[_, MIB] : CallstackProfileData) {
283 if (MemprofRawVersion >= 4ULL && MIB.AccessHistogramSize > 0) {
284 free((void *)MIB.AccessHistogram);
285 }
286 }
287}
291 if (!BufferOr)
292 return false;
293
294 std::unique_ptr Buffer(BufferOr.get().release());
296}
300 return false;
301
302
304 return Magic == MEMPROF_RAW_MAGIC_64;
305}
308 uint64_t NumAllocFunctions = 0, NumMibInfo = 0;
310 const size_t NumAllocSites = KV.second.AllocSites.size();
311 if (NumAllocSites > 0) {
312 NumAllocFunctions++;
313 NumMibInfo += NumAllocSites;
314 }
315 }
316
317 OS << "MemprofProfile:\n";
318 OS << " Summary:\n";
319 OS << " Version: " << MemprofRawVersion << "\n";
320 OS << " NumSegments: " << SegmentInfo.size() << "\n";
321 OS << " NumMibInfo: " << NumMibInfo << "\n";
322 OS << " NumAllocFunctions: " << NumAllocFunctions << "\n";
323 OS << " NumStackOffsets: " << StackMap.size() << "\n";
324
325 OS << " Segments:\n";
326 for (const auto &Entry : SegmentInfo) {
327 OS << " -\n";
328 OS << " BuildId: " << getBuildIdString(Entry) << "\n";
329 OS << " Start: 0x" << llvm::utohexstr(Entry.Start) << "\n";
330 OS << " End: 0x" << llvm::utohexstr(Entry.End) << "\n";
331 OS << " Offset: 0x" << llvm::utohexstr(Entry.Offset) << "\n";
332 }
333
334 OS << " Records:\n";
335 for (const auto &[GUID, Record] : *this) {
336 OS << " -\n";
337 OS << " FunctionGUID: " << GUID << "\n";
339 }
340}
341
342Error RawMemProfReader::initialize(std::unique_ptr DataBuffer) {
343 const StringRef FileName = Binary.getBinary()->getFileName();
344
345 auto *ElfObject = dyn_castobject::ELFObjectFileBase(Binary.getBinary());
346 if (!ElfObject) {
347 return report(make_error(Twine("Not an ELF file: "),
349 FileName);
350 }
351
352
353
354
355 auto *Elf64LEObject = llvm::castllvm::object::ELF64LEObjectFile(ElfObject);
357 auto PHdrsOr = ElfFile.program_headers();
358 if (!PHdrsOr)
359 return report(
360 make_error(Twine("Could not read program headers: "),
362 FileName);
363
364 int NumExecutableSegments = 0;
365 for (const auto &Phdr : *PHdrsOr) {
368
369
370 if (++NumExecutableSegments > 1) {
371 return report(
372 make_error(
373 "Expect only one executable load segment in the binary",
375 FileName);
376 }
377
378
379
380
381
382 PreferredTextSegmentAddress = Phdr.p_vaddr;
383 assert(Phdr.p_vaddr == (Phdr.p_vaddr & ~(0x1000 - 1U)) &&
384 "Expect p_vaddr to always be page aligned");
385 assert(Phdr.p_offset == 0 && "Expect p_offset = 0 for symbolization.");
386 }
387 }
388 }
389
390 auto Triple = ElfObject->makeTriple();
391 if (!Triple.isX86())
392 return report(make_error(Twine("Unsupported target: ") +
393 Triple.getArchName(),
395 FileName);
396
397
398 if (Error E = readRawProfile(std::move(DataBuffer)))
399 return E;
400
401 if (Error E = setupForSymbolization())
402 return E;
403
404 auto *Object = castobject::ObjectFile(Binary.getBinary());
407
409 Object, std::move(Context), false);
410 if (!SOFOr)
411 return report(SOFOr.takeError(), FileName);
412 auto Symbolizer = std::move(SOFOr.get());
413
414
415
416
417
418 if (Error E = symbolizeAndFilterStackFrames(std::move(Symbolizer)))
419 return E;
420
421 return mapRawProfileToRecords();
422}
423
424Error RawMemProfReader::setupForSymbolization() {
425 auto *Object = castobject::ObjectFile(Binary.getBinary());
427 if (BinaryId.empty())
428 return make_error(Twine("No build id found in binary ") +
429 Binary.getBinary()->getFileName(),
431
432 int NumMatched = 0;
433 for (const auto &Entry : SegmentInfo) {
435 if (BinaryId == SegmentId) {
436
437
438 if (++NumMatched > 1) {
439 return make_error(
440 "We expect only one executable segment in the profiled binary",
442 }
443 ProfiledTextSegmentStart = Entry.Start;
444 ProfiledTextSegmentEnd = Entry.End;
445 }
446 }
447 assert(NumMatched != 0 && "No matching executable segments in segment info.");
448 assert((PreferredTextSegmentAddress == 0 ||
449 (PreferredTextSegmentAddress == ProfiledTextSegmentStart)) &&
450 "Expect text segment address to be 0 or equal to profiled text "
451 "segment start.");
453}
454
455Error RawMemProfReader::mapRawProfileToRecords() {
456
457
458
461 PerFunctionCallSites;
462
463
464
465 for (const auto &[StackId, MIB] : CallstackProfileData) {
466 auto It = StackMap.find(StackId);
467 if (It == StackMap.end())
468 return make_error(
470 "memprof callstack record does not contain id: " + Twine(StackId));
471
472
474 Callstack.reserve(It->getSecond().size());
475
477 for (size_t I = 0; I < Addresses.size(); I++) {
480 "Address not found in SymbolizedFrame map");
481 const SmallVector &Frames = SymbolizedFrame[Address];
482
484 "The last frame should not be inlined");
485
486
487
488
489 for (size_t J = 0; J < Frames.size(); J++) {
490 if (I == 0 && J == 0)
491 continue;
492
493
494
495
497 PerFunctionCallSites[Guid].insert(&Frames);
498 }
499
500
501 Callstack.append(Frames.begin(), Frames.end());
502 }
503
505
506
507
508 for (size_t I = 0; ; I++) {
509 const Frame &F = idToFrame(Callstack[I]);
511 Record.AllocSites.emplace_back(CSId, MIB);
512
513 if (.IsInlineFrame)
514 break;
515 }
516 }
517
518
519 for (const auto &[Id, Locs] : PerFunctionCallSites) {
520
521
523 for (LocationPtr Loc : Locs)
525 }
526
528}
529
530Error RawMemProfReader::symbolizeAndFilterStackFrames(
531 std::unique_ptrllvm::symbolize::SymbolizableModule Symbolizer) {
532
533 const DILineInfoSpecifier Specifier(
534 DILineInfoSpecifier::FileLineInfoKind::RawValue,
535 DILineInfoSpecifier::FunctionNameKind::LinkageName);
536
537
538
540
541
543 for (auto &Entry : StackMap) {
545
546
547
548 if (SymbolizedFrame.count(VAddr) > 0 ||
549 AllVAddrsToDiscard.contains(VAddr))
550 continue;
551
552 Expected DIOr = Symbolizer->symbolizeInlinedCode(
553 getModuleOffset(VAddr), Specifier, false);
554 if (!DIOr)
555 return DIOr.takeError();
556 DIInliningInfo DI = DIOr.get();
557
558
560 isRuntimePath(DI.getFrame(0).FileName)) {
561 AllVAddrsToDiscard.insert(VAddr);
562 continue;
563 }
564
565 for (size_t I = 0, NumFrames = DI.getNumberOfFrames(); I < NumFrames;
566 I++) {
567 const auto &DIFrame = DI.getFrame(I);
570 const Frame F(Guid, DIFrame.Line - DIFrame.StartLine, DIFrame.Column,
571
572 I != NumFrames - 1);
573
574
575
576
577 if (KeepSymbolName) {
578 StringRef CanonicalName =
580 DIFrame.FunctionName);
581 GuidToSymbolName.insert({Guid, CanonicalName.str()});
582 }
583
585 }
586 }
587
588 auto &CallStack = Entry.getSecond();
590 return AllVAddrsToDiscard.contains(A);
591 });
592 if (CallStack.empty())
594 }
595
596
597 for (const uint64_t Id : EntriesToErase) {
598 StackMap.erase(Id);
599 if (CallstackProfileData[Id].AccessHistogramSize > 0)
600 free((void *)CallstackProfileData[Id].AccessHistogram);
601 CallstackProfileData.erase(Id);
602 }
603
604 if (StackMap.empty())
605 return make_error(
607 "no entries in callstack map after symbolization");
608
610}
611
612std::vectorstd::string
615
616
617
618
619
620
623 BuildIds;
624 while (Next < DataBuffer->getBufferEnd()) {
625 const auto *Header = reinterpret_cast<const memprof::Header *>(Next);
626
628 readSegmentEntries(Next + Header->SegmentOffset);
629
630 for (const auto &Entry : Entries)
631 BuildIds.insert(getBuildIdString(Entry));
632
633 Next += Header->TotalSize;
634 }
636}
637
638
639
640
642RawMemProfReader::readMemInfoBlocks(const char *Ptr) {
643 if (MemprofRawVersion == 3ULL)
644 return readMemInfoBlocksV3(Ptr);
645 if (MemprofRawVersion == 4ULL)
646 return readMemInfoBlocksV4(Ptr);
648 "Panic: Unsupported version number when reading MemInfoBlocks");
649}
650
651Error RawMemProfReader::readRawProfile(
652 std::unique_ptr DataBuffer) {
653 const char *Next = DataBuffer->getBufferStart();
654
655 while (Next < DataBuffer->getBufferEnd()) {
656 const auto *Header = reinterpret_cast<const memprof::Header *>(Next);
657
658
659
660 MemprofRawVersion = Header->Version;
661
662
663
665 readSegmentEntries(Next + Header->SegmentOffset);
666 if (!SegmentInfo.empty() && SegmentInfo != Entries) {
667
668
669
670 return make_error(
672 "memprof raw profile has different segment information");
673 }
674 SegmentInfo.assign(Entries.begin(), Entries.end());
675
676
677
678
679 for (const auto &[Id, MIB] : readMemInfoBlocks(Next + Header->MIBOffset)) {
680 if (CallstackProfileData.count(Id)) {
681
682 if (MemprofRawVersion >= 4ULL &&
683 (CallstackProfileData[Id].AccessHistogramSize > 0 ||
684 MIB.AccessHistogramSize > 0)) {
685 uintptr_t ShorterHistogram;
686 if (CallstackProfileData[Id].AccessHistogramSize >
687 MIB.AccessHistogramSize)
688 ShorterHistogram = MIB.AccessHistogram;
689 else
690 ShorterHistogram = CallstackProfileData[Id].AccessHistogram;
691 CallstackProfileData[Id].Merge(MIB);
692 free((void *)ShorterHistogram);
693 } else {
694 CallstackProfileData[Id].Merge(MIB);
695 }
696 } else {
697 CallstackProfileData[Id] = MIB;
698 }
699 }
700
701
702
703 const CallStackMap CSM = readStackInfo(Next + Header->StackOffset);
704 if (StackMap.empty()) {
705 StackMap = CSM;
706 } else {
707 if (mergeStackMap(CSM, StackMap))
708 return make_error(
710 "memprof raw profile got different call stack for same id");
711 }
712
713 Next += Header->TotalSize;
714 }
715
717}
718
719object::SectionedAddress
720RawMemProfReader::getModuleOffset(const uint64_t VirtualAddress) {
721 if (VirtualAddress > ProfiledTextSegmentStart &&
722 VirtualAddress <= ProfiledTextSegmentEnd) {
723
724
725
726
727 const uint64_t AdjustedAddress =
728 VirtualAddress + PreferredTextSegmentAddress - ProfiledTextSegmentStart;
729 return object::SectionedAddress{AdjustedAddress};
730 }
731
732
733
734 return object::SectionedAddress{VirtualAddress};
735}
739 std::function<const Frame(const FrameId)> Callback) {
740
741
742
743 auto IdToFrameCallback = [this](const FrameId Id) {
745 if (!this->KeepSymbolName)
746 return F;
747 auto Iter = this->GuidToSymbolName.find(F.Function);
749 F.SymbolName = std::make_uniquestd::string(Iter->getSecond());
750 return F;
751 };
753}
754
758 if (std::error_code EC = BufferOr.getError())
759 return report(errorCodeToError(EC), Path.getSingleStringRef());
760
761 std::unique_ptr Buffer(BufferOr.get().release());
762 return create(std::move(Buffer));
763}
764
767 auto Reader = std::make_unique();
768 Reader->parse(Buffer->getBuffer());
769 return std::move(Reader);
770}
774 if (!BufferOr)
775 return false;
776
777 std::unique_ptr Buffer(BufferOr.get().release());
779}
783}
787 yaml::Input Yin(YAMLData);
788
789 Yin >> Doc;
790 if (Yin.error())
791 return;
792
793
800 };
801
804
805
808 IndexedRecord.AllocSites.emplace_back(CSId, AI.Info);
809 }
810
811
812 for (const auto &CallSite : Record.CallSites) {
813 CallStackId CSId = AddCallStack(CallSite);
814 IndexedRecord.CallSiteIds.push_back(CSId);
815 }
816
818 }
819}
820}
821}
BlockVerifier::State From
This file declares a library for handling Build IDs and using them to find debug info.
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
This file defines the DenseMap class.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallSet class.
This file defines the SmallVector class.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
static std::unique_ptr< DWARFContext > create(const object::ObjectFile &Obj, ProcessDebugRelocations RelocAction=ProcessDebugRelocations::Process, const LoadedObjectInfo *L=nullptr, std::string DWPName="", std::function< void(Error)> RecoverableErrorHandler=WithColor::defaultErrorHandler, std::function< void(Error)> WarningHandler=WithColor::defaultWarningHandler, bool ThreadSafe=false)
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Implements a dense probed hash-table based set.
Lightweight error class with error context and mandatory checking.
static ErrorSuccess success()
Create a success value.
Tagged union holding either a T or a Error.
uint64_t GUID
Declare a type to represent a global unique identifier for a global value.
This class implements a map that also provides access to all stored values in a deterministic order.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
This interface provides simple read-only access to a block of memory, and provides simple methods for...
size_t getBufferSize() const
StringRef getBuffer() const
static ErrorOr< std::unique_ptr< MemoryBuffer > > getFileOrSTDIN(const Twine &Filename, bool IsText=false, bool RequiresNullTerminator=true, std::optional< Align > Alignment=std::nullopt)
Open the specified file as a MemoryBuffer, or open stdin if the Filename is "-".
const char * getBufferStart() const
A vector that has set insertion semantics.
Vector takeVector()
Clear the SetVector and return the underlying vector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
void reserve(size_type N)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StringRef - Represent a constant reference to a string, i.e.
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
constexpr bool empty() const
empty - Check if the string is empty.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
std::pair< iterator, bool > insert(const ValueT &V)
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
Helper class to iterate through stack ids in both metadata (memprof MIB and callsite) and the corresp...
const Frame & idToFrame(const FrameId Id) const
IndexedMemProfData MemProfData
virtual Error readNextRecord(GuidMemProfRecordPair &GuidRecord, std::function< const Frame(const FrameId)> Callback=nullptr)
llvm::MapVector< GlobalValue::GUID, IndexedMemProfRecord >::iterator Iter
std::pair< GlobalValue::GUID, MemProfRecord > GuidMemProfRecordPair
void printYAML(raw_ostream &OS)
static Expected< std::unique_ptr< RawMemProfReader > > create(const Twine &Path, StringRef ProfiledBinary, bool KeepName=false)
static std::vector< std::string > peekBuildIds(MemoryBuffer *DataBuffer)
Error readNextRecord(GuidMemProfRecordPair &GuidRecord, std::function< const Frame(const FrameId)> Callback) override
static bool hasFormat(const MemoryBuffer &DataBuffer)
virtual ~RawMemProfReader() override
static bool hasFormat(const MemoryBuffer &DataBuffer)
static Expected< std::unique_ptr< YAMLMemProfReader > > create(const Twine &Path)
void parse(StringRef YAMLData)
This class implements an extremely fast bulk output stream that can only output to a stream.
static StringRef getCanonicalFnName(const Function &F)
Return the canonical name for a function, taking into account suffix elision policy attributes.
static Expected< std::unique_ptr< SymbolizableObjectFile > > create(const object::ObjectFile *Obj, std::unique_ptr< DIContext > DICtx, bool UntagAddresses)
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
llvm::DenseMap< uint64_t, llvm::SmallVector< uint64_t > > CallStackMap
BuildIDRef getBuildID(const ObjectFile *Obj)
Returns the build ID, if any, contained in the given object file.
ArrayRef< uint8_t > BuildIDRef
A reference to a BuildID in binary form.
Expected< std::unique_ptr< Binary > > createBinary(MemoryBufferRef Source, LLVMContext *Context=nullptr, bool InitContent=true)
Create a Binary from Source, autodetecting the file type.
StringRef filename(StringRef path LLVM_LIFETIME_BOUND, Style style=Style::native)
Get filename.
This is an optimization pass for GlobalISel generic memory operations.
std::error_code inconvertibleErrorCode()
The value returned by this function can be returned from convertToErrorCode for Error values where no...
Error createStringError(std::error_code EC, char const *Fmt, const Ts &... Vals)
Create formatted StringError object.
Error joinErrors(Error E1, Error E2)
Concatenate errors.
FormattedNumber format_hex_no_prefix(uint64_t N, unsigned Width, bool Upper=false)
format_hex_no_prefix - Output N as a fixed width hexadecimal.
void erase_if(Container &C, UnaryPredicate P)
Provide a container algorithm similar to C++ Library Fundamentals v2's erase_if which is equivalent t...
Error errorCodeToError(std::error_code EC)
Helper for converting an std::error_code to a Error.
static constexpr const char *const BadString
std::vector< GUIDMemProfRecordPair > HeapProfileRecords
std::vector< Frame > CallStack
PortableMemInfoBlock Info
GlobalValue::GUID Function
llvm::MapVector< GlobalValue::GUID, IndexedMemProfRecord > Records
CallStackId addCallStack(ArrayRef< FrameId > CS)
FrameId addFrame(const Frame &F)
llvm::SmallVector< CallStackId > CallSiteIds
llvm::SmallVector< IndexedAllocationInfo > AllocSites
static GlobalValue::GUID getGUID(const StringRef FunctionName)