1//===- InstrProfReader.cpp - Instrumented profiling reader ----------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file contains support for reading profiling data for clang's
10// instrumentation based PGO and coverage.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/ProfileData/InstrProfReader.h"
15#include "llvm/ADT/ArrayRef.h"
16#include "llvm/ADT/DenseMap.h"
17#include "llvm/ADT/StringExtras.h"
18#include "llvm/ADT/StringRef.h"
19#include "llvm/IR/ProfileSummary.h"
20#include "llvm/ProfileData/InstrProf.h"
21#include "llvm/ProfileData/MemProf.h"
22#include "llvm/ProfileData/ProfileCommon.h"
23#include "llvm/ProfileData/SymbolRemappingReader.h"
24#include "llvm/Support/Endian.h"
25#include "llvm/Support/Error.h"
26#include "llvm/Support/ErrorOr.h"
27#include "llvm/Support/FormatVariadic.h"
28#include "llvm/Support/MemoryBuffer.h"
29#include "llvm/Support/SwapByteOrder.h"
30#include "llvm/Support/VirtualFileSystem.h"
31#include <algorithm>
32#include <cstddef>
33#include <cstdint>
34#include <limits>
35#include <memory>
36#include <optional>
37#include <system_error>
38#include <utility>
39#include <vector>
40
41using namespace llvm;
42
43// Extracts the variant information from the top 32 bits in the version and
44// returns an enum specifying the variants present.
45static InstrProfKind getProfileKindFromVersion(uint64_t Version) {
46 InstrProfKind ProfileKind = InstrProfKind::Unknown;
47 if (Version & VARIANT_MASK_IR_PROF) {
48 ProfileKind |= InstrProfKind::IRInstrumentation;
49 }
50 if (Version & VARIANT_MASK_CSIR_PROF) {
51 ProfileKind |= InstrProfKind::ContextSensitive;
52 }
53 if (Version & VARIANT_MASK_INSTR_ENTRY) {
54 ProfileKind |= InstrProfKind::FunctionEntryInstrumentation;
55 }
56 if (Version & VARIANT_MASK_BYTE_COVERAGE) {
57 ProfileKind |= InstrProfKind::SingleByteCoverage;
58 }
59 if (Version & VARIANT_MASK_FUNCTION_ENTRY_ONLY) {
60 ProfileKind |= InstrProfKind::FunctionEntryOnly;
61 }
62 if (Version & VARIANT_MASK_MEMPROF) {
63 ProfileKind |= InstrProfKind::MemProf;
64 }
65 if (Version & VARIANT_MASK_TEMPORAL_PROF) {
66 ProfileKind |= InstrProfKind::TemporalProfile;
67 }
68 return ProfileKind;
69}
70
71static Expected<std::unique_ptr<MemoryBuffer>>
72setupMemoryBuffer(const Twine &Filename, vfs::FileSystem &FS) {
73 auto BufferOrErr = Filename.str() == "-" ? MemoryBuffer::getSTDIN()
74 : FS.getBufferForFile(Name: Filename);
75 if (std::error_code EC = BufferOrErr.getError())
76 return errorCodeToError(EC);
77 return std::move(BufferOrErr.get());
78}
79
80static Error initializeReader(InstrProfReader &Reader) {
81 return Reader.readHeader();
82}
83
84/// Read a list of binary ids from a profile that consist of
85/// a. uint64_t binary id length
86/// b. uint8_t binary id data
87/// c. uint8_t padding (if necessary)
88/// This function is shared between raw and indexed profiles.
89/// Raw profiles are in host-endian format, and indexed profiles are in
90/// little-endian format. So, this function takes an argument indicating the
91/// associated endian format to read the binary ids correctly.
92static Error
93readBinaryIdsInternal(const MemoryBuffer &DataBuffer,
94 ArrayRef<uint8_t> BinaryIdsBuffer,
95 std::vector<llvm::object::BuildID> &BinaryIds,
96 const llvm::endianness Endian) {
97 using namespace support;
98
99 const uint64_t BinaryIdsSize = BinaryIdsBuffer.size();
100 const uint8_t *BinaryIdsStart = BinaryIdsBuffer.data();
101
102 if (BinaryIdsSize == 0)
103 return Error::success();
104
105 const uint8_t *BI = BinaryIdsStart;
106 const uint8_t *BIEnd = BinaryIdsStart + BinaryIdsSize;
107 const uint8_t *End =
108 reinterpret_cast<const uint8_t *>(DataBuffer.getBufferEnd());
109
110 while (BI < BIEnd) {
111 size_t Remaining = BIEnd - BI;
112 // There should be enough left to read the binary id length.
113 if (Remaining < sizeof(uint64_t))
114 return make_error<InstrProfError>(
115 Args: instrprof_error::malformed,
116 Args: "not enough data to read binary id length");
117
118 uint64_t BILen = endian::readNext<uint64_t>(memory&: BI, endian: Endian);
119 if (BILen == 0)
120 return make_error<InstrProfError>(Args: instrprof_error::malformed,
121 Args: "binary id length is 0");
122
123 Remaining = BIEnd - BI;
124 // There should be enough left to read the binary id data.
125 if (Remaining < alignToPowerOf2(Value: BILen, Align: sizeof(uint64_t)))
126 return make_error<InstrProfError>(
127 Args: instrprof_error::malformed, Args: "not enough data to read binary id data");
128
129 // Add binary id to the binary ids list.
130 BinaryIds.push_back(x: object::BuildID(BI, BI + BILen));
131
132 // Increment by binary id data length, which aligned to the size of uint64.
133 BI += alignToPowerOf2(Value: BILen, Align: sizeof(uint64_t));
134 if (BI > End)
135 return make_error<InstrProfError>(
136 Args: instrprof_error::malformed,
137 Args: "binary id section is greater than buffer size");
138 }
139
140 return Error::success();
141}
142
143static void printBinaryIdsInternal(raw_ostream &OS,
144 ArrayRef<llvm::object::BuildID> BinaryIds) {
145 OS << "Binary IDs: \n";
146 for (const auto &BI : BinaryIds) {
147 for (auto I : BI)
148 OS << format(Fmt: "%02x", Vals: I);
149 OS << "\n";
150 }
151}
152
153Expected<std::unique_ptr<InstrProfReader>>
154InstrProfReader::create(const Twine &Path, vfs::FileSystem &FS,
155 const InstrProfCorrelator *Correlator,
156 std::function<void(Error)> Warn) {
157 // Set up the buffer to read.
158 auto BufferOrError = setupMemoryBuffer(Filename: Path, FS);
159 if (Error E = BufferOrError.takeError())
160 return std::move(E);
161 return InstrProfReader::create(Buffer: std::move(BufferOrError.get()), Correlator,
162 Warn);
163}
164
165Expected<std::unique_ptr<InstrProfReader>>
166InstrProfReader::create(std::unique_ptr<MemoryBuffer> Buffer,
167 const InstrProfCorrelator *Correlator,
168 std::function<void(Error)> Warn) {
169 if (Buffer->getBufferSize() == 0)
170 return make_error<InstrProfError>(Args: instrprof_error::empty_raw_profile);
171
172 std::unique_ptr<InstrProfReader> Result;
173 // Create the reader.
174 if (IndexedInstrProfReader::hasFormat(DataBuffer: *Buffer))
175 Result.reset(p: new IndexedInstrProfReader(std::move(Buffer)));
176 else if (RawInstrProfReader64::hasFormat(DataBuffer: *Buffer))
177 Result.reset(p: new RawInstrProfReader64(std::move(Buffer), Correlator, Warn));
178 else if (RawInstrProfReader32::hasFormat(DataBuffer: *Buffer))
179 Result.reset(p: new RawInstrProfReader32(std::move(Buffer), Correlator, Warn));
180 else if (TextInstrProfReader::hasFormat(Buffer: *Buffer))
181 Result.reset(p: new TextInstrProfReader(std::move(Buffer)));
182 else
183 return make_error<InstrProfError>(Args: instrprof_error::unrecognized_format);
184
185 // Initialize the reader and return the result.
186 if (Error E = initializeReader(Reader&: *Result))
187 return std::move(E);
188
189 return std::move(Result);
190}
191
192Expected<std::unique_ptr<IndexedInstrProfReader>>
193IndexedInstrProfReader::create(const Twine &Path, vfs::FileSystem &FS,
194 const Twine &RemappingPath) {
195 // Set up the buffer to read.
196 auto BufferOrError = setupMemoryBuffer(Filename: Path, FS);
197 if (Error E = BufferOrError.takeError())
198 return std::move(E);
199
200 // Set up the remapping buffer if requested.
201 std::unique_ptr<MemoryBuffer> RemappingBuffer;
202 std::string RemappingPathStr = RemappingPath.str();
203 if (!RemappingPathStr.empty()) {
204 auto RemappingBufferOrError = setupMemoryBuffer(Filename: RemappingPathStr, FS);
205 if (Error E = RemappingBufferOrError.takeError())
206 return std::move(E);
207 RemappingBuffer = std::move(RemappingBufferOrError.get());
208 }
209
210 return IndexedInstrProfReader::create(Buffer: std::move(BufferOrError.get()),
211 RemappingBuffer: std::move(RemappingBuffer));
212}
213
214Expected<std::unique_ptr<IndexedInstrProfReader>>
215IndexedInstrProfReader::create(std::unique_ptr<MemoryBuffer> Buffer,
216 std::unique_ptr<MemoryBuffer> RemappingBuffer) {
217 // Create the reader.
218 if (!IndexedInstrProfReader::hasFormat(DataBuffer: *Buffer))
219 return make_error<InstrProfError>(Args: instrprof_error::bad_magic);
220 auto Result = std::make_unique<IndexedInstrProfReader>(
221 args: std::move(Buffer), args: std::move(RemappingBuffer));
222
223 // Initialize the reader and return the result.
224 if (Error E = initializeReader(Reader&: *Result))
225 return std::move(E);
226
227 return std::move(Result);
228}
229
230bool TextInstrProfReader::hasFormat(const MemoryBuffer &Buffer) {
231 // Verify that this really looks like plain ASCII text by checking a
232 // 'reasonable' number of characters (up to profile magic size).
233 size_t count = std::min(a: Buffer.getBufferSize(), b: sizeof(uint64_t));
234 StringRef buffer = Buffer.getBufferStart();
235 return count == 0 ||
236 std::all_of(first: buffer.begin(), last: buffer.begin() + count,
237 pred: [](char c) { return isPrint(C: c) || isSpace(C: c); });
238}
239
240// Read the profile variant flag from the header: ":FE" means this is a FE
241// generated profile. ":IR" means this is an IR level profile. Other strings
242// with a leading ':' will be reported an error format.
243Error TextInstrProfReader::readHeader() {
244 Symtab.reset(p: new InstrProfSymtab());
245
246 while (Line->starts_with(Prefix: ":")) {
247 StringRef Str = Line->substr(Start: 1);
248 if (Str.equals_insensitive(RHS: "ir"))
249 ProfileKind |= InstrProfKind::IRInstrumentation;
250 else if (Str.equals_insensitive(RHS: "fe"))
251 ProfileKind |= InstrProfKind::FrontendInstrumentation;
252 else if (Str.equals_insensitive(RHS: "csir")) {
253 ProfileKind |= InstrProfKind::IRInstrumentation;
254 ProfileKind |= InstrProfKind::ContextSensitive;
255 } else if (Str.equals_insensitive(RHS: "entry_first"))
256 ProfileKind |= InstrProfKind::FunctionEntryInstrumentation;
257 else if (Str.equals_insensitive(RHS: "not_entry_first"))
258 ProfileKind &= ~InstrProfKind::FunctionEntryInstrumentation;
259 else if (Str.equals_insensitive(RHS: "single_byte_coverage"))
260 ProfileKind |= InstrProfKind::SingleByteCoverage;
261 else if (Str.equals_insensitive(RHS: "temporal_prof_traces")) {
262 ProfileKind |= InstrProfKind::TemporalProfile;
263 if (auto Err = readTemporalProfTraceData())
264 return error(E: std::move(Err));
265 } else
266 return error(Err: instrprof_error::bad_header);
267 ++Line;
268 }
269 return success();
270}
271
272/// Temporal profile trace data is stored in the header immediately after
273/// ":temporal_prof_traces". The first integer is the number of traces, the
274/// second integer is the stream size, then the following lines are the actual
275/// traces which consist of a weight and a comma separated list of function
276/// names.
277Error TextInstrProfReader::readTemporalProfTraceData() {
278 if ((++Line).is_at_end())
279 return error(Err: instrprof_error::eof);
280
281 uint32_t NumTraces;
282 if (Line->getAsInteger(Radix: 0, Result&: NumTraces))
283 return error(Err: instrprof_error::malformed);
284
285 if ((++Line).is_at_end())
286 return error(Err: instrprof_error::eof);
287
288 if (Line->getAsInteger(Radix: 0, Result&: TemporalProfTraceStreamSize))
289 return error(Err: instrprof_error::malformed);
290
291 for (uint32_t i = 0; i < NumTraces; i++) {
292 if ((++Line).is_at_end())
293 return error(Err: instrprof_error::eof);
294
295 TemporalProfTraceTy Trace;
296 if (Line->getAsInteger(Radix: 0, Result&: Trace.Weight))
297 return error(Err: instrprof_error::malformed);
298
299 if ((++Line).is_at_end())
300 return error(Err: instrprof_error::eof);
301
302 SmallVector<StringRef> FuncNames;
303 Line->split(A&: FuncNames, Separator: ",", /*MaxSplit=*/-1, /*KeepEmpty=*/false);
304 for (auto &FuncName : FuncNames)
305 Trace.FunctionNameRefs.push_back(
306 x: IndexedInstrProf::ComputeHash(K: FuncName.trim()));
307 TemporalProfTraces.push_back(Elt: std::move(Trace));
308 }
309 return success();
310}
311
312Error
313TextInstrProfReader::readValueProfileData(InstrProfRecord &Record) {
314
315#define CHECK_LINE_END(Line) \
316 if (Line.is_at_end()) \
317 return error(instrprof_error::truncated);
318#define READ_NUM(Str, Dst) \
319 if ((Str).getAsInteger(10, (Dst))) \
320 return error(instrprof_error::malformed);
321#define VP_READ_ADVANCE(Val) \
322 CHECK_LINE_END(Line); \
323 uint32_t Val; \
324 READ_NUM((*Line), (Val)); \
325 Line++;
326
327 if (Line.is_at_end())
328 return success();
329
330 uint32_t NumValueKinds;
331 if (Line->getAsInteger(Radix: 10, Result&: NumValueKinds)) {
332 // No value profile data
333 return success();
334 }
335 if (NumValueKinds == 0 || NumValueKinds > IPVK_Last + 1)
336 return error(Err: instrprof_error::malformed,
337 ErrMsg: "number of value kinds is invalid");
338 Line++;
339
340 for (uint32_t VK = 0; VK < NumValueKinds; VK++) {
341 VP_READ_ADVANCE(ValueKind);
342 if (ValueKind > IPVK_Last)
343 return error(Err: instrprof_error::malformed, ErrMsg: "value kind is invalid");
344 ;
345 VP_READ_ADVANCE(NumValueSites);
346 if (!NumValueSites)
347 continue;
348
349 Record.reserveSites(ValueKind: VK, NumValueSites);
350 for (uint32_t S = 0; S < NumValueSites; S++) {
351 VP_READ_ADVANCE(NumValueData);
352
353 std::vector<InstrProfValueData> CurrentValues;
354 for (uint32_t V = 0; V < NumValueData; V++) {
355 CHECK_LINE_END(Line);
356 std::pair<StringRef, StringRef> VD = Line->rsplit(Separator: ':');
357 uint64_t TakenCount, Value;
358 if (ValueKind == IPVK_IndirectCallTarget) {
359 if (InstrProfSymtab::isExternalSymbol(Symbol: VD.first)) {
360 Value = 0;
361 } else {
362 if (Error E = Symtab->addFuncName(FuncName: VD.first))
363 return E;
364 Value = IndexedInstrProf::ComputeHash(K: VD.first);
365 }
366 } else if (ValueKind == IPVK_VTableTarget) {
367 if (InstrProfSymtab::isExternalSymbol(Symbol: VD.first))
368 Value = 0;
369 else {
370 if (Error E = Symtab->addVTableName(VTableName: VD.first))
371 return E;
372 Value = IndexedInstrProf::ComputeHash(K: VD.first);
373 }
374 } else {
375 READ_NUM(VD.first, Value);
376 }
377 READ_NUM(VD.second, TakenCount);
378 CurrentValues.push_back(x: {.Value: Value, .Count: TakenCount});
379 Line++;
380 }
381 assert(CurrentValues.size() == NumValueData);
382 Record.addValueData(ValueKind, Site: S, VData: CurrentValues, SymTab: nullptr);
383 }
384 }
385 return success();
386
387#undef CHECK_LINE_END
388#undef READ_NUM
389#undef VP_READ_ADVANCE
390}
391
392Error TextInstrProfReader::readNextRecord(NamedInstrProfRecord &Record) {
393 // Skip empty lines and comments.
394 while (!Line.is_at_end() && (Line->empty() || Line->starts_with(Prefix: "#")))
395 ++Line;
396 // If we hit EOF while looking for a name, we're done.
397 if (Line.is_at_end()) {
398 return error(Err: instrprof_error::eof);
399 }
400
401 // Read the function name.
402 Record.Name = *Line++;
403 if (Error E = Symtab->addFuncName(FuncName: Record.Name))
404 return error(E: std::move(E));
405
406 // Read the function hash.
407 if (Line.is_at_end())
408 return error(Err: instrprof_error::truncated);
409 if ((Line++)->getAsInteger(Radix: 0, Result&: Record.Hash))
410 return error(Err: instrprof_error::malformed,
411 ErrMsg: "function hash is not a valid integer");
412
413 // Read the number of counters.
414 uint64_t NumCounters;
415 if (Line.is_at_end())
416 return error(Err: instrprof_error::truncated);
417 if ((Line++)->getAsInteger(Radix: 10, Result&: NumCounters))
418 return error(Err: instrprof_error::malformed,
419 ErrMsg: "number of counters is not a valid integer");
420 if (NumCounters == 0)
421 return error(Err: instrprof_error::malformed, ErrMsg: "number of counters is zero");
422
423 // Read each counter and fill our internal storage with the values.
424 Record.Clear();
425 Record.Counts.reserve(n: NumCounters);
426 for (uint64_t I = 0; I < NumCounters; ++I) {
427 if (Line.is_at_end())
428 return error(Err: instrprof_error::truncated);
429 uint64_t Count;
430 if ((Line++)->getAsInteger(Radix: 10, Result&: Count))
431 return error(Err: instrprof_error::malformed, ErrMsg: "count is invalid");
432 Record.Counts.push_back(x: Count);
433 }
434
435 // Bitmap byte information is indicated with special character.
436 if (Line->starts_with(Prefix: "$")) {
437 Record.BitmapBytes.clear();
438 // Read the number of bitmap bytes.
439 uint64_t NumBitmapBytes;
440 if ((Line++)->drop_front(N: 1).trim().getAsInteger(Radix: 0, Result&: NumBitmapBytes))
441 return error(Err: instrprof_error::malformed,
442 ErrMsg: "number of bitmap bytes is not a valid integer");
443 if (NumBitmapBytes != 0) {
444 // Read each bitmap and fill our internal storage with the values.
445 Record.BitmapBytes.reserve(n: NumBitmapBytes);
446 for (uint8_t I = 0; I < NumBitmapBytes; ++I) {
447 if (Line.is_at_end())
448 return error(Err: instrprof_error::truncated);
449 uint8_t BitmapByte;
450 if ((Line++)->getAsInteger(Radix: 0, Result&: BitmapByte))
451 return error(Err: instrprof_error::malformed,
452 ErrMsg: "bitmap byte is not a valid integer");
453 Record.BitmapBytes.push_back(x: BitmapByte);
454 }
455 }
456 }
457
458 // Check if value profile data exists and read it if so.
459 if (Error E = readValueProfileData(Record))
460 return error(E: std::move(E));
461
462 return success();
463}
464
465template <class IntPtrT>
466InstrProfKind RawInstrProfReader<IntPtrT>::getProfileKind() const {
467 return getProfileKindFromVersion(Version);
468}
469
470template <class IntPtrT>
471SmallVector<TemporalProfTraceTy> &
472RawInstrProfReader<IntPtrT>::getTemporalProfTraces(
473 std::optional<uint64_t> Weight) {
474 if (TemporalProfTimestamps.empty()) {
475 assert(TemporalProfTraces.empty());
476 return TemporalProfTraces;
477 }
478 // Sort functions by their timestamps to build the trace.
479 std::sort(first: TemporalProfTimestamps.begin(), last: TemporalProfTimestamps.end());
480 TemporalProfTraceTy Trace;
481 if (Weight)
482 Trace.Weight = *Weight;
483 for (auto &[TimestampValue, NameRef] : TemporalProfTimestamps)
484 Trace.FunctionNameRefs.push_back(x: NameRef);
485 TemporalProfTraces = {std::move(Trace)};
486 return TemporalProfTraces;
487}
488
489template <class IntPtrT>
490bool RawInstrProfReader<IntPtrT>::hasFormat(const MemoryBuffer &DataBuffer) {
491 if (DataBuffer.getBufferSize() < sizeof(uint64_t))
492 return false;
493 uint64_t Magic =
494 *reinterpret_cast<const uint64_t *>(DataBuffer.getBufferStart());
495 return RawInstrProf::getMagic<IntPtrT>() == Magic ||
496 llvm::byteswap(RawInstrProf::getMagic<IntPtrT>()) == Magic;
497}
498
499template <class IntPtrT>
500Error RawInstrProfReader<IntPtrT>::readHeader() {
501 if (!hasFormat(DataBuffer: *DataBuffer))
502 return error(instrprof_error::bad_magic);
503 if (DataBuffer->getBufferSize() < sizeof(RawInstrProf::Header))
504 return error(instrprof_error::bad_header);
505 auto *Header = reinterpret_cast<const RawInstrProf::Header *>(
506 DataBuffer->getBufferStart());
507 ShouldSwapBytes = Header->Magic != RawInstrProf::getMagic<IntPtrT>();
508 return readHeader(*Header);
509}
510
511template <class IntPtrT>
512Error RawInstrProfReader<IntPtrT>::readNextHeader(const char *CurrentPos) {
513 const char *End = DataBuffer->getBufferEnd();
514 // Skip zero padding between profiles.
515 while (CurrentPos != End && *CurrentPos == 0)
516 ++CurrentPos;
517 // If there's nothing left, we're done.
518 if (CurrentPos == End)
519 return make_error<InstrProfError>(Args: instrprof_error::eof);
520 // If there isn't enough space for another header, this is probably just
521 // garbage at the end of the file.
522 if (CurrentPos + sizeof(RawInstrProf::Header) > End)
523 return make_error<InstrProfError>(Args: instrprof_error::malformed,
524 Args: "not enough space for another header");
525 // The writer ensures each profile is padded to start at an aligned address.
526 if (reinterpret_cast<size_t>(CurrentPos) % alignof(uint64_t))
527 return make_error<InstrProfError>(Args: instrprof_error::malformed,
528 Args: "insufficient padding");
529 // The magic should have the same byte order as in the previous header.
530 uint64_t Magic = *reinterpret_cast<const uint64_t *>(CurrentPos);
531 if (Magic != swap(RawInstrProf::getMagic<IntPtrT>()))
532 return make_error<InstrProfError>(Args: instrprof_error::bad_magic);
533
534 // There's another profile to read, so we need to process the header.
535 auto *Header = reinterpret_cast<const RawInstrProf::Header *>(CurrentPos);
536 return readHeader(*Header);
537}
538
539template <class IntPtrT>
540Error RawInstrProfReader<IntPtrT>::createSymtab(InstrProfSymtab &Symtab) {
541 if (Error E = Symtab.create(FuncNameStrings: StringRef(NamesStart, NamesEnd - NamesStart),
542 VTableNameStrings: StringRef(VNamesStart, VNamesEnd - VNamesStart)))
543 return error(std::move(E));
544 for (const RawInstrProf::ProfileData<IntPtrT> *I = Data; I != DataEnd; ++I) {
545 const IntPtrT FPtr = swap(I->FunctionPointer);
546 if (!FPtr)
547 continue;
548 Symtab.mapAddress(Addr: FPtr, MD5Val: swap(I->NameRef));
549 }
550
551 if (VTableBegin != nullptr && VTableEnd != nullptr) {
552 for (const RawInstrProf::VTableProfileData<IntPtrT> *I = VTableBegin;
553 I != VTableEnd; ++I) {
554 const IntPtrT VPtr = swap(I->VTablePointer);
555 if (!VPtr)
556 continue;
557 // Map both begin and end address to the name hash, since the instrumented
558 // address could be somewhere in the middle.
559 // VPtr is of type uint32_t or uint64_t so 'VPtr + I->VTableSize' marks
560 // the end of vtable address.
561 Symtab.mapVTableAddress(StartAddr: VPtr, EndAddr: VPtr + swap(I->VTableSize),
562 MD5Val: swap(I->VTableNameHash));
563 }
564 }
565 return success();
566}
567
568template <class IntPtrT>
569Error RawInstrProfReader<IntPtrT>::readHeader(
570 const RawInstrProf::Header &Header) {
571 Version = swap(Header.Version);
572 if (GET_VERSION(Version) != RawInstrProf::Version)
573 return error(instrprof_error::raw_profile_version_mismatch,
574 ("Profile uses raw profile format version = " +
575 Twine(GET_VERSION(Version)) +
576 "; expected version = " + Twine(RawInstrProf::Version) +
577 "\nPLEASE update this tool to version in the raw profile, or "
578 "regenerate raw profile with expected version.")
579 .str());
580
581 uint64_t BinaryIdSize = swap(Header.BinaryIdsSize);
582 // Binary id start just after the header if exists.
583 const uint8_t *BinaryIdStart =
584 reinterpret_cast<const uint8_t *>(&Header) + sizeof(RawInstrProf::Header);
585 const uint8_t *BinaryIdEnd = BinaryIdStart + BinaryIdSize;
586 const uint8_t *BufferEnd = (const uint8_t *)DataBuffer->getBufferEnd();
587 if (BinaryIdSize % sizeof(uint64_t) || BinaryIdEnd > BufferEnd)
588 return error(instrprof_error::bad_header);
589 ArrayRef<uint8_t> BinaryIdsBuffer(BinaryIdStart, BinaryIdSize);
590 if (!BinaryIdsBuffer.empty()) {
591 if (Error Err = readBinaryIdsInternal(*DataBuffer, BinaryIdsBuffer,
592 BinaryIds, getDataEndianness()))
593 return Err;
594 }
595
596 CountersDelta = swap(Header.CountersDelta);
597 BitmapDelta = swap(Header.BitmapDelta);
598 NamesDelta = swap(Header.NamesDelta);
599 auto NumData = swap(Header.NumData);
600 auto PaddingBytesBeforeCounters = swap(Header.PaddingBytesBeforeCounters);
601 auto CountersSize = swap(Header.NumCounters) * getCounterTypeSize();
602 auto PaddingBytesAfterCounters = swap(Header.PaddingBytesAfterCounters);
603 auto NumBitmapBytes = swap(Header.NumBitmapBytes);
604 auto PaddingBytesAfterBitmapBytes = swap(Header.PaddingBytesAfterBitmapBytes);
605 auto NamesSize = swap(Header.NamesSize);
606 auto VTableNameSize = swap(Header.VNamesSize);
607 auto NumVTables = swap(Header.NumVTables);
608 ValueKindLast = swap(Header.ValueKindLast);
609
610 auto DataSize = NumData * sizeof(RawInstrProf::ProfileData<IntPtrT>);
611 auto PaddingBytesAfterNames = getNumPaddingBytes(SizeInBytes: NamesSize);
612 auto PaddingBytesAfterVTableNames = getNumPaddingBytes(SizeInBytes: VTableNameSize);
613
614 auto VTableSectionSize =
615 NumVTables * sizeof(RawInstrProf::VTableProfileData<IntPtrT>);
616 auto PaddingBytesAfterVTableProfData = getNumPaddingBytes(SizeInBytes: VTableSectionSize);
617
618 // Profile data starts after profile header and binary ids if exist.
619 ptrdiff_t DataOffset = sizeof(RawInstrProf::Header) + BinaryIdSize;
620 ptrdiff_t CountersOffset = DataOffset + DataSize + PaddingBytesBeforeCounters;
621 ptrdiff_t BitmapOffset =
622 CountersOffset + CountersSize + PaddingBytesAfterCounters;
623 ptrdiff_t NamesOffset =
624 BitmapOffset + NumBitmapBytes + PaddingBytesAfterBitmapBytes;
625 ptrdiff_t VTableProfDataOffset =
626 NamesOffset + NamesSize + PaddingBytesAfterNames;
627 ptrdiff_t VTableNameOffset = VTableProfDataOffset + VTableSectionSize +
628 PaddingBytesAfterVTableProfData;
629 ptrdiff_t ValueDataOffset =
630 VTableNameOffset + VTableNameSize + PaddingBytesAfterVTableNames;
631
632 auto *Start = reinterpret_cast<const char *>(&Header);
633 if (Start + ValueDataOffset > DataBuffer->getBufferEnd())
634 return error(instrprof_error::bad_header);
635
636 if (Correlator) {
637 // These sizes in the raw file are zero because we constructed them in the
638 // Correlator.
639 if (!(DataSize == 0 && NamesSize == 0 && CountersDelta == 0 &&
640 NamesDelta == 0))
641 return error(instrprof_error::unexpected_correlation_info);
642 Data = Correlator->getDataPointer();
643 DataEnd = Data + Correlator->getDataSize();
644 NamesStart = Correlator->getNamesPointer();
645 NamesEnd = NamesStart + Correlator->getNamesSize();
646 } else {
647 Data = reinterpret_cast<const RawInstrProf::ProfileData<IntPtrT> *>(
648 Start + DataOffset);
649 DataEnd = Data + NumData;
650 VTableBegin =
651 reinterpret_cast<const RawInstrProf::VTableProfileData<IntPtrT> *>(
652 Start + VTableProfDataOffset);
653 VTableEnd = VTableBegin + NumVTables;
654 NamesStart = Start + NamesOffset;
655 NamesEnd = NamesStart + NamesSize;
656 VNamesStart = Start + VTableNameOffset;
657 VNamesEnd = VNamesStart + VTableNameSize;
658 }
659
660 CountersStart = Start + CountersOffset;
661 CountersEnd = CountersStart + CountersSize;
662 BitmapStart = Start + BitmapOffset;
663 BitmapEnd = BitmapStart + NumBitmapBytes;
664 ValueDataStart = reinterpret_cast<const uint8_t *>(Start + ValueDataOffset);
665
666 std::unique_ptr<InstrProfSymtab> NewSymtab = std::make_unique<InstrProfSymtab>();
667 if (Error E = createSymtab(Symtab&: *NewSymtab))
668 return E;
669
670 Symtab = std::move(NewSymtab);
671 return success();
672}
673
674template <class IntPtrT>
675Error RawInstrProfReader<IntPtrT>::readName(NamedInstrProfRecord &Record) {
676 Record.Name = getName(NameRef: Data->NameRef);
677 return success();
678}
679
680template <class IntPtrT>
681Error RawInstrProfReader<IntPtrT>::readFuncHash(NamedInstrProfRecord &Record) {
682 Record.Hash = swap(Data->FuncHash);
683 return success();
684}
685
686template <class IntPtrT>
687Error RawInstrProfReader<IntPtrT>::readRawCounts(
688 InstrProfRecord &Record) {
689 uint32_t NumCounters = swap(Data->NumCounters);
690 if (NumCounters == 0)
691 return error(instrprof_error::malformed, "number of counters is zero");
692
693 ptrdiff_t CounterBaseOffset = swap(Data->CounterPtr) - CountersDelta;
694 if (CounterBaseOffset < 0)
695 return error(
696 instrprof_error::malformed,
697 ("counter offset " + Twine(CounterBaseOffset) + " is negative").str());
698
699 if (CounterBaseOffset >= CountersEnd - CountersStart)
700 return error(instrprof_error::malformed,
701 ("counter offset " + Twine(CounterBaseOffset) +
702 " is greater than the maximum counter offset " +
703 Twine(CountersEnd - CountersStart - 1))
704 .str());
705
706 uint64_t MaxNumCounters =
707 (CountersEnd - (CountersStart + CounterBaseOffset)) /
708 getCounterTypeSize();
709 if (NumCounters > MaxNumCounters)
710 return error(instrprof_error::malformed,
711 ("number of counters " + Twine(NumCounters) +
712 " is greater than the maximum number of counters " +
713 Twine(MaxNumCounters))
714 .str());
715
716 Record.Counts.clear();
717 Record.Counts.reserve(n: NumCounters);
718 for (uint32_t I = 0; I < NumCounters; I++) {
719 const char *Ptr =
720 CountersStart + CounterBaseOffset + I * getCounterTypeSize();
721 if (I == 0 && hasTemporalProfile()) {
722 uint64_t TimestampValue = swap(*reinterpret_cast<const uint64_t *>(Ptr));
723 if (TimestampValue != 0 &&
724 TimestampValue != std::numeric_limits<uint64_t>::max()) {
725 TemporalProfTimestamps.emplace_back(TimestampValue,
726 swap(Data->NameRef));
727 TemporalProfTraceStreamSize = 1;
728 }
729 if (hasSingleByteCoverage()) {
730 // In coverage mode, getCounterTypeSize() returns 1 byte but our
731 // timestamp field has size uint64_t. Increment I so that the next
732 // iteration of this for loop points to the byte after the timestamp
733 // field, i.e., I += 8.
734 I += 7;
735 }
736 continue;
737 }
738 if (hasSingleByteCoverage()) {
739 // A value of zero signifies the block is covered.
740 Record.Counts.push_back(x: *Ptr == 0 ? 1 : 0);
741 } else {
742 uint64_t CounterValue = swap(*reinterpret_cast<const uint64_t *>(Ptr));
743 if (CounterValue > MaxCounterValue && Warn)
744 Warn(make_error<InstrProfError>(
745 Args: instrprof_error::counter_value_too_large, Args: Twine(CounterValue)));
746
747 Record.Counts.push_back(x: CounterValue);
748 }
749 }
750
751 return success();
752}
753
754template <class IntPtrT>
755Error RawInstrProfReader<IntPtrT>::readRawBitmapBytes(InstrProfRecord &Record) {
756 uint32_t NumBitmapBytes = swap(Data->NumBitmapBytes);
757
758 Record.BitmapBytes.clear();
759 Record.BitmapBytes.reserve(n: NumBitmapBytes);
760
761 // It's possible MCDC is either not enabled or only used for some functions
762 // and not others. So if we record 0 bytes, just move on.
763 if (NumBitmapBytes == 0)
764 return success();
765
766 // BitmapDelta decreases as we advance to the next data record.
767 ptrdiff_t BitmapOffset = swap(Data->BitmapPtr) - BitmapDelta;
768 if (BitmapOffset < 0)
769 return error(
770 instrprof_error::malformed,
771 ("bitmap offset " + Twine(BitmapOffset) + " is negative").str());
772
773 if (BitmapOffset >= BitmapEnd - BitmapStart)
774 return error(instrprof_error::malformed,
775 ("bitmap offset " + Twine(BitmapOffset) +
776 " is greater than the maximum bitmap offset " +
777 Twine(BitmapEnd - BitmapStart - 1))
778 .str());
779
780 uint64_t MaxNumBitmapBytes =
781 (BitmapEnd - (BitmapStart + BitmapOffset)) / sizeof(uint8_t);
782 if (NumBitmapBytes > MaxNumBitmapBytes)
783 return error(instrprof_error::malformed,
784 ("number of bitmap bytes " + Twine(NumBitmapBytes) +
785 " is greater than the maximum number of bitmap bytes " +
786 Twine(MaxNumBitmapBytes))
787 .str());
788
789 for (uint32_t I = 0; I < NumBitmapBytes; I++) {
790 const char *Ptr = BitmapStart + BitmapOffset + I;
791 Record.BitmapBytes.push_back(swap(*Ptr));
792 }
793
794 return success();
795}
796
797template <class IntPtrT>
798Error RawInstrProfReader<IntPtrT>::readValueProfilingData(
799 InstrProfRecord &Record) {
800 Record.clearValueData();
801 CurValueDataSize = 0;
802 // Need to match the logic in value profile dumper code in compiler-rt:
803 uint32_t NumValueKinds = 0;
804 for (uint32_t I = 0; I < IPVK_Last + 1; I++)
805 NumValueKinds += (Data->NumValueSites[I] != 0);
806
807 if (!NumValueKinds)
808 return success();
809
810 Expected<std::unique_ptr<ValueProfData>> VDataPtrOrErr =
811 ValueProfData::getValueProfData(
812 SrcBuffer: ValueDataStart, SrcBufferEnd: (const unsigned char *)DataBuffer->getBufferEnd(),
813 SrcDataEndianness: getDataEndianness());
814
815 if (Error E = VDataPtrOrErr.takeError())
816 return E;
817
818 // Note that besides deserialization, this also performs the conversion for
819 // indirect call targets. The function pointers from the raw profile are
820 // remapped into function name hashes.
821 VDataPtrOrErr.get()->deserializeTo(Record, SymTab: Symtab.get());
822 CurValueDataSize = VDataPtrOrErr.get()->getSize();
823 return success();
824}
825
826template <class IntPtrT>
827Error RawInstrProfReader<IntPtrT>::readNextRecord(NamedInstrProfRecord &Record) {
828 // Keep reading profiles that consist of only headers and no profile data and
829 // counters.
830 while (atEnd())
831 // At this point, ValueDataStart field points to the next header.
832 if (Error E = readNextHeader(CurrentPos: getNextHeaderPos()))
833 return error(std::move(E));
834
835 // Read name and set it in Record.
836 if (Error E = readName(Record))
837 return error(std::move(E));
838
839 // Read FuncHash and set it in Record.
840 if (Error E = readFuncHash(Record))
841 return error(std::move(E));
842
843 // Read raw counts and set Record.
844 if (Error E = readRawCounts(Record))
845 return error(std::move(E));
846
847 // Read raw bitmap bytes and set Record.
848 if (Error E = readRawBitmapBytes(Record))
849 return error(std::move(E));
850
851 // Read value data and set Record.
852 if (Error E = readValueProfilingData(Record))
853 return error(std::move(E));
854
855 // Iterate.
856 advanceData();
857 return success();
858}
859
860template <class IntPtrT>
861Error RawInstrProfReader<IntPtrT>::readBinaryIds(
862 std::vector<llvm::object::BuildID> &BinaryIds) {
863 BinaryIds.insert(BinaryIds.begin(), this->BinaryIds.begin(),
864 this->BinaryIds.end());
865 return Error::success();
866}
867
868template <class IntPtrT>
869Error RawInstrProfReader<IntPtrT>::printBinaryIds(raw_ostream &OS) {
870 if (!BinaryIds.empty())
871 printBinaryIdsInternal(OS, BinaryIds);
872 return Error::success();
873}
874
875namespace llvm {
876
877template class RawInstrProfReader<uint32_t>;
878template class RawInstrProfReader<uint64_t>;
879
880} // end namespace llvm
881
882InstrProfLookupTrait::hash_value_type
883InstrProfLookupTrait::ComputeHash(StringRef K) {
884 return IndexedInstrProf::ComputeHash(Type: HashType, K);
885}
886
887using data_type = InstrProfLookupTrait::data_type;
888using offset_type = InstrProfLookupTrait::offset_type;
889
890bool InstrProfLookupTrait::readValueProfilingData(
891 const unsigned char *&D, const unsigned char *const End) {
892 Expected<std::unique_ptr<ValueProfData>> VDataPtrOrErr =
893 ValueProfData::getValueProfData(SrcBuffer: D, SrcBufferEnd: End, SrcDataEndianness: ValueProfDataEndianness);
894
895 if (VDataPtrOrErr.takeError())
896 return false;
897
898 VDataPtrOrErr.get()->deserializeTo(Record&: DataBuffer.back(), SymTab: nullptr);
899 D += VDataPtrOrErr.get()->TotalSize;
900
901 return true;
902}
903
904data_type InstrProfLookupTrait::ReadData(StringRef K, const unsigned char *D,
905 offset_type N) {
906 using namespace support;
907
908 // Check if the data is corrupt. If so, don't try to read it.
909 if (N % sizeof(uint64_t))
910 return data_type();
911
912 DataBuffer.clear();
913 std::vector<uint64_t> CounterBuffer;
914 std::vector<uint8_t> BitmapByteBuffer;
915
916 const unsigned char *End = D + N;
917 while (D < End) {
918 // Read hash.
919 if (D + sizeof(uint64_t) >= End)
920 return data_type();
921 uint64_t Hash = endian::readNext<uint64_t, llvm::endianness::little>(memory&: D);
922
923 // Initialize number of counters for GET_VERSION(FormatVersion) == 1.
924 uint64_t CountsSize = N / sizeof(uint64_t) - 1;
925 // If format version is different then read the number of counters.
926 if (GET_VERSION(FormatVersion) != IndexedInstrProf::ProfVersion::Version1) {
927 if (D + sizeof(uint64_t) > End)
928 return data_type();
929 CountsSize = endian::readNext<uint64_t, llvm::endianness::little>(memory&: D);
930 }
931 // Read counter values.
932 if (D + CountsSize * sizeof(uint64_t) > End)
933 return data_type();
934
935 CounterBuffer.clear();
936 CounterBuffer.reserve(n: CountsSize);
937 for (uint64_t J = 0; J < CountsSize; ++J)
938 CounterBuffer.push_back(
939 x: endian::readNext<uint64_t, llvm::endianness::little>(memory&: D));
940
941 // Read bitmap bytes for GET_VERSION(FormatVersion) > 10.
942 if (GET_VERSION(FormatVersion) > IndexedInstrProf::ProfVersion::Version10) {
943 uint64_t BitmapBytes = 0;
944 if (D + sizeof(uint64_t) > End)
945 return data_type();
946 BitmapBytes = endian::readNext<uint64_t, llvm::endianness::little>(memory&: D);
947 // Read bitmap byte values.
948 if (D + BitmapBytes * sizeof(uint8_t) > End)
949 return data_type();
950 BitmapByteBuffer.clear();
951 BitmapByteBuffer.reserve(n: BitmapBytes);
952 for (uint64_t J = 0; J < BitmapBytes; ++J)
953 BitmapByteBuffer.push_back(x: static_cast<uint8_t>(
954 endian::readNext<uint64_t, llvm::endianness::little>(memory&: D)));
955 }
956
957 DataBuffer.emplace_back(args&: K, args&: Hash, args: std::move(CounterBuffer),
958 args: std::move(BitmapByteBuffer));
959
960 // Read value profiling data.
961 if (GET_VERSION(FormatVersion) > IndexedInstrProf::ProfVersion::Version2 &&
962 !readValueProfilingData(D, End)) {
963 DataBuffer.clear();
964 return data_type();
965 }
966 }
967 return DataBuffer;
968}
969
970template <typename HashTableImpl>
971Error InstrProfReaderIndex<HashTableImpl>::getRecords(
972 StringRef FuncName, ArrayRef<NamedInstrProfRecord> &Data) {
973 auto Iter = HashTable->find(FuncName);
974 if (Iter == HashTable->end())
975 return make_error<InstrProfError>(Args: instrprof_error::unknown_function);
976
977 Data = (*Iter);
978 if (Data.empty())
979 return make_error<InstrProfError>(Args: instrprof_error::malformed,
980 Args: "profile data is empty");
981
982 return Error::success();
983}
984
985template <typename HashTableImpl>
986Error InstrProfReaderIndex<HashTableImpl>::getRecords(
987 ArrayRef<NamedInstrProfRecord> &Data) {
988 if (atEnd())
989 return make_error<InstrProfError>(Args: instrprof_error::eof);
990
991 Data = *RecordIterator;
992
993 if (Data.empty())
994 return make_error<InstrProfError>(Args: instrprof_error::malformed,
995 Args: "profile data is empty");
996
997 return Error::success();
998}
999
1000template <typename HashTableImpl>
1001InstrProfReaderIndex<HashTableImpl>::InstrProfReaderIndex(
1002 const unsigned char *Buckets, const unsigned char *const Payload,
1003 const unsigned char *const Base, IndexedInstrProf::HashT HashType,
1004 uint64_t Version) {
1005 FormatVersion = Version;
1006 HashTable.reset(HashTableImpl::Create(
1007 Buckets, Payload, Base,
1008 typename HashTableImpl::InfoType(HashType, Version)));
1009 RecordIterator = HashTable->data_begin();
1010}
1011
1012template <typename HashTableImpl>
1013InstrProfKind InstrProfReaderIndex<HashTableImpl>::getProfileKind() const {
1014 return getProfileKindFromVersion(Version: FormatVersion);
1015}
1016
1017namespace {
1018/// A remapper that does not apply any remappings.
1019class InstrProfReaderNullRemapper : public InstrProfReaderRemapper {
1020 InstrProfReaderIndexBase &Underlying;
1021
1022public:
1023 InstrProfReaderNullRemapper(InstrProfReaderIndexBase &Underlying)
1024 : Underlying(Underlying) {}
1025
1026 Error getRecords(StringRef FuncName,
1027 ArrayRef<NamedInstrProfRecord> &Data) override {
1028 return Underlying.getRecords(FuncName, Data);
1029 }
1030};
1031} // namespace
1032
1033/// A remapper that applies remappings based on a symbol remapping file.
1034template <typename HashTableImpl>
1035class llvm::InstrProfReaderItaniumRemapper
1036 : public InstrProfReaderRemapper {
1037public:
1038 InstrProfReaderItaniumRemapper(
1039 std::unique_ptr<MemoryBuffer> RemapBuffer,
1040 InstrProfReaderIndex<HashTableImpl> &Underlying)
1041 : RemapBuffer(std::move(RemapBuffer)), Underlying(Underlying) {
1042 }
1043
1044 /// Extract the original function name from a PGO function name.
1045 static StringRef extractName(StringRef Name) {
1046 // We can have multiple pieces separated by kGlobalIdentifierDelimiter (
1047 // semicolon now and colon in older profiles); there can be pieces both
1048 // before and after the mangled name. Find the first part that starts with
1049 // '_Z'; we'll assume that's the mangled name we want.
1050 std::pair<StringRef, StringRef> Parts = {StringRef(), Name};
1051 while (true) {
1052 Parts = Parts.second.split(Separator: GlobalIdentifierDelimiter);
1053 if (Parts.first.starts_with(Prefix: "_Z"))
1054 return Parts.first;
1055 if (Parts.second.empty())
1056 return Name;
1057 }
1058 }
1059
1060 /// Given a mangled name extracted from a PGO function name, and a new
1061 /// form for that mangled name, reconstitute the name.
1062 static void reconstituteName(StringRef OrigName, StringRef ExtractedName,
1063 StringRef Replacement,
1064 SmallVectorImpl<char> &Out) {
1065 Out.reserve(N: OrigName.size() + Replacement.size() - ExtractedName.size());
1066 Out.insert(I: Out.end(), From: OrigName.begin(), To: ExtractedName.begin());
1067 Out.insert(I: Out.end(), From: Replacement.begin(), To: Replacement.end());
1068 Out.insert(I: Out.end(), From: ExtractedName.end(), To: OrigName.end());
1069 }
1070
1071 Error populateRemappings() override {
1072 if (Error E = Remappings.read(B&: *RemapBuffer))
1073 return E;
1074 for (StringRef Name : Underlying.HashTable->keys()) {
1075 StringRef RealName = extractName(Name);
1076 if (auto Key = Remappings.insert(FunctionName: RealName)) {
1077 // FIXME: We could theoretically map the same equivalence class to
1078 // multiple names in the profile data. If that happens, we should
1079 // return NamedInstrProfRecords from all of them.
1080 MappedNames.insert(KV: {Key, RealName});
1081 }
1082 }
1083 return Error::success();
1084 }
1085
1086 Error getRecords(StringRef FuncName,
1087 ArrayRef<NamedInstrProfRecord> &Data) override {
1088 StringRef RealName = extractName(Name: FuncName);
1089 if (auto Key = Remappings.lookup(FunctionName: RealName)) {
1090 StringRef Remapped = MappedNames.lookup(Val: Key);
1091 if (!Remapped.empty()) {
1092 if (RealName.begin() == FuncName.begin() &&
1093 RealName.end() == FuncName.end())
1094 FuncName = Remapped;
1095 else {
1096 // Try rebuilding the name from the given remapping.
1097 SmallString<256> Reconstituted;
1098 reconstituteName(OrigName: FuncName, ExtractedName: RealName, Replacement: Remapped, Out&: Reconstituted);
1099 Error E = Underlying.getRecords(Reconstituted, Data);
1100 if (!E)
1101 return E;
1102
1103 // If we failed because the name doesn't exist, fall back to asking
1104 // about the original name.
1105 if (Error Unhandled = handleErrors(
1106 std::move(E), [](std::unique_ptr<InstrProfError> Err) {
1107 return Err->get() == instrprof_error::unknown_function
1108 ? Error::success()
1109 : Error(std::move(Err));
1110 }))
1111 return Unhandled;
1112 }
1113 }
1114 }
1115 return Underlying.getRecords(FuncName, Data);
1116 }
1117
1118private:
1119 /// The memory buffer containing the remapping configuration. Remappings
1120 /// holds pointers into this buffer.
1121 std::unique_ptr<MemoryBuffer> RemapBuffer;
1122
1123 /// The mangling remapper.
1124 SymbolRemappingReader Remappings;
1125
1126 /// Mapping from mangled name keys to the name used for the key in the
1127 /// profile data.
1128 /// FIXME: Can we store a location within the on-disk hash table instead of
1129 /// redoing lookup?
1130 DenseMap<SymbolRemappingReader::Key, StringRef> MappedNames;
1131
1132 /// The real profile data reader.
1133 InstrProfReaderIndex<HashTableImpl> &Underlying;
1134};
1135
1136bool IndexedInstrProfReader::hasFormat(const MemoryBuffer &DataBuffer) {
1137 using namespace support;
1138
1139 if (DataBuffer.getBufferSize() < 8)
1140 return false;
1141 uint64_t Magic = endian::read<uint64_t, llvm::endianness::little, aligned>(
1142 memory: DataBuffer.getBufferStart());
1143 // Verify that it's magical.
1144 return Magic == IndexedInstrProf::Magic;
1145}
1146
1147const unsigned char *
1148IndexedInstrProfReader::readSummary(IndexedInstrProf::ProfVersion Version,
1149 const unsigned char *Cur, bool UseCS) {
1150 using namespace IndexedInstrProf;
1151 using namespace support;
1152
1153 if (Version >= IndexedInstrProf::Version4) {
1154 const IndexedInstrProf::Summary *SummaryInLE =
1155 reinterpret_cast<const IndexedInstrProf::Summary *>(Cur);
1156 uint64_t NFields = endian::byte_swap<uint64_t, llvm::endianness::little>(
1157 value: SummaryInLE->NumSummaryFields);
1158 uint64_t NEntries = endian::byte_swap<uint64_t, llvm::endianness::little>(
1159 value: SummaryInLE->NumCutoffEntries);
1160 uint32_t SummarySize =
1161 IndexedInstrProf::Summary::getSize(NumSumFields: NFields, NumCutoffEntries: NEntries);
1162 std::unique_ptr<IndexedInstrProf::Summary> SummaryData =
1163 IndexedInstrProf::allocSummary(TotalSize: SummarySize);
1164
1165 const uint64_t *Src = reinterpret_cast<const uint64_t *>(SummaryInLE);
1166 uint64_t *Dst = reinterpret_cast<uint64_t *>(SummaryData.get());
1167 for (unsigned I = 0; I < SummarySize / sizeof(uint64_t); I++)
1168 Dst[I] = endian::byte_swap<uint64_t, llvm::endianness::little>(value: Src[I]);
1169
1170 SummaryEntryVector DetailedSummary;
1171 for (unsigned I = 0; I < SummaryData->NumCutoffEntries; I++) {
1172 const IndexedInstrProf::Summary::Entry &Ent = SummaryData->getEntry(I);
1173 DetailedSummary.emplace_back(args: (uint32_t)Ent.Cutoff, args: Ent.MinBlockCount,
1174 args: Ent.NumBlocks);
1175 }
1176 std::unique_ptr<llvm::ProfileSummary> &Summary =
1177 UseCS ? this->CS_Summary : this->Summary;
1178
1179 // initialize InstrProfSummary using the SummaryData from disk.
1180 Summary = std::make_unique<ProfileSummary>(
1181 args: UseCS ? ProfileSummary::PSK_CSInstr : ProfileSummary::PSK_Instr,
1182 args&: DetailedSummary, args: SummaryData->get(K: Summary::TotalBlockCount),
1183 args: SummaryData->get(K: Summary::MaxBlockCount),
1184 args: SummaryData->get(K: Summary::MaxInternalBlockCount),
1185 args: SummaryData->get(K: Summary::MaxFunctionCount),
1186 args: SummaryData->get(K: Summary::TotalNumBlocks),
1187 args: SummaryData->get(K: Summary::TotalNumFunctions));
1188 return Cur + SummarySize;
1189 } else {
1190 // The older versions do not support a profile summary. This just computes
1191 // an empty summary, which will not result in accurate hot/cold detection.
1192 // We would need to call addRecord for all NamedInstrProfRecords to get the
1193 // correct summary. However, this version is old (prior to early 2016) and
1194 // has not been supporting an accurate summary for several years.
1195 InstrProfSummaryBuilder Builder(ProfileSummaryBuilder::DefaultCutoffs);
1196 Summary = Builder.getSummary();
1197 return Cur;
1198 }
1199}
1200
1201Error IndexedMemProfReader::deserializeV012(const unsigned char *Start,
1202 const unsigned char *Ptr,
1203 uint64_t FirstWord) {
1204 // The value returned from RecordTableGenerator.Emit.
1205 const uint64_t RecordTableOffset =
1206 Version == memprof::Version0
1207 ? FirstWord
1208 : support::endian::readNext<uint64_t, llvm::endianness::little>(memory&: Ptr);
1209 // The offset in the stream right before invoking
1210 // FrameTableGenerator.Emit.
1211 const uint64_t FramePayloadOffset =
1212 support::endian::readNext<uint64_t, llvm::endianness::little>(memory&: Ptr);
1213 // The value returned from FrameTableGenerator.Emit.
1214 const uint64_t FrameTableOffset =
1215 support::endian::readNext<uint64_t, llvm::endianness::little>(memory&: Ptr);
1216
1217 // The offset in the stream right before invoking
1218 // CallStackTableGenerator.Emit.
1219 uint64_t CallStackPayloadOffset = 0;
1220 // The value returned from CallStackTableGenerator.Emit.
1221 uint64_t CallStackTableOffset = 0;
1222 if (Version >= memprof::Version2) {
1223 CallStackPayloadOffset =
1224 support::endian::readNext<uint64_t, llvm::endianness::little>(memory&: Ptr);
1225 CallStackTableOffset =
1226 support::endian::readNext<uint64_t, llvm::endianness::little>(memory&: Ptr);
1227 }
1228
1229 // Read the schema.
1230 auto SchemaOr = memprof::readMemProfSchema(Buffer&: Ptr);
1231 if (!SchemaOr)
1232 return SchemaOr.takeError();
1233 Schema = SchemaOr.get();
1234
1235 // Now initialize the table reader with a pointer into data buffer.
1236 MemProfRecordTable.reset(p: MemProfRecordHashTable::Create(
1237 /*Buckets=*/Start + RecordTableOffset,
1238 /*Payload=*/Ptr,
1239 /*Base=*/Start, InfoObj: memprof::RecordLookupTrait(Version, Schema)));
1240
1241 // Initialize the frame table reader with the payload and bucket offsets.
1242 MemProfFrameTable.reset(p: MemProfFrameHashTable::Create(
1243 /*Buckets=*/Start + FrameTableOffset,
1244 /*Payload=*/Start + FramePayloadOffset,
1245 /*Base=*/Start));
1246
1247 if (Version >= memprof::Version2)
1248 MemProfCallStackTable.reset(p: MemProfCallStackHashTable::Create(
1249 /*Buckets=*/Start + CallStackTableOffset,
1250 /*Payload=*/Start + CallStackPayloadOffset,
1251 /*Base=*/Start));
1252
1253 return Error::success();
1254}
1255
1256Error IndexedMemProfReader::deserializeV3(const unsigned char *Start,
1257 const unsigned char *Ptr) {
1258 // The offset in the stream right before invoking
1259 // CallStackTableGenerator.Emit.
1260 const uint64_t CallStackPayloadOffset =
1261 support::endian::readNext<uint64_t, llvm::endianness::little>(memory&: Ptr);
1262 // The offset in the stream right before invoking RecordTableGenerator.Emit.
1263 const uint64_t RecordPayloadOffset =
1264 support::endian::readNext<uint64_t, llvm::endianness::little>(memory&: Ptr);
1265 // The value returned from RecordTableGenerator.Emit.
1266 const uint64_t RecordTableOffset =
1267 support::endian::readNext<uint64_t, llvm::endianness::little>(memory&: Ptr);
1268
1269 // Read the schema.
1270 auto SchemaOr = memprof::readMemProfSchema(Buffer&: Ptr);
1271 if (!SchemaOr)
1272 return SchemaOr.takeError();
1273 Schema = SchemaOr.get();
1274
1275 FrameBase = Ptr;
1276 CallStackBase = Start + CallStackPayloadOffset;
1277
1278 // Now initialize the table reader with a pointer into data buffer.
1279 MemProfRecordTable.reset(p: MemProfRecordHashTable::Create(
1280 /*Buckets=*/Start + RecordTableOffset,
1281 /*Payload=*/Start + RecordPayloadOffset,
1282 /*Base=*/Start, InfoObj: memprof::RecordLookupTrait(memprof::Version3, Schema)));
1283
1284 return Error::success();
1285}
1286
1287Error IndexedMemProfReader::deserialize(const unsigned char *Start,
1288 uint64_t MemProfOffset) {
1289 const unsigned char *Ptr = Start + MemProfOffset;
1290
1291 // Read the first 64-bit word, which may be RecordTableOffset in
1292 // memprof::MemProfVersion0 or the MemProf version number in
1293 // memprof::MemProfVersion1 and above.
1294 const uint64_t FirstWord =
1295 support::endian::readNext<uint64_t, llvm::endianness::little>(memory&: Ptr);
1296
1297 if (FirstWord == memprof::Version1 || FirstWord == memprof::Version2 ||
1298 FirstWord == memprof::Version3) {
1299 // Everything is good. We can proceed to deserialize the rest.
1300 Version = static_cast<memprof::IndexedVersion>(FirstWord);
1301 } else if (FirstWord >= 24) {
1302 // This is a heuristic/hack to detect memprof::MemProfVersion0,
1303 // which does not have a version field in the header.
1304 // In memprof::MemProfVersion0, FirstWord will be RecordTableOffset,
1305 // which should be at least 24 because of the MemProf header size.
1306 Version = memprof::Version0;
1307 } else {
1308 return make_error<InstrProfError>(
1309 Args: instrprof_error::unsupported_version,
1310 Args: formatv(Fmt: "MemProf version {} not supported; "
1311 "requires version between {} and {}, inclusive",
1312 Vals: FirstWord, Vals: memprof::MinimumSupportedVersion,
1313 Vals: memprof::MaximumSupportedVersion));
1314 }
1315
1316 switch (Version) {
1317 case memprof::Version0:
1318 case memprof::Version1:
1319 case memprof::Version2:
1320 if (Error E = deserializeV012(Start, Ptr, FirstWord))
1321 return E;
1322 break;
1323 case memprof::Version3:
1324 if (Error E = deserializeV3(Start, Ptr))
1325 return E;
1326 break;
1327 }
1328
1329#ifdef EXPENSIVE_CHECKS
1330 // Go through all the records and verify that CSId has been correctly
1331 // populated. Do this only under EXPENSIVE_CHECKS. Otherwise, we
1332 // would defeat the purpose of OnDiskIterableChainedHashTable.
1333 // Note that we can compare CSId against actual call stacks only for
1334 // Version0 and Version1 because IndexedAllocationInfo::CallStack and
1335 // IndexedMemProfRecord::CallSites are not populated in Version2.
1336 if (Version <= memprof::Version1)
1337 for (const auto &Record : MemProfRecordTable->data())
1338 verifyIndexedMemProfRecord(Record);
1339#endif
1340
1341 return Error::success();
1342}
1343
1344Error IndexedInstrProfReader::readHeader() {
1345 using namespace support;
1346
1347 const unsigned char *Start =
1348 (const unsigned char *)DataBuffer->getBufferStart();
1349 const unsigned char *Cur = Start;
1350 if ((const unsigned char *)DataBuffer->getBufferEnd() - Cur < 24)
1351 return error(Err: instrprof_error::truncated);
1352
1353 auto HeaderOr = IndexedInstrProf::Header::readFromBuffer(Buffer: Start);
1354 if (!HeaderOr)
1355 return HeaderOr.takeError();
1356
1357 const IndexedInstrProf::Header *Header = &HeaderOr.get();
1358 Cur += Header->size();
1359
1360 Cur = readSummary(Version: (IndexedInstrProf::ProfVersion)Header->Version, Cur,
1361 /* UseCS */ false);
1362 if (Header->Version & VARIANT_MASK_CSIR_PROF)
1363 Cur = readSummary(Version: (IndexedInstrProf::ProfVersion)Header->Version, Cur,
1364 /* UseCS */ true);
1365 // Read the hash type and start offset.
1366 IndexedInstrProf::HashT HashType =
1367 static_cast<IndexedInstrProf::HashT>(Header->HashType);
1368 if (HashType > IndexedInstrProf::HashT::Last)
1369 return error(Err: instrprof_error::unsupported_hash_type);
1370
1371 // The hash table with profile counts comes next.
1372 auto IndexPtr = std::make_unique<InstrProfReaderIndex<OnDiskHashTableImplV3>>(
1373 args: Start + Header->HashOffset, args&: Cur, args&: Start, args&: HashType, args: Header->Version);
1374
1375 // The MemProfOffset field in the header is only valid when the format
1376 // version is higher than 8 (when it was introduced).
1377 if (Header->getIndexedProfileVersion() >= 8 &&
1378 Header->Version & VARIANT_MASK_MEMPROF) {
1379 if (Error E = MemProfReader.deserialize(Start, MemProfOffset: Header->MemProfOffset))
1380 return E;
1381 }
1382
1383 // BinaryIdOffset field in the header is only valid when the format version
1384 // is higher than 9 (when it was introduced).
1385 if (Header->getIndexedProfileVersion() >= 9) {
1386 const unsigned char *Ptr = Start + Header->BinaryIdOffset;
1387 // Read binary ids size.
1388 uint64_t BinaryIdsSize =
1389 support::endian::readNext<uint64_t, llvm::endianness::little>(memory&: Ptr);
1390 if (BinaryIdsSize % sizeof(uint64_t))
1391 return error(Err: instrprof_error::bad_header);
1392 // Set the binary ids start.
1393 BinaryIdsBuffer = ArrayRef<uint8_t>(Ptr, BinaryIdsSize);
1394 if (Ptr > (const unsigned char *)DataBuffer->getBufferEnd())
1395 return make_error<InstrProfError>(Args: instrprof_error::malformed,
1396 Args: "corrupted binary ids");
1397 }
1398
1399 if (Header->getIndexedProfileVersion() >= 12) {
1400 const unsigned char *Ptr = Start + Header->VTableNamesOffset;
1401
1402 uint64_t CompressedVTableNamesLen =
1403 support::endian::readNext<uint64_t, llvm::endianness::little>(memory&: Ptr);
1404
1405 // Writer first writes the length of compressed string, and then the actual
1406 // content.
1407 const char *VTableNamePtr = (const char *)Ptr;
1408 if (VTableNamePtr > (const char *)DataBuffer->getBufferEnd())
1409 return make_error<InstrProfError>(Args: instrprof_error::truncated);
1410
1411 VTableName = StringRef(VTableNamePtr, CompressedVTableNamesLen);
1412 }
1413
1414 if (Header->getIndexedProfileVersion() >= 10 &&
1415 Header->Version & VARIANT_MASK_TEMPORAL_PROF) {
1416 const unsigned char *Ptr = Start + Header->TemporalProfTracesOffset;
1417 const auto *PtrEnd = (const unsigned char *)DataBuffer->getBufferEnd();
1418 // Expect at least two 64 bit fields: NumTraces, and TraceStreamSize
1419 if (Ptr + 2 * sizeof(uint64_t) > PtrEnd)
1420 return error(Err: instrprof_error::truncated);
1421 const uint64_t NumTraces =
1422 support::endian::readNext<uint64_t, llvm::endianness::little>(memory&: Ptr);
1423 TemporalProfTraceStreamSize =
1424 support::endian::readNext<uint64_t, llvm::endianness::little>(memory&: Ptr);
1425 for (unsigned i = 0; i < NumTraces; i++) {
1426 // Expect at least two 64 bit fields: Weight and NumFunctions
1427 if (Ptr + 2 * sizeof(uint64_t) > PtrEnd)
1428 return error(Err: instrprof_error::truncated);
1429 TemporalProfTraceTy Trace;
1430 Trace.Weight =
1431 support::endian::readNext<uint64_t, llvm::endianness::little>(memory&: Ptr);
1432 const uint64_t NumFunctions =
1433 support::endian::readNext<uint64_t, llvm::endianness::little>(memory&: Ptr);
1434 // Expect at least NumFunctions 64 bit fields
1435 if (Ptr + NumFunctions * sizeof(uint64_t) > PtrEnd)
1436 return error(Err: instrprof_error::truncated);
1437 for (unsigned j = 0; j < NumFunctions; j++) {
1438 const uint64_t NameRef =
1439 support::endian::readNext<uint64_t, llvm::endianness::little>(memory&: Ptr);
1440 Trace.FunctionNameRefs.push_back(x: NameRef);
1441 }
1442 TemporalProfTraces.push_back(Elt: std::move(Trace));
1443 }
1444 }
1445
1446 // Load the remapping table now if requested.
1447 if (RemappingBuffer) {
1448 Remapper =
1449 std::make_unique<InstrProfReaderItaniumRemapper<OnDiskHashTableImplV3>>(
1450 args: std::move(RemappingBuffer), args&: *IndexPtr);
1451 if (Error E = Remapper->populateRemappings())
1452 return E;
1453 } else {
1454 Remapper = std::make_unique<InstrProfReaderNullRemapper>(args&: *IndexPtr);
1455 }
1456 Index = std::move(IndexPtr);
1457
1458 return success();
1459}
1460
1461InstrProfSymtab &IndexedInstrProfReader::getSymtab() {
1462 if (Symtab)
1463 return *Symtab;
1464
1465 auto NewSymtab = std::make_unique<InstrProfSymtab>();
1466
1467 if (Error E = NewSymtab->initVTableNamesFromCompressedStrings(CompressedVTableNames: VTableName)) {
1468 auto [ErrCode, Msg] = InstrProfError::take(E: std::move(E));
1469 consumeError(Err: error(Err: ErrCode, ErrMsg: Msg));
1470 }
1471
1472 // finalizeSymtab is called inside populateSymtab.
1473 if (Error E = Index->populateSymtab(*NewSymtab)) {
1474 auto [ErrCode, Msg] = InstrProfError::take(E: std::move(E));
1475 consumeError(Err: error(Err: ErrCode, ErrMsg: Msg));
1476 }
1477
1478 Symtab = std::move(NewSymtab);
1479 return *Symtab;
1480}
1481
1482Expected<InstrProfRecord> IndexedInstrProfReader::getInstrProfRecord(
1483 StringRef FuncName, uint64_t FuncHash, StringRef DeprecatedFuncName,
1484 uint64_t *MismatchedFuncSum) {
1485 ArrayRef<NamedInstrProfRecord> Data;
1486 uint64_t FuncSum = 0;
1487 auto Err = Remapper->getRecords(FuncName, Data);
1488 if (Err) {
1489 // If we don't find FuncName, try DeprecatedFuncName to handle profiles
1490 // built by older compilers.
1491 auto Err2 =
1492 handleErrors(E: std::move(Err), Hs: [&](const InstrProfError &IE) -> Error {
1493 if (IE.get() != instrprof_error::unknown_function)
1494 return make_error<InstrProfError>(Args: IE);
1495 if (auto Err = Remapper->getRecords(FuncName: DeprecatedFuncName, Data))
1496 return Err;
1497 return Error::success();
1498 });
1499 if (Err2)
1500 return std::move(Err2);
1501 }
1502 // Found it. Look for counters with the right hash.
1503
1504 // A flag to indicate if the records are from the same type
1505 // of profile (i.e cs vs nocs).
1506 bool CSBitMatch = false;
1507 auto getFuncSum = [](ArrayRef<uint64_t> Counts) {
1508 uint64_t ValueSum = 0;
1509 for (uint64_t CountValue : Counts) {
1510 if (CountValue == (uint64_t)-1)
1511 continue;
1512 // Handle overflow -- if that happens, return max.
1513 if (std::numeric_limits<uint64_t>::max() - CountValue <= ValueSum)
1514 return std::numeric_limits<uint64_t>::max();
1515 ValueSum += CountValue;
1516 }
1517 return ValueSum;
1518 };
1519
1520 for (const NamedInstrProfRecord &I : Data) {
1521 // Check for a match and fill the vector if there is one.
1522 if (I.Hash == FuncHash)
1523 return std::move(I);
1524 if (NamedInstrProfRecord::hasCSFlagInHash(FuncHash: I.Hash) ==
1525 NamedInstrProfRecord::hasCSFlagInHash(FuncHash)) {
1526 CSBitMatch = true;
1527 if (MismatchedFuncSum == nullptr)
1528 continue;
1529 FuncSum = std::max(a: FuncSum, b: getFuncSum(I.Counts));
1530 }
1531 }
1532 if (CSBitMatch) {
1533 if (MismatchedFuncSum != nullptr)
1534 *MismatchedFuncSum = FuncSum;
1535 return error(Err: instrprof_error::hash_mismatch);
1536 }
1537 return error(Err: instrprof_error::unknown_function);
1538}
1539
1540static Expected<memprof::MemProfRecord>
1541getMemProfRecordV0(const memprof::IndexedMemProfRecord &IndexedRecord,
1542 MemProfFrameHashTable &MemProfFrameTable) {
1543 memprof::FrameIdConverter<MemProfFrameHashTable> FrameIdConv(
1544 MemProfFrameTable);
1545
1546 memprof::MemProfRecord Record =
1547 memprof::MemProfRecord(IndexedRecord, FrameIdConv);
1548
1549 // Check that all frame ids were successfully converted to frames.
1550 if (FrameIdConv.LastUnmappedId) {
1551 return make_error<InstrProfError>(Args: instrprof_error::hash_mismatch,
1552 Args: "memprof frame not found for frame id " +
1553 Twine(*FrameIdConv.LastUnmappedId));
1554 }
1555
1556 return Record;
1557}
1558
1559static Expected<memprof::MemProfRecord>
1560getMemProfRecordV2(const memprof::IndexedMemProfRecord &IndexedRecord,
1561 MemProfFrameHashTable &MemProfFrameTable,
1562 MemProfCallStackHashTable &MemProfCallStackTable) {
1563 memprof::FrameIdConverter<MemProfFrameHashTable> FrameIdConv(
1564 MemProfFrameTable);
1565
1566 memprof::CallStackIdConverter<MemProfCallStackHashTable> CSIdConv(
1567 MemProfCallStackTable, FrameIdConv);
1568
1569 memprof::MemProfRecord Record = IndexedRecord.toMemProfRecord(Callback: CSIdConv);
1570
1571 // Check that all call stack ids were successfully converted to call stacks.
1572 if (CSIdConv.LastUnmappedId) {
1573 return make_error<InstrProfError>(
1574 Args: instrprof_error::hash_mismatch,
1575 Args: "memprof call stack not found for call stack id " +
1576 Twine(*CSIdConv.LastUnmappedId));
1577 }
1578
1579 // Check that all frame ids were successfully converted to frames.
1580 if (FrameIdConv.LastUnmappedId) {
1581 return make_error<InstrProfError>(Args: instrprof_error::hash_mismatch,
1582 Args: "memprof frame not found for frame id " +
1583 Twine(*FrameIdConv.LastUnmappedId));
1584 }
1585
1586 return Record;
1587}
1588
1589static Expected<memprof::MemProfRecord>
1590getMemProfRecordV3(const memprof::IndexedMemProfRecord &IndexedRecord,
1591 const unsigned char *FrameBase,
1592 const unsigned char *CallStackBase) {
1593 memprof::LinearFrameIdConverter FrameIdConv(FrameBase);
1594 memprof::LinearCallStackIdConverter CSIdConv(CallStackBase, FrameIdConv);
1595 memprof::MemProfRecord Record = IndexedRecord.toMemProfRecord(Callback: CSIdConv);
1596 return Record;
1597}
1598
1599Expected<memprof::MemProfRecord>
1600IndexedMemProfReader::getMemProfRecord(const uint64_t FuncNameHash) const {
1601 // TODO: Add memprof specific errors.
1602 if (MemProfRecordTable == nullptr)
1603 return make_error<InstrProfError>(Args: instrprof_error::invalid_prof,
1604 Args: "no memprof data available in profile");
1605 auto Iter = MemProfRecordTable->find(EKey: FuncNameHash);
1606 if (Iter == MemProfRecordTable->end())
1607 return make_error<InstrProfError>(
1608 Args: instrprof_error::unknown_function,
1609 Args: "memprof record not found for function hash " + Twine(FuncNameHash));
1610
1611 const memprof::IndexedMemProfRecord &IndexedRecord = *Iter;
1612 switch (Version) {
1613 case memprof::Version0:
1614 case memprof::Version1:
1615 assert(MemProfFrameTable && "MemProfFrameTable must be available");
1616 assert(!MemProfCallStackTable &&
1617 "MemProfCallStackTable must not be available");
1618 return getMemProfRecordV0(IndexedRecord, MemProfFrameTable&: *MemProfFrameTable);
1619 case memprof::Version2:
1620 assert(MemProfFrameTable && "MemProfFrameTable must be available");
1621 assert(MemProfCallStackTable && "MemProfCallStackTable must be available");
1622 return getMemProfRecordV2(IndexedRecord, MemProfFrameTable&: *MemProfFrameTable,
1623 MemProfCallStackTable&: *MemProfCallStackTable);
1624 case memprof::Version3:
1625 assert(!MemProfFrameTable && "MemProfFrameTable must not be available");
1626 assert(!MemProfCallStackTable &&
1627 "MemProfCallStackTable must not be available");
1628 assert(FrameBase && "FrameBase must be available");
1629 assert(CallStackBase && "CallStackBase must be available");
1630 return getMemProfRecordV3(IndexedRecord, FrameBase, CallStackBase);
1631 }
1632
1633 return make_error<InstrProfError>(
1634 Args: instrprof_error::unsupported_version,
1635 Args: formatv(Fmt: "MemProf version {} not supported; "
1636 "requires version between {} and {}, inclusive",
1637 Vals: Version, Vals: memprof::MinimumSupportedVersion,
1638 Vals: memprof::MaximumSupportedVersion));
1639}
1640
1641Error IndexedInstrProfReader::getFunctionCounts(StringRef FuncName,
1642 uint64_t FuncHash,
1643 std::vector<uint64_t> &Counts) {
1644 Expected<InstrProfRecord> Record = getInstrProfRecord(FuncName, FuncHash);
1645 if (Error E = Record.takeError())
1646 return error(E: std::move(E));
1647
1648 Counts = Record.get().Counts;
1649 return success();
1650}
1651
1652Error IndexedInstrProfReader::getFunctionBitmap(StringRef FuncName,
1653 uint64_t FuncHash,
1654 BitVector &Bitmap) {
1655 Expected<InstrProfRecord> Record = getInstrProfRecord(FuncName, FuncHash);
1656 if (Error E = Record.takeError())
1657 return error(E: std::move(E));
1658
1659 const auto &BitmapBytes = Record.get().BitmapBytes;
1660 size_t I = 0, E = BitmapBytes.size();
1661 Bitmap.resize(N: E * CHAR_BIT);
1662 BitVector::apply(
1663 f: [&](auto X) {
1664 using XTy = decltype(X);
1665 alignas(XTy) uint8_t W[sizeof(X)];
1666 size_t N = std::min(a: E - I, b: sizeof(W));
1667 std::memset(s: W, c: 0, n: sizeof(W));
1668 std::memcpy(dest: W, src: &BitmapBytes[I], n: N);
1669 I += N;
1670 return support::endian::read<XTy, llvm::endianness::little,
1671 support::aligned>(W);
1672 },
1673 Out&: Bitmap, Arg: Bitmap);
1674 assert(I == E);
1675
1676 return success();
1677}
1678
1679Error IndexedInstrProfReader::readNextRecord(NamedInstrProfRecord &Record) {
1680 ArrayRef<NamedInstrProfRecord> Data;
1681
1682 Error E = Index->getRecords(Data);
1683 if (E)
1684 return error(E: std::move(E));
1685
1686 Record = Data[RecordIndex++];
1687 if (RecordIndex >= Data.size()) {
1688 Index->advanceToNextKey();
1689 RecordIndex = 0;
1690 }
1691 return success();
1692}
1693
1694Error IndexedInstrProfReader::readBinaryIds(
1695 std::vector<llvm::object::BuildID> &BinaryIds) {
1696 return readBinaryIdsInternal(DataBuffer: *DataBuffer, BinaryIdsBuffer, BinaryIds,
1697 Endian: llvm::endianness::little);
1698}
1699
1700Error IndexedInstrProfReader::printBinaryIds(raw_ostream &OS) {
1701 std::vector<llvm::object::BuildID> BinaryIds;
1702 if (Error E = readBinaryIds(BinaryIds))
1703 return E;
1704 printBinaryIdsInternal(OS, BinaryIds);
1705 return Error::success();
1706}
1707
1708void InstrProfReader::accumulateCounts(CountSumOrPercent &Sum, bool IsCS) {
1709 uint64_t NumFuncs = 0;
1710 for (const auto &Func : *this) {
1711 if (isIRLevelProfile()) {
1712 bool FuncIsCS = NamedInstrProfRecord::hasCSFlagInHash(FuncHash: Func.Hash);
1713 if (FuncIsCS != IsCS)
1714 continue;
1715 }
1716 Func.accumulateCounts(Sum);
1717 ++NumFuncs;
1718 }
1719 Sum.NumEntries = NumFuncs;
1720}
1721