LLVM: lib/Transforms/Instrumentation/AddressSanitizer.cpp Source File (original) (raw)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
84#include
85#include
86#include
87#include
88#include
89#include
90#include
91#include
92#include
93
94using namespace llvm;
95
96#define DEBUG_TYPE "asan"
97
102 std::numeric_limits<uint64_t>::max();
124
125
127
132
136
143 "__asan_unregister_image_globals";
156 "__asan_stack_malloc_always_";
164
165
166
168
170 "__asan_option_detect_stack_use_after_return";
171
173 "__asan_shadow_memory_dynamic_address";
174
177
182
183
185
187
188
195
196
197
199 "asan-kernel", cl::desc("Enable KernelAddressSanitizer instrumentation"),
201
203 "asan-recover",
204 cl::desc("Enable recovery mode (continue-after-error)."),
206
208 "asan-guard-against-version-mismatch",
209 cl::desc("Guard against compiler/runtime version mismatch."), cl::Hidden,
211
212
214 cl::desc("instrument read instructions"),
216
218 "asan-instrument-writes", cl::desc("instrument write instructions"),
220
225
227 "asan-instrument-atomics",
228 cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden,
230
235
237 "asan-always-slow-path",
238 cl::desc("use instrumentation with slow path for all accesses"), cl::Hidden,
240
242 "asan-force-dynamic-shadow",
243 cl::desc("Load shadow address into a local variable for each function"),
245
248 cl::desc("Access dynamic shadow through an ifunc global on "
249 "platforms that support this"),
251
254 cl::desc("Address space for pointers to the shadow map"),
256
258 "asan-with-ifunc-suppress-remat",
259 cl::desc("Suppress rematerialization of dynamic shadow address by passing "
260 "it through inline asm in prologue."),
262
263
264
265
266
268 "asan-max-ins-per-bb", cl::init(10000),
269 cl::desc("maximal number of instructions to instrument in any given BB"),
271
272
276 "asan-max-inline-poisoning-size",
278 "Inline shadow poisoning for blocks up to the given size in bytes."),
280
282 "asan-use-after-return",
283 cl::desc("Sets the mode of detection for stack-use-after-return."),
286 "Never detect stack use after return."),
289 "Detect stack use after return if "
290 "binary flag 'ASAN_OPTIONS=detect_stack_use_after_return' is set."),
292 "Always detect stack use after return.")),
294
296 cl::desc("Create redzones for byval "
297 "arguments (extra copy "
300
302 cl::desc("Check stack-use-after-scope"),
304
305
309
311 cl::desc("Handle C++ initializer order"),
313
315 "asan-detect-invalid-pointer-pair",
316 cl::desc("Instrument <, <=, >, >=, - with pointer operands"), cl::Hidden,
318
320 "asan-detect-invalid-pointer-cmp",
321 cl::desc("Instrument <, <=, >, >= with pointer operands"), cl::Hidden,
323
325 "asan-detect-invalid-pointer-sub",
326 cl::desc("Instrument - operations with pointer operands"), cl::Hidden,
328
330 "asan-realign-stack",
331 cl::desc("Realign stack to the value of this flag (power of two)"),
333
335 "asan-instrumentation-with-call-threshold",
336 cl::desc("If the function being instrumented contains more than "
337 "this number of memory accesses, use callbacks instead of "
338 "inline checks (-1 means never use callbacks)."),
340
342 "asan-memory-access-callback-prefix",
345
347 "asan-kernel-mem-intrinsic-prefix",
348 cl::desc("Use prefix for memory intrinsics in KASAN mode"), cl::Hidden,
350
353 cl::desc("instrument dynamic allocas"),
355
357 "asan-skip-promotable-allocas",
360
362 "asan-constructor-kind",
363 cl::desc("Sets the ASan constructor kind"),
366 "Use global constructors")),
368
369
370
371
373 cl::desc("scale of asan shadow mapping"),
375
378 cl::desc("offset of asan shadow mapping [EXPERIMENTAL]"),
380
381
382
383
386
388 cl::desc("Optimize callbacks"),
390
392 "asan-opt-same-temp", cl::desc("Instrument the same temp just once"),
394
396 cl::desc("Don't instrument scalar globals"),
398
400 "asan-opt-stack", cl::desc("Don't instrument scalar stack variables"),
402
404 "asan-stack-dynamic-alloca",
405 cl::desc("Use dynamic alloca to represent stack variables"), cl::Hidden,
407
409 "asan-force-experiment",
412
415 cl::desc("Use private aliases for global variables"),
417
420 cl::desc("Use odr indicators to improve ODR reporting"),
422
425 cl::desc("Use linker features to support dead "
426 "code stripping of globals"),
428
429
430
433 cl::desc("Place ASan constructors in comdat sections"),
435
437 "asan-destructor-kind",
438 cl::desc("Sets the ASan destructor kind. The default is to use the value "
439 "provided to the pass constructor"),
442 "Use global destructors")),
444
447 "asan-instrument-address-spaces",
448 cl::desc("Only instrument variables in the specified address spaces."),
452 }));
453
454
455
458
461
464
467
470
471STATISTIC(NumInstrumentedReads, "Number of instrumented reads");
472STATISTIC(NumInstrumentedWrites, "Number of instrumented writes");
474 "Number of optimized accesses to global vars");
476 "Number of optimized accesses to stack vars");
477
478namespace {
479
480
481
482
483
484
485struct ShadowMapping {
486 int Scale;
488 bool OrShadowOffset;
489 bool InGlobal;
490};
491
492}
493
495 bool IsKasan) {
496 bool IsAndroid = TargetTriple.isAndroid();
497 bool IsIOS = TargetTriple.isiOS() || TargetTriple.isWatchOS() ||
499 bool IsMacOS = TargetTriple.isMacOSX();
500 bool IsFreeBSD = TargetTriple.isOSFreeBSD();
501 bool IsNetBSD = TargetTriple.isOSNetBSD();
502 bool IsPS = TargetTriple.isPS();
503 bool IsLinux = TargetTriple.isOSLinux();
508 bool IsMIPSN32ABI = TargetTriple.isABIN32();
509 bool IsMIPS32 = TargetTriple.isMIPS32();
510 bool IsMIPS64 = TargetTriple.isMIPS64();
511 bool IsArmOrThumb = TargetTriple.isARM() || TargetTriple.isThumb();
514 bool IsLoongArch64 = TargetTriple.isLoongArch64();
516 bool IsWindows = TargetTriple.isOSWindows();
517 bool IsFuchsia = TargetTriple.isOSFuchsia();
518 bool IsAMDGPU = TargetTriple.isAMDGPU();
519 bool IsHaiku = TargetTriple.isOSHaiku();
520 bool IsWasm = TargetTriple.isWasm();
521 bool IsBPF = TargetTriple.isBPF();
522
523 ShadowMapping Mapping;
524
528 }
529
530 if (LongSize == 32) {
531 if (IsAndroid)
533 else if (IsMIPSN32ABI)
535 else if (IsMIPS32)
537 else if (IsFreeBSD)
539 else if (IsNetBSD)
541 else if (IsIOS)
543 else if (IsWindows)
545 else if (IsWasm)
547 else
549 } else {
550
551
552 if (IsFuchsia)
553 Mapping.Offset = 0;
554 else if (IsPPC64)
556 else if (IsSystemZ)
558 else if (IsFreeBSD && IsAArch64)
560 else if (IsFreeBSD && !IsMIPS64) {
561 if (IsKasan)
563 else
565 } else if (IsNetBSD) {
566 if (IsKasan)
568 else
570 } else if (IsPS)
572 else if (IsLinux && IsX86_64) {
573 if (IsKasan)
575 else
578 } else if (IsWindows && IsX86_64) {
580 } else if (IsMIPS64)
582 else if (IsIOS)
584 else if (IsMacOS && IsAArch64)
586 else if (IsAArch64)
588 else if (IsLoongArch64)
590 else if (IsRISCV64)
592 else if (IsAMDGPU)
595 else if (IsHaiku && IsX86_64)
598 else if (IsBPF)
600 else
602 }
603
606 }
607
610 }
611
612
613
614
615
616
617 Mapping.OrShadowOffset = !IsAArch64 && !IsPPC64 && !IsSystemZ && !IsPS &&
618 !IsRISCV64 && !IsLoongArch64 &&
619 !(Mapping.Offset & (Mapping.Offset - 1)) &&
621 Mapping.InGlobal = ClWithIfunc && IsAndroid && IsArmOrThumb;
622
623 return Mapping;
624}
625
627 bool IsKasan, uint64_t *ShadowBase,
628 int *MappingScale, bool *OrShadowOffset) {
629 auto Mapping = getShadowMapping(TargetTriple, LongSize, IsKasan);
630 *ShadowBase = Mapping.Offset;
631 *MappingScale = Mapping.Scale;
632 *OrShadowOffset = Mapping.OrShadowOffset;
633}
634
636
637
638
639
640
641
642
643
644
645
646
647
648
649
651 if (.doesNotAccessMemory()) {
652 bool WritesMemory = .onlyReadsMemory();
653 bool ReadsMemory = .onlyWritesMemory();
654 if ((WritesMemory && !ReadsMemory) || F.onlyAccessesArgMemory()) {
655 F.removeFnAttr(Attribute::Memory);
657 }
658 }
659 if (ReadsArgMem) {
661 if (A.hasAttribute(Attribute::WriteOnly)) {
662 A.removeAttr(Attribute::WriteOnly);
664 }
665 }
666 }
668
669
670 F.addFnAttr(Attribute::NoBuiltin);
671 }
672}
673
679
687
689
690
691 return std::max(32U, 1U << MappingScale);
692}
693
700
704
705namespace {
706
707
708
709class RuntimeCallInserter {
711 bool TrackInsertedCalls = false;
713
714public:
715 RuntimeCallInserter(Function &Fn) : OwnerFn(&Fn) {
717 auto Personality = classifyEHPersonality(Fn.getPersonalityFn());
718 if (isScopedEHPersonality(Personality))
719 TrackInsertedCalls = true;
720 }
721 }
722
723 ~RuntimeCallInserter() {
724 if (InsertedCalls.empty())
725 return;
726 assert(TrackInsertedCalls && "Calls were wrongly tracked");
727
728 DenseMap<BasicBlock *, ColorVector> BlockColors = colorEHFunclets(*OwnerFn);
729 for (CallInst *CI : InsertedCalls) {
731 assert(BB && "Instruction doesn't belong to a BasicBlock");
733 "Instruction doesn't belong to the expected Function!");
734
736
737
738
739 if (Colors.empty())
740 continue;
741 if (Colors.size() != 1) {
743 "Instruction's BasicBlock is not monochromatic");
744 continue;
745 }
746
749
750 if (EHPadIt != Color->end() && EHPadIt->isEHPad()) {
751
754 OB, CI->getIterator());
755 NewCall->copyMetadata(*CI);
756 CI->replaceAllUsesWith(NewCall);
757 CI->eraseFromParent();
758 }
759 }
760 }
761
762 CallInst *createRuntimeCall(IRBuilder<> &IRB, FunctionCallee Callee,
764 const Twine &Name = "") {
766
767 CallInst *Inst = IRB.CreateCall(Callee, Args, Name, nullptr);
768 if (TrackInsertedCalls)
769 InsertedCalls.push_back(Inst);
770 return Inst;
771 }
772};
773
774
775struct AddressSanitizer {
776 AddressSanitizer(Module &M, const StackSafetyGlobalInfo *SSGI,
777 int InstrumentationWithCallsThreshold,
778 uint32_t MaxInlinePoisoningSize, bool CompileKernel = false,
779 bool Recover = false, bool UseAfterScope = false,
781 AsanDetectStackUseAfterReturnMode::Runtime)
784 : CompileKernel),
788 : UseAfterReturn),
789 SSGI(SSGI),
790 InstrumentationWithCallsThreshold(
793 : InstrumentationWithCallsThreshold),
796 : MaxInlinePoisoningSize) {
799 LongSize = M.getDataLayout().getPointerSizeInBits();
800 IntptrTy = Type::getIntNTy(*C, LongSize);
801 PtrTy = PointerType::getUnqual(*C);
802 Int32Ty = Type::getInt32Ty(*C);
803 TargetTriple = M.getTargetTriple();
804
805 Mapping = getShadowMapping(TargetTriple, LongSize, this->CompileKernel);
806
807 assert(this->UseAfterReturn != AsanDetectStackUseAfterReturnMode::Invalid);
808 }
809
812 }
813
814
815 bool isInterestingAlloca(const AllocaInst &AI);
816
817 bool ignoreAccess(Instruction *Inst, Value *Ptr);
819 Instruction *I, SmallVectorImpl &Interesting,
820 const TargetTransformInfo *TTI);
821
822 void instrumentMop(ObjectSizeOffsetVisitor &ObjSizeVis,
823 InterestingMemoryOperand &O, bool UseCalls,
824 const DataLayout &DL, RuntimeCallInserter &RTCI);
825 void instrumentPointerComparisonOrSubtraction(Instruction *I,
826 RuntimeCallInserter &RTCI);
827 void instrumentAddress(Instruction *OrigIns, Instruction *InsertBefore,
828 Value *Addr, MaybeAlign Alignment,
829 uint32_t TypeStoreSize, bool IsWrite,
830 Value *SizeArgument, bool UseCalls, uint32_t Exp,
831 RuntimeCallInserter &RTCI);
832 Instruction *instrumentAMDGPUAddress(Instruction *OrigIns,
833 Instruction *InsertBefore, Value *Addr,
834 uint32_t TypeStoreSize, bool IsWrite,
835 Value *SizeArgument);
837 bool Recover);
838 void instrumentUnusualSizeOrAlignment(Instruction *I,
839 Instruction *InsertBefore, Value *Addr,
840 TypeSize TypeStoreSize, bool IsWrite,
841 Value *SizeArgument, bool UseCalls,
842 uint32_t Exp,
843 RuntimeCallInserter &RTCI);
844 void instrumentMaskedLoadOrStore(AddressSanitizer *Pass, const DataLayout &DL,
846 Value *Stride, Instruction *I, Value *Addr,
847 MaybeAlign Alignment, unsigned Granularity,
848 Type *OpType, bool IsWrite,
849 Value *SizeArgument, bool UseCalls,
850 uint32_t Exp, RuntimeCallInserter &RTCI);
852 Value *ShadowValue, uint32_t TypeStoreSize);
853 Instruction *generateCrashCode(Instruction *InsertBefore, Value *Addr,
854 bool IsWrite, size_t AccessSizeIndex,
855 Value *SizeArgument, uint32_t Exp,
856 RuntimeCallInserter &RTCI);
857 void instrumentMemIntrinsic(MemIntrinsic *MI, RuntimeCallInserter &RTCI);
859 bool suppressInstrumentationSiteForDebug(int &Instrumented);
860 bool instrumentFunction(Function &F, const TargetLibraryInfo *TLI,
861 const TargetTransformInfo *TTI);
862 bool maybeInsertAsanInitAtFunctionEntry(Function &F);
863 bool maybeInsertDynamicShadowAtFunctionEntry(Function &F);
864 void markEscapedLocalAllocas(Function &F);
865 void markCatchParametersAsUninteresting(Function &F);
866
867private:
868 friend struct FunctionStackPoisoner;
869
870 void initializeCallbacks(const TargetLibraryInfo *TLI);
871
872 bool LooksLikeCodeInBug11395(Instruction *I);
873 bool GlobalIsLinkerInitialized(GlobalVariable *G);
874 bool isSafeAccess(ObjectSizeOffsetVisitor &ObjSizeVis, Value *Addr,
875 TypeSize TypeStoreSize) const;
876
877
878 struct FunctionStateRAII {
879 AddressSanitizer *Pass;
880
881 FunctionStateRAII(AddressSanitizer *Pass) : Pass(Pass) {
882 assert(Pass->ProcessedAllocas.empty() &&
883 "last pass forgot to clear cache");
885 }
886
887 ~FunctionStateRAII() {
888 Pass->LocalDynamicShadow = nullptr;
889 Pass->ProcessedAllocas.clear();
890 }
891 };
892
894 LLVMContext *C;
895 const DataLayout *DL;
896 Triple TargetTriple;
897 int LongSize;
898 bool CompileKernel;
899 bool Recover;
900 bool UseAfterScope;
902 Type *IntptrTy;
905 ShadowMapping Mapping;
906 FunctionCallee AsanHandleNoReturnFunc;
907 FunctionCallee AsanPtrCmpFunction, AsanPtrSubFunction;
909
910
913
914
915 FunctionCallee AsanErrorCallbackSized[2][2];
916 FunctionCallee AsanMemoryAccessCallbackSized[2][2];
917
918 FunctionCallee AsanMemmove, AsanMemcpy, AsanMemset;
919 Value *LocalDynamicShadow = nullptr;
920 const StackSafetyGlobalInfo *SSGI;
921 DenseMap<const AllocaInst *, bool> ProcessedAllocas;
922
923 FunctionCallee AMDGPUAddressShared;
924 FunctionCallee AMDGPUAddressPrivate;
925 int InstrumentationWithCallsThreshold;
926 uint32_t MaxInlinePoisoningSize;
927};
928
929class ModuleAddressSanitizer {
930public:
931 ModuleAddressSanitizer(Module &M, bool InsertVersionCheck,
932 bool CompileKernel = false, bool Recover = false,
933 bool UseGlobalsGC = true, bool UseOdrIndicator = true,
934 AsanDtorKind DestructorKind = AsanDtorKind::Global,
935 AsanCtorKind ConstructorKind = AsanCtorKind::Global)
938 : CompileKernel),
941 : InsertVersionCheck),
943 UseGlobalsGC(UseGlobalsGC && ClUseGlobalsGC && !this->CompileKernel),
944
947 : UseOdrIndicator),
950 : UseOdrIndicator),
951
952
953
954
955
956
957
958 UseCtorComdat(UseGlobalsGC && ClWithComdat && !this->CompileKernel),
959 DestructorKind(DestructorKind),
962 : ConstructorKind) {
964 int LongSize = M.getDataLayout().getPointerSizeInBits();
965 IntptrTy = Type::getIntNTy(*C, LongSize);
966 PtrTy = PointerType::getUnqual(*C);
967 TargetTriple = M.getTargetTriple();
968 Mapping = getShadowMapping(TargetTriple, LongSize, this->CompileKernel);
969
972 assert(this->DestructorKind != AsanDtorKind::Invalid);
973 }
974
975 bool instrumentModule();
976
977private:
978 void initializeCallbacks();
979
980 void instrumentGlobals(IRBuilder<> &IRB, bool *CtorComdat);
981 void InstrumentGlobalsCOFF(IRBuilder<> &IRB,
984 void instrumentGlobalsELF(IRBuilder<> &IRB,
987 const std::string &UniqueModuleId);
988 void InstrumentGlobalsMachO(IRBuilder<> &IRB,
991 void
992 InstrumentGlobalsWithMetadataArray(IRBuilder<> &IRB,
995
996 GlobalVariable *CreateMetadataGlobal(Constant *Initializer,
997 StringRef OriginalName);
998 void SetComdatForGlobalMetadata(GlobalVariable *G, GlobalVariable *Metadata,
999 StringRef InternalSuffix);
1001
1002 const GlobalVariable *getExcludedAliasedGlobal(const GlobalAlias &GA) const;
1003 bool shouldInstrumentGlobal(GlobalVariable *G) const;
1004 bool ShouldUseMachOGlobalsSection() const;
1005 StringRef getGlobalMetadataSection() const;
1006 void poisonOneInitializer(Function &GlobalInit);
1007 void createInitializerPoisonCalls();
1008 uint64_t getMinRedzoneSizeForGlobal() const {
1010 }
1012 int GetAsanVersion() const;
1013 GlobalVariable *getOrCreateModuleName();
1014
1016 bool CompileKernel;
1017 bool InsertVersionCheck;
1018 bool Recover;
1019 bool UseGlobalsGC;
1020 bool UsePrivateAlias;
1021 bool UseOdrIndicator;
1022 bool UseCtorComdat;
1025 Type *IntptrTy;
1027 LLVMContext *C;
1028 Triple TargetTriple;
1029 ShadowMapping Mapping;
1030 FunctionCallee AsanPoisonGlobals;
1031 FunctionCallee AsanUnpoisonGlobals;
1032 FunctionCallee AsanRegisterGlobals;
1033 FunctionCallee AsanUnregisterGlobals;
1034 FunctionCallee AsanRegisterImageGlobals;
1035 FunctionCallee AsanUnregisterImageGlobals;
1036 FunctionCallee AsanRegisterElfGlobals;
1037 FunctionCallee AsanUnregisterElfGlobals;
1038
1039 Function *AsanCtorFunction = nullptr;
1040 Function *AsanDtorFunction = nullptr;
1041 GlobalVariable *ModuleName = nullptr;
1042};
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053struct FunctionStackPoisoner : public InstVisitor {
1055 AddressSanitizer &ASan;
1056 RuntimeCallInserter &RTCI;
1057 DIBuilder DIB;
1058 LLVMContext *C;
1059 Type *IntptrTy;
1060 Type *IntptrPtrTy;
1061 ShadowMapping Mapping;
1062
1065 SmallVector<Instruction *, 8> RetVec;
1066
1069 FunctionCallee AsanSetShadowFunc[0x100] = {};
1070 FunctionCallee AsanPoisonStackMemoryFunc, AsanUnpoisonStackMemoryFunc;
1071 FunctionCallee AsanAllocaPoisonFunc, AsanAllocasUnpoisonFunc;
1072
1073
1074 struct AllocaPoisonCall {
1075 IntrinsicInst *InsBefore;
1076 AllocaInst *AI;
1077 uint64_t Size;
1078 bool DoPoison;
1079 };
1082
1085 AllocaInst *DynamicAllocaLayout = nullptr;
1086 IntrinsicInst *LocalEscapeCall = nullptr;
1087
1088 bool HasInlineAsm = false;
1089 bool HasReturnsTwiceCall = false;
1090 bool PoisonStack;
1091
1092 FunctionStackPoisoner(Function &F, AddressSanitizer &ASan,
1093 RuntimeCallInserter &RTCI)
1094 : F(F), ASan(ASan), RTCI(RTCI),
1096 IntptrTy(ASan.IntptrTy),
1098 Mapping(ASan.Mapping),
1099 PoisonStack(ClStack && .getParent()->getTargetTriple().isAMDGPU()) {}
1100
1102 if (!PoisonStack)
1103 return false;
1104
1106 copyArgsPassedByValToAllocas();
1107
1108
1109 for (BasicBlock *BB : depth_first(&F.getEntryBlock())) visit(*BB);
1110
1111 if (AllocaVec.empty() && DynamicAllocaVec.empty()) return false;
1112
1113 initializeCallbacks(*F.getParent());
1114
1115 processDynamicAllocas();
1116 processStaticAllocas();
1117
1120 }
1121 return true;
1122 }
1123
1124
1125
1126
1127 void copyArgsPassedByValToAllocas();
1128
1129
1130
1131
1132 void processStaticAllocas();
1133 void processDynamicAllocas();
1134
1135 void createDynamicAllocasInitStorage();
1136
1137
1138
1139
1140 void visitReturnInst(ReturnInst &RI) {
1141 if (CallInst *CI = RI.getParent()->getTerminatingMustTailCall())
1143 else
1145 }
1146
1147
1148 void visitResumeInst(ResumeInst &RI) { RetVec.push_back(&RI); }
1149
1150
1151 void visitCleanupReturnInst(CleanupReturnInst &CRI) { RetVec.push_back(&CRI); }
1152
1153 void unpoisonDynamicAllocasBeforeInst(Instruction *InstBefore,
1154 Value *SavedStack) {
1157
1158
1159
1160
1163 Intrinsic::get_dynamic_area_offset, {IntptrTy}, {});
1164
1166 DynamicAreaOffset);
1167 }
1168
1169 RTCI.createRuntimeCall(
1170 IRB, AsanAllocasUnpoisonFunc,
1171 {IRB.CreateLoad(IntptrTy, DynamicAllocaLayout), DynamicAreaPtr});
1172 }
1173
1174
1175 void unpoisonDynamicAllocas() {
1176 for (Instruction *Ret : RetVec)
1177 unpoisonDynamicAllocasBeforeInst(Ret, DynamicAllocaLayout);
1178
1179 for (Instruction *StackRestoreInst : StackRestoreVec)
1180 unpoisonDynamicAllocasBeforeInst(StackRestoreInst,
1181 StackRestoreInst->getOperand(0));
1182 }
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194 void handleDynamicAllocaCall(AllocaInst *AI);
1195
1196
1197 void visitAllocaInst(AllocaInst &AI) {
1198
1202 (STy && STy->containsHomogeneousScalableVectorTypes())) {
1204
1205
1206 if (AllocaVec.empty())
1207 return;
1208
1209 StaticAllocasToMoveUp.push_back(&AI);
1210 }
1211 return;
1212 }
1213
1215 DynamicAllocaVec.push_back(&AI);
1216 else
1218 }
1219
1220
1221
1222 void visitIntrinsicInst(IntrinsicInst &II) {
1224 if (ID == Intrinsic::stackrestore) StackRestoreVec.push_back(&II);
1225 if (ID == Intrinsic::localescape) LocalEscapeCall = &II;
1226 if (!ASan.UseAfterScope)
1227 return;
1228 if (.isLifetimeStartOrEnd())
1229 return;
1230
1232
1233 if (!AI || !ASan.isInterestingAlloca(*AI))
1234 return;
1235
1237
1238
1239 if ( || Size->isScalable() ||
1241 return;
1242
1243 bool DoPoison = (ID == Intrinsic::lifetime_end);
1244 AllocaPoisonCall APC = {&II, AI, *Size, DoPoison};
1246 StaticAllocaPoisonCallVec.push_back(APC);
1248 DynamicAllocaPoisonCallVec.push_back(APC);
1249 }
1250
1251 void visitCallBase(CallBase &CB) {
1253 HasInlineAsm |= CI->isInlineAsm() && &CB != ASan.LocalDynamicShadow;
1254 HasReturnsTwiceCall |= CI->canReturnTwice();
1255 }
1256 }
1257
1258
1259 void initializeCallbacks(Module &M);
1260
1261
1262
1263
1264 void copyToShadow(ArrayRef<uint8_t> ShadowMask, ArrayRef<uint8_t> ShadowBytes,
1266 void copyToShadow(ArrayRef<uint8_t> ShadowMask, ArrayRef<uint8_t> ShadowBytes,
1267 size_t Begin, size_t End, IRBuilder<> &IRB,
1268 Value *ShadowBase);
1269 void copyToShadowInline(ArrayRef<uint8_t> ShadowMask,
1270 ArrayRef<uint8_t> ShadowBytes, size_t Begin,
1272
1273 void poisonAlloca(Value *V, uint64_t Size, IRBuilder<> &IRB, bool DoPoison);
1274
1275 Value *createAllocaForLayout(IRBuilder<> &IRB, const ASanStackFrameLayout &L,
1278 Instruction *ThenTerm, Value *ValueIfFalse);
1279};
1280
1281}
1282
1286 OS, MapClassName2PassName);
1287 OS << '<';
1288 if (Options.CompileKernel)
1289 OS << "kernel;";
1290 if (Options.UseAfterScope)
1291 OS << "use-after-scope";
1292 OS << '>';
1293}
1294
1297 bool UseOdrIndicator, AsanDtorKind DestructorKind,
1299 : Options(Options), UseGlobalGC(UseGlobalGC),
1300 UseOdrIndicator(UseOdrIndicator), DestructorKind(DestructorKind),
1301 ConstructorKind(ConstructorKind) {}
1302
1305
1306
1309
1310 ModuleAddressSanitizer ModuleSanitizer(
1311 M, Options.InsertVersionCheck, Options.CompileKernel, Options.Recover,
1312 UseGlobalGC, UseOdrIndicator, DestructorKind, ConstructorKind);
1318 if (F.empty())
1319 continue;
1321 continue;
1323 continue;
1324 if (F.getName().starts_with("__asan_"))
1325 continue;
1326 if (F.isPresplitCoroutine())
1327 continue;
1328 AddressSanitizer FunctionSanitizer(
1329 M, SSGI, Options.InstrumentationWithCallsThreshold,
1330 Options.MaxInlinePoisoningSize, Options.CompileKernel, Options.Recover,
1331 Options.UseAfterScope, Options.UseAfterReturn);
1334 Modified |= FunctionSanitizer.instrumentFunction(F, &TLI, &TTI);
1335 }
1336 Modified |= ModuleSanitizer.instrumentModule();
1339
1341
1342
1343
1345 return PA;
1346}
1347
1353
1354
1356
1357 if (G->getName().starts_with("llvm.") ||
1358
1359 G->getName().starts_with("__llvm_gcov_ctr") ||
1360
1361 G->getName().starts_with("__llvm_rtti_proxy"))
1362 return true;
1363
1364
1368 return true;
1369
1370 return false;
1371}
1372
1376
1377 if (AddrSpace == 3 || AddrSpace == 5)
1378 return true;
1379 return false;
1380}
1381
1385
1388
1389 if (TargetTriple.isAMDGPU())
1391
1392 return AddrSpace == 0;
1393}
1394
1396
1397 Shadow = IRB.CreateLShr(Shadow, Mapping.Scale);
1398 if (Mapping.Offset == 0) return Shadow;
1399
1400 Value *ShadowBase;
1401 if (LocalDynamicShadow)
1402 ShadowBase = LocalDynamicShadow;
1403 else
1404 ShadowBase = ConstantInt::get(IntptrTy, Mapping.Offset);
1405 if (Mapping.OrShadowOffset)
1406 return IRB.CreateOr(Shadow, ShadowBase);
1407 else
1408 return IRB.CreateAdd(Shadow, ShadowBase);
1409}
1410
1411
1412void AddressSanitizer::instrumentMemIntrinsic(MemIntrinsic *MI,
1413 RuntimeCallInserter &RTCI) {
1416 RTCI.createRuntimeCall(
1420 IRB.CreateIntCast(MI->getOperand(2), IntptrTy, false)});
1422 RTCI.createRuntimeCall(
1423 IRB, AsanMemset,
1426 IRB.CreateIntCast(MI->getOperand(2), IntptrTy, false)});
1427 }
1428 MI->eraseFromParent();
1429}
1430
1431
1432bool AddressSanitizer::isInterestingAlloca(const AllocaInst &AI) {
1433 auto [It, Inserted] = ProcessedAllocas.try_emplace(&AI);
1434
1435 if (!Inserted)
1436 return It->getSecond();
1437
1438 bool IsInteresting =
1440
1442
1443
1445
1446
1448
1450
1451 !(SSGI && SSGI->isSafe(AI)));
1452
1453 It->second = IsInteresting;
1454 return IsInteresting;
1455}
1456
1457bool AddressSanitizer::ignoreAccess(Instruction *Inst, Value *Ptr) {
1458
1459
1461 return true;
1462
1463
1464
1465
1466
1468 return true;
1469
1470
1471
1472
1475 return true;
1476
1479 return true;
1480
1481 return false;
1482}
1483
1484void AddressSanitizer::getInterestingMemoryOperands(
1487
1488 if (LocalDynamicShadow == I)
1489 return;
1490
1493 return;
1494 Interesting.emplace_back(I, LI->getPointerOperandIndex(), false,
1495 LI->getType(), LI->getAlign());
1498 return;
1499 Interesting.emplace_back(I, SI->getPointerOperandIndex(), true,
1500 SI->getValueOperand()->getType(), SI->getAlign());
1503 return;
1504 Interesting.emplace_back(I, RMW->getPointerOperandIndex(), true,
1505 RMW->getValOperand()->getType(), std::nullopt);
1508 return;
1509 Interesting.emplace_back(I, XCHG->getPointerOperandIndex(), true,
1510 XCHG->getCompareOperand()->getType(),
1511 std::nullopt);
1513 switch (CI->getIntrinsicID()) {
1514 case Intrinsic::masked_load:
1515 case Intrinsic::masked_store:
1516 case Intrinsic::masked_gather:
1517 case Intrinsic::masked_scatter: {
1518 bool IsWrite = CI->getType()->isVoidTy();
1519
1520 unsigned OpOffset = IsWrite ? 1 : 0;
1522 return;
1523
1524 auto BasePtr = CI->getOperand(OpOffset);
1525 if (ignoreAccess(I, BasePtr))
1526 return;
1527 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1528 MaybeAlign Alignment = CI->getParamAlign(0);
1529 Value *Mask = CI->getOperand(1 + OpOffset);
1530 Interesting.emplace_back(I, OpOffset, IsWrite, Ty, Alignment, Mask);
1531 break;
1532 }
1533 case Intrinsic::masked_expandload:
1534 case Intrinsic::masked_compressstore: {
1535 bool IsWrite = CI->getIntrinsicID() == Intrinsic::masked_compressstore;
1536 unsigned OpOffset = IsWrite ? 1 : 0;
1538 return;
1539 auto BasePtr = CI->getOperand(OpOffset);
1540 if (ignoreAccess(I, BasePtr))
1541 return;
1543 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1544
1546 Value *Mask = CI->getOperand(1 + OpOffset);
1547
1549 Value *ExtMask = IB.CreateZExt(Mask, ExtTy);
1550 Value *EVL = IB.CreateAddReduce(ExtMask);
1551 Value *TrueMask = ConstantInt::get(Mask->getType(), 1);
1552 Interesting.emplace_back(I, OpOffset, IsWrite, Ty, Alignment, TrueMask,
1553 EVL);
1554 break;
1555 }
1556 case Intrinsic::vp_load:
1557 case Intrinsic::vp_store:
1558 case Intrinsic::experimental_vp_strided_load:
1559 case Intrinsic::experimental_vp_strided_store: {
1561 unsigned IID = CI->getIntrinsicID();
1562 bool IsWrite = CI->getType()->isVoidTy();
1564 return;
1565 unsigned PtrOpNo = *VPI->getMemoryPointerParamPos(IID);
1566 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1567 MaybeAlign Alignment = VPI->getOperand(PtrOpNo)->getPointerAlignment(*DL);
1568 Value *Stride = nullptr;
1569 if (IID == Intrinsic::experimental_vp_strided_store ||
1570 IID == Intrinsic::experimental_vp_strided_load) {
1571 Stride = VPI->getOperand(PtrOpNo + 1);
1572
1573
1574
1578 Alignment = Align(1);
1579 }
1580 Interesting.emplace_back(I, PtrOpNo, IsWrite, Ty, Alignment,
1581 VPI->getMaskParam(), VPI->getVectorLengthParam(),
1582 Stride);
1583 break;
1584 }
1585 case Intrinsic::vp_gather:
1586 case Intrinsic::vp_scatter: {
1588 unsigned IID = CI->getIntrinsicID();
1589 bool IsWrite = IID == Intrinsic::vp_scatter;
1591 return;
1592 unsigned PtrOpNo = *VPI->getMemoryPointerParamPos(IID);
1593 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1594 MaybeAlign Alignment = VPI->getPointerAlignment();
1595 Interesting.emplace_back(I, PtrOpNo, IsWrite, Ty, Alignment,
1596 VPI->getMaskParam(),
1597 VPI->getVectorLengthParam());
1598 break;
1599 }
1600 default:
1603 if (TTI->getTgtMemIntrinsic(II, IntrInfo))
1605 return;
1606 }
1607 for (unsigned ArgNo = 0; ArgNo < CI->arg_size(); ArgNo++) {
1609 ignoreAccess(I, CI->getArgOperand(ArgNo)))
1610 continue;
1611 Type *Ty = CI->getParamByValType(ArgNo);
1613 }
1614 }
1615 }
1616}
1617
1621
1622
1623
1624
1627 if (!Cmp->isRelational())
1628 return false;
1629 } else {
1630 return false;
1631 }
1634}
1635
1636
1637
1638
1641 if (BO->getOpcode() != Instruction::Sub)
1642 return false;
1643 } else {
1644 return false;
1645 }
1648}
1649
1650bool AddressSanitizer::GlobalIsLinkerInitialized(GlobalVariable *G) {
1651
1652
1653
1654 if (->hasInitializer())
1655 return false;
1656
1657 if (G->hasSanitizerMetadata() && G->getSanitizerMetadata().IsDynInit)
1658 return false;
1659
1660 return true;
1661}
1662
1663void AddressSanitizer::instrumentPointerComparisonOrSubtraction(
1664 Instruction *I, RuntimeCallInserter &RTCI) {
1667 Value *Param[2] = {I->getOperand(0), I->getOperand(1)};
1668 for (Value *&i : Param) {
1669 if (i->getType()->isPointerTy())
1671 }
1672 RTCI.createRuntimeCall(IRB, F, Param);
1673}
1674
1677 MaybeAlign Alignment, unsigned Granularity,
1678 TypeSize TypeStoreSize, bool IsWrite,
1679 Value *SizeArgument, bool UseCalls,
1680 uint32_t Exp, RuntimeCallInserter &RTCI) {
1681
1682
1684 const auto FixedSize = TypeStoreSize.getFixedValue();
1685 switch (FixedSize) {
1686 case 8:
1687 case 16:
1688 case 32:
1689 case 64:
1690 case 128:
1691 if (!Alignment || *Alignment >= Granularity ||
1692 *Alignment >= FixedSize / 8)
1693 return Pass->instrumentAddress(I, InsertBefore, Addr, Alignment,
1694 FixedSize, IsWrite, nullptr, UseCalls,
1695 Exp, RTCI);
1696 }
1697 }
1698 Pass->instrumentUnusualSizeOrAlignment(I, InsertBefore, Addr, TypeStoreSize,
1699 IsWrite, nullptr, UseCalls, Exp, RTCI);
1700}
1701
1702void AddressSanitizer::instrumentMaskedLoadOrStore(
1705 MaybeAlign Alignment, unsigned Granularity, Type *OpType, bool IsWrite,
1706 Value *SizeArgument, bool UseCalls, uint32_t Exp,
1707 RuntimeCallInserter &RTCI) {
1709 TypeSize ElemTypeSize = DL.getTypeStoreSizeInBits(VTy->getScalarType());
1710 auto Zero = ConstantInt::get(IntptrTy, 0);
1711
1714 if (EVL) {
1715
1716
1718 Value *IsEVLZero = IB.CreateICmpNE(EVL, ConstantInt::get(EVLType, 0));
1720 IB.SetInsertPoint(LoopInsertBefore);
1721
1722 EVL = IB.CreateZExtOrTrunc(EVL, IntptrTy);
1723
1724
1725 Value *EC = IB.CreateElementCount(IntptrTy, VTy->getElementCount());
1726 EVL = IB.CreateBinaryIntrinsic(Intrinsic::umin, EVL, EC);
1727 } else {
1728 EVL = IB.CreateElementCount(IntptrTy, VTy->getElementCount());
1729 }
1730
1731
1732 if (Stride)
1733 Stride = IB.CreateZExtOrTrunc(Stride, IntptrTy);
1734
1737 Value *MaskElem = IRB.CreateExtractElement(Mask, Index);
1738 if (auto *MaskElemC = dyn_cast(MaskElem)) {
1739 if (MaskElemC->isZero())
1740
1741 return;
1742
1743 } else {
1744
1745 Instruction *ThenTerm = SplitBlockAndInsertIfThen(
1746 MaskElem, &*IRB.GetInsertPoint(), false);
1747 IRB.SetInsertPoint(ThenTerm);
1748 }
1749
1750 Value *InstrumentedAddress;
1752 assert(
1753 cast(Addr->getType())->getElementType()->isPointerTy() &&
1754 "Expected vector of pointer.");
1755 InstrumentedAddress = IRB.CreateExtractElement(Addr, Index);
1756 } else if (Stride) {
1758 InstrumentedAddress = IRB.CreatePtrAdd(Addr, Index);
1759 } else {
1761 }
1763 Alignment, Granularity, ElemTypeSize, IsWrite,
1764 SizeArgument, UseCalls, Exp, RTCI);
1765 });
1766}
1767
1771 RuntimeCallInserter &RTCI) {
1772 Value *Addr = O.getPtr();
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1786
1788
1789
1791 if (G && ( || GlobalIsLinkerInitialized(G)) &&
1792 isSafeAccess(ObjSizeVis, Addr, O.TypeStoreSize)) {
1793 NumOptimizedAccessesToGlobalVar++;
1794 return;
1795 }
1796 }
1797
1799
1801 isSafeAccess(ObjSizeVis, Addr, O.TypeStoreSize)) {
1802 NumOptimizedAccessesToStackVar++;
1803 return;
1804 }
1805 }
1806
1807 if (O.IsWrite)
1808 NumInstrumentedWrites++;
1809 else
1810 NumInstrumentedReads++;
1811
1812 if (O.MaybeByteOffset) {
1815
1817 if (TargetTriple.isRISCV()) {
1819
1820
1822 static_cast<unsigned>(LongSize)) {
1826 }
1827 }
1828 Addr = IB.CreateGEP(Ty, Addr, {OffsetOp});
1829 }
1830
1831 unsigned Granularity = 1 << Mapping.Scale;
1832 if (O.MaybeMask) {
1833 instrumentMaskedLoadOrStore(this, DL, IntptrTy, O.MaybeMask, O.MaybeEVL,
1834 O.MaybeStride, O.getInsn(), Addr, O.Alignment,
1835 Granularity, O.OpType, O.IsWrite, nullptr,
1836 UseCalls, Exp, RTCI);
1837 } else {
1839 Granularity, O.TypeStoreSize, O.IsWrite, nullptr,
1840 UseCalls, Exp, RTCI);
1841 }
1842}
1843
1845 Value *Addr, bool IsWrite,
1846 size_t AccessSizeIndex,
1847 Value *SizeArgument,
1848 uint32_t Exp,
1849 RuntimeCallInserter &RTCI) {
1851 Value *ExpVal = Exp == 0 ? nullptr : ConstantInt::get(IRB.getInt32Ty(), Exp);
1853 if (SizeArgument) {
1854 if (Exp == 0)
1855 Call = RTCI.createRuntimeCall(IRB, AsanErrorCallbackSized[IsWrite][0],
1856 {Addr, SizeArgument});
1857 else
1858 Call = RTCI.createRuntimeCall(IRB, AsanErrorCallbackSized[IsWrite][1],
1859 {Addr, SizeArgument, ExpVal});
1860 } else {
1861 if (Exp == 0)
1862 Call = RTCI.createRuntimeCall(
1863 IRB, AsanErrorCallback[IsWrite][0][AccessSizeIndex], Addr);
1864 else
1865 Call = RTCI.createRuntimeCall(
1866 IRB, AsanErrorCallback[IsWrite][1][AccessSizeIndex], {Addr, ExpVal});
1867 }
1868
1870 return Call;
1871}
1872
1874 Value *ShadowValue,
1875 uint32_t TypeStoreSize) {
1876 size_t Granularity = static_cast<size_t>(1) << Mapping.Scale;
1877
1878 Value *LastAccessedByte =
1879 IRB.CreateAnd(AddrLong, ConstantInt::get(IntptrTy, Granularity - 1));
1880
1881 if (TypeStoreSize / 8 > 1)
1882 LastAccessedByte = IRB.CreateAdd(
1883 LastAccessedByte, ConstantInt::get(IntptrTy, TypeStoreSize / 8 - 1));
1884
1885 LastAccessedByte =
1886 IRB.CreateIntCast(LastAccessedByte, ShadowValue->getType(), false);
1887
1888 return IRB.CreateICmpSGE(LastAccessedByte, ShadowValue);
1889}
1890
1891Instruction *AddressSanitizer::instrumentAMDGPUAddress(
1893 uint32_t TypeStoreSize, bool IsWrite, Value *SizeArgument) {
1894
1896 return nullptr;
1898
1900 return InsertBefore;
1901
1903 Value *IsShared = IRB.CreateCall(AMDGPUAddressShared, {Addr});
1905 Value *IsSharedOrPrivate = IRB.CreateOr(IsShared, IsPrivate);
1907 Value *AddrSpaceZeroLanding =
1910 return InsertBefore;
1911}
1912
1917 if (!Recover) {
1921 }
1922
1923 auto *Trm =
1926 Trm->getParent()->setName("asan.report");
1927
1928 if (Recover)
1929 return Trm;
1930
1935}
1936
1937void AddressSanitizer::instrumentAddress(Instruction *OrigIns,
1940 uint32_t TypeStoreSize, bool IsWrite,
1941 Value *SizeArgument, bool UseCalls,
1942 uint32_t Exp,
1943 RuntimeCallInserter &RTCI) {
1944 if (TargetTriple.isAMDGPU()) {
1945 InsertBefore = instrumentAMDGPUAddress(OrigIns, InsertBefore, Addr,
1946 TypeStoreSize, IsWrite, SizeArgument);
1947 if (!InsertBefore)
1948 return;
1949 }
1950
1953
1955 const ASanAccessInfo AccessInfo(IsWrite, CompileKernel, AccessSizeIndex);
1956 IRB.CreateIntrinsic(Intrinsic::asan_check_memaccess, {},
1958 ConstantInt::get(Int32Ty, AccessInfo.Packed)});
1959 return;
1960 }
1961
1963 if (UseCalls) {
1964 if (Exp == 0)
1965 RTCI.createRuntimeCall(
1966 IRB, AsanMemoryAccessCallback[IsWrite][0][AccessSizeIndex], AddrLong);
1967 else
1968 RTCI.createRuntimeCall(
1969 IRB, AsanMemoryAccessCallback[IsWrite][1][AccessSizeIndex],
1970 {AddrLong, ConstantInt::get(IRB.getInt32Ty(), Exp)});
1971 return;
1972 }
1973
1974 Type *ShadowTy =
1975 IntegerType::get(*C, std::max(8U, TypeStoreSize >> Mapping.Scale));
1977 Value *ShadowPtr = memToShadow(AddrLong, IRB);
1978 const uint64_t ShadowAlign =
1979 std::max<uint64_t>(Alignment.valueOrOne().value() >> Mapping.Scale, 1);
1981 ShadowTy, IRB.CreateIntToPtr(ShadowPtr, ShadowPtrTy), Align(ShadowAlign));
1982
1984 size_t Granularity = 1ULL << Mapping.Scale;
1986
1987 bool GenSlowPath = (ClAlwaysSlowPath || (TypeStoreSize < 8 * Granularity));
1988
1989 if (TargetTriple.isAMDGCN()) {
1990 if (GenSlowPath) {
1991 auto *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeStoreSize);
1993 }
1994 CrashTerm = genAMDGPUReportBlock(IRB, Cmp, Recover);
1995 } else if (GenSlowPath) {
1996
1997
2003 Value *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeStoreSize);
2004 if (Recover) {
2006 } else {
2012 }
2013 } else {
2015 }
2016
2018 CrashTerm, AddrLong, IsWrite, AccessSizeIndex, SizeArgument, Exp, RTCI);
2021}
2022
2023
2024
2025
2026
2027void AddressSanitizer::instrumentUnusualSizeOrAlignment(
2029 TypeSize TypeStoreSize, bool IsWrite, Value *SizeArgument, bool UseCalls,
2030 uint32_t Exp, RuntimeCallInserter &RTCI) {
2034
2036 if (UseCalls) {
2037 if (Exp == 0)
2038 RTCI.createRuntimeCall(IRB, AsanMemoryAccessCallbackSized[IsWrite][0],
2039 {AddrLong, Size});
2040 else
2041 RTCI.createRuntimeCall(
2042 IRB, AsanMemoryAccessCallbackSized[IsWrite][1],
2043 {AddrLong, Size, ConstantInt::get(IRB.getInt32Ty(), Exp)});
2044 } else {
2045 Value *SizeMinusOne = IRB.CreateSub(Size, ConstantInt::get(IntptrTy, 1));
2047 IRB.CreateAdd(AddrLong, SizeMinusOne),
2050 RTCI);
2052 Exp, RTCI);
2053 }
2054}
2055
2056void ModuleAddressSanitizer::poisonOneInitializer(Function &GlobalInit) {
2057
2060
2061
2062 Value *ModuleNameAddr =
2064 IRB.CreateCall(AsanPoisonGlobals, ModuleNameAddr);
2065
2066
2067 for (auto &BB : GlobalInit)
2070}
2071
2072void ModuleAddressSanitizer::createInitializerPoisonCalls() {
2073 GlobalVariable *GV = M.getGlobalVariable("llvm.global_ctors");
2074 if (!GV)
2075 return;
2076
2078 if (!CA)
2079 return;
2080
2084
2085
2089
2091 continue;
2092 poisonOneInitializer(*F);
2093 }
2094 }
2095}
2096
2098ModuleAddressSanitizer::getExcludedAliasedGlobal(const GlobalAlias &GA) const {
2099
2100
2101
2102
2103 assert(CompileKernel && "Only expecting to be called when compiling kernel");
2104
2106
2107
2108
2111
2112 return nullptr;
2113}
2114
2115bool ModuleAddressSanitizer::shouldInstrumentGlobal(GlobalVariable *G) const {
2116 Type *Ty = G->getValueType();
2118
2119 if (G->hasSanitizerMetadata() && G->getSanitizerMetadata().NoAddress)
2120 return false;
2121 if (!Ty->isSized()) return false;
2122 if (->hasInitializer()) return false;
2124 return false;
2126
2127
2128
2129 if (G->isThreadLocal()) return false;
2130
2131 if (G->getAlign() && *G->getAlign() > getMinRedzoneSizeForGlobal()) return false;
2132
2133
2134
2135
2136
2137 if (!TargetTriple.isOSBinFormatCOFF()) {
2138 if (->hasExactDefinition() || G->hasComdat())
2139 return false;
2140 } else {
2141
2142 if (G->isInterposable())
2143 return false;
2144
2145
2146 if (G->hasAvailableExternallyLinkage())
2147 return false;
2148 }
2149
2150
2151
2152 if (Comdat *C = G->getComdat()) {
2153 switch (C->getSelectionKind()) {
2157 break;
2160 return false;
2161 }
2162 }
2163
2164 if (G->hasSection()) {
2165
2166
2167
2168 if (CompileKernel)
2169 return false;
2170
2172
2173
2174 if (Section == "llvm.metadata") return false;
2175
2176 if (Section.contains("__llvm") || Section.contains("__LLVM"))
2177 return false;
2178
2179
2180
2181 if (Section.starts_with(".preinit_array") ||
2182 Section.starts_with(".init_array") ||
2183 Section.starts_with(".fini_array")) {
2184 return false;
2185 }
2186
2187
2188
2189 if (TargetTriple.isOSBinFormatELF()) {
2191 [](char c) { return llvm::isAlnum(c) || c == '_'; }))
2192 return false;
2193 }
2194
2195
2196
2197
2198
2199
2200
2201
2202
2203 if (TargetTriple.isOSBinFormatCOFF() && Section.contains('$')) {
2204 LLVM_DEBUG(dbgs() << "Ignoring global in sorted section (contains '$'): "
2205 << *G << "\n");
2206 return false;
2207 }
2208
2209 if (TargetTriple.isOSBinFormatMachO()) {
2210 StringRef ParsedSegment, ParsedSection;
2211 unsigned TAA = 0, StubSize = 0;
2212 bool TAAParsed;
2214 Section, ParsedSegment, ParsedSection, TAA, TAAParsed, StubSize));
2215
2216
2217
2218
2219 if (ParsedSegment == "__OBJC" ||
2220 (ParsedSegment == "__DATA" && ParsedSection.starts_with("__objc_"))) {
2221 LLVM_DEBUG(dbgs() << "Ignoring ObjC runtime global: " << *G << "\n");
2222 return false;
2223 }
2224
2225
2226
2227
2228
2229
2230
2231
2232 if (ParsedSegment == "__DATA" && ParsedSection == "__cfstring") {
2233 LLVM_DEBUG(dbgs() << "Ignoring CFString: " << *G << "\n");
2234 return false;
2235 }
2236
2237
2239 LLVM_DEBUG(dbgs() << "Ignoring a cstring literal: " << *G << "\n");
2240 return false;
2241 }
2242 }
2243 }
2244
2245 if (CompileKernel) {
2246
2247
2248 if (G->getName().starts_with("__"))
2249 return false;
2250 }
2251
2252 return true;
2253}
2254
2255
2256
2257
2258bool ModuleAddressSanitizer::ShouldUseMachOGlobalsSection() const {
2259 if (!TargetTriple.isOSBinFormatMachO())
2260 return false;
2261
2262 if (TargetTriple.isMacOSX() && !TargetTriple.isMacOSXVersionLT(10, 11))
2263 return true;
2264 if (TargetTriple.isiOS() && !TargetTriple.isOSVersionLT(9))
2265 return true;
2266 if (TargetTriple.isWatchOS() && !TargetTriple.isOSVersionLT(2))
2267 return true;
2268 if (TargetTriple.isDriverKit())
2269 return true;
2270 if (TargetTriple.isXROS())
2271 return true;
2272
2273 return false;
2274}
2275
2276StringRef ModuleAddressSanitizer::getGlobalMetadataSection() const {
2277 switch (TargetTriple.getObjectFormat()) {
2279 case Triple::ELF: return "asan_globals";
2280 case Triple::MachO: return "__DATA,__asan_globals,regular";
2287 "ModuleAddressSanitizer not implemented for object file format");
2289 break;
2290 }
2292}
2293
2294void ModuleAddressSanitizer::initializeCallbacks() {
2296
2297
2298 AsanPoisonGlobals =
2300 AsanUnpoisonGlobals =
2302
2303
2304 AsanRegisterGlobals = M.getOrInsertFunction(
2306 AsanUnregisterGlobals = M.getOrInsertFunction(
2308
2309
2310
2311 AsanRegisterImageGlobals = M.getOrInsertFunction(
2313 AsanUnregisterImageGlobals = M.getOrInsertFunction(
2315
2316 AsanRegisterElfGlobals =
2318 IntptrTy, IntptrTy, IntptrTy);
2319 AsanUnregisterElfGlobals =
2321 IntptrTy, IntptrTy, IntptrTy);
2322}
2323
2324
2325
2326void ModuleAddressSanitizer::SetComdatForGlobalMetadata(
2328 Module &M = *G->getParent();
2330 if () {
2331 if (->hasName()) {
2332
2333
2334 assert(G->hasLocalLinkage());
2335 G->setName(genName("anon_global"));
2336 }
2337
2338 if (!InternalSuffix.empty() && G->hasLocalLinkage()) {
2339 std::string Name = std::string(G->getName());
2340 Name += InternalSuffix;
2341 C = M.getOrInsertComdat(Name);
2342 } else {
2343 C = M.getOrInsertComdat(G->getName());
2344 }
2345
2346
2347
2348
2349 if (TargetTriple.isOSBinFormatCOFF()) {
2351 if (G->hasPrivateLinkage())
2353 }
2355 }
2356
2358 Metadata->setComdat(G->getComdat());
2359}
2360
2361
2362
2364ModuleAddressSanitizer::CreateMetadataGlobal(Constant *Initializer,
2366 auto Linkage = TargetTriple.isOSBinFormatMachO()
2370 M, Initializer->getType(), false, Linkage, Initializer,
2372 Metadata->setSection(getGlobalMetadataSection());
2373
2374
2377}
2378
2379Instruction *ModuleAddressSanitizer::CreateAsanModuleDtor() {
2383 AsanDtorFunction->addFnAttr(Attribute::NoUnwind);
2384
2387
2389}
2390
2391void ModuleAddressSanitizer::InstrumentGlobalsCOFF(
2394 assert(ExtendedGlobals.size() == MetadataInitializers.size());
2395 auto &DL = M.getDataLayout();
2396
2398 for (size_t i = 0; i < ExtendedGlobals.size(); i++) {
2399 Constant *Initializer = MetadataInitializers[i];
2403 Metadata->setMetadata(LLVMContext::MD_associated, MD);
2404 MetadataGlobals[i] = Metadata;
2405
2406
2407
2408
2409 unsigned SizeOfGlobalStruct = DL.getTypeAllocSize(Initializer->getType());
2411 "global metadata will not be padded appropriately");
2413
2414 SetComdatForGlobalMetadata(G, Metadata, "");
2415 }
2416
2417
2418
2419 if (!MetadataGlobals.empty())
2421}
2422
2423void ModuleAddressSanitizer::instrumentGlobalsELF(
2426 const std::string &UniqueModuleId) {
2427 assert(ExtendedGlobals.size() == MetadataInitializers.size());
2428
2429
2430
2431
2432
2433 bool UseComdatForGlobalsGC = UseOdrIndicator && !UniqueModuleId.empty();
2434
2436 for (size_t i = 0; i < ExtendedGlobals.size(); i++) {
2439 CreateMetadataGlobal(MetadataInitializers[i], G->getName());
2441 Metadata->setMetadata(LLVMContext::MD_associated, MD);
2442 MetadataGlobals[i] = Metadata;
2443
2444 if (UseComdatForGlobalsGC)
2445 SetComdatForGlobalMetadata(G, Metadata, UniqueModuleId);
2446 }
2447
2448
2449
2450 if (!MetadataGlobals.empty())
2452
2453
2454
2455
2456
2457
2458
2463
2464
2467 "__start_" + getGlobalMetadataSection());
2471 "__stop_" + getGlobalMetadataSection());
2473
2474
2476 IRB.CreateCall(AsanRegisterElfGlobals,
2480
2481
2482
2483 if (DestructorKind != AsanDtorKind::None && !MetadataGlobals.empty()) {
2484 IRBuilder<> IrbDtor(CreateAsanModuleDtor());
2485 IrbDtor.CreateCall(AsanUnregisterElfGlobals,
2489 }
2490}
2491
2492void ModuleAddressSanitizer::InstrumentGlobalsMachO(
2495 assert(ExtendedGlobals.size() == MetadataInitializers.size());
2496
2497
2498
2499
2502
2503 for (size_t i = 0; i < ExtendedGlobals.size(); i++) {
2504 Constant *Initializer = MetadataInitializers[i];
2507
2508
2509
2510 auto LivenessBinder =
2515 Twine("__asan_binder_") + G->getName());
2516 Liveness->setSection("__DATA,__asan_liveness,regular,live_support");
2517 LivenessGlobals[i] = Liveness;
2518 }
2519
2520
2521
2522
2523
2524 if (!LivenessGlobals.empty())
2526
2527
2528
2529
2530
2531
2532
2537
2539 IRB.CreateCall(AsanRegisterImageGlobals,
2541
2542
2543
2545 IRBuilder<> IrbDtor(CreateAsanModuleDtor());
2546 IrbDtor.CreateCall(AsanUnregisterImageGlobals,
2548 }
2549}
2550
2551void ModuleAddressSanitizer::InstrumentGlobalsWithMetadataArray(
2554 assert(ExtendedGlobals.size() == MetadataInitializers.size());
2555 unsigned N = ExtendedGlobals.size();
2557
2558
2559
2560 ArrayType *ArrayOfGlobalStructTy =
2564 ConstantArray::get(ArrayOfGlobalStructTy, MetadataInitializers), "");
2565 if (Mapping.Scale > 3)
2566 AllGlobals->setAlignment(Align(1ULL << Mapping.Scale));
2567
2571 ConstantInt::get(IntptrTy, N)});
2572
2573
2574
2576 IRBuilder<> IrbDtor(CreateAsanModuleDtor());
2577 IrbDtor.CreateCall(AsanUnregisterGlobals,
2579 ConstantInt::get(IntptrTy, N)});
2580 }
2581}
2582
2583
2584
2585
2586
2587
2588void ModuleAddressSanitizer::instrumentGlobals(IRBuilder<> &IRB,
2589 bool *CtorComdat) {
2590
2591
2593 if (CompileKernel) {
2594 for (auto &GA : M.aliases()) {
2595 if (const GlobalVariable *GV = getExcludedAliasedGlobal(GA))
2596 AliasedGlobalExclusions.insert(GV);
2597 }
2598 }
2599
2601 for (auto &G : M.globals()) {
2602 if (!AliasedGlobalExclusions.count(&G) && shouldInstrumentGlobal(&G))
2604 }
2605
2606 size_t n = GlobalsToChange.size();
2607 auto &DL = M.getDataLayout();
2608
2609
2610
2611
2612
2613
2614
2615
2616
2617
2618
2620 StructType::get(IntptrTy, IntptrTy, IntptrTy, IntptrTy, IntptrTy,
2621 IntptrTy, IntptrTy, IntptrTy);
2624
2625 for (size_t i = 0; i < n; i++) {
2627
2629 if (G->hasSanitizerMetadata())
2630 MD = G->getSanitizerMetadata();
2631
2632
2633
2634
2635 std::string NameForGlobal = G->getName().str();
2638 true, genName("global"));
2639
2640 Type *Ty = G->getValueType();
2641 const uint64_t SizeInBytes = DL.getTypeAllocSize(Ty);
2644
2648
2649
2654 M, NewTy, G->isConstant(), Linkage, NewInitializer, "", G,
2655 G->getThreadLocalMode(), G->getAddressSpace());
2657 NewGlobal->setComdat(G->getComdat());
2659
2660
2661
2663
2664
2665 if (TargetTriple.isOSBinFormatMachO() && ->hasSection() &&
2666 G->isConstant()) {
2668 if (Seq && Seq->isCString())
2669 NewGlobal->setSection("__TEXT,__asan_cstring,regular");
2670 }
2671
2672
2673
2675
2676 Value *Indices2[2];
2677 Indices2[0] = IRB.getInt32(0);
2678 Indices2[1] = IRB.getInt32(0);
2679
2683 G->eraseFromParent();
2684 NewGlobals[i] = NewGlobal;
2685
2687 GlobalValue *InstrumentedGlobal = NewGlobal;
2688
2689 bool CanUsePrivateAliases =
2690 TargetTriple.isOSBinFormatELF() || TargetTriple.isOSBinFormatMachO() ||
2691 TargetTriple.isOSBinFormatWasm();
2692 if (CanUsePrivateAliases && UsePrivateAlias) {
2693
2694
2695 InstrumentedGlobal =
2697 }
2698
2699
2701 ODRIndicator = ConstantInt::get(IntptrTy, -1);
2702 } else if (UseOdrIndicator) {
2703
2704
2705 auto *ODRIndicatorSym =
2710
2711
2712 ODRIndicatorSym->setVisibility(NewGlobal->getVisibility());
2713 ODRIndicatorSym->setDLLStorageClass(NewGlobal->getDLLStorageClass());
2714 ODRIndicatorSym->setAlignment(Align(1));
2716 }
2717
2719 GlobalStructTy,
2721 ConstantInt::get(IntptrTy, SizeInBytes),
2722 ConstantInt::get(IntptrTy, SizeInBytes + RightRedzoneSize),
2725 ConstantInt::get(IntptrTy, MD.IsDynInit),
2727
2728 LLVM_DEBUG(dbgs() << "NEW GLOBAL: " << *NewGlobal << "\n");
2729
2730 Initializers[i] = Initializer;
2731 }
2732
2733
2734
2736 for (size_t i = 0; i < n; i++) {
2738 if (G->getName().empty()) continue;
2739 GlobalsToAddToUsedList.push_back(G);
2740 }
2742
2743 if (UseGlobalsGC && TargetTriple.isOSBinFormatELF()) {
2744
2745
2746
2747
2748 *CtorComdat = true;
2749 instrumentGlobalsELF(IRB, NewGlobals, Initializers, getUniqueModuleId(&M));
2750 } else if (n == 0) {
2751
2752
2753 *CtorComdat = TargetTriple.isOSBinFormatELF();
2754 } else {
2755 *CtorComdat = false;
2756 if (UseGlobalsGC && TargetTriple.isOSBinFormatCOFF()) {
2757 InstrumentGlobalsCOFF(IRB, NewGlobals, Initializers);
2758 } else if (UseGlobalsGC && ShouldUseMachOGlobalsSection()) {
2759 InstrumentGlobalsMachO(IRB, NewGlobals, Initializers);
2760 } else {
2761 InstrumentGlobalsWithMetadataArray(IRB, NewGlobals, Initializers);
2762 }
2763 }
2764
2765
2767 createInitializerPoisonCalls();
2768
2770}
2771
2772uint64_t
2773ModuleAddressSanitizer::getRedzoneSizeForGlobal(uint64_t SizeInBytes) const {
2774 constexpr uint64_t kMaxRZ = 1 << 18;
2775 const uint64_t MinRZ = getMinRedzoneSizeForGlobal();
2776
2777 uint64_t RZ = 0;
2778 if (SizeInBytes <= MinRZ / 2) {
2779
2780
2781
2782 RZ = MinRZ - SizeInBytes;
2783 } else {
2784
2785 RZ = std::clamp((SizeInBytes / MinRZ / 4) * MinRZ, MinRZ, kMaxRZ);
2786
2787
2788 if (SizeInBytes % MinRZ)
2789 RZ += MinRZ - (SizeInBytes % MinRZ);
2790 }
2791
2792 assert((RZ + SizeInBytes) % MinRZ == 0);
2793
2794 return RZ;
2795}
2796
2797int ModuleAddressSanitizer::GetAsanVersion() const {
2798 int LongSize = M.getDataLayout().getPointerSizeInBits();
2799 bool isAndroid = M.getTargetTriple().isAndroid();
2801
2802
2803 Version += (LongSize == 32 && isAndroid);
2805}
2806
2807GlobalVariable *ModuleAddressSanitizer::getOrCreateModuleName() {
2808 if (!ModuleName) {
2809
2810
2811 ModuleName =
2813 false, genName("module"));
2814 }
2815 return ModuleName;
2816}
2817
2818bool ModuleAddressSanitizer::instrumentModule() {
2819 initializeCallbacks();
2820
2823
2824
2825
2827 if (CompileKernel) {
2828
2829
2831 } else {
2832 std::string AsanVersion = std::to_string(GetAsanVersion());
2833 std::string VersionCheckName =
2835 std::tie(AsanCtorFunction, std::ignore) =
2838 {}, VersionCheckName);
2839 }
2840 }
2841
2842 bool CtorComdat = true;
2845 if (AsanCtorFunction) {
2846 IRBuilder<> IRB(AsanCtorFunction->getEntryBlock().getTerminator());
2847 instrumentGlobals(IRB, &CtorComdat);
2848 } else {
2850 instrumentGlobals(IRB, &CtorComdat);
2851 }
2852 }
2853
2855
2856
2857
2858
2859 if (UseCtorComdat && TargetTriple.isOSBinFormatELF() && CtorComdat) {
2860 if (AsanCtorFunction) {
2863 }
2864 if (AsanDtorFunction) {
2867 }
2868 } else {
2869 if (AsanCtorFunction)
2871 if (AsanDtorFunction)
2873 }
2874
2875 return true;
2876}
2877
2878void AddressSanitizer::initializeCallbacks(const TargetLibraryInfo *TLI) {
2880
2881
2882 for (int Exp = 0; Exp < 2; Exp++) {
2883 for (size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) {
2884 const std::string TypeStr = AccessIsWrite ? "store" : "load";
2885 const std::string ExpStr = Exp ? "exp_" : "";
2886 const std::string EndingStr = Recover ? "_noabort" : "";
2887
2890 AttributeList AL2;
2891 AttributeList AL1;
2892 if (Exp) {
2896 if (auto AK = TLI->getExtAttrForI32Param(false)) {
2897 AL2 = AL2.addParamAttribute(*C, 2, AK);
2898 AL1 = AL1.addParamAttribute(*C, 1, AK);
2899 }
2900 }
2901 AsanErrorCallbackSized[AccessIsWrite][Exp] = M.getOrInsertFunction(
2904
2905 AsanMemoryAccessCallbackSized[AccessIsWrite][Exp] = M.getOrInsertFunction(
2908
2910 AccessSizeIndex++) {
2911 const std::string Suffix = TypeStr + itostr(1ULL << AccessSizeIndex);
2912 AsanErrorCallback[AccessIsWrite][Exp][AccessSizeIndex] =
2913 M.getOrInsertFunction(
2916
2917 AsanMemoryAccessCallback[AccessIsWrite][Exp][AccessSizeIndex] =
2918 M.getOrInsertFunction(
2921 }
2922 }
2923 }
2924
2925 const std::string MemIntrinCallbackPrefix =
2927 ? std::string("")
2929 AsanMemmove = M.getOrInsertFunction(MemIntrinCallbackPrefix + "memmove",
2930 PtrTy, PtrTy, PtrTy, IntptrTy);
2931 AsanMemcpy = M.getOrInsertFunction(MemIntrinCallbackPrefix + "memcpy", PtrTy,
2932 PtrTy, PtrTy, IntptrTy);
2933 AsanMemset = M.getOrInsertFunction(MemIntrinCallbackPrefix + "memset",
2935 PtrTy, PtrTy, IRB.getInt32Ty(), IntptrTy);
2936
2937 AsanHandleNoReturnFunc =
2939
2940 AsanPtrCmpFunction =
2942 AsanPtrSubFunction =
2944 if (Mapping.InGlobal)
2945 AsanShadowGlobal = M.getOrInsertGlobal("__asan_shadow",
2947
2948 AMDGPUAddressShared =
2950 AMDGPUAddressPrivate =
2952}
2953
2954bool AddressSanitizer::maybeInsertAsanInitAtFunctionEntry(Function &F) {
2955
2956
2957
2958
2959
2960
2961
2962 if (F.getName().contains(" load]")) {
2966 IRB.CreateCall(AsanInitFunction, {});
2967 return true;
2968 }
2969 return false;
2970}
2971
2972bool AddressSanitizer::maybeInsertDynamicShadowAtFunctionEntry(Function &F) {
2973
2975 return false;
2976
2978 if (Mapping.InGlobal) {
2980
2981
2983 FunctionType::get(IntptrTy, {AsanShadowGlobal->getType()}, false),
2985 false);
2986 LocalDynamicShadow =
2987 IRB.CreateCall(Asm, {AsanShadowGlobal}, ".asan.shadow");
2988 } else {
2989 LocalDynamicShadow =
2990 IRB.CreatePointerCast(AsanShadowGlobal, IntptrTy, ".asan.shadow");
2991 }
2992 } else {
2993 Value *GlobalDynamicAddress = F.getParent()->getOrInsertGlobal(
2995 LocalDynamicShadow = IRB.CreateLoad(IntptrTy, GlobalDynamicAddress);
2996 }
2997 return true;
2998}
2999
3000void AddressSanitizer::markEscapedLocalAllocas(Function &F) {
3001
3002
3003
3004
3005 assert(ProcessedAllocas.empty() && "must process localescape before allocas");
3006
3007
3008
3009 if (.getParent()->getFunction("llvm.localescape")) return;
3010
3011
3012
3015 if (II && II->getIntrinsicID() == Intrinsic::localescape) {
3016
3017 for (Value *Arg : II->args()) {
3020 "non-static alloca arg to localescape");
3021 ProcessedAllocas[AI] = false;
3022 }
3023 break;
3024 }
3025 }
3026}
3027
3028
3029
3030void AddressSanitizer::markCatchParametersAsUninteresting(Function &F) {
3034
3035
3036 for (Value *Operand : CatchPad->arg_operands())
3038 ProcessedAllocas[AI] = false;
3039 }
3040 }
3041 }
3042}
3043
3044bool AddressSanitizer::suppressInstrumentationSiteForDebug(int &Instrumented) {
3045 bool ShouldInstrument =
3048 Instrumented++;
3049 return !ShouldInstrument;
3050}
3051
3052bool AddressSanitizer::instrumentFunction(Function &F,
3055 bool FunctionModified = false;
3056
3057
3058 if (F.hasFnAttribute(Attribute::Naked))
3059 return FunctionModified;
3060
3061
3062
3063
3064 if (maybeInsertAsanInitAtFunctionEntry(F))
3065 FunctionModified = true;
3066
3067
3068 if (.hasFnAttribute(Attribute::SanitizeAddress)) return FunctionModified;
3069
3070 if (F.hasFnAttribute(Attribute::DisableSanitizerInstrumentation))
3071 return FunctionModified;
3072
3073 LLVM_DEBUG(dbgs() << "ASAN instrumenting:\n" << F << "\n");
3074
3075 initializeCallbacks(TLI);
3076
3077 FunctionStateRAII CleanupObj(this);
3078
3079 RuntimeCallInserter RTCI(F);
3080
3081 FunctionModified |= maybeInsertDynamicShadowAtFunctionEntry(F);
3082
3083
3084
3085 markEscapedLocalAllocas(F);
3086
3087 if (TargetTriple.isOSWindows())
3088 markCatchParametersAsUninteresting(F);
3089
3090
3091
3098
3099
3100 for (auto &BB : F) {
3102 TempsToInstrument.clear();
3103 int NumInsnsPerBB = 0;
3104 for (auto &Inst : BB) {
3105 if (LooksLikeCodeInBug11395(&Inst)) return false;
3106
3107 if (Inst.hasMetadata(LLVMContext::MD_nosanitize))
3108 continue;
3111
3112 if (!InterestingOperands.empty()) {
3113 for (auto &Operand : InterestingOperands) {
3115 Value *Ptr = Operand.getPtr();
3116
3117
3118
3119 if (Operand.MaybeMask) {
3120 if (TempsToInstrument.count(Ptr))
3121 continue;
3122 } else {
3123 if (!TempsToInstrument.insert(Ptr).second)
3124 continue;
3125 }
3126 }
3127 OperandsToInstrument.push_back(Operand);
3128 NumInsnsPerBB++;
3129 }
3134 PointerComparisonsOrSubtracts.push_back(&Inst);
3136
3138 NumInsnsPerBB++;
3139 } else {
3141
3142 TempsToInstrument.clear();
3145 }
3148 }
3150 }
3151 }
3152
3153 bool UseCalls = (InstrumentationWithCallsThreshold >= 0 &&
3154 OperandsToInstrument.size() + IntrinToInstrument.size() >
3155 (unsigned)InstrumentationWithCallsThreshold);
3158
3159
3160 int NumInstrumented = 0;
3161 for (auto &Operand : OperandsToInstrument) {
3162 if (!suppressInstrumentationSiteForDebug(NumInstrumented))
3163 instrumentMop(ObjSizeVis, Operand, UseCalls,
3164 F.getDataLayout(), RTCI);
3165 FunctionModified = true;
3166 }
3167 for (auto *Inst : IntrinToInstrument) {
3168 if (!suppressInstrumentationSiteForDebug(NumInstrumented))
3169 instrumentMemIntrinsic(Inst, RTCI);
3170 FunctionModified = true;
3171 }
3172
3173 FunctionStackPoisoner FSP(F, *this, RTCI);
3174 bool ChangedStack = FSP.runOnFunction();
3175
3176
3177
3178 for (auto *CI : NoReturnCalls) {
3180 RTCI.createRuntimeCall(IRB, AsanHandleNoReturnFunc, {});
3181 }
3182
3183 for (auto *Inst : PointerComparisonsOrSubtracts) {
3184 instrumentPointerComparisonOrSubtraction(Inst, RTCI);
3185 FunctionModified = true;
3186 }
3187
3188 if (ChangedStack || !NoReturnCalls.empty())
3189 FunctionModified = true;
3190
3191 LLVM_DEBUG(dbgs() << "ASAN done instrumenting: " << FunctionModified << " "
3192 << F << "\n");
3193
3194 return FunctionModified;
3195}
3196
3197
3198
3199
3200bool AddressSanitizer::LooksLikeCodeInBug11395(Instruction *I) {
3201 if (LongSize != 32) return false;
3203 if (!CI || !CI->isInlineAsm()) return false;
3205 return false;
3206
3207 return true;
3208}
3209
3210void FunctionStackPoisoner::initializeCallbacks(Module &M) {
3214 const char *MallocNameTemplate =
3219 std::string Suffix = itostr(Index);
3220 AsanStackMallocFunc[Index] = M.getOrInsertFunction(
3221 MallocNameTemplate + Suffix, IntptrTy, IntptrTy);
3222 AsanStackFreeFunc[Index] =
3224 IRB.getVoidTy(), IntptrTy, IntptrTy);
3225 }
3226 }
3227 if (ASan.UseAfterScope) {
3228 AsanPoisonStackMemoryFunc = M.getOrInsertFunction(
3230 AsanUnpoisonStackMemoryFunc = M.getOrInsertFunction(
3232 }
3233
3234 for (size_t Val : {0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0xf1, 0xf2,
3235 0xf3, 0xf5, 0xf8}) {
3236 std::ostringstream Name;
3238 Name << std::setw(2) << std::setfill('0') << std::hex << Val;
3239 AsanSetShadowFunc[Val] =
3240 M.getOrInsertFunction(Name.str(), IRB.getVoidTy(), IntptrTy, IntptrTy);
3241 }
3242
3243 AsanAllocaPoisonFunc = M.getOrInsertFunction(
3245 AsanAllocasUnpoisonFunc = M.getOrInsertFunction(
3247}
3248
3249void FunctionStackPoisoner::copyToShadowInline(ArrayRef<uint8_t> ShadowMask,
3251 size_t Begin, size_t End,
3253 Value *ShadowBase) {
3254 if (Begin >= End)
3255 return;
3256
3257 const size_t LargestStoreSizeInBytes =
3258 std::min<size_t>(sizeof(uint64_t), ASan.LongSize / 8);
3259
3260 const bool IsLittleEndian = F.getDataLayout().isLittleEndian();
3261
3262
3263
3264
3265
3266 for (size_t i = Begin; i < End;) {
3267 if (!ShadowMask[i]) {
3268 assert(!ShadowBytes[i]);
3269 ++i;
3270 continue;
3271 }
3272
3273 size_t StoreSizeInBytes = LargestStoreSizeInBytes;
3274
3275 while (StoreSizeInBytes > End - i)
3276 StoreSizeInBytes /= 2;
3277
3278
3279 for (size_t j = StoreSizeInBytes - 1; j && !ShadowMask[i + j]; --j) {
3280 while (j <= StoreSizeInBytes / 2)
3281 StoreSizeInBytes /= 2;
3282 }
3283
3284 uint64_t Val = 0;
3285 for (size_t j = 0; j < StoreSizeInBytes; j++) {
3286 if (IsLittleEndian)
3287 Val |= (uint64_t)ShadowBytes[i + j] << (8 * j);
3288 else
3289 Val = (Val << 8) | ShadowBytes[i + j];
3290 }
3291
3292 Value *Ptr = IRB.CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i));
3297
3298 i += StoreSizeInBytes;
3299 }
3300}
3301
3302void FunctionStackPoisoner::copyToShadow(ArrayRef<uint8_t> ShadowMask,
3305 copyToShadow(ShadowMask, ShadowBytes, 0, ShadowMask.size(), IRB, ShadowBase);
3306}
3307
3308void FunctionStackPoisoner::copyToShadow(ArrayRef<uint8_t> ShadowMask,
3310 size_t Begin, size_t End,
3313 size_t Done = Begin;
3314 for (size_t i = Begin, j = Begin + 1; i < End; i = j++) {
3315 if (!ShadowMask[i]) {
3316 assert(!ShadowBytes[i]);
3317 continue;
3318 }
3319 uint8_t Val = ShadowBytes[i];
3320 if (!AsanSetShadowFunc[Val])
3321 continue;
3322
3323
3324 for (; j < End && ShadowMask[j] && Val == ShadowBytes[j]; ++j) {
3325 }
3326
3327 if (j - i >= ASan.MaxInlinePoisoningSize) {
3328 copyToShadowInline(ShadowMask, ShadowBytes, Done, i, IRB, ShadowBase);
3329 RTCI.createRuntimeCall(
3330 IRB, AsanSetShadowFunc[Val],
3331 {IRB.CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i)),
3332 ConstantInt::get(IntptrTy, j - i)});
3334 }
3335 }
3336
3337 copyToShadowInline(ShadowMask, ShadowBytes, Done, End, IRB, ShadowBase);
3338}
3339
3340
3341
3345 for (int i = 0;; i++, MaxSize *= 2)
3346 if (LocalStackSize <= MaxSize) return i;
3348}
3349
3350void FunctionStackPoisoner::copyArgsPassedByValToAllocas() {
3351 Instruction *CopyInsertPoint = &F.front().front();
3352 if (CopyInsertPoint == ASan.LocalDynamicShadow) {
3353
3354 CopyInsertPoint = CopyInsertPoint->getNextNode();
3355 assert(CopyInsertPoint);
3356 }
3359 for (Argument &Arg : F.args()) {
3360 if (Arg.hasByValAttr()) {
3361 Type *Ty = Arg.getParamByValType();
3362 const Align Alignment =
3363 DL.getValueOrABITypeAlignment(Arg.getParamAlign(), Ty);
3364
3366 Ty, nullptr,
3367 (Arg.hasName() ? Arg.getName() : "Arg" + Twine(Arg.getArgNo())) +
3368 ".byval");
3370 Arg.replaceAllUsesWith(AI);
3371
3372 uint64_t AllocSize = DL.getTypeAllocSize(Ty);
3373 IRB.CreateMemCpy(AI, Alignment, &Arg, Alignment, AllocSize);
3374 }
3375 }
3376}
3377
3379 Value *ValueIfTrue,
3381 Value *ValueIfFalse) {
3384 PHI->addIncoming(ValueIfFalse, CondBlock);
3386 PHI->addIncoming(ValueIfTrue, ThenBlock);
3387 return PHI;
3388}
3389
3390Value *FunctionStackPoisoner::createAllocaForLayout(
3395 ConstantInt::get(IRB.getInt64Ty(), L.FrameSize),
3396 "MyAlloca");
3397 } else {
3399 nullptr, "MyAlloca");
3401 }
3403 uint64_t FrameAlignment = std::max(L.FrameAlignment, uint64_t(ClRealignStack));
3405 return Alloca;
3406}
3407
3408void FunctionStackPoisoner::createDynamicAllocasInitStorage() {
3411 DynamicAllocaLayout = IRB.CreateAlloca(IntptrTy, nullptr);
3414}
3415
3416void FunctionStackPoisoner::processDynamicAllocas() {
3418 assert(DynamicAllocaPoisonCallVec.empty());
3419 return;
3420 }
3421
3422
3423 for (const auto &APC : DynamicAllocaPoisonCallVec) {
3424 assert(APC.InsBefore);
3426 assert(ASan.isInterestingAlloca(*APC.AI));
3427 assert(!APC.AI->isStaticAlloca());
3428
3430 poisonAlloca(APC.AI, APC.Size, IRB, APC.DoPoison);
3431
3432
3433
3434 }
3435
3436
3437 createDynamicAllocasInitStorage();
3438 for (auto &AI : DynamicAllocaVec)
3439 handleDynamicAllocaCall(AI);
3440 unpoisonDynamicAllocas();
3441}
3442
3443
3444
3445
3446
3447
3449 AddressSanitizer &ASan, Instruction &InsBefore,
3452 for (Instruction *It = Start; It; It = It->getNextNode()) {
3453
3454
3455
3456
3457
3458
3459
3460
3461
3463 continue;
3465
3466
3467
3469 if (!Alloca || ASan.isInterestingAlloca(*Alloca))
3470 continue;
3471
3472 Value *Val = Store->getValueOperand();
3474 bool IsArgInitViaCast =
3477
3478
3479 Val == It->getPrevNode();
3480 bool IsArgInit = IsDirectArgInit || IsArgInitViaCast;
3481 if (!IsArgInit)
3482 continue;
3483
3484 if (IsArgInitViaCast)
3487 continue;
3488 }
3489
3490
3491
3492 return;
3493 }
3494}
3495
3497
3498
3499 if (AI->hasMetadata(LLVMContext::MD_annotation)) {
3500 MDTuple *AllocaAnnotations =
3502 for (auto &Annotation : AllocaAnnotations->operands()) {
3504 continue;
3505 auto AnnotationTuple = cast(Annotation);
3506 for (unsigned Index = 0; Index < AnnotationTuple->getNumOperands();
3507 Index++) {
3508
3509 auto MetadataString =
3511 if (MetadataString->getString() == "alloca_name_altered")
3512 return cast(AnnotationTuple->getOperand(Index + 1))
3513 ->getString();
3514 }
3515 }
3516 }
3518}
3519
3520void FunctionStackPoisoner::processStaticAllocas() {
3521 if (AllocaVec.empty()) {
3522 assert(StaticAllocaPoisonCallVec.empty());
3523 return;
3524 }
3525
3526 int StackMallocIdx = -1;
3527 DebugLoc EntryDebugLocation;
3528 if (auto SP = F.getSubprogram())
3529 EntryDebugLocation =
3531
3534
3535
3536
3537
3538 auto InsBeforeB = InsBefore->getParent();
3539 assert(InsBeforeB == &F.getEntryBlock());
3540 for (auto *AI : StaticAllocasToMoveUp)
3541 if (AI->getParent() == InsBeforeB)
3543
3544
3545
3546
3547
3550 for (Instruction *ArgInitInst : ArgInitInsts)
3551 ArgInitInst->moveBefore(InsBefore->getIterator());
3552
3553
3554 if (LocalEscapeCall)
3556
3562 ASan.getAllocaSizeInBytes(*AI),
3563 0,
3565 AI,
3566 0,
3567 0};
3569 }
3570
3571
3572
3573 uint64_t Granularity = 1ULL << Mapping.Scale;
3574 uint64_t MinHeaderSize = std::max((uint64_t)ASan.LongSize / 2, Granularity);
3577
3578
3580 for (auto &Desc : SVD)
3581 AllocaToSVDMap[Desc.AI] = &Desc;
3582
3583
3584 for (const auto &APC : StaticAllocaPoisonCallVec) {
3585 assert(APC.InsBefore);
3587 assert(ASan.isInterestingAlloca(*APC.AI));
3588 assert(APC.AI->isStaticAlloca());
3589
3591 Desc.LifetimeSize = Desc.Size;
3592 if (const DILocation *FnLoc = EntryDebugLocation.get()) {
3593 if (const DILocation *LifetimeLoc = APC.InsBefore->getDebugLoc().get()) {
3594 if (LifetimeLoc->getFile() == FnLoc->getFile())
3595 if (unsigned Line = LifetimeLoc->getLine())
3596 Desc.Line = std::min(Desc.Line ? Desc.Line : Line, Line);
3597 }
3598 }
3599 }
3600
3602 LLVM_DEBUG(dbgs() << DescriptionString << " --- " << L.FrameSize << "\n");
3603 uint64_t LocalStackSize = L.FrameSize;
3604 bool DoStackMalloc =
3608
3609
3610
3611
3612
3613
3614 DoDynamicAlloca &= !HasInlineAsm && !HasReturnsTwiceCall;
3615 DoStackMalloc &= !HasInlineAsm && !HasReturnsTwiceCall;
3616
3617 Type *PtrTy = F.getDataLayout().getAllocaPtrType(F.getContext());
3618 Value *StaticAlloca =
3619 DoDynamicAlloca ? nullptr : createAllocaForLayout(IRB, L, false);
3620
3621 Value *FakeStackPtr;
3622 Value *FakeStackInt;
3623 Value *LocalStackBase;
3624 Value *LocalStackBaseAlloca;
3626
3627 if (DoStackMalloc) {
3628 LocalStackBaseAlloca =
3629 IRB.CreateAlloca(IntptrTy, nullptr, "asan_local_stack_base");
3631
3632
3633
3634
3635
3636 Constant *OptionDetectUseAfterReturn = F.getParent()->getOrInsertGlobal(
3646 Value *FakeStackValue =
3647 RTCI.createRuntimeCall(IRBIf, AsanStackMallocFunc[StackMallocIdx],
3648 ConstantInt::get(IntptrTy, LocalStackSize));
3650 FakeStackInt = createPHI(IRB, UseAfterReturnIsEnabled, FakeStackValue,
3651 Term, ConstantInt::get(IntptrTy, 0));
3652 } else {
3653
3654
3655
3656
3658 FakeStackInt =
3659 RTCI.createRuntimeCall(IRB, AsanStackMallocFunc[StackMallocIdx],
3660 ConstantInt::get(IntptrTy, LocalStackSize));
3661 }
3662 FakeStackPtr = IRB.CreateIntToPtr(FakeStackInt, PtrTy);
3663 Value *NoFakeStack =
3668 Value *AllocaValue =
3669 DoDynamicAlloca ? createAllocaForLayout(IRBIf, L, true) : StaticAlloca;
3670
3672 LocalStackBase =
3673 createPHI(IRB, NoFakeStack, AllocaValue, Term, FakeStackPtr);
3674 IRB.CreateStore(LocalStackBase, LocalStackBaseAlloca);
3676 } else {
3677
3678
3681 LocalStackBase =
3682 DoDynamicAlloca ? createAllocaForLayout(IRB, L, true) : StaticAlloca;
3683 LocalStackBaseAlloca = LocalStackBase;
3684 }
3685
3686
3688 for (const auto &Desc : SVD) {
3692 LocalStackBase, ConstantInt::get(IntptrTy, Desc.Offset));
3694 NewAllocaPtrs.push_back(NewAllocaPtr);
3695 }
3696
3697
3698
3700 LocalStackBase);
3701
3703 LocalStackBase, ConstantInt::get(IntptrTy, ASan.LongSize / 8));
3706 true, genName("stack"));
3708 IRB.CreateStore(Description, BasePlus1);
3709
3711 LocalStackBase, ConstantInt::get(IntptrTy, 2 * ASan.LongSize / 8));
3713
3715
3716
3717 Value *ShadowBase =
3718 ASan.memToShadow(IRB.CreatePtrToInt(LocalStackBase, IntptrTy), IRB);
3719
3720
3721 copyToShadow(ShadowAfterScope, ShadowAfterScope, IRB, ShadowBase);
3722
3723 if (!StaticAllocaPoisonCallVec.empty()) {
3725
3726
3727 for (const auto &APC : StaticAllocaPoisonCallVec) {
3729 assert(Desc.Offset % L.Granularity == 0);
3730 size_t Begin = Desc.Offset / L.Granularity;
3731 size_t End = Begin + (APC.Size + L.Granularity - 1) / L.Granularity;
3732
3734 copyToShadow(ShadowAfterScope,
3735 APC.DoPoison ? ShadowAfterScope : ShadowInScope, Begin, End,
3736 IRB, ShadowBase);
3737 }
3738 }
3739
3740
3741 for (Value *NewAllocaPtr : NewAllocaPtrs) {
3744 if (I->isLifetimeStartOrEnd())
3745 I->eraseFromParent();
3746 }
3747 }
3748
3751
3752
3755
3757 LocalStackBase);
3758 if (DoStackMalloc) {
3759 assert(StackMallocIdx >= 0);
3760
3761
3762
3763
3764
3765
3766
3767
3768
3769
3774
3776 if (ASan.MaxInlinePoisoningSize != 0 && StackMallocIdx <= 4) {
3778 ShadowAfterReturn.resize(ClassSize / L.Granularity,
3780 copyToShadow(ShadowAfterReturn, ShadowAfterReturn, IRBPoison,
3781 ShadowBase);
3782 Value *SavedFlagPtrPtr = IRBPoison.CreatePtrAdd(
3783 FakeStackPtr,
3784 ConstantInt::get(IntptrTy, ClassSize - ASan.LongSize / 8));
3785 Value *SavedFlagPtr = IRBPoison.CreateLoad(IntptrTy, SavedFlagPtrPtr);
3786 IRBPoison.CreateStore(
3788 IRBPoison.CreateIntToPtr(SavedFlagPtr, IRBPoison.getPtrTy()));
3789 } else {
3790
3791 RTCI.createRuntimeCall(
3792 IRBPoison, AsanStackFreeFunc[StackMallocIdx],
3793 {FakeStackInt, ConstantInt::get(IntptrTy, LocalStackSize)});
3794 }
3795
3797 copyToShadow(ShadowAfterScope, ShadowClean, IRBElse, ShadowBase);
3798 } else {
3799 copyToShadow(ShadowAfterScope, ShadowClean, IRBRet, ShadowBase);
3800 }
3801 }
3802
3803
3804 for (auto *AI : AllocaVec)
3806}
3807
3808void FunctionStackPoisoner::poisonAlloca(Value *V, uint64_t Size,
3810
3812 Value *SizeArg = ConstantInt::get(IntptrTy, Size);
3813 RTCI.createRuntimeCall(
3814 IRB, DoPoison ? AsanPoisonStackMemoryFunc : AsanUnpoisonStackMemoryFunc,
3815 {AddrArg, SizeArg});
3816}
3817
3818
3819
3820
3821
3822
3823
3824
3825
3826void FunctionStackPoisoner::handleDynamicAllocaCall(AllocaInst *AI) {
3828
3830 const uint64_t AllocaRedzoneMask = kAllocaRzSize - 1;
3831
3834 Value *AllocaRzMask = ConstantInt::get(IntptrTy, AllocaRedzoneMask);
3835
3836
3837
3838
3839
3840 const unsigned ElementSize =
3842 Value *OldSize =
3844 ConstantInt::get(IntptrTy, ElementSize));
3845
3846
3847 Value *PartialSize = IRB.CreateAnd(OldSize, AllocaRzMask);
3848
3849
3850 Value *Misalign = IRB.CreateSub(AllocaRzSize, PartialSize);
3851
3852
3855
3856
3857
3858
3861 PartialPadding);
3862
3863 Value *NewSize = IRB.CreateAdd(OldSize, AdditionalChunkSize);
3864
3865
3868
3869
3870 Value *NewAddress =
3872 ConstantInt::get(IntptrTy, Alignment.value()));
3873
3874
3875 RTCI.createRuntimeCall(IRB, AsanAllocaPoisonFunc, {NewAddress, OldSize});
3876
3877
3878
3880
3882
3883
3886 if (I->isLifetimeStartOrEnd())
3887 I->eraseFromParent();
3888 }
3889
3890
3892
3893
3895}
3896
3897
3898
3899
3903
3904
3905 return false;
3906
3909 return false;
3910
3913
3914
3915
3916
3917
3919 Size - uint64_t(Offset) >= TypeStoreSize / 8;
3920}
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
static cl::opt< bool > ClUseStackSafety("stack-tagging-use-stack-safety", cl::Hidden, cl::init(true), cl::desc("Use Stack Safety analysis results"))
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
static void findStoresToUninstrumentedArgAllocas(AddressSanitizer &ASan, Instruction &InsBefore, SmallVectorImpl< Instruction * > &InitInsts)
Collect instructions in the entry block after InsBefore which initialize permanent storage for a func...
Definition AddressSanitizer.cpp:3448
static void doInstrumentAddress(AddressSanitizer *Pass, Instruction *I, Instruction *InsertBefore, Value *Addr, MaybeAlign Alignment, unsigned Granularity, TypeSize TypeStoreSize, bool IsWrite, Value *SizeArgument, bool UseCalls, uint32_t Exp, RuntimeCallInserter &RTCI)
Definition AddressSanitizer.cpp:1675
static const uint64_t kDefaultShadowScale
Definition AddressSanitizer.cpp:98
const char kAMDGPUUnreachableName[]
Definition AddressSanitizer.cpp:181
constexpr size_t kAccessSizeIndexMask
Definition AddressSanitizer.cpp:192
static cl::opt< int > ClDebugMin("asan-debug-min", cl::desc("Debug min inst"), cl::Hidden, cl::init(-1))
static cl::opt< bool > ClUsePrivateAlias("asan-use-private-alias", cl::desc("Use private aliases for global variables"), cl::Hidden, cl::init(true))
static const uint64_t kPS_ShadowOffset64
Definition AddressSanitizer.cpp:121
static const uint64_t kFreeBSD_ShadowOffset32
Definition AddressSanitizer.cpp:114
constexpr size_t kIsWriteShift
Definition AddressSanitizer.cpp:193
static const uint64_t kSmallX86_64ShadowOffsetAlignMask
Definition AddressSanitizer.cpp:104
static bool isInterestingPointerSubtraction(Instruction *I)
Definition AddressSanitizer.cpp:1639
const char kAMDGPUAddressSharedName[]
Definition AddressSanitizer.cpp:178
const char kAsanStackFreeNameTemplate[]
Definition AddressSanitizer.cpp:157
constexpr size_t kCompileKernelMask
Definition AddressSanitizer.cpp:190
static cl::opt< bool > ClForceDynamicShadow("asan-force-dynamic-shadow", cl::desc("Load shadow address into a local variable for each function"), cl::Hidden, cl::init(false))
const char kAsanOptionDetectUseAfterReturn[]
Definition AddressSanitizer.cpp:169
static cl::opt< std::string > ClMemoryAccessCallbackPrefix("asan-memory-access-callback-prefix", cl::desc("Prefix for memory access callbacks"), cl::Hidden, cl::init("__asan_"))
static const uint64_t kRISCV64_ShadowOffset64
Definition AddressSanitizer.cpp:113
static cl::opt< bool > ClInsertVersionCheck("asan-guard-against-version-mismatch", cl::desc("Guard against compiler/runtime version mismatch."), cl::Hidden, cl::init(true))
const char kAsanSetShadowPrefix[]
Definition AddressSanitizer.cpp:161
static cl::opt< AsanDtorKind > ClOverrideDestructorKind("asan-destructor-kind", cl::desc("Sets the ASan destructor kind. The default is to use the value " "provided to the pass constructor"), cl::values(clEnumValN(AsanDtorKind::None, "none", "No destructors"), clEnumValN(AsanDtorKind::Global, "global", "Use global destructors")), cl::init(AsanDtorKind::Invalid), cl::Hidden)
static Twine genName(StringRef suffix)
Definition AddressSanitizer.cpp:701
static cl::opt< bool > ClInstrumentWrites("asan-instrument-writes", cl::desc("instrument write instructions"), cl::Hidden, cl::init(true))
const char kAsanPtrCmp[]
Definition AddressSanitizer.cpp:150
static uint64_t GetCtorAndDtorPriority(Triple &TargetTriple)
Definition AddressSanitizer.cpp:694
const char kAsanStackMallocNameTemplate[]
Definition AddressSanitizer.cpp:154
static cl::opt< bool > ClInstrumentByval("asan-instrument-byval", cl::desc("instrument byval call arguments"), cl::Hidden, cl::init(true))
const char kAsanInitName[]
Definition AddressSanitizer.cpp:148
static cl::opt< bool > ClGlobals("asan-globals", cl::desc("Handle global objects"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClRedzoneByvalArgs("asan-redzone-byval-args", cl::desc("Create redzones for byval " "arguments (extra copy " "required)"), cl::Hidden, cl::init(true))
static const uint64_t kWindowsShadowOffset64
Definition AddressSanitizer.cpp:126
const char kAsanGenPrefix[]
Definition AddressSanitizer.cpp:158
constexpr size_t kIsWriteMask
Definition AddressSanitizer.cpp:194
static uint64_t getRedzoneSizeForScale(int MappingScale)
Definition AddressSanitizer.cpp:688
static const uint64_t kDefaultShadowOffset64
Definition AddressSanitizer.cpp:100
static cl::opt< bool > ClOptimizeCallbacks("asan-optimize-callbacks", cl::desc("Optimize callbacks"), cl::Hidden, cl::init(false))
const char kAsanUnregisterGlobalsName[]
Definition AddressSanitizer.cpp:140
static const uint64_t kAsanCtorAndDtorPriority
Definition AddressSanitizer.cpp:135
const char kAsanUnpoisonGlobalsName[]
Definition AddressSanitizer.cpp:147
static cl::opt< bool > ClWithIfuncSuppressRemat("asan-with-ifunc-suppress-remat", cl::desc("Suppress rematerialization of dynamic shadow address by passing " "it through inline asm in prologue."), cl::Hidden, cl::init(true))
static cl::opt< int > ClDebugStack("asan-debug-stack", cl::desc("debug stack"), cl::Hidden, cl::init(0))
const char kAsanUnregisterElfGlobalsName[]
Definition AddressSanitizer.cpp:145
static bool isUnsupportedAMDGPUAddrspace(Value *Addr)
Definition AddressSanitizer.cpp:1373
const char kAsanRegisterImageGlobalsName[]
Definition AddressSanitizer.cpp:141
static const uint64_t kWebAssemblyShadowOffset
Definition AddressSanitizer.cpp:123
static cl::opt< bool > ClOpt("asan-opt", cl::desc("Optimize instrumentation"), cl::Hidden, cl::init(true))
static const uint64_t kAllocaRzSize
Definition AddressSanitizer.cpp:186
const char kODRGenPrefix[]
Definition AddressSanitizer.cpp:159
static const uint64_t kSystemZ_ShadowOffset64
Definition AddressSanitizer.cpp:107
static const uint64_t kDefaultShadowOffset32
Definition AddressSanitizer.cpp:99
const char kAsanShadowMemoryDynamicAddress[]
Definition AddressSanitizer.cpp:172
static cl::opt< bool > ClUseOdrIndicator("asan-use-odr-indicator", cl::desc("Use odr indicators to improve ODR reporting"), cl::Hidden, cl::init(true))
static bool GlobalWasGeneratedByCompiler(GlobalVariable *G)
Check if G has been created by a trusted compiler pass.
Definition AddressSanitizer.cpp:1355
const char kAsanStackMallocAlwaysNameTemplate[]
Definition AddressSanitizer.cpp:155
static cl::opt< int > ClShadowAddrSpace("asan-shadow-addr-space", cl::desc("Address space for pointers to the shadow map"), cl::Hidden, cl::init(0))
static cl::opt< bool > ClInvalidPointerCmp("asan-detect-invalid-pointer-cmp", cl::desc("Instrument <, <=, >, >= with pointer operands"), cl::Hidden, cl::init(false))
static const uint64_t kAsanEmscriptenCtorAndDtorPriority
Definition AddressSanitizer.cpp:137
static cl::opt< int > ClInstrumentationWithCallsThreshold("asan-instrumentation-with-call-threshold", cl::desc("If the function being instrumented contains more than " "this number of memory accesses, use callbacks instead of " "inline checks (-1 means never use callbacks)."), cl::Hidden, cl::init(7000))
static cl::opt< int > ClDebugMax("asan-debug-max", cl::desc("Debug max inst"), cl::Hidden, cl::init(-1))
static cl::opt< bool > ClInvalidPointerSub("asan-detect-invalid-pointer-sub", cl::desc("Instrument - operations with pointer operands"), cl::Hidden, cl::init(false))
static cl::list< unsigned > ClAddrSpaces("asan-instrument-address-spaces", cl::desc("Only instrument variables in the specified address spaces."), cl::Hidden, cl::CommaSeparated, cl::ZeroOrMore, cl::callback([](const unsigned &AddrSpace) { SrcAddrSpaces.insert(AddrSpace);}))
static const uint64_t kFreeBSD_ShadowOffset64
Definition AddressSanitizer.cpp:115
static cl::opt< uint32_t > ClForceExperiment("asan-force-experiment", cl::desc("Force optimization experiment (for testing)"), cl::Hidden, cl::init(0))
const char kSanCovGenPrefix[]
Definition AddressSanitizer.cpp:160
static const uint64_t kFreeBSDKasan_ShadowOffset64
Definition AddressSanitizer.cpp:117
const char kAsanModuleDtorName[]
Definition AddressSanitizer.cpp:134
static const uint64_t kDynamicShadowSentinel
Definition AddressSanitizer.cpp:101
static bool isInterestingPointerComparison(Instruction *I)
Definition AddressSanitizer.cpp:1625
static cl::opt< bool > ClStack("asan-stack", cl::desc("Handle stack memory"), cl::Hidden, cl::init(true))
static const uint64_t kMIPS64_ShadowOffset64
Definition AddressSanitizer.cpp:110
static const uint64_t kLinuxKasan_ShadowOffset64
Definition AddressSanitizer.cpp:105
static int StackMallocSizeClass(uint64_t LocalStackSize)
Definition AddressSanitizer.cpp:3342
static cl::opt< uint32_t > ClMaxInlinePoisoningSize("asan-max-inline-poisoning-size", cl::desc("Inline shadow poisoning for blocks up to the given size in bytes."), cl::Hidden, cl::init(64))
static cl::opt< bool > ClInstrumentAtomics("asan-instrument-atomics", cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClUseAfterScope("asan-use-after-scope", cl::desc("Check stack-use-after-scope"), cl::Hidden, cl::init(false))
constexpr size_t kAccessSizeIndexShift
Definition AddressSanitizer.cpp:191
static cl::opt< int > ClMappingScale("asan-mapping-scale", cl::desc("scale of asan shadow mapping"), cl::Hidden, cl::init(0))
const char kAsanPoisonStackMemoryName[]
Definition AddressSanitizer.cpp:162
static cl::opt< bool > ClEnableKasan("asan-kernel", cl::desc("Enable KernelAddressSanitizer instrumentation"), cl::Hidden, cl::init(false))
static cl::opt< std::string > ClDebugFunc("asan-debug-func", cl::Hidden, cl::desc("Debug func"))
static bool isSupportedAddrspace(const Triple &TargetTriple, Value *Addr)
Definition AddressSanitizer.cpp:1382
static cl::opt< bool > ClUseGlobalsGC("asan-globals-live-support", cl::desc("Use linker features to support dead " "code stripping of globals"), cl::Hidden, cl::init(true))
static const size_t kNumberOfAccessSizes
Definition AddressSanitizer.cpp:184
const char kAsanUnpoisonStackMemoryName[]
Definition AddressSanitizer.cpp:163
static const uint64_t kLoongArch64_ShadowOffset64
Definition AddressSanitizer.cpp:112
const char kAsanRegisterGlobalsName[]
Definition AddressSanitizer.cpp:139
static cl::opt< bool > ClInstrumentDynamicAllocas("asan-instrument-dynamic-allocas", cl::desc("instrument dynamic allocas"), cl::Hidden, cl::init(true))
const char kAsanModuleCtorName[]
Definition AddressSanitizer.cpp:133
const char kAsanGlobalsRegisteredFlagName[]
Definition AddressSanitizer.cpp:167
static const size_t kMaxStackMallocSize
Definition AddressSanitizer.cpp:129
static cl::opt< bool > ClRecover("asan-recover", cl::desc("Enable recovery mode (continue-after-error)."), cl::Hidden, cl::init(false))
static cl::opt< bool > ClOptSameTemp("asan-opt-same-temp", cl::desc("Instrument the same temp just once"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClDynamicAllocaStack("asan-stack-dynamic-alloca", cl::desc("Use dynamic alloca to represent stack variables"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClOptStack("asan-opt-stack", cl::desc("Don't instrument scalar stack variables"), cl::Hidden, cl::init(false))
static const uint64_t kMIPS_ShadowOffsetN32
Definition AddressSanitizer.cpp:108
const char kAsanUnregisterImageGlobalsName[]
Definition AddressSanitizer.cpp:142
static cl::opt< AsanDetectStackUseAfterReturnMode > ClUseAfterReturn("asan-use-after-return", cl::desc("Sets the mode of detection for stack-use-after-return."), cl::values(clEnumValN(AsanDetectStackUseAfterReturnMode::Never, "never", "Never detect stack use after return."), clEnumValN(AsanDetectStackUseAfterReturnMode::Runtime, "runtime", "Detect stack use after return if " "binary flag 'ASAN_OPTIONS=detect_stack_use_after_return' is set."), clEnumValN(AsanDetectStackUseAfterReturnMode::Always, "always", "Always detect stack use after return.")), cl::Hidden, cl::init(AsanDetectStackUseAfterReturnMode::Runtime))
static cl::opt< bool > ClOptGlobals("asan-opt-globals", cl::desc("Don't instrument scalar globals"), cl::Hidden, cl::init(true))
static const uintptr_t kCurrentStackFrameMagic
Definition AddressSanitizer.cpp:130
static ShadowMapping getShadowMapping(const Triple &TargetTriple, int LongSize, bool IsKasan)
Definition AddressSanitizer.cpp:494
static const uint64_t kPPC64_ShadowOffset64
Definition AddressSanitizer.cpp:106
static cl::opt< AsanCtorKind > ClConstructorKind("asan-constructor-kind", cl::desc("Sets the ASan constructor kind"), cl::values(clEnumValN(AsanCtorKind::None, "none", "No constructors"), clEnumValN(AsanCtorKind::Global, "global", "Use global constructors")), cl::init(AsanCtorKind::Global), cl::Hidden)
static const int kMaxAsanStackMallocSizeClass
Definition AddressSanitizer.cpp:153
static const uint64_t kMIPS32_ShadowOffset32
Definition AddressSanitizer.cpp:109
static cl::opt< bool > ClAlwaysSlowPath("asan-always-slow-path", cl::desc("use instrumentation with slow path for all accesses"), cl::Hidden, cl::init(false))
static const uint64_t kNetBSD_ShadowOffset32
Definition AddressSanitizer.cpp:118
static const uint64_t kFreeBSDAArch64_ShadowOffset64
Definition AddressSanitizer.cpp:116
static const uint64_t kSmallX86_64ShadowOffsetBase
Definition AddressSanitizer.cpp:103
static cl::opt< bool > ClInitializers("asan-initialization-order", cl::desc("Handle C++ initializer order"), cl::Hidden, cl::init(true))
static const uint64_t kNetBSD_ShadowOffset64
Definition AddressSanitizer.cpp:119
const char kAsanPtrSub[]
Definition AddressSanitizer.cpp:151
static cl::opt< unsigned > ClRealignStack("asan-realign-stack", cl::desc("Realign stack to the value of this flag (power of two)"), cl::Hidden, cl::init(32))
static const uint64_t kWindowsShadowOffset32
Definition AddressSanitizer.cpp:122
static cl::opt< bool > ClInstrumentReads("asan-instrument-reads", cl::desc("instrument read instructions"), cl::Hidden, cl::init(true))
static size_t TypeStoreSizeToSizeIndex(uint32_t TypeSize)
Definition AddressSanitizer.cpp:1348
const char kAsanAllocaPoison[]
Definition AddressSanitizer.cpp:175
constexpr size_t kCompileKernelShift
Definition AddressSanitizer.cpp:189
static SmallSet< unsigned, 8 > SrcAddrSpaces
Definition AddressSanitizer.cpp:445
static cl::opt< bool > ClWithIfunc("asan-with-ifunc", cl::desc("Access dynamic shadow through an ifunc global on " "platforms that support this"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClKasanMemIntrinCallbackPrefix("asan-kernel-mem-intrinsic-prefix", cl::desc("Use prefix for memory intrinsics in KASAN mode"), cl::Hidden, cl::init(false))
const char kAsanVersionCheckNamePrefix[]
Definition AddressSanitizer.cpp:149
const char kAMDGPUAddressPrivateName[]
Definition AddressSanitizer.cpp:179
static const uint64_t kNetBSDKasan_ShadowOffset64
Definition AddressSanitizer.cpp:120
const char kAMDGPUBallotName[]
Definition AddressSanitizer.cpp:180
const char kAsanRegisterElfGlobalsName[]
Definition AddressSanitizer.cpp:144
static cl::opt< uint64_t > ClMappingOffset("asan-mapping-offset", cl::desc("offset of asan shadow mapping [EXPERIMENTAL]"), cl::Hidden, cl::init(0))
const char kAsanReportErrorTemplate[]
Definition AddressSanitizer.cpp:138
static cl::opt< bool > ClWithComdat("asan-with-comdat", cl::desc("Place ASan constructors in comdat sections"), cl::Hidden, cl::init(true))
static StringRef getAllocaName(AllocaInst *AI)
Definition AddressSanitizer.cpp:3496
static cl::opt< bool > ClSkipPromotableAllocas("asan-skip-promotable-allocas", cl::desc("Do not instrument promotable allocas"), cl::Hidden, cl::init(true))
static cl::opt< int > ClMaxInsnsToInstrumentPerBB("asan-max-ins-per-bb", cl::init(10000), cl::desc("maximal number of instructions to instrument in any given BB"), cl::Hidden)
static const uintptr_t kRetiredStackFrameMagic
Definition AddressSanitizer.cpp:131
static cl::opt< bool > ClUseStackSafety("asan-use-stack-safety", cl::Hidden, cl::init(true), cl::Hidden, cl::desc("Use Stack Safety analysis results"), cl::Optional)
const char kAsanPoisonGlobalsName[]
Definition AddressSanitizer.cpp:146
const char kAsanHandleNoReturnName[]
Definition AddressSanitizer.cpp:152
static const size_t kMinStackMallocSize
Definition AddressSanitizer.cpp:128
static cl::opt< int > ClDebug("asan-debug", cl::desc("debug"), cl::Hidden, cl::init(0))
const char kAsanAllocasUnpoison[]
Definition AddressSanitizer.cpp:176
static const uint64_t kAArch64_ShadowOffset64
Definition AddressSanitizer.cpp:111
static cl::opt< bool > ClInvalidPointerPairs("asan-detect-invalid-pointer-pair", cl::desc("Instrument <, <=, >, >=, - with pointer operands"), cl::Hidden, cl::init(false))
Function Alias Analysis false
This file contains the simple types necessary to represent the attributes associated with functions a...
static bool isPointerOperand(Value *I, User *U)
static const Function * getParent(const Value *V)
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< StatepointGC > D("statepoint-example", "an example strategy for statepoint")
#define clEnumValN(ENUMVAL, FLAGNAME, DESC)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file defines the DenseMap class.
This file builds on the ADT/GraphTraits.h file to build generic depth first graph iterator.
static bool runOnFunction(Function &F, bool PostInlining)
This is the interface for a simple mod/ref and alias analysis over globals.
Module.h This file contains the declarations for the Module class.
This defines the Use class.
std::pair< Instruction::BinaryOps, Value * > OffsetOp
Find all possible pairs (BinOp, RHS) that BinOp V, RHS can be simplified.
static bool isZero(Value *V, const DataLayout &DL, DominatorTree *DT, AssumptionCache *AC)
print mir2vec MIR2Vec Vocabulary Printer Pass
Machine Check Debug Module
uint64_t IntrinsicInst * II
FunctionAnalysisManager FAM
ModuleAnalysisManager MAM
const SmallVectorImpl< MachineOperand > & Cond
void visit(MachineFunction &MF, MachineBasicBlock &Start, std::function< void(MachineBasicBlock *)> op)
This file defines the SmallPtrSet class.
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
This pass exposes codegen information to IR-level passes.
uint64_t getZExtValue() const
Get zero extended value.
int64_t getSExtValue() const
Get sign extended value.
LLVM_ABI AddressSanitizerPass(const AddressSanitizerOptions &Options, bool UseGlobalGC=true, bool UseOdrIndicator=true, AsanDtorKind DestructorKind=AsanDtorKind::Global, AsanCtorKind ConstructorKind=AsanCtorKind::Global)
Definition AddressSanitizer.cpp:1295
LLVM_ABI PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM)
Definition AddressSanitizer.cpp:1303
LLVM_ABI void printPipeline(raw_ostream &OS, function_ref< StringRef(StringRef)> MapClassName2PassName)
Definition AddressSanitizer.cpp:1283
an instruction to allocate memory on the stack
bool isSwiftError() const
Return true if this alloca is used as a swifterror argument to a call.
LLVM_ABI bool isStaticAlloca() const
Return true if this alloca is in the entry block of the function and is a constant size.
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
PointerType * getType() const
Overload to return most specific pointer type.
Type * getAllocatedType() const
Return the type that is being allocated by the instruction.
bool isUsedWithInAlloca() const
Return true if this alloca is used as an inalloca argument to a call.
LLVM_ABI std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
void setAlignment(Align Align)
const Value * getArraySize() const
Get the number of elements allocated.
This class represents an incoming formal argument to a Function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
Class to represent array types.
static LLVM_ABI ArrayType * get(Type *ElementType, uint64_t NumElements)
This static method is the primary way to construct an ArrayType.
An instruction that atomically checks whether a specified value is in a memory location,...
an instruction that atomically reads a memory location, combines it with another value,...
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
LLVM_ABI const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
const Function * getParent() const
Return the enclosing method, or null if none.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
LLVM_ABI const Module * getModule() const
Return the module owning the function this basic block belongs to, or nullptr if the function does no...
Conditional or Unconditional Branch instruction.
static BranchInst * Create(BasicBlock *IfTrue, InsertPosition InsertBefore=nullptr)
bool isInlineAsm() const
Check if this call is an inline asm statement.
static LLVM_ABI CallBase * addOperandBundle(CallBase *CB, uint32_t ID, OperandBundleDef OB, InsertPosition InsertPt=nullptr)
Create a clone of CB with operand bundle OB added.
bool doesNotReturn() const
Determine if the call cannot return.
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
@ Largest
The linker will choose the largest COMDAT.
@ SameSize
The data referenced by the COMDAT must be the same size.
@ Any
The linker may choose any COMDAT.
@ NoDeduplicate
No deduplication is performed.
@ ExactMatch
The data referenced by the COMDAT must be the same.
ConstantArray - Constant Array Declarations.
static LLVM_ABI Constant * get(ArrayType *T, ArrayRef< Constant * > V)
static LLVM_ABI Constant * getPointerCast(Constant *C, Type *Ty)
Create a BitCast, AddrSpaceCast, or a PtrToInt cast constant expression.
static LLVM_ABI Constant * getPtrToInt(Constant *C, Type *Ty, bool OnlyIfReduced=false)
static Constant * getGetElementPtr(Type *Ty, Constant *C, ArrayRef< Constant * > IdxList, GEPNoWrapFlags NW=GEPNoWrapFlags::none(), std::optional< ConstantRange > InRange=std::nullopt, Type *OnlyIfReducedTy=nullptr)
Getelementptr form.
static LLVM_ABI bool isValueValidForType(Type *Ty, uint64_t V)
This static method returns true if the type Ty is big enough to represent the value V.
static LLVM_ABI Constant * get(StructType *T, ArrayRef< Constant * > V)
This is an important base class in LLVM.
static LLVM_ABI Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
LLVM_ABI Constant * getAggregateElement(unsigned Elt) const
For aggregates (struct/array/vector) return the constant that corresponds to the specified element if...
A parsed version of the target data layout string in and methods for querying it.
LLVM_ABI DILocation * get() const
Get the underlying DILocation.
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
static LLVM_ABI FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
const BasicBlock & front() const
static Function * createWithDefaultAttr(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
Creates a function with some attributes recorded in llvm.module.flags and the LLVMContext applied.
bool hasPersonalityFn() const
Check whether this function has a personality function.
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
const Constant * getAliasee() const
static LLVM_ABI GlobalAlias * create(Type *Ty, unsigned AddressSpace, LinkageTypes Linkage, const Twine &Name, Constant *Aliasee, Module *Parent)
If a parent module is specified, the alias is automatically inserted into the end of the specified mo...
LLVM_ABI void copyMetadata(const GlobalObject *Src, unsigned Offset)
Copy metadata from Src, adjusting offsets by Offset.
LLVM_ABI void setComdat(Comdat *C)
LLVM_ABI void setSection(StringRef S)
Change the section for this global.
VisibilityTypes getVisibility() const
void setUnnamedAddr(UnnamedAddr Val)
bool hasLocalLinkage() const
static StringRef dropLLVMManglingEscape(StringRef Name)
If the given string begins with the GlobalValue name mangling escape character '\1',...
ThreadLocalMode getThreadLocalMode() const
@ HiddenVisibility
The GV is hidden.
void setVisibility(VisibilityTypes V)
LinkageTypes
An enumeration for the kinds of linkage for global values.
@ PrivateLinkage
Like Internal, but omit from symbol table.
@ CommonLinkage
Tentative definitions.
@ InternalLinkage
Rename collisions when linking (static functions).
@ AvailableExternallyLinkage
Available for inspection, not emission.
@ ExternalWeakLinkage
ExternalWeak linkage description.
DLLStorageClassTypes getDLLStorageClass() const
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
LLVM_ABI void copyAttributesFrom(const GlobalVariable *Src)
copyAttributesFrom - copy all additional attributes (those not needed to create a GlobalVariable) fro...
void setAlignment(Align Align)
Sets the alignment attribute of the GlobalVariable.
Analysis pass providing a never-invalidated alias analysis result.
This instruction compares its operands according to the predicate given to the constructor.
Common base class shared among various IRBuilders.
AllocaInst * CreateAlloca(Type *Ty, unsigned AddrSpace, Value *ArraySize=nullptr, const Twine &Name="")
IntegerType * getInt1Ty()
Fetch the type representing a single bit.
LoadInst * CreateAlignedLoad(Type *Ty, Value *Ptr, MaybeAlign Align, const char *Name)
CallInst * CreateMemCpy(Value *Dst, MaybeAlign DstAlign, Value *Src, MaybeAlign SrcAlign, uint64_t Size, bool isVolatile=false, const AAMDNodes &AAInfo=AAMDNodes())
Create and insert a memcpy between the specified pointers.
Value * CreatePointerCast(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateICmpSGE(Value *LHS, Value *RHS, const Twine &Name="")
LLVM_ABI Value * CreateSelect(Value *C, Value *True, Value *False, const Twine &Name="", Instruction *MDFrom=nullptr)
BasicBlock::iterator GetInsertPoint() const
Value * CreateIntToPtr(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateLShr(Value *LHS, Value *RHS, const Twine &Name="", bool isExact=false)
IntegerType * getInt32Ty()
Fetch the type representing a 32-bit integer.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
BasicBlock * GetInsertBlock() const
IntegerType * getInt64Ty()
Fetch the type representing a 64-bit integer.
Value * CreateICmpNE(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateGEP(Type *Ty, Value *Ptr, ArrayRef< Value * > IdxList, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
LLVM_ABI CallInst * CreateIntrinsic(Intrinsic::ID ID, ArrayRef< Type * > Types, ArrayRef< Value * > Args, FMFSource FMFSource={}, const Twine &Name="")
Create a call to intrinsic ID with Args, mangled using Types.
ConstantInt * getInt32(uint32_t C)
Get a constant 32-bit value.
PHINode * CreatePHI(Type *Ty, unsigned NumReservedValues, const Twine &Name="")
Value * CreateNot(Value *V, const Twine &Name="")
Value * CreateICmpEQ(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateSub(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
ConstantInt * getIntN(unsigned N, uint64_t C)
Get a constant N-bit value, zero extended or truncated from a 64-bit value.
LoadInst * CreateLoad(Type *Ty, Value *Ptr, const char *Name)
Provided to resolve 'CreateLoad(Ty, Ptr, "...")' correctly, instead of converting the string to 'bool...
Value * CreateAnd(Value *LHS, Value *RHS, const Twine &Name="")
StoreInst * CreateStore(Value *Val, Value *Ptr, bool isVolatile=false)
Value * CreateAdd(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
Value * CreatePtrToInt(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateIsNotNull(Value *Arg, const Twine &Name="")
Return a boolean value testing if Arg != 0.
CallInst * CreateCall(FunctionType *FTy, Value *Callee, ArrayRef< Value * > Args={}, const Twine &Name="", MDNode *FPMathTag=nullptr)
LLVM_ABI Value * CreateTypeSize(Type *Ty, TypeSize Size)
Create an expression which evaluates to the number of units in Size at runtime.
Value * CreateIntCast(Value *V, Type *DestTy, bool isSigned, const Twine &Name="")
void SetInsertPoint(BasicBlock *TheBB)
This specifies that created instructions should be appended to the end of the specified block.
Type * getVoidTy()
Fetch the type representing void.
StoreInst * CreateAlignedStore(Value *Val, Value *Ptr, MaybeAlign Align, bool isVolatile=false)
Value * CreateOr(Value *LHS, Value *RHS, const Twine &Name="", bool IsDisjoint=false)
IntegerType * getInt8Ty()
Fetch the type representing an 8-bit integer.
Value * CreateAddrSpaceCast(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateMul(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
static LLVM_ABI InlineAsm * get(FunctionType *Ty, StringRef AsmString, StringRef Constraints, bool hasSideEffects, bool isAlignStack=false, AsmDialect asmDialect=AD_ATT, bool canThrow=false)
InlineAsm::get - Return the specified uniqued inline asm string.
Base class for instruction visitors.
const DebugLoc & getDebugLoc() const
Return the debug location for this node as a DebugLoc.
bool hasMetadata() const
Return true if this instruction has any metadata attached to it.
LLVM_ABI void moveBefore(InstListType::iterator InsertPos)
Unlink this instruction from its current basic block and insert it into the basic block that MovePos ...
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
MDNode * getMetadata(unsigned KindID) const
Get the metadata of given kind attached to this Instruction.
LLVM_ABI BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
LLVM_ABI const DataLayout & getDataLayout() const
Get the data layout of the module this instruction belongs to.
static LLVM_ABI IntegerType * get(LLVMContext &C, unsigned NumBits)
This static method is the primary way of constructing an IntegerType.
A wrapper class for inspecting calls to intrinsic functions.
LLVM_ABI void emitError(const Instruction *I, const Twine &ErrorStr)
emitError - Emit an error message to the currently installed error handler with optional location inf...
An instruction for reading from memory.
static Error ParseSectionSpecifier(StringRef Spec, StringRef &Segment, StringRef &Section, unsigned &TAA, bool &TAAParsed, unsigned &StubSize)
Parse the section specifier indicated by "Spec".
LLVM_ABI MDNode * createUnlikelyBranchWeights()
Return metadata containing two branch weights, with significant bias towards false destination.
ArrayRef< MDOperand > operands() const
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
This is the common base class for memset/memcpy/memmove.
A Module instance is used to store all the information related to an LLVM module.
Evaluate the size and offset of an object pointed to by a Value* statically.
LLVM_ABI SizeOffsetAPInt compute(Value *V)
Pass interface - Implemented by all 'passes'.
static PointerType * getUnqual(Type *ElementType)
This constructs a pointer to an object of the specified type in the default address space (address sp...
static LLVM_ABI PointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
PreservedAnalyses & abandon()
Mark an analysis as abandoned.
Return a value (possibly void), from a function.
static ReturnInst * Create(LLVMContext &C, Value *retVal=nullptr, InsertPosition InsertBefore=nullptr)
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
This pass performs the global (interprocedural) stack safety analysis (new pass manager).
bool stackAccessIsSafe(const Instruction &I) const
bool isSafe(const AllocaInst &AI) const
An instruction for storing to memory.
StringRef - Represent a constant reference to a string, i.e.
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
constexpr bool empty() const
empty - Check if the string is empty.
Class to represent struct types.
static LLVM_ABI StructType * get(LLVMContext &Context, ArrayRef< Type * > Elements, bool isPacked=false)
This static method is the primary way to create a literal StructType.
Analysis pass providing the TargetTransformInfo.
Analysis pass providing the TargetLibraryInfo.
Provides information about what library functions are available for the current target.
AttributeList getAttrList(LLVMContext *C, ArrayRef< unsigned > ArgNos, bool Signed, bool Ret=false, AttributeList AL=AttributeList()) const
This pass provides access to the codegen interfaces that are needed for IR-level transformations.
Triple - Helper class for working with autoconf configuration names.
bool isThumb() const
Tests whether the target is Thumb (little and big endian).
bool isDriverKit() const
Is this an Apple DriverKit triple.
bool isBPF() const
Tests whether the target is eBPF.
bool isAndroid() const
Tests whether the target is Android.
bool isMIPS64() const
Tests whether the target is MIPS 64-bit (little and big endian).
ArchType getArch() const
Get the parsed architecture type of this triple.
bool isLoongArch64() const
Tests whether the target is 64-bit LoongArch.
bool isMIPS32() const
Tests whether the target is MIPS 32-bit (little and big endian).
bool isOSWindows() const
Tests whether the OS is Windows.
bool isARM() const
Tests whether the target is ARM (little and big endian).
bool isOSLinux() const
Tests whether the OS is Linux.
bool isMacOSX() const
Is this a Mac OS X triple.
bool isOSEmscripten() const
Tests whether the OS is Emscripten.
bool isWatchOS() const
Is this an Apple watchOS triple.
bool isiOS() const
Is this an iOS triple.
bool isPS() const
Tests whether the target is the PS4 or PS5 platform.
bool isWasm() const
Tests whether the target is wasm (32- and 64-bit).
bool isOSHaiku() const
Tests whether the OS is Haiku.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
LLVM_ABI unsigned getIntegerBitWidth() const
static LLVM_ABI IntegerType * getInt32Ty(LLVMContext &C)
LLVM_ABI unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
static LLVM_ABI Type * getVoidTy(LLVMContext &C)
static LLVM_ABI IntegerType * getInt8Ty(LLVMContext &C)
Type * getScalarType() const
If this is a vector type, return the element type, otherwise return 'this'.
bool isSized(SmallPtrSetImpl< Type * > *Visited=nullptr) const
Return true if it makes sense to take the size of this type.
This function has undefined behavior.
A Use represents the edge between a Value definition and its users.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
LLVM_ABI void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
LLVM_ABI bool isSwiftError() const
Return true if this value is a swifterror value.
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
LLVM_ABI void takeName(Value *V)
Transfer the name from V to this value.
Base class of all SIMD vector types.
static LLVM_ABI VectorType * get(Type *ElementType, ElementCount EC)
This static method is the primary way to construct an VectorType.
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
An efficient, type-erasing, non-owning reference to a callable.
const ParentTy * getParent() const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
This file contains the declaration of the Comdat class, which represents a single COMDAT in LLVM.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
void getInterestingMemoryOperands(Module &M, Instruction *I, SmallVectorImpl< InterestingMemoryOperand > &Interesting)
Get all the memory operands from the instruction that needs to be instrumented.
void instrumentAddress(Module &M, IRBuilder<> &IRB, Instruction *OrigIns, Instruction *InsertBefore, Value *Addr, Align Alignment, TypeSize TypeStoreSize, bool IsWrite, Value *SizeArgument, bool UseCalls, bool Recover, int AsanScale, int AsanOffset)
Instrument the memory operand Addr.
uint64_t getRedzoneSizeForGlobal(int AsanScale, uint64_t SizeInBytes)
Given SizeInBytes of the Value to be instrunmented, Returns the redzone size corresponding to it.
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ C
The default llvm calling convention, compatible with C.
@ BasicBlock
Various leaf nodes.
@ S_CSTRING_LITERALS
S_CSTRING_LITERALS - Section with literal C strings.
@ OB
OB - OneByte - Set if this instruction has a one byte opcode.
ValuesClass values(OptsTy... Options)
Helper to build a ValuesClass by forwarding a variable number of arguments as an initializer list to ...
initializer< Ty > init(const Ty &Val)
cb< typename detail::callback_traits< F >::result_type, typename detail::callback_traits< F >::arg_type > callback(F CB)
uint64_t getAllocaSizeInBytes(const AllocaInst &AI)
Context & getContext() const
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
LLVM_ABI void ReplaceInstWithInst(BasicBlock *BB, BasicBlock::iterator &BI, Instruction *I)
Replace the instruction specified by BI with the instruction specified by I.
FunctionAddr VTableAddr Value
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
LLVM_ABI SmallVector< uint8_t, 64 > GetShadowBytesAfterScope(const SmallVectorImpl< ASanStackVariableDescription > &Vars, const ASanStackFrameLayout &Layout)
LLVM_ABI GlobalVariable * createPrivateGlobalForString(Module &M, StringRef Str, bool AllowMerging, Twine NamePrefix="")
LLVM_ABI AllocaInst * findAllocaForValue(Value *V, bool OffsetZero=false)
Returns unique alloca where the value comes from, or nullptr.
decltype(auto) dyn_cast(const From &Val)
dyn_cast - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
LLVM_ABI Function * createSanitizerCtor(Module &M, StringRef CtorName)
Creates sanitizer constructor function.
AsanDetectStackUseAfterReturnMode
Mode of ASan detect stack use after return.
@ Always
Always detect stack use after return.
@ Never
Never detect stack use after return.
@ Runtime
Detect stack use after return if not disabled runtime with (ASAN_OPTIONS=detect_stack_use_after_retur...
LLVM_ABI DenseMap< BasicBlock *, ColorVector > colorEHFunclets(Function &F)
If an EH funclet personality is in use (see isFuncletEHPersonality), this will recompute which blocks...
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
InnerAnalysisManagerProxy< FunctionAnalysisManager, Module > FunctionAnalysisManagerModuleProxy
Provide the FunctionAnalysisManager to Module proxy.
LLVM_ABI bool isAllocaPromotable(const AllocaInst *AI)
Return true if this alloca is legal for promotion.
LLVM_ABI SmallString< 64 > ComputeASanStackFrameDescription(const SmallVectorImpl< ASanStackVariableDescription > &Vars)
LLVM_ABI SmallVector< uint8_t, 64 > GetShadowBytes(const SmallVectorImpl< ASanStackVariableDescription > &Vars, const ASanStackFrameLayout &Layout)
int countr_zero(T Val)
Count number of 0's from the least significant bit to the most stopping at the first 1.
auto dyn_cast_or_null(const Y &Val)
LLVM_ABI FunctionCallee declareSanitizerInitFunction(Module &M, StringRef InitName, ArrayRef< Type * > InitArgTypes, bool Weak=false)
FunctionAddr VTableAddr uintptr_t uintptr_t Version
LLVM_ABI std::string getUniqueModuleId(Module *M)
Produce a unique identifier for this module by taking the MD5 sum of the names of the module's strong...
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
LLVM_ABI std::pair< Function *, FunctionCallee > createSanitizerCtorAndInitFunctions(Module &M, StringRef CtorName, StringRef InitName, ArrayRef< Type * > InitArgTypes, ArrayRef< Value * > InitArgs, StringRef VersionCheckName=StringRef(), bool Weak=false)
Creates sanitizer constructor function, and calls sanitizer's init function from it.
decltype(auto) get(const PointerIntPair< PointerTy, IntBits, IntType, PtrTraits, Info > &Pair)
LLVM_ABI void SplitBlockAndInsertIfThenElse(Value *Cond, BasicBlock::iterator SplitBefore, Instruction **ThenTerm, Instruction **ElseTerm, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr)
SplitBlockAndInsertIfThenElse is similar to SplitBlockAndInsertIfThen, but also creates the ElseBlock...
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
bool isAlnum(char C)
Checks whether character C is either a decimal digit or an uppercase or lowercase letter as classifie...
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
bool isa(const From &Val)
isa - Return true if the parameter to the template is an instance of one of the template type argu...
AsanDtorKind
Types of ASan module destructors supported.
@ Invalid
Not a valid destructor Kind.
@ Global
Append to llvm.global_dtors.
@ None
Do not emit any destructors for ASan.
LLVM_ABI ASanStackFrameLayout ComputeASanStackFrameLayout(SmallVectorImpl< ASanStackVariableDescription > &Vars, uint64_t Granularity, uint64_t MinHeaderSize)
void cantFail(Error Err, const char *Msg=nullptr)
Report a fatal error if Err is a failure value.
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
OperandBundleDefT< Value * > OperandBundleDef
LLVM_ABI void appendToCompilerUsed(Module &M, ArrayRef< GlobalValue * > Values)
Adds global values to the llvm.compiler.used list.
static const int kAsanStackUseAfterReturnMagic
LLVM_ABI void setGlobalVariableLargeSection(const Triple &TargetTriple, GlobalVariable &GV)
void removeASanIncompatibleFnAttributes(Function &F, bool ReadsArgMem)
Remove memory attributes that are incompatible with the instrumentation added by AddressSanitizer and...
Definition AddressSanitizer.cpp:635
@ Dynamic
Denotes mode unknown at compile time.
ArrayRef(const T &OneElt) -> ArrayRef< T >
LLVM_ABI void appendToGlobalCtors(Module &M, Function *F, int Priority, Constant *Data=nullptr)
Append F to the list of global ctors of module M with the given Priority.
TinyPtrVector< BasicBlock * > ColorVector
decltype(auto) cast(const From &Val)
cast - Return the argument parameter cast to the specified type.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
iterator_range< df_iterator< T > > depth_first(const T &G)
LLVM_ABI Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
LLVM_ABI const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=MaxLookupSearchDepth)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
AsanCtorKind
Types of ASan module constructors supported.
LLVM_ABI void maybeMarkSanitizerLibraryCallNoBuiltin(CallInst *CI, const TargetLibraryInfo *TLI)
Given a CallInst, check if it calls a string function known to CodeGen, and mark it with NoBuiltin if...
LLVM_ABI void appendToUsed(Module &M, ArrayRef< GlobalValue * > Values)
Adds global values to the llvm.used list.
LLVM_ABI void appendToGlobalDtors(Module &M, Function *F, int Priority, Constant *Data=nullptr)
Same as appendToGlobalCtors(), but for global dtors.
LLVM_ABI bool checkIfAlreadyInstrumented(Module &M, StringRef Flag)
Check if module has flag attached, if not add the flag.
void getAddressSanitizerParams(const Triple &TargetTriple, int LongSize, bool IsKasan, uint64_t *ShadowBase, int *MappingScale, bool *OrShadowOffset)
Definition AddressSanitizer.cpp:626
DEMANGLE_ABI std::string demangle(std::string_view MangledName)
Attempt to demangle a string using different demangling schemes.
std::string itostr(int64_t X)
LLVM_ABI void SplitBlockAndInsertForEachLane(ElementCount EC, Type *IndexTy, BasicBlock::iterator InsertBefore, std::function< void(IRBuilderBase &, Value *)> Func)
Utility function for performing a given action on each lane of a vector with EC elements.
AnalysisManager< Module > ModuleAnalysisManager
Convenience typedef for the Module analysis manager.
LLVM_ABI bool replaceDbgDeclare(Value *Address, Value *NewAddress, DIBuilder &Builder, uint8_t DIExprFlags, int Offset)
Replaces dbg.declare record when the address it describes is replaced with a new value.
LLVM_ABI ASanAccessInfo(int32_t Packed)
Definition AddressSanitizer.cpp:674
const uint8_t AccessSizeIndex
This struct is a compact representation of a valid (non-zero power of two) alignment.
constexpr uint64_t value() const
This is a hole in the type system and should not be abused.
This struct is a compact representation of a valid (power of two) or undefined (0) alignment.
Align valueOrOne() const
For convenience, returns a valid alignment or 1 if undefined.
Information about a load/store intrinsic defined by the target.
SmallVector< InterestingMemoryOperand, 1 > InterestingOperands
A CRTP mix-in to automatically provide informational APIs needed for passes.
SizeOffsetAPInt - Used by ObjectSizeOffsetVisitor, which works with APInts.