LLVM: lib/Target/SPIRV/SPIRVEmitIntrinsics.cpp Source File (original) (raw)
1
2
3
4
5
6
7
8
9
10
11
12
13
24#include "llvm/IR/IntrinsicsSPIRV.h"
28
29#include
30#include
31#include <unordered_set>
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51using namespace llvm;
52
54#define GET_BuiltinGroup_DECL
55#include "SPIRVGenTables.inc"
56}
57
58namespace {
59
60class SPIRVEmitIntrinsics
62 public InstVisitor<SPIRVEmitIntrinsics, Instruction *> {
63 SPIRVTargetMachine *TM = nullptr;
64 SPIRVGlobalRegistry *GR = nullptr;
66 bool TrackConstants = true;
67 bool HaveFunPtrs = false;
68 DenseMap<Instruction *, Constant *> AggrConsts;
69 DenseMap<Instruction *, Type *> AggrConstTypes;
70 DenseSet<Instruction *> AggrStores;
71 std::unordered_set<Value *> Named;
72
73
74 DenseMap<Function *, SmallVector<std::pair<unsigned, Type *>>> FDeclPtrTys;
75
76
77 bool CanTodoType = true;
78 unsigned TodoTypeSz = 0;
79 DenseMap<Value *, bool> TodoType;
80 void insertTodoType(Value *Op) {
81
83 auto It = TodoType.try_emplace(Op, true);
84 if (It.second)
85 ++TodoTypeSz;
86 }
87 }
88 void eraseTodoType(Value *Op) {
89 auto It = TodoType.find(Op);
90 if (It != TodoType.end() && It->second) {
91 It->second = false;
92 --TodoTypeSz;
93 }
94 }
95 bool isTodoType(Value *Op) {
97 return false;
98 auto It = TodoType.find(Op);
99 return It != TodoType.end() && It->second;
100 }
101
102
103 std::unordered_set<Instruction *> TypeValidated;
104
105
106 enum WellKnownTypes { Event };
107
108
109 Type *deduceElementType(Value *I, bool UnknownElemTypeI8);
110 Type *deduceElementTypeHelper(Value *I, bool UnknownElemTypeI8);
111 Type *deduceElementTypeHelper(Value *I, std::unordered_set<Value *> &Visited,
112 bool UnknownElemTypeI8,
113 bool IgnoreKnownType = false);
114 Type *deduceElementTypeByValueDeep(Type *ValueTy, Value *Operand,
115 bool UnknownElemTypeI8);
116 Type *deduceElementTypeByValueDeep(Type *ValueTy, Value *Operand,
117 std::unordered_set<Value *> &Visited,
118 bool UnknownElemTypeI8);
119 Type *deduceElementTypeByUsersDeep(Value *Op,
120 std::unordered_set<Value *> &Visited,
121 bool UnknownElemTypeI8);
122 void maybeAssignPtrType(Type *&Ty, Value *I, Type *RefTy,
123 bool UnknownElemTypeI8);
124
125
126 Type *deduceNestedTypeHelper(User *U, bool UnknownElemTypeI8);
128 std::unordered_set<Value *> &Visited,
129 bool UnknownElemTypeI8);
130
131
132 void deduceOperandElementType(Instruction *I,
133 SmallPtrSet<Instruction *, 4> *IncompleteRets,
134 const SmallPtrSet<Value *, 4> *AskOps = nullptr,
135 bool IsPostprocessing = false);
136
137 void preprocessCompositeConstants(IRBuilder<> &B);
139
140 Type *reconstructType(Value *Op, bool UnknownElemTypeI8,
141 bool IsPostprocessing);
142
146 bool UnknownElemTypeI8);
148 void insertAssignPtrTypeTargetExt(TargetExtType *AssignedType, Value *V,
151 Type *ExpectedElementType,
152 unsigned OperandToReplace,
155 bool shouldTryToAddMemAliasingDecoration(Instruction *Inst);
157 void insertConstantsForFPFastMathDefault(Module &M);
158 void processGlobalValue(GlobalVariable &GV, IRBuilder<> &B);
163 std::unordered_set<Function *> &FVisited);
164
165 bool deduceOperandElementTypeCalledFunction(
166 CallInst *CI, SmallVector<std::pair<Value *, unsigned>> &Ops,
167 Type *&KnownElemTy, bool &Incomplete);
168 void deduceOperandElementTypeFunctionPointer(
169 CallInst *CI, SmallVector<std::pair<Value *, unsigned>> &Ops,
170 Type *&KnownElemTy, bool IsPostprocessing);
171 bool deduceOperandElementTypeFunctionRet(
172 Instruction *I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
173 const SmallPtrSet<Value *, 4> *AskOps, bool IsPostprocessing,
175
178 DenseMap<Function *, CallInst *> Ptrcasts);
179 void propagateElemType(Value *Op, Type *ElemTy,
180 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
181 void
182 propagateElemTypeRec(Value *Op, Type *PtrElemTy, Type *CastElemTy,
183 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
184 void propagateElemTypeRec(Value *Op, Type *PtrElemTy, Type *CastElemTy,
185 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
186 std::unordered_set<Value *> &Visited,
187 DenseMap<Function *, CallInst *> Ptrcasts);
188
191 Instruction *Dest, bool DeleteOld = true);
192
193 void applyDemangledPtrArgTypes(IRBuilder<> &B);
194
195 GetElementPtrInst *simplifyZeroLengthArrayGepInst(GetElementPtrInst *GEP);
196
198 bool postprocessTypes(Module &M);
199 bool processFunctionPointers(Module &M);
200 void parseFunDeclarations(Module &M);
201
202 void useRoundingMode(ConstrainedFPIntrinsic *FPI, IRBuilder<> &B);
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218 bool walkLogicalAccessChain(
219 GetElementPtrInst &GEP,
220 const std::function<void(Type *PointedType, uint64_t Index)>
221 &OnLiteralIndexing,
223 &OnDynamicIndexing);
224
225
226
227
228
229 Type *getGEPType(GetElementPtrInst *GEP);
230
231
232
233
234
235
236 Type *getGEPTypeLogical(GetElementPtrInst *GEP);
237
238 Instruction *buildLogicalAccessChainFromGEP(GetElementPtrInst &GEP);
239
240public:
241 static char ID;
242 SPIRVEmitIntrinsics(SPIRVTargetMachine *TM = nullptr)
243 : ModulePass(ID), TM(TM) {}
246 Instruction *visitGetElementPtrInst(GetElementPtrInst &I);
247 Instruction *visitBitCastInst(BitCastInst &I);
248 Instruction *visitInsertElementInst(InsertElementInst &I);
249 Instruction *visitExtractElementInst(ExtractElementInst &I);
250 Instruction *visitInsertValueInst(InsertValueInst &I);
251 Instruction *visitExtractValueInst(ExtractValueInst &I);
255 Instruction *visitAtomicCmpXchgInst(AtomicCmpXchgInst &I);
256 Instruction *visitUnreachableInst(UnreachableInst &I);
258
259 StringRef getPassName() const override { return "SPIRV emit intrinsics"; }
260
261 bool runOnModule(Module &M) override;
262
263 void getAnalysisUsage(AnalysisUsage &AU) const override {
264 ModulePass::getAnalysisUsage(AU);
265 }
266};
267
268bool isConvergenceIntrinsic(const Instruction *I) {
270 if ()
271 return false;
272
273 return II->getIntrinsicID() == Intrinsic::experimental_convergence_entry ||
274 II->getIntrinsicID() == Intrinsic::experimental_convergence_loop ||
275 II->getIntrinsicID() == Intrinsic::experimental_convergence_anchor;
276}
277
278bool expectIgnoredInIRTranslation(const Instruction *I) {
280 if ()
281 return false;
282 switch (II->getIntrinsicID()) {
283 case Intrinsic::invariant_start:
284 case Intrinsic::spv_resource_handlefrombinding:
285 case Intrinsic::spv_resource_getpointer:
286 return true;
287 default:
288 return false;
289 }
290}
291
292
295 if (II->getIntrinsicID() == Intrinsic::spv_ptrcast) {
296 Value *V = II->getArgOperand(0);
297 return getPointerRoot(V);
298 }
299 }
300 return I;
301}
302
303}
304
305char SPIRVEmitIntrinsics::ID = 0;
306
307INITIALIZE_PASS(SPIRVEmitIntrinsics, "emit-intrinsics", "SPIRV emit intrinsics",
308 false, false)
309
313}
314
319
325
328 B.SetInsertPoint(I->getParent()->getFirstNonPHIOrDbgOrAlloca());
329 else
331}
332
334 B.SetCurrentDebugLocation(I->getDebugLoc());
335 if (I->getType()->isVoidTy())
336 B.SetInsertPoint(I->getNextNode());
337 else
338 B.SetInsertPoint(*I->getInsertionPointAfterDef());
339}
340
343 switch (Intr->getIntrinsicID()) {
344 case Intrinsic::invariant_start:
345 case Intrinsic::invariant_end:
346 return false;
347 }
348 }
349 return true;
350}
351
353 if (I->getType()->isTokenTy())
354 report_fatal_error("A token is encountered but SPIR-V without extensions "
355 "does not support token type",
356 false);
357}
358
360 if (->hasName() || I->getType()->isAggregateType() ||
361 expectIgnoredInIRTranslation(I))
362 return;
363
365
366
367
369 if (Name.starts_with("spv.mutated_callsite"))
370 return;
371 if (Name.starts_with("spv.named_mutated_callsite"))
372 I->setName(Name.substr(Name.rfind('.') + 1));
373 }
377 std::vector<Value *> Args = {
380 B.CreateIntrinsic(Intrinsic::spv_assign_name, {I->getType()}, Args);
381}
382
383void SPIRVEmitIntrinsics::replaceAllUsesWith(Value *Src, Value *Dest,
384 bool DeleteOld) {
386
387 if (isTodoType(Src)) {
388 if (DeleteOld)
389 eraseTodoType(Src);
390 insertTodoType(Dest);
391 }
392}
393
394void SPIRVEmitIntrinsics::replaceAllUsesWithAndErase(IRBuilder<> &B,
395 Instruction *Src,
396 Instruction *Dest,
397 bool DeleteOld) {
399 std::string Name = Src->hasName() ? Src->getName().str() : "";
400 Src->eraseFromParent();
401 if (.empty()) {
403 if (Named.insert(Dest).second)
405 }
406}
407
413
414
420 return Ty;
422 return OriginalTy;
423 return Ty;
424}
425
426
427
428Type *SPIRVEmitIntrinsics::reconstructType(Value *Op, bool UnknownElemTypeI8,
429 bool IsPostprocessing) {
434 return Ty;
435
438
440 if (CI) {
443 }
444 if (UnknownElemTypeI8) {
445 if (!IsPostprocessing)
446 insertTodoType(Op);
449 }
450 return nullptr;
451}
452
453CallInst *SPIRVEmitIntrinsics::buildSpvPtrcast(Function *F, Value *Op,
454 Type *ElemTy) {
457
458
461 B.SetInsertPointPastAllocas(OpA->getParent());
462 B.SetCurrentDebugLocation(DebugLoc());
463 } else {
464 B.SetInsertPoint(F->getEntryBlock().getFirstNonPHIOrDbgOrAlloca());
465 }
466 Type *OpTy = Op->getType();
470 CallInst *PtrCasted =
471 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {Types}, Args);
473 return PtrCasted;
474}
475
476void SPIRVEmitIntrinsics::replaceUsesOfWithSpvPtrcast(
478 DenseMap<Function *, CallInst *> Ptrcasts) {
479 Function *F = I->getParent()->getParent();
480 CallInst *PtrCastedI = nullptr;
481 auto It = Ptrcasts.find(F);
482 if (It == Ptrcasts.end()) {
483 PtrCastedI = buildSpvPtrcast(F, Op, ElemTy);
484 Ptrcasts[F] = PtrCastedI;
485 } else {
486 PtrCastedI = It->second;
487 }
488 I->replaceUsesOfWith(Op, PtrCastedI);
489}
490
491void SPIRVEmitIntrinsics::propagateElemType(
493 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
494 DenseMap<Function *, CallInst *> Ptrcasts;
496 for (auto *U : Users) {
498 continue;
499 if (!VisitedSubst.insert(std::make_pair(U, Op)).second)
500 continue;
502
503
505 TypeValidated.find(UI) != TypeValidated.end())
506 replaceUsesOfWithSpvPtrcast(Op, ElemTy, UI, Ptrcasts);
507 }
508}
509
510void SPIRVEmitIntrinsics::propagateElemTypeRec(
512 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
513 std::unordered_set<Value *> Visited;
514 DenseMap<Function *, CallInst *> Ptrcasts;
515 propagateElemTypeRec(Op, PtrElemTy, CastElemTy, VisitedSubst, Visited,
516 std::move(Ptrcasts));
517}
518
519void SPIRVEmitIntrinsics::propagateElemTypeRec(
521 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
522 std::unordered_set<Value *> &Visited,
523 DenseMap<Function *, CallInst *> Ptrcasts) {
524 if (!Visited.insert(Op).second)
525 return;
527 for (auto *U : Users) {
529 continue;
530 if (!VisitedSubst.insert(std::make_pair(U, Op)).second)
531 continue;
533
534
536 TypeValidated.find(UI) != TypeValidated.end())
537 replaceUsesOfWithSpvPtrcast(Op, CastElemTy, UI, Ptrcasts);
538 }
539}
540
541
542
543
545SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(Type *ValueTy, Value *Operand,
546 bool UnknownElemTypeI8) {
547 std::unordered_set<Value *> Visited;
548 return deduceElementTypeByValueDeep(ValueTy, Operand, Visited,
549 UnknownElemTypeI8);
550}
551
552Type *SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
553 Type *ValueTy, Value *Operand, std::unordered_set<Value *> &Visited,
554 bool UnknownElemTypeI8) {
555 Type *Ty = ValueTy;
556 if (Operand) {
558 if (Type *NestedTy =
559 deduceElementTypeHelper(Operand, Visited, UnknownElemTypeI8))
561 } else {
562 Ty = deduceNestedTypeHelper(dyn_cast(Operand), Ty, Visited,
563 UnknownElemTypeI8);
564 }
565 }
566 return Ty;
567}
568
569
570Type *SPIRVEmitIntrinsics::deduceElementTypeByUsersDeep(
571 Value *Op, std::unordered_set<Value *> &Visited, bool UnknownElemTypeI8) {
574 return nullptr;
575
577 return ElemTy;
578
579
581 return KnownTy;
582
583 for (User *OpU : Op->users()) {
585 if (Type *Ty = deduceElementTypeHelper(Inst, Visited, UnknownElemTypeI8))
586 return Ty;
587 }
588 }
589 return nullptr;
590}
591
592
593
594
595
598 if ((DemangledName.starts_with("__spirv_ocl_printf(") ||
602 return nullptr;
603}
604
605
606
607Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(Value *I,
608 bool UnknownElemTypeI8) {
609 std::unordered_set<Value *> Visited;
610 return deduceElementTypeHelper(I, Visited, UnknownElemTypeI8);
611}
612
613void SPIRVEmitIntrinsics::maybeAssignPtrType(Type *&Ty, Value *Op, Type *RefTy,
614 bool UnknownElemTypeI8) {
616 if (!UnknownElemTypeI8)
617 return;
618 insertTodoType(Op);
619 }
620 Ty = RefTy;
621}
622
623bool SPIRVEmitIntrinsics::walkLogicalAccessChain(
624 GetElementPtrInst &GEP,
625 const std::function<void(Type *, uint64_t)> &OnLiteralIndexing,
626 const std::function<void(Type *, Value *)> &OnDynamicIndexing) {
627
628
629 assert(GEP.getSourceElementType() ==
630 IntegerType::getInt8Ty(CurrF->getContext()));
631 assert(GEP.getNumIndices() == 1);
632
634 Value *Src = getPointerRoot(GEP.getPointerOperand());
635 Type *CurType = deduceElementType(Src, true);
636
637 Value *Operand = *GEP.idx_begin();
639 if (!CI) {
641
642
643 if (AT)
644 OnDynamicIndexing(AT->getElementType(), Operand);
645 return AT == nullptr;
646 }
647
650
651 do {
653 uint32_t EltTypeSize = DL.getTypeSizeInBits(AT->getElementType()) / 8;
657 CurType = AT->getElementType();
658 OnLiteralIndexing(CurType, Index);
660 uint32_t StructSize = DL.getTypeSizeInBits(ST) / 8;
662 (void)StructSize;
663 const auto &STL = DL.getStructLayout(ST);
664 unsigned Element = STL->getElementContainingOffset(Offset);
665 Offset -= STL->getElementOffset(Element);
666 CurType = ST->getElementType(Element);
667 OnLiteralIndexing(CurType, Element);
668 } else {
669
670
671 return true;
672 }
673 } while (Offset > 0);
674
675 return false;
676}
677
679SPIRVEmitIntrinsics::buildLogicalAccessChainFromGEP(GetElementPtrInst &GEP) {
683
684 std::vector<Value *> Indices;
685 Indices.push_back(ConstantInt::get(
686 IntegerType::getInt32Ty(CurrF->getContext()), 0, false));
687 walkLogicalAccessChain(
689 [&Indices, &B](Type *EltType, uint64_t Index) {
690 Indices.push_back(
691 ConstantInt::get(B.getInt64Ty(), Index, false));
692 },
694 uint32_t EltTypeSize = DL.getTypeSizeInBits(EltType) / 8;
696 Offset, ConstantInt::get(Offset->getType(), EltTypeSize,
697 false));
698 Indices.push_back(Index);
699 });
700
703 Args.push_back(B.getInt1(GEP.isInBounds()));
704 Args.push_back(GEP.getOperand(0));
706 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_gep, {Types}, {Args});
707 replaceAllUsesWithAndErase(B, &GEP, NewI);
708 return NewI;
709}
710
711Type *SPIRVEmitIntrinsics::getGEPTypeLogical(GetElementPtrInst *GEP) {
712
713 Type *CurType = GEP->getResultElementType();
714
715 bool Interrupted = walkLogicalAccessChain(
716 *GEP, [&CurType](Type *EltType, uint64_t Index) { CurType = EltType; },
717 [&CurType](Type *EltType, Value *Index) { CurType = EltType; });
718
719 return Interrupted ? GEP->getResultElementType() : CurType;
720}
721
722Type *SPIRVEmitIntrinsics::getGEPType(GetElementPtrInst *Ref) {
723 if (Ref->getSourceElementType() ==
724 IntegerType::getInt8Ty(CurrF->getContext()) &&
726 return getGEPTypeLogical(Ref);
727 }
728
729 Type *Ty = nullptr;
730
731
733 Ty = Ref->getSourceElementType();
736 } else {
737 Ty = Ref->getResultElementType();
738 }
739 return Ty;
740}
741
742Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
743 Value *I, std::unordered_set<Value *> &Visited, bool UnknownElemTypeI8,
744 bool IgnoreKnownType) {
745
746 if ()
747 return nullptr;
748
749
750 if (!IgnoreKnownType)
752 return KnownTy;
753
754
755 if (!Visited.insert(I).second)
756 return nullptr;
757
758
759 Type *Ty = nullptr;
760
762 maybeAssignPtrType(Ty, I, Ref->getAllocatedType(), UnknownElemTypeI8);
764 Ty = getGEPType(Ref);
766 Value *Op = Ref->getPointerOperand();
768 if (!KnownTy)
769 KnownTy = Op->getType();
771 maybeAssignPtrType(Ty, I, ElemTy, UnknownElemTypeI8);
774 Ty = SPIRV::getOriginalFunctionType(*Fn);
776 } else {
777 Ty = deduceElementTypeByValueDeep(
778 Ref->getValueType(),
779 Ref->getNumOperands() > 0 ? Ref->getOperand(0) : nullptr, Visited,
780 UnknownElemTypeI8);
781 }
783 Type *RefTy = deduceElementTypeHelper(Ref->getPointerOperand(), Visited,
784 UnknownElemTypeI8);
785 maybeAssignPtrType(Ty, I, RefTy, UnknownElemTypeI8);
787 maybeAssignPtrType(Ty, I, Ref->getDestTy(), UnknownElemTypeI8);
789 if (Type *Src = Ref->getSrcTy(), *Dest = Ref->getDestTy();
791 Ty = deduceElementTypeHelper(Ref->getOperand(0), Visited,
792 UnknownElemTypeI8);
796 Ty = deduceElementTypeHelper(Op, Visited, UnknownElemTypeI8);
800 Ty = deduceElementTypeHelper(Op, Visited, UnknownElemTypeI8);
802 Type *BestTy = nullptr;
803 unsigned MaxN = 1;
804 DenseMap<Type *, unsigned> PhiTys;
805 for (int i = Ref->getNumIncomingValues() - 1; i >= 0; --i) {
806 Ty = deduceElementTypeByUsersDeep(Ref->getIncomingValue(i), Visited,
807 UnknownElemTypeI8);
808 if (!Ty)
809 continue;
811 if (!It.second) {
812 ++It.first->second;
813 if (It.first->second > MaxN) {
814 MaxN = It.first->second;
815 BestTy = Ty;
816 }
817 }
818 }
819 if (BestTy)
820 Ty = BestTy;
822 for (Value *Op : {Ref->getTrueValue(), Ref->getFalseValue()}) {
823 Ty = deduceElementTypeByUsersDeep(Op, Visited, UnknownElemTypeI8);
824 if (Ty)
825 break;
826 }
828 static StringMap ResTypeByArg = {
829 {"to_global", 0},
830 {"to_local", 0},
831 {"to_private", 0},
832 {"__spirv_GenericCastToPtr_ToGlobal", 0},
833 {"__spirv_GenericCastToPtr_ToLocal", 0},
834 {"__spirv_GenericCastToPtr_ToPrivate", 0},
835 {"__spirv_GenericCastToPtrExplicit_ToGlobal", 0},
836 {"__spirv_GenericCastToPtrExplicit_ToLocal", 0},
837 {"__spirv_GenericCastToPtrExplicit_ToPrivate", 0}};
838
839
841 if (II && II->getIntrinsicID() == Intrinsic::spv_resource_getpointer) {
843 if (HandleType->getTargetExtName() == "spirv.Image" ||
844 HandleType->getTargetExtName() == "spirv.SignedImage") {
845 for (User *U : II->users()) {
847 if (Ty)
848 break;
849 }
850 } else if (HandleType->getTargetExtName() == "spirv.VulkanBuffer") {
851
852 Ty = HandleType->getTypeParameter(0);
855 else {
859 }
861 } else {
862 llvm_unreachable("Unknown handle type for spv_resource_getpointer.");
863 }
864 } else if (II && II->getIntrinsicID() ==
865 Intrinsic::spv_generic_cast_to_ptr_explicit) {
866 Ty = deduceElementTypeHelper(CI->getArgOperand(0), Visited,
867 UnknownElemTypeI8);
868 } else if (Function *CalledF = CI->getCalledFunction()) {
869 std::string DemangledName =
871 if (DemangledName.length() > 0)
872 DemangledName = SPIRV::lookupBuiltinNameHelper(DemangledName);
873 auto AsArgIt = ResTypeByArg.find(DemangledName);
874 if (AsArgIt != ResTypeByArg.end())
875 Ty = deduceElementTypeHelper(CI->getArgOperand(AsArgIt->second),
876 Visited, UnknownElemTypeI8);
878 Ty = KnownRetTy;
879 }
880 }
881
882
883 if (Ty && !IgnoreKnownType) {
884
886 }
887
888 return Ty;
889}
890
891
892
893
894Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(User *U,
895 bool UnknownElemTypeI8) {
896 std::unordered_set<Value *> Visited;
897 return deduceNestedTypeHelper(U, U->getType(), Visited, UnknownElemTypeI8);
898}
899
900Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(
901 User *U, Type *OrigTy, std::unordered_set<Value *> &Visited,
902 bool UnknownElemTypeI8) {
903 if (!U)
904 return OrigTy;
905
906
908 return KnownTy;
909
910
911 if (!Visited.insert(U).second)
912 return OrigTy;
913
916 bool Change = false;
917 for (unsigned i = 0; i < U->getNumOperands(); ++i) {
918 Value *Op = U->getOperand(i);
919 assert(Op && "Operands should not be null.");
920 Type *OpTy = Op->getType();
921 Type *Ty = OpTy;
923 if (Type *NestedTy =
924 deduceElementTypeHelper(Op, Visited, UnknownElemTypeI8))
926 } else {
927 Ty = deduceNestedTypeHelper(dyn_cast(Op), OpTy, Visited,
928 UnknownElemTypeI8);
929 }
931 Change |= Ty != OpTy;
932 }
933 if (Change) {
936 return NewTy;
937 }
939 if (Value *Op = U->getNumOperands() > 0 ? U->getOperand(0) : nullptr) {
940 Type *OpTy = ArrTy->getElementType();
941 Type *Ty = OpTy;
943 if (Type *NestedTy =
944 deduceElementTypeHelper(Op, Visited, UnknownElemTypeI8))
946 } else {
947 Ty = deduceNestedTypeHelper(dyn_cast(Op), OpTy, Visited,
948 UnknownElemTypeI8);
949 }
950 if (Ty != OpTy) {
951 Type *NewTy = ArrayType::get(Ty, ArrTy->getNumElements());
953 return NewTy;
954 }
955 }
957 if (Value *Op = U->getNumOperands() > 0 ? U->getOperand(0) : nullptr) {
958 Type *OpTy = VecTy->getElementType();
959 Type *Ty = OpTy;
961 if (Type *NestedTy =
962 deduceElementTypeHelper(Op, Visited, UnknownElemTypeI8))
964 } else {
965 Ty = deduceNestedTypeHelper(dyn_cast(Op), OpTy, Visited,
966 UnknownElemTypeI8);
967 }
968 if (Ty != OpTy) {
969 Type *NewTy = VectorType::get(Ty, VecTy->getElementCount());
971 return NewTy;
972 }
973 }
974 }
975
976 return OrigTy;
977}
978
979Type *SPIRVEmitIntrinsics::deduceElementType(Value *I, bool UnknownElemTypeI8) {
980 if (Type *Ty = deduceElementTypeHelper(I, UnknownElemTypeI8))
981 return Ty;
982 if (!UnknownElemTypeI8)
983 return nullptr;
984 insertTodoType(I);
985 return IntegerType::getInt8Ty(I->getContext());
986}
987
989 Value *PointerOperand) {
992 return nullptr;
994 if (!PtrTy)
995 return I->getType();
998 return nullptr;
999}
1000
1001
1002
1003bool SPIRVEmitIntrinsics::deduceOperandElementTypeCalledFunction(
1004 CallInst *CI, SmallVector<std::pair<Value *, unsigned>> &Ops,
1005 Type *&KnownElemTy, bool &Incomplete) {
1007 if (!CalledF)
1008 return false;
1009 std::string DemangledName =
1011 if (DemangledName.length() > 0 &&
1012 !StringRef(DemangledName).starts_with("llvm.")) {
1013 const SPIRVSubtarget &ST = TM->getSubtarget(*CalledF);
1014 auto [Grp, Opcode, ExtNo] = SPIRV::mapBuiltinToOpcode(
1015 DemangledName, ST.getPreferredInstructionSet());
1016 if (Opcode == SPIRV::OpGroupAsyncCopy) {
1017 for (unsigned i = 0, PtrCnt = 0; i < CI->arg_size() && PtrCnt < 2; ++i) {
1020 continue;
1021 ++PtrCnt;
1023 KnownElemTy = ElemTy;
1024 Ops.push_back(std::make_pair(Op, i));
1025 }
1026 } else if (Grp == SPIRV::Atomic || Grp == SPIRV::AtomicFloating) {
1028 return true;
1031 return true;
1032 switch (Opcode) {
1033 case SPIRV::OpAtomicFAddEXT:
1034 case SPIRV::OpAtomicFMinEXT:
1035 case SPIRV::OpAtomicFMaxEXT:
1036 case SPIRV::OpAtomicLoad:
1037 case SPIRV::OpAtomicCompareExchangeWeak:
1038 case SPIRV::OpAtomicCompareExchange:
1039 case SPIRV::OpAtomicExchange:
1040 case SPIRV::OpAtomicIAdd:
1041 case SPIRV::OpAtomicISub:
1042 case SPIRV::OpAtomicOr:
1043 case SPIRV::OpAtomicXor:
1044 case SPIRV::OpAtomicAnd:
1045 case SPIRV::OpAtomicUMin:
1046 case SPIRV::OpAtomicUMax:
1047 case SPIRV::OpAtomicSMin:
1048 case SPIRV::OpAtomicSMax: {
1051 if (!KnownElemTy)
1052 return true;
1053 Incomplete = isTodoType(Op);
1054 Ops.push_back(std::make_pair(Op, 0));
1055 } break;
1056 case SPIRV::OpAtomicStore: {
1058 return true;
1063 if (!KnownElemTy)
1064 return true;
1065 Incomplete = isTodoType(Op);
1066 Ops.push_back(std::make_pair(Op, 0));
1067 } break;
1068 }
1069 }
1070 }
1071 return true;
1072}
1073
1074
1075void SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionPointer(
1076 CallInst *CI, SmallVector<std::pair<Value *, unsigned>> &Ops,
1077 Type *&KnownElemTy, bool IsPostprocessing) {
1080 return;
1081 Ops.push_back(std::make_pair(Op, std::numeric_limits::max()));
1082 FunctionType *FTy = SPIRV::getOriginalFunctionType(*CI);
1083 bool IsNewFTy = false, IsIncomplete = false;
1086 Type *ArgTy = Arg->getType();
1089 IsNewFTy = true;
1091 if (isTodoType(Arg))
1092 IsIncomplete = true;
1093 } else {
1094 IsIncomplete = true;
1095 }
1096 } else {
1097 ArgTy = FTy->getFunctionParamType(ParmIdx);
1098 }
1100 }
1101 Type *RetTy = FTy->getReturnType();
1104 IsNewFTy = true;
1105 RetTy =
1107 if (isTodoType(CI))
1108 IsIncomplete = true;
1109 } else {
1110 IsIncomplete = true;
1111 }
1112 }
1113 if (!IsPostprocessing && IsIncomplete)
1114 insertTodoType(Op);
1115 KnownElemTy =
1116 IsNewFTy ? FunctionType::get(RetTy, ArgTys, FTy->isVarArg()) : FTy;
1117}
1118
1119bool SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionRet(
1120 Instruction *I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
1121 const SmallPtrSet<Value *, 4> *AskOps, bool IsPostprocessing,
1122 Type *&KnownElemTy, Value *Op, Function *F) {
1124 if (KnownElemTy)
1125 return false;
1132
1133 DenseSet<std::pair<Value *, Value *>> VisitedSubst{std::make_pair(I, Op)};
1134 for (User *U : F->users()) {
1137 continue;
1142 propagateElemType(CI, PrevElemTy, VisitedSubst);
1143 }
1144 }
1145 }
1146
1147
1148
1149
1150
1151 if (IncompleteRets)
1152 for (Instruction *IncompleteRetI : *IncompleteRets)
1153 deduceOperandElementType(IncompleteRetI, nullptr, AskOps,
1154 IsPostprocessing);
1155 } else if (IncompleteRets) {
1156 IncompleteRets->insert(I);
1157 }
1158 TypeValidated.insert(I);
1159 return true;
1160}
1161
1162
1163
1164
1165
1166void SPIRVEmitIntrinsics::deduceOperandElementType(
1167 Instruction *I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
1168 const SmallPtrSet<Value *, 4> *AskOps, bool IsPostprocessing) {
1170 Type *KnownElemTy = nullptr;
1171 bool Incomplete = false;
1172
1176 return;
1177 Incomplete = isTodoType(I);
1178 for (unsigned i = 0; i < Ref->getNumIncomingValues(); i++) {
1179 Value *Op = Ref->getIncomingValue(i);
1181 Ops.push_back(std::make_pair(Op, i));
1182 }
1185 if (!KnownElemTy)
1186 return;
1187 Incomplete = isTodoType(I);
1188 Ops.push_back(std::make_pair(Ref->getPointerOperand(), 0));
1191 return;
1193 if (!KnownElemTy)
1194 return;
1195 Incomplete = isTodoType(I);
1196 Ops.push_back(std::make_pair(Ref->getOperand(0), 0));
1199 return;
1200 KnownElemTy = Ref->getSourceElementType();
1201 Ops.push_back(std::make_pair(Ref->getPointerOperand(),
1204 KnownElemTy = I->getType();
1206 return;
1209 return;
1210 Ops.push_back(std::make_pair(Ref->getPointerOperand(),
1213 if (!(KnownElemTy =
1214 reconstructType(Ref->getValueOperand(), false, IsPostprocessing)))
1215 return;
1218 return;
1219 Ops.push_back(std::make_pair(Ref->getPointerOperand(),
1225 if (!KnownElemTy)
1226 return;
1227 Incomplete = isTodoType(Ref->getPointerOperand());
1228 Ops.push_back(std::make_pair(Ref->getPointerOperand(),
1234 if (!KnownElemTy)
1235 return;
1236 Incomplete = isTodoType(Ref->getPointerOperand());
1237 Ops.push_back(std::make_pair(Ref->getPointerOperand(),
1242 return;
1243 Incomplete = isTodoType(I);
1244 for (unsigned i = 0; i < Ref->getNumOperands(); i++) {
1247 Ops.push_back(std::make_pair(Op, i));
1248 }
1251 return;
1253 if ()
1254 return;
1255 if (deduceOperandElementTypeFunctionRet(I, IncompleteRets, AskOps,
1256 IsPostprocessing, KnownElemTy, Op,
1257 CurrF))
1258 return;
1259 Incomplete = isTodoType(CurrF);
1260 Ops.push_back(std::make_pair(Op, 0));
1263 return;
1264 Value *Op0 = Ref->getOperand(0);
1265 Value *Op1 = Ref->getOperand(1);
1266 bool Incomplete0 = isTodoType(Op0);
1267 bool Incomplete1 = isTodoType(Op1);
1269 Type *ElemTy0 = (Incomplete0 && !Incomplete1 && ElemTy1)
1270 ? nullptr
1271 : GR->findDeducedElementType(Op0);
1272 if (ElemTy0) {
1273 KnownElemTy = ElemTy0;
1274 Incomplete = Incomplete0;
1275 Ops.push_back(std::make_pair(Op1, 1));
1276 } else if (ElemTy1) {
1277 KnownElemTy = ElemTy1;
1278 Incomplete = Incomplete1;
1279 Ops.push_back(std::make_pair(Op0, 0));
1280 }
1283 deduceOperandElementTypeCalledFunction(CI, Ops, KnownElemTy, Incomplete);
1284 else if (HaveFunPtrs)
1285 deduceOperandElementTypeFunctionPointer(CI, Ops, KnownElemTy,
1286 IsPostprocessing);
1287 }
1288
1289
1290 if (!KnownElemTy || Ops.size() == 0)
1291 return;
1292
1293 LLVMContext &Ctx = CurrF->getContext();
1295 for (auto &OpIt : Ops) {
1297 if (AskOps && !AskOps->contains(Op))
1298 continue;
1299 Type *AskTy = nullptr;
1300 CallInst *AskCI = nullptr;
1301 if (IsPostprocessing && AskOps) {
1304 assert(AskTy && AskCI);
1305 }
1307 if (Ty == KnownElemTy)
1308 continue;
1310 Type *OpTy = Op->getType();
1311 if (Op->hasUseList() &&
1315
1316 if (!Incomplete)
1317 eraseTodoType(Op);
1318 else if (!IsPostprocessing)
1319 insertTodoType(Op);
1320
1322 if (AssignCI == nullptr) {
1325 CallInst *CI =
1326 buildIntrWithMD(Intrinsic::spv_assign_ptr_type, {OpTy}, OpTyVal, Op,
1329 } else {
1331 DenseSet<std::pair<Value *, Value *>> VisitedSubst{
1333 propagateElemTypeRec(Op, KnownElemTy, PrevElemTy, VisitedSubst);
1334 }
1335 } else {
1336 eraseTodoType(Op);
1337 CallInst *PtrCastI =
1338 buildSpvPtrcast(I->getParent()->getParent(), Op, KnownElemTy);
1339 if (OpIt.second == std::numeric_limits::max())
1341 else
1342 I->setOperand(OpIt.second, PtrCastI);
1343 }
1344 }
1345 TypeValidated.insert(I);
1346}
1347
1348void SPIRVEmitIntrinsics::replaceMemInstrUses(Instruction *Old,
1349 Instruction *New,
1353 if (isAssignTypeInstr(U)) {
1354 B.SetInsertPoint(U);
1355 SmallVector<Value *, 2> Args = {New, U->getOperand(1)};
1356 CallInst *AssignCI =
1357 B.CreateIntrinsic(Intrinsic::spv_assign_type, {New->getType()}, Args);
1359 U->eraseFromParent();
1362 U->replaceUsesOfWith(Old, New);
1363 } else {
1365 }
1366 }
1367 New->copyMetadata(*Old);
1369}
1370
1371void SPIRVEmitIntrinsics::preprocessUndefs(IRBuilder<> &B) {
1372 std::queue<Instruction *> Worklist;
1374 Worklist.push(&I);
1375
1376 while (!Worklist.empty()) {
1378 bool BPrepared = false;
1379 Worklist.pop();
1380
1381 for (auto &Op : I->operands()) {
1383 if (!AggrUndef || ->getType()->isAggregateType())
1384 continue;
1385
1386 if (!BPrepared) {
1388 BPrepared = true;
1389 }
1390 auto *IntrUndef = B.CreateIntrinsic(Intrinsic::spv_undef, {});
1391 Worklist.push(IntrUndef);
1392 I->replaceUsesOfWith(Op, IntrUndef);
1393 AggrConsts[IntrUndef] = AggrUndef;
1394 AggrConstTypes[IntrUndef] = AggrUndef->getType();
1395 }
1396 }
1397}
1398
1399void SPIRVEmitIntrinsics::preprocessCompositeConstants(IRBuilder<> &B) {
1400 std::queue<Instruction *> Worklist;
1402 Worklist.push(&I);
1403
1404 while (!Worklist.empty()) {
1405 auto *I = Worklist.front();
1406 bool IsPhi = isa(I), BPrepared = false;
1408 bool KeepInst = false;
1409 for (const auto &Op : I->operands()) {
1410 Constant *AggrConst = nullptr;
1411 Type *ResTy = nullptr;
1413 AggrConst = COp;
1414 ResTy = COp->getType();
1416 AggrConst = COp;
1419 AggrConst = COp;
1422 AggrConst = COp;
1425 AggrConst = COp;
1426 ResTy = Op->getType()->isVectorTy() ? COp->getType() : B.getInt32Ty();
1427 }
1428 if (AggrConst) {
1431 for (unsigned i = 0; i < COp->getNumElements(); ++i)
1432 Args.push_back(COp->getElementAsConstant(i));
1433 else
1435 if (!BPrepared) {
1436 IsPhi ? B.SetInsertPointPastAllocas(I->getParent()->getParent())
1438 BPrepared = true;
1439 }
1440 auto *CI =
1441 B.CreateIntrinsic(Intrinsic::spv_const_composite, {ResTy}, {Args});
1442 Worklist.push(CI);
1444 KeepInst = true;
1445 AggrConsts[CI] = AggrConst;
1446 AggrConstTypes[CI] = deduceNestedTypeHelper(AggrConst, false);
1447 }
1448 }
1449 if (!KeepInst)
1450 Worklist.pop();
1451 }
1452}
1453
1458 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {I->getType()},
1460}
1461
1463 unsigned RoundingModeDeco,
1468 Ctx,
1470 ConstantInt::get(Int32Ty, SPIRV::Decoration::FPRoundingMode)),
1473}
1474
1479 MDNode *SaturatedConversionNode =
1481 Int32Ty, SPIRV::Decoration::SaturatedConversion))});
1483}
1484
1488 if (Fu->isIntrinsic()) {
1489 unsigned const int IntrinsicId = Fu->getIntrinsicID();
1490 switch (IntrinsicId) {
1491 case Intrinsic::fptosi_sat:
1492 case Intrinsic::fptoui_sat:
1494 break;
1495 default:
1496 break;
1497 }
1498 }
1499 }
1500 }
1501}
1502
1503Instruction *SPIRVEmitIntrinsics::visitCallInst(CallInst &Call) {
1505 return &Call;
1506
1508 LLVMContext &Ctx = CurrF->getContext();
1509
1511 MDString *ConstraintString = MDString::get(Ctx, IA->getConstraintString());
1517
1520 B.CreateIntrinsic(Intrinsic::spv_inline_asm, {Args});
1521 return &Call;
1522}
1523
1524
1525void SPIRVEmitIntrinsics::useRoundingMode(ConstrainedFPIntrinsic *FPI,
1528 if (.has_value())
1529 return;
1530 unsigned RoundingModeDeco = std::numeric_limits::max();
1531 switch (RM.value()) {
1532 default:
1533
1534 break;
1535 case RoundingMode::NearestTiesToEven:
1536 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTE;
1537 break;
1538 case RoundingMode::TowardNegative:
1539 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTN;
1540 break;
1541 case RoundingMode::TowardPositive:
1542 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTP;
1543 break;
1544 case RoundingMode::TowardZero:
1545 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTZ;
1546 break;
1547 case RoundingMode::Dynamic:
1548 case RoundingMode::NearestTiesToAway:
1549
1550 break;
1551 }
1552 if (RoundingModeDeco == std::numeric_limits::max())
1553 return;
1554
1556}
1557
1558Instruction *SPIRVEmitIntrinsics::visitSwitchInst(SwitchInst &I) {
1565 Args.push_back(I.getCondition());
1566 BBCases.push_back(I.getDefaultDest());
1568 for (auto &Case : I.cases()) {
1569 Args.push_back(Case.getCaseValue());
1570 BBCases.push_back(Case.getCaseSuccessor());
1572 }
1573 CallInst *NewI = B.CreateIntrinsic(Intrinsic::spv_switch,
1574 {I.getOperand(0)->getType()}, {Args});
1575
1576
1578 I.eraseFromParent();
1579
1580
1581 B.SetInsertPoint(ParentBB);
1582 IndirectBrInst *BrI = B.CreateIndirectBr(
1584 BBCases.size());
1585 for (BasicBlock *BBCase : BBCases)
1587 return BrI;
1588}
1589
1591 if (GEP->getNumIndices() == 0)
1592 return false;
1594 return CI->getZExtValue() == 0;
1595 }
1596 return false;
1597}
1598
1599Instruction *SPIRVEmitIntrinsics::visitGetElementPtrInst(GetElementPtrInst &I) {
1602
1604
1605
1606
1607
1608
1609 if (I.getSourceElementType() ==
1610 IntegerType::getInt8Ty(CurrF->getContext())) {
1611 return buildLogicalAccessChainFromGEP(I);
1612 }
1613
1614
1615
1616 Value *PtrOp = I.getPointerOperand();
1617 Type *SrcElemTy = I.getSourceElementType();
1618 Type *DeducedPointeeTy = deduceElementType(PtrOp, true);
1619
1621 if (ArrTy->getElementType() == SrcElemTy) {
1623 Type *FirstIdxType = I.getOperand(1)->getType();
1624 NewIndices.push_back(ConstantInt::get(FirstIdxType, 0));
1625 for (Value *Idx : I.indices())
1627
1630 Args.push_back(B.getInt1(I.isInBounds()));
1631 Args.push_back(I.getPointerOperand());
1632 Args.append(NewIndices.begin(), NewIndices.end());
1633
1634 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_gep, {Types}, {Args});
1635 replaceAllUsesWithAndErase(B, &I, NewI);
1636 return NewI;
1637 }
1638 }
1639 }
1640
1643 Args.push_back(B.getInt1(I.isInBounds()));
1645 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_gep, {Types}, {Args});
1646 replaceAllUsesWithAndErase(B, &I, NewI);
1647 return NewI;
1648}
1649
1650Instruction *SPIRVEmitIntrinsics::visitBitCastInst(BitCastInst &I) {
1654
1655
1656
1657
1658
1661 I.eraseFromParent();
1662 return nullptr;
1663 }
1664
1667 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_bitcast, {Types}, {Args});
1668 replaceAllUsesWithAndErase(B, &I, NewI);
1669 return NewI;
1670}
1671
1672void SPIRVEmitIntrinsics::insertAssignPtrTypeTargetExt(
1674 Type *VTy = V->getType();
1675
1676
1679 if (ElemTy != AssignedType)
1681
1683 if (!AssignCI) {
1685 return;
1686 }
1687
1688 Type *CurrentType =
1692 if (CurrentType == AssignedType)
1693 return;
1694
1695
1699 " for value " + V->getName(),
1700 false);
1701
1702
1703
1705}
1706
1707void SPIRVEmitIntrinsics::replacePointerOperandWithPtrCast(
1708 Instruction *I, Value *Pointer, Type *ExpectedElementType,
1709 unsigned OperandToReplace, IRBuilder<> &B) {
1710 TypeValidated.insert(I);
1711
1712
1713 Type *PointerElemTy = deduceElementTypeHelper(Pointer, false);
1714 if (PointerElemTy == ExpectedElementType ||
1716 return;
1717
1720 MetadataAsValue *VMD = buildMD(ExpectedElementVal);
1722 bool FirstPtrCastOrAssignPtrType = true;
1723
1724
1725
1726
1727 if (Pointer->hasUseList()) {
1728 for (auto User : Pointer->users()) {
1730 if ( ||
1731 (II->getIntrinsicID() != Intrinsic::spv_assign_ptr_type &&
1732 II->getIntrinsicID() != Intrinsic::spv_ptrcast) ||
1733 II->getOperand(0) != Pointer)
1734 continue;
1735
1736
1737
1738 FirstPtrCastOrAssignPtrType = false;
1739 if (II->getOperand(1) != VMD ||
1742 continue;
1743
1744
1745
1746 if (II->getIntrinsicID() != Intrinsic::spv_ptrcast)
1747 return;
1748
1749
1750
1751 if (II->getParent() != I->getParent())
1752 continue;
1753
1754 I->setOperand(OperandToReplace, II);
1755 return;
1756 }
1757 }
1758
1760 if (FirstPtrCastOrAssignPtrType) {
1761
1762
1764 return;
1765 } else if (isTodoType(Pointer)) {
1766 eraseTodoType(Pointer);
1768
1769
1773 DenseSet<std::pair<Value *, Value *>> VisitedSubst{
1774 std::make_pair(I, Pointer)};
1775 GR->updateAssignType(AssignCI, Pointer, ExpectedElementVal);
1776 propagateElemType(Pointer, PrevElemTy, VisitedSubst);
1777 } else {
1779 }
1780 return;
1781 }
1782 }
1783 }
1784
1785
1788 auto *PtrCastI = B.CreateIntrinsic(Intrinsic::spv_ptrcast, {Types}, Args);
1789 I->setOperand(OperandToReplace, PtrCastI);
1790
1792}
1793
1794void SPIRVEmitIntrinsics::insertPtrCastOrAssignTypeInstr(Instruction *I,
1796
1799 replacePointerOperandWithPtrCast(
1800 I, SI->getValueOperand(), IntegerType::getInt8Ty(CurrF->getContext()),
1801 0, B);
1802 }
1803 if (SI) {
1804 Value *Op = SI->getValueOperand();
1806 Type *OpTy = Op->getType();
1809 if (OpTy == Op->getType())
1810 OpTy = deduceElementTypeByValueDeep(OpTy, Op, false);
1811 replacePointerOperandWithPtrCast(I, Pointer, OpTy, 1, B);
1812 return;
1813 }
1816 Type *OpTy = LI->getType();
1820 } else {
1821 Type *NewOpTy = OpTy;
1822 OpTy = deduceElementTypeByValueDeep(OpTy, LI, false);
1823 if (OpTy == NewOpTy)
1824 insertTodoType(Pointer);
1825 }
1826 }
1827 replacePointerOperandWithPtrCast(I, Pointer, OpTy, 0, B);
1828 return;
1829 }
1832 Type *OpTy = nullptr;
1833
1834
1835
1836
1837
1839 return;
1840 }
1841
1842
1843 if (!OpTy)
1844 OpTy = GEPI->getSourceElementType();
1845
1846 replacePointerOperandWithPtrCast(I, Pointer, OpTy, 0, B);
1848 insertTodoType(Pointer);
1849 return;
1850 }
1851
1852
1853
1857 return;
1858
1859
1860 std::string DemangledName =
1864 bool HaveTypes = false;
1869 CalledArgTys.push_back(nullptr);
1871 CalledArgTys.push_back(ArgTypeElem);
1872 HaveTypes = true;
1873 } else {
1877 if (!ElemTy) {
1879 if (ElemTy) {
1881 } else {
1882 for (User *U : CalledArg->users()) {
1884 if ((ElemTy = deduceElementTypeHelper(Inst, false)) != nullptr)
1885 break;
1886 }
1887 }
1888 }
1889 }
1890 HaveTypes |= ElemTy != nullptr;
1892 }
1893 }
1894
1895 if (DemangledName.empty() && !HaveTypes)
1896 return;
1897
1901 continue;
1902
1903
1905
1906
1907
1910 continue;
1911 }
1912
1913 Type *ExpectedType =
1914 OpIdx < CalledArgTys.size() ? CalledArgTys[OpIdx] : nullptr;
1915 if (!ExpectedType && !DemangledName.empty())
1916 ExpectedType = SPIRV::parseBuiltinCallArgumentBaseType(
1917 DemangledName, OpIdx, I->getContext());
1918 if (!ExpectedType || ExpectedType->isVoidTy())
1919 continue;
1920
1924 ArgOperand, B);
1925 else
1926 replacePointerOperandWithPtrCast(CI, ArgOperand, ExpectedType, OpIdx, B);
1927 }
1928}
1929
1930Instruction *SPIRVEmitIntrinsics::visitInsertElementInst(InsertElementInst &I) {
1931
1932
1934 return &I;
1935
1937 I.getOperand(1)->getType(),
1938 I.getOperand(2)->getType()};
1942 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_insertelt, {Types}, {Args});
1943 replaceAllUsesWithAndErase(B, &I, NewI);
1944 return NewI;
1945}
1946
1948SPIRVEmitIntrinsics::visitExtractElementInst(ExtractElementInst &I) {
1949
1950
1951 if (isVector1(I.getVectorOperandType()))
1952 return &I;
1953
1957 I.getIndexOperand()->getType()};
1958 SmallVector<Value *, 2> Args = {I.getVectorOperand(), I.getIndexOperand()};
1959 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_extractelt, {Types}, {Args});
1960 replaceAllUsesWithAndErase(B, &I, NewI);
1961 return NewI;
1962}
1963
1964Instruction *SPIRVEmitIntrinsics::visitInsertValueInst(InsertValueInst &I) {
1969 Value *AggregateOp = I.getAggregateOperand();
1972 else
1973 Args.push_back(AggregateOp);
1974 Args.push_back(I.getInsertedValueOperand());
1975 for (auto &Op : I.indices())
1976 Args.push_back(B.getInt32(Op));
1978 B.CreateIntrinsic(Intrinsic::spv_insertv, {Types}, {Args});
1979 replaceMemInstrUses(&I, NewI, B);
1980 return NewI;
1981}
1982
1983Instruction *SPIRVEmitIntrinsics::visitExtractValueInst(ExtractValueInst &I) {
1984 if (I.getAggregateOperand()->getType()->isAggregateType())
1985 return &I;
1989 for (auto &Op : I.indices())
1990 Args.push_back(B.getInt32(Op));
1991 auto *NewI =
1992 B.CreateIntrinsic(Intrinsic::spv_extractv, {I.getType()}, {Args});
1993 replaceAllUsesWithAndErase(B, &I, NewI);
1994 return NewI;
1995}
1996
1997Instruction *SPIRVEmitIntrinsics::visitLoadInst(LoadInst &I) {
1998 if (.getType()->isAggregateType())
1999 return &I;
2002 TrackConstants = false;
2005 TLI->getLoadMemOperandFlags(I, CurrF->getDataLayout());
2006 auto *NewI =
2007 B.CreateIntrinsic(Intrinsic::spv_load, {I.getOperand(0)->getType()},
2008 {I.getPointerOperand(), B.getInt16(Flags),
2009 B.getInt8(I.getAlign().value())});
2010 replaceMemInstrUses(&I, NewI, B);
2011 return NewI;
2012}
2013
2014Instruction *SPIRVEmitIntrinsics::visitStoreInst(StoreInst &I) {
2016 return &I;
2019 TrackConstants = false;
2022 TLI->getStoreMemOperandFlags(I, CurrF->getDataLayout());
2023 auto *PtrOp = I.getPointerOperand();
2024 auto *NewI = B.CreateIntrinsic(
2025 Intrinsic::spv_store, {I.getValueOperand()->getType(), PtrOp->getType()},
2026 {I.getValueOperand(), PtrOp, B.getInt16(Flags),
2027 B.getInt8(I.getAlign().value())});
2029 I.eraseFromParent();
2030 return NewI;
2031}
2032
2033Instruction *SPIRVEmitIntrinsics::visitAllocaInst(AllocaInst &I) {
2034 Value *ArraySize = nullptr;
2035 if (I.isArrayAllocation()) {
2036 const SPIRVSubtarget *STI = TM->getSubtargetImpl(*I.getFunction());
2038 SPIRV::Extension::SPV_INTEL_variable_length_array))
2040 "array allocation: this instruction requires the following "
2041 "SPIR-V extension: SPV_INTEL_variable_length_array",
2042 false);
2043 ArraySize = I.getArraySize();
2044 }
2047 TrackConstants = false;
2048 Type *PtrTy = I.getType();
2049 auto *NewI =
2050 ArraySize
2051 ? B.CreateIntrinsic(Intrinsic::spv_alloca_array,
2052 {PtrTy, ArraySize->getType()},
2053 {ArraySize, B.getInt8(I.getAlign().value())})
2054 : B.CreateIntrinsic(Intrinsic::spv_alloca, {PtrTy},
2055 {B.getInt8(I.getAlign().value())});
2056 replaceAllUsesWithAndErase(B, &I, NewI);
2057 return NewI;
2058}
2059
2060Instruction *SPIRVEmitIntrinsics::visitAtomicCmpXchgInst(AtomicCmpXchgInst &I) {
2061 assert(I.getType()->isAggregateType() && "Aggregate result is expected");
2065 Args.push_back(B.getInt32(
2066 static_cast<uint32_t>(getMemScope(I.getContext(), I.getSyncScopeID()))));
2067 Args.push_back(B.getInt32(
2068 static_cast<uint32_t>(getMemSemantics(I.getSuccessOrdering()))));
2069 Args.push_back(B.getInt32(
2070 static_cast<uint32_t>(getMemSemantics(I.getFailureOrdering()))));
2071 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_cmpxchg,
2072 {I.getPointerOperand()->getType()}, {Args});
2073 replaceMemInstrUses(&I, NewI, B);
2074 return NewI;
2075}
2076
2077Instruction *SPIRVEmitIntrinsics::visitUnreachableInst(UnreachableInst &I) {
2080 B.CreateIntrinsic(Intrinsic::spv_unreachable, {});
2081 return &I;
2082}
2083
2084void SPIRVEmitIntrinsics::processGlobalValue(GlobalVariable &GV,
2086
2087 static const StringSet<> ArtificialGlobals{"llvm.global.annotations",
2088 "llvm.compiler.used"};
2089
2091 return;
2092
2095
2096
2097
2098 deduceElementTypeHelper(&GV, false);
2102 auto *InitInst = B.CreateIntrinsic(Intrinsic::spv_init_global,
2104 InitInst->setArgOperand(1, Init);
2105 }
2107 B.CreateIntrinsic(Intrinsic::spv_unref_global, GV.getType(), &GV);
2108}
2109
2110
2111
2112
2113bool SPIRVEmitIntrinsics::insertAssignPtrTypeIntrs(Instruction *I,
2115 bool UnknownElemTypeI8) {
2118 return false;
2119
2121 if (Type *ElemTy = deduceElementType(I, UnknownElemTypeI8)) {
2123 return false;
2124 }
2125 return true;
2126}
2127
2128void SPIRVEmitIntrinsics::insertAssignTypeIntrs(Instruction *I,
2130
2131 static StringMap ResTypeWellKnown = {
2132 {"async_work_group_copy", WellKnownTypes::Event},
2133 {"async_work_group_strided_copy", WellKnownTypes::Event},
2134 {"__spirv_GroupAsyncCopy", WellKnownTypes::Event}};
2135
2137
2138 bool IsKnown = false;
2143 std::string DemangledName =
2146 if (DemangledName.length() > 0)
2147 DemangledName =
2148 SPIRV::lookupBuiltinNameHelper(DemangledName, &DecorationId);
2149 auto ResIt = ResTypeWellKnown.find(DemangledName);
2150 if (ResIt != ResTypeWellKnown.end()) {
2151 IsKnown = true;
2153 switch (ResIt->second) {
2154 case WellKnownTypes::Event:
2157 break;
2158 }
2159 }
2160
2161 switch (DecorationId) {
2162 default:
2163 break;
2164 case FPDecorationId::SAT:
2166 break;
2167 case FPDecorationId::RTE:
2169 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTE, B);
2170 break;
2171 case FPDecorationId::RTZ:
2173 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTZ, B);
2174 break;
2175 case FPDecorationId::RTP:
2177 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTP, B);
2178 break;
2179 case FPDecorationId::RTN:
2181 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTN, B);
2182 break;
2183 }
2184 }
2185 }
2186
2190 Type *TypeToAssign = Ty;
2192 if (II->getIntrinsicID() == Intrinsic::spv_const_composite ||
2193 II->getIntrinsicID() == Intrinsic::spv_undef) {
2194 auto It = AggrConstTypes.find(II);
2195 if (It == AggrConstTypes.end())
2197 TypeToAssign = It->second;
2198 }
2199 }
2202 }
2203 for (const auto &Op : I->operands()) {
2205
2210 Type *OpTy = Op->getType();
2212 CallInst *AssignCI =
2217 Type *OpTy = Op->getType();
2219 if (OpTyElem) {
2223 GR->buildAssignPtr(B, ElemTy ? ElemTy : deduceElementType(Op, true),
2224 Op);
2225 } else {
2228
2229
2231 }
2232 CallInst *AssignCI =
2236 }
2237 }
2238 }
2239 }
2240}
2241
2242bool SPIRVEmitIntrinsics::shouldTryToAddMemAliasingDecoration(
2243 Instruction *Inst) {
2245 if (!STI->canUseExtension(SPIRV::Extension::SPV_INTEL_memory_access_aliasing))
2246 return false;
2247
2248
2249
2251 if (!CI)
2252 return false;
2256 case Intrinsic::spv_load:
2257 case Intrinsic::spv_store:
2258 return true;
2259 default:
2260 return false;
2261 }
2262 }
2264 const std::string Prefix = "__spirv_Atomic";
2265 const bool IsAtomic = Name.find(Prefix) == 0;
2266
2268 return true;
2269 }
2270 return false;
2271}
2272
2273void SPIRVEmitIntrinsics::insertSpirvDecorations(Instruction *I,
2275 if (MDNode *MD = I->getMetadata("spirv.Decorations")) {
2277 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {I->getType()},
2279 }
2280
2281 {
2282 auto processMemAliasingDecoration = [&](unsigned Kind) {
2283 if (MDNode *AliasListMD = I->getMetadata(Kind)) {
2284 if (shouldTryToAddMemAliasingDecoration(I)) {
2285 uint32_t Dec = Kind == LLVMContext::MD_alias_scope
2286 ? SPIRV::Decoration::AliasScopeINTEL
2287 : SPIRV::Decoration::NoAliasINTEL;
2289 I, ConstantInt::get(B.getInt32Ty(), Dec),
2292 B.CreateIntrinsic(Intrinsic::spv_assign_aliasing_decoration,
2294 }
2295 }
2296 };
2297 processMemAliasingDecoration(LLVMContext::MD_alias_scope);
2298 processMemAliasingDecoration(LLVMContext::MD_noalias);
2299 }
2300
2301 if (MDNode *MD = I->getMetadata(LLVMContext::MD_fpmath)) {
2302 const SPIRVSubtarget *STI = TM->getSubtargetImpl(*I->getFunction());
2303 bool AllowFPMaxError =
2304 STI->canUseExtension(SPIRV::Extension::SPV_INTEL_fp_max_error);
2305 if (!AllowFPMaxError)
2306 return;
2307
2309 B.CreateIntrinsic(Intrinsic::spv_assign_fpmaxerror_decoration,
2310 {I->getType()},
2312 }
2313}
2314
2318 &FPFastMathDefaultInfoMap,
2320 auto it = FPFastMathDefaultInfoMap.find(F);
2321 if (it != FPFastMathDefaultInfoMap.end())
2322 return it->second;
2323
2324
2325
2326
2329 SPIRV::FPFastMathMode::None);
2331 SPIRV::FPFastMathMode::None);
2333 SPIRV::FPFastMathMode::None);
2334 return FPFastMathDefaultInfoMap[F] = std::move(FPFastMathDefaultInfoVec);
2335}
2336
2339 const Type *Ty) {
2340 size_t BitWidth = Ty->getScalarSizeInBits();
2341 int Index =
2344 assert(Index >= 0 && Index < 3 &&
2345 "Expected FPFastMathDefaultInfo for half, float, or double");
2346 assert(FPFastMathDefaultInfoVec.size() == 3 &&
2347 "Expected FPFastMathDefaultInfoVec to have exactly 3 elements");
2348 return FPFastMathDefaultInfoVec[Index];
2349}
2350
2351void SPIRVEmitIntrinsics::insertConstantsForFPFastMathDefault(Module &M) {
2353 if (->canUseExtension(SPIRV::Extension::SPV_KHR_float_controls2))
2354 return;
2355
2356
2357
2358
2359
2360
2361
2362 auto Node = M.getNamedMetadata("spirv.ExecutionMode");
2363 if (!Node) {
2364 if (.getNamedMetadata("opencl.enable.FP_CONTRACT")) {
2365
2366
2367
2368
2369
2370
2372 ConstantInt::get(Type::getInt32Ty(M.getContext()), 0);
2373
2374
2375 [[maybe_unused]] GlobalVariable *GV =
2376 new GlobalVariable(M,
2377 Type::getInt32Ty(M.getContext()),
2378 true,
2380 InitValue
2381 );
2382 }
2383 return;
2384 }
2385
2386
2387
2388
2389
2390
2391 DenseMap<Function *, SPIRV::FPFastMathDefaultInfoVector>
2392 FPFastMathDefaultInfoMap;
2393
2394 for (unsigned i = 0; i < Node->getNumOperands(); i++) {
2399 const auto EM =
2402 ->getZExtValue();
2403 if (EM == SPIRV::ExecutionMode::FPFastMathDefault) {
2405 "Expected 4 operands for FPFastMathDefault");
2407 unsigned Flags =
2410 ->getZExtValue();
2411 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2413 SPIRV::FPFastMathDefaultInfo &Info =
2416 Info.FPFastMathDefault = true;
2417 } else if (EM == SPIRV::ExecutionMode::ContractionOff) {
2419 "Expected no operands for ContractionOff");
2420
2421
2422
2423 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2425 for (SPIRV::FPFastMathDefaultInfo &Info : FPFastMathDefaultInfoVec) {
2426 Info.ContractionOff = true;
2427 }
2428 } else if (EM == SPIRV::ExecutionMode::SignedZeroInfNanPreserve) {
2430 "Expected 1 operand for SignedZeroInfNanPreserve");
2431 unsigned TargetWidth =
2434 ->getZExtValue();
2435
2436 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2440 assert(Index >= 0 && Index < 3 &&
2441 "Expected FPFastMathDefaultInfo for half, float, or double");
2442 assert(FPFastMathDefaultInfoVec.size() == 3 &&
2443 "Expected FPFastMathDefaultInfoVec to have exactly 3 elements");
2444 FPFastMathDefaultInfoVec[Index].SignedZeroInfNanPreserve = true;
2445 }
2446 }
2447
2448 std::unordered_map<unsigned, GlobalVariable *> GlobalVars;
2449 for (auto &[Func, FPFastMathDefaultInfoVec] : FPFastMathDefaultInfoMap) {
2450 if (FPFastMathDefaultInfoVec.empty())
2451 continue;
2452
2453 for (const SPIRV::FPFastMathDefaultInfo &Info : FPFastMathDefaultInfoVec) {
2454 assert(Info.Ty && "Expected target type for FPFastMathDefaultInfo");
2455
2456 unsigned Flags = Info.FastMathFlags;
2457 if (Flags == SPIRV::FPFastMathMode::None && .ContractionOff &&
2458 .SignedZeroInfNanPreserve &&
.FPFastMathDefault)
2459 continue;
2460
2461
2462 if (Info.ContractionOff && (Flags & SPIRV::FPFastMathMode::AllowContract))
2464 "and AllowContract");
2465
2466 if (Info.SignedZeroInfNanPreserve &&
2467 !(Flags &
2468 (SPIRV::FPFastMathMode::NotNaN | SPIRV::FPFastMathMode::NotInf |
2469 SPIRV::FPFastMathMode::NSZ))) {
2470 if (Info.FPFastMathDefault)
2472 "SignedZeroInfNanPreserve but at least one of "
2473 "NotNaN/NotInf/NSZ is enabled.");
2474 }
2475
2476 if ((Flags & SPIRV::FPFastMathMode::AllowTransform) &&
2477 !((Flags & SPIRV::FPFastMathMode::AllowReassoc) &&
2478 (Flags & SPIRV::FPFastMathMode::AllowContract))) {
2480 "AllowTransform requires AllowReassoc and "
2481 "AllowContract to be set.");
2482 }
2483
2484 auto it = GlobalVars.find(Flags);
2485 GlobalVariable *GV = nullptr;
2486 if (it != GlobalVars.end()) {
2487
2488 GV = it->second;
2489 } else {
2490
2492 ConstantInt::get(Type::getInt32Ty(M.getContext()), Flags);
2493
2494
2495 GV = new GlobalVariable(M,
2496 Type::getInt32Ty(M.getContext()),
2497 true,
2499 InitValue
2500 );
2501 GlobalVars[Flags] = GV;
2502 }
2503 }
2504 }
2505}
2506
2507void SPIRVEmitIntrinsics::processInstrAfterVisit(Instruction *I,
2510 bool IsConstComposite =
2511 II && II->getIntrinsicID() == Intrinsic::spv_const_composite;
2512 if (IsConstComposite && TrackConstants) {
2514 auto t = AggrConsts.find(I);
2515 assert(t != AggrConsts.end());
2516 auto *NewOp =
2518 {II->getType(), II->getType()}, t->second, I, {}, B);
2520 NewOp->setArgOperand(0, I);
2521 }
2522 bool IsPhi = isa(I), BPrepared = false;
2523 for (const auto &Op : I->operands()) {
2526 continue;
2527 unsigned OpNo = Op.getOperandNo();
2528 if (II && ((II->getIntrinsicID() == Intrinsic::spv_gep && OpNo == 0) ||
2529 (II->paramHasAttr(OpNo, Attribute::ImmArg))))
2530 continue;
2531
2532 if (!BPrepared) {
2533 IsPhi ? B.SetInsertPointPastAllocas(I->getParent()->getParent())
2535 BPrepared = true;
2536 }
2537 Type *OpTy = Op->getType();
2541
2542
2545 {OpTy, OpTyVal->getType()}, Op, OpTyVal, {}, B);
2546 }
2547 if (!IsConstComposite && isPointerTy(OpTy) && OpElemTy != nullptr &&
2548 OpElemTy != IntegerType::getInt8Ty(I->getContext())) {
2550 SmallVector<Value *, 2> Args = {
2553 CallInst *PtrCasted =
2554 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {Types}, Args);
2556 NewOp = PtrCasted;
2557 }
2558 if (NewOp != Op)
2559 I->setOperand(OpNo, NewOp);
2560 }
2561 if (Named.insert(I).second)
2563}
2564
2565Type *SPIRVEmitIntrinsics::deduceFunParamElementType(Function *F,
2566 unsigned OpIdx) {
2567 std::unordered_set<Function *> FVisited;
2568 return deduceFunParamElementType(F, OpIdx, FVisited);
2569}
2570
2571Type *SPIRVEmitIntrinsics::deduceFunParamElementType(
2572 Function *F, unsigned OpIdx, std::unordered_set<Function *> &FVisited) {
2573
2574 if (!FVisited.insert(F).second)
2575 return nullptr;
2576
2577 std::unordered_set<Value *> Visited;
2579
2580 for (User *U : F->users()) {
2583 continue;
2586 continue;
2587
2589 return KnownTy;
2590
2591 Visited.clear();
2592 if (Type *Ty = deduceElementTypeHelper(OpArg, Visited, false))
2593 return Ty;
2594
2595 for (User *OpU : OpArg->users()) {
2597 if (!Inst || Inst == CI)
2598 continue;
2599 Visited.clear();
2600 if (Type *Ty = deduceElementTypeHelper(Inst, Visited, false))
2601 return Ty;
2602 }
2603
2605 continue;
2607 if (FVisited.find(OuterF) != FVisited.end())
2608 continue;
2609 for (unsigned i = 0; i < OuterF->arg_size(); ++i) {
2610 if (OuterF->getArg(i) == OpArg) {
2611 Lookup.push_back(std::make_pair(OuterF, i));
2612 break;
2613 }
2614 }
2615 }
2616
2617
2618 for (auto &Pair : Lookup) {
2619 if (Type *Ty = deduceFunParamElementType(Pair.first, Pair.second, FVisited))
2620 return Ty;
2621 }
2622
2623 return nullptr;
2624}
2625
2626void SPIRVEmitIntrinsics::processParamTypesByFunHeader(Function *F,
2628 B.SetInsertPointPastAllocas(F);
2632 continue;
2634 if (ElemTy)
2635 continue;
2639 continue;
2640 }
2641
2642 for (User *U : F->users()) {
2645 continue;
2648 continue;
2649
2651 break;
2652 }
2653 if (ElemTy) {
2655 continue;
2656 }
2657 if (HaveFunPtrs) {
2658 for (User *U : Arg->users()) {
2662 CI->getParent()->getParent() == CurrF) {
2664 deduceOperandElementTypeFunctionPointer(CI, Ops, ElemTy, false);
2665 if (ElemTy) {
2667 break;
2668 }
2669 }
2670 }
2671 }
2672 }
2673}
2674
2675void SPIRVEmitIntrinsics::processParamTypes(Function *F, IRBuilder<> &B) {
2676 B.SetInsertPointPastAllocas(F);
2680 continue;
2682 if (!ElemTy && (ElemTy = deduceFunParamElementType(F, OpIdx)) != nullptr) {
2684 DenseSet<std::pair<Value *, Value *>> VisitedSubst;
2686 propagateElemType(Arg, IntegerType::getInt8Ty(F->getContext()),
2687 VisitedSubst);
2688 } else {
2690 }
2691 }
2692 }
2693}
2694
2698 bool IsNewFTy = false;
2700 for (Argument &Arg : F->args()) {
2704 IsNewFTy = true;
2706 }
2708 }
2709 return IsNewFTy
2710 ? FunctionType::get(FTy->getReturnType(), ArgTys, FTy->isVarArg())
2711 : FTy;
2712}
2713
2714bool SPIRVEmitIntrinsics::processFunctionPointers(Module &M) {
2716 for (auto &F : M) {
2717 if (F.isIntrinsic())
2718 continue;
2719 if (F.isDeclaration()) {
2720 for (User *U : F.users()) {
2724 break;
2725 }
2726 }
2727 } else {
2728 if (F.user_empty())
2729 continue;
2731 if (!FPElemTy)
2733 for (User *U : F.users()) {
2735 if ( || II->arg_size() != 3 || II->getOperand(0) != &F)
2736 continue;
2737 if (II->getIntrinsicID() == Intrinsic::spv_assign_ptr_type ||
2738 II->getIntrinsicID() == Intrinsic::spv_ptrcast) {
2740 break;
2741 }
2742 }
2743 }
2744 }
2745 if (Worklist.empty())
2746 return false;
2747
2751 "cannot allocate a name for the internal service function");
2752 LLVMContext &Ctx = M.getContext();
2754 Function::Create(FunctionType::get(Type::getVoidTy(Ctx), {}, false),
2759
2760 for (Function *F : Worklist) {
2762 for (const auto &Arg : F->args())
2764 IRB.CreateCall(F, Args);
2765 }
2766 IRB.CreateRetVoid();
2767
2768 return true;
2769}
2770
2771
2772void SPIRVEmitIntrinsics::applyDemangledPtrArgTypes(IRBuilder<> &B) {
2773 DenseMap<Function *, CallInst *> Ptrcasts;
2774 for (auto It : FDeclPtrTys) {
2776 for (auto *U : F->users()) {
2779 continue;
2780 unsigned Sz = CI->arg_size();
2781 for (auto [Idx, ElemTy] : It.second) {
2782 if (Idx >= Sz)
2783 continue;
2786 continue;
2789 B.SetInsertPointPastAllocas(Arg->getParent());
2790 B.SetCurrentDebugLocation(DebugLoc());
2792 }
2794 replaceUsesOfWithSpvPtrcast(Param, normalizeType(ElemTy), CI,
2795 Ptrcasts);
2798
2799 } else {
2801 ->getParent()
2802 ->getEntryBlock()
2803 .getFirstNonPHIOrDbgOrAlloca());
2805 }
2807 if ()
2808 continue;
2809 Function *RefF = Ref->getCalledFunction();
2812 continue;
2818 }
2819 }
2820 }
2821}
2822
2823GetElementPtrInst *
2824SPIRVEmitIntrinsics::simplifyZeroLengthArrayGepInst(GetElementPtrInst *GEP) {
2825
2826
2827
2828
2829
2831 Type *SrcTy = GEP->getSourceElementType();
2832 SmallVector<Value *, 8> Indices(GEP->indices());
2834 if (ArrTy && ArrTy->getNumElements() == 0 &&
2836 Indices.erase(Indices.begin());
2837 SrcTy = ArrTy->getElementType();
2839 GEP->getNoWrapFlags(), "",
2840 GEP->getIterator());
2841 }
2842 return nullptr;
2843}
2844
2845bool SPIRVEmitIntrinsics::runOnFunction(Function &Func) {
2846 if (Func.isDeclaration())
2847 return false;
2848
2850 GR = ST.getSPIRVGlobalRegistry();
2851
2852 if (!CurrF)
2853 HaveFunPtrs =
2854 ST.canUseExtension(SPIRV::Extension::SPV_INTEL_function_pointers);
2855
2856 CurrF = &Func;
2858 AggrConsts.clear();
2859 AggrConstTypes.clear();
2860 AggrStores.clear();
2861
2862
2863
2864 SmallPtrSet<Instruction *, 4> DeadInsts;
2867 if ( || GR->findDeducedElementType(Ref))
2868 continue;
2869
2870 GetElementPtrInst *NewGEP = simplifyZeroLengthArrayGepInst(Ref);
2871 if (NewGEP) {
2872 Ref->replaceAllUsesWith(NewGEP);
2874 Ref = NewGEP;
2875 }
2876 if (Type *GepTy = getGEPType(Ref))
2878 }
2879
2880 for (auto *I : DeadInsts) {
2881 assert(I->use_empty() && "Dead instruction should not have any uses left");
2882 I->eraseFromParent();
2883 }
2884
2885 processParamTypesByFunHeader(CurrF, B);
2886
2887
2888
2889
2892 if (!SI)
2893 continue;
2894 Type *ElTy = SI->getValueOperand()->getType();
2897 }
2898
2899 B.SetInsertPoint(&Func.getEntryBlock(), Func.getEntryBlock().begin());
2900 for (auto &GV : Func.getParent()->globals())
2901 processGlobalValue(GV, B);
2902
2903 preprocessUndefs(B);
2904 preprocessCompositeConstants(B);
2907
2908 applyDemangledPtrArgTypes(B);
2909
2910
2911 for (auto &I : Worklist) {
2912
2913 if (isConvergenceIntrinsic(I))
2914 continue;
2915
2916 bool Postpone = insertAssignPtrTypeIntrs(I, B, false);
2917
2918 insertAssignTypeIntrs(I, B);
2919 insertPtrCastOrAssignTypeInstr(I, B);
2921
2922
2923 if (Postpone && !GR->findAssignPtrTypeInstr(I))
2924 insertAssignPtrTypeIntrs(I, B, true);
2925
2927 useRoundingMode(FPI, B);
2928 }
2929
2930
2931
2932 SmallPtrSet<Instruction *, 4> IncompleteRets;
2934 deduceOperandElementType(&I, &IncompleteRets);
2935
2936
2937
2938 for (BasicBlock &BB : Func)
2939 for (PHINode &Phi : BB.phis())
2941 deduceOperandElementType(&Phi, nullptr);
2942
2943 for (auto *I : Worklist) {
2944 TrackConstants = true;
2947
2948
2950 if ()
2951 continue;
2952
2953
2954 if (isConvergenceIntrinsic(I))
2955 continue;
2956
2958 processInstrAfterVisit(I, B);
2959 }
2960
2961 return true;
2962}
2963
2964
2965bool SPIRVEmitIntrinsics::postprocessTypes(Module &M) {
2966 if (!GR || TodoTypeSz == 0)
2967 return false;
2968
2969 unsigned SzTodo = TodoTypeSz;
2970 DenseMap<Value *, SmallPtrSet<Value *, 4>> ToProcess;
2971 for (auto [Op, Enabled] : TodoType) {
2972
2974 continue;
2975 CallInst *AssignCI = GR->findAssignPtrTypeInstr(Op);
2976 Type *KnownTy = GR->findDeducedElementType(Op);
2977 if (!KnownTy || !AssignCI)
2978 continue;
2980
2982 CurrF = CI->getParent()->getParent();
2983 std::unordered_set<Value *> Visited;
2984 if (Type *ElemTy = deduceElementTypeHelper(Op, Visited, false, true)) {
2985 if (ElemTy != KnownTy) {
2986 DenseSet<std::pair<Value *, Value *>> VisitedSubst;
2987 propagateElemType(CI, ElemTy, VisitedSubst);
2988 eraseTodoType(Op);
2989 continue;
2990 }
2991 }
2992 }
2993
2994 if (Op->hasUseList()) {
2995 for (User *U : Op->users()) {
2999 }
3000 }
3001 }
3002 if (TodoTypeSz == 0)
3003 return true;
3004
3005 for (auto &F : M) {
3006 CurrF = &F;
3007 SmallPtrSet<Instruction *, 4> IncompleteRets;
3009 auto It = ToProcess.find(&I);
3010 if (It == ToProcess.end())
3011 continue;
3012 It->second.remove_if([this](Value *V) { return !isTodoType(V); });
3013 if (It->second.size() == 0)
3014 continue;
3015 deduceOperandElementType(&I, &IncompleteRets, &It->second, true);
3016 if (TodoTypeSz == 0)
3017 return true;
3018 }
3019 }
3020
3021 return SzTodo > TodoTypeSz;
3022}
3023
3024
3025void SPIRVEmitIntrinsics::parseFunDeclarations(Module &M) {
3026 for (auto &F : M) {
3027 if (.isDeclaration() || F.isIntrinsic())
3028 continue;
3029
3031 if (DemangledName.empty())
3032 continue;
3033
3034 const SPIRVSubtarget &ST = TM->getSubtarget(F);
3035 auto [Grp, Opcode, ExtNo] = SPIRV::mapBuiltinToOpcode(
3036 DemangledName, ST.getPreferredInstructionSet());
3037 if (Opcode != SPIRV::OpGroupAsyncCopy)
3038 continue;
3039
3040 SmallVector Idxs;
3045 }
3046 if (!Idxs.size())
3047 continue;
3048
3049 LLVMContext &Ctx = F.getContext();
3051 SPIRV::parseBuiltinTypeStr(TypeStrs, DemangledName, Ctx);
3052 if (!TypeStrs.size())
3053 continue;
3054
3055 for (unsigned Idx : Idxs) {
3056 if (Idx >= TypeStrs.size())
3057 continue;
3058 if (Type *ElemTy =
3059 SPIRV::parseBuiltinCallArgumentType(TypeStrs[Idx].trim(), Ctx))
3062 FDeclPtrTys[&F].push_back(std::make_pair(Idx, ElemTy));
3063 }
3064 }
3065}
3066
3067bool SPIRVEmitIntrinsics::runOnModule(Module &M) {
3069
3070 parseFunDeclarations(M);
3071 insertConstantsForFPFastMathDefault(M);
3072
3073 TodoType.clear();
3074 for (auto &F : M)
3076
3077
3078 for (auto &F : M) {
3079
3080 CurrF = &F;
3081 if (.isDeclaration() &&
.isIntrinsic()) {
3083 processParamTypes(&F, B);
3084 }
3085 }
3086
3087 CanTodoType = false;
3088 Changed |= postprocessTypes(M);
3089
3090 if (HaveFunPtrs)
3091 Changed |= processFunctionPointers(M);
3092
3094}
3095
3097 return new SPIRVEmitIntrinsics(TM);
3098}
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Expand Atomic instructions
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
Analysis containing CSE Info
static void replaceAllUsesWith(Value *Old, Value *New, SmallPtrSet< BasicBlock *, 32 > &FreshBBs, bool IsHuge)
Replace all old uses with new ones, and push the updated BBs into FreshBBs.
This file defines the DenseSet and SmallDenseSet classes.
static bool runOnFunction(Function &F, bool PostInlining)
iv Induction Variable Users
const AbstractManglingParser< Derived, Alloc >::OperatorInfo AbstractManglingParser< Derived, Alloc >::Ops[]
Machine Check Debug Module
MachineInstr unsigned OpIdx
uint64_t IntrinsicInst * II
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
static unsigned getNumElements(Type *Ty)
static bool isMemInstrToReplace(Instruction *I)
Definition SPIRVEmitIntrinsics.cpp:315
static bool isAggrConstForceInt32(const Value *V)
Definition SPIRVEmitIntrinsics.cpp:320
static SPIRV::FPFastMathDefaultInfoVector & getOrCreateFPFastMathDefaultInfoVec(const Module &M, DenseMap< Function *, SPIRV::FPFastMathDefaultInfoVector > &FPFastMathDefaultInfoMap, Function *F)
Definition SPIRVEmitIntrinsics.cpp:2315
static Type * getAtomicElemTy(SPIRVGlobalRegistry *GR, Instruction *I, Value *PointerOperand)
Definition SPIRVEmitIntrinsics.cpp:988
static void reportFatalOnTokenType(const Instruction *I)
Definition SPIRVEmitIntrinsics.cpp:352
static void setInsertPointAfterDef(IRBuilder<> &B, Instruction *I)
Definition SPIRVEmitIntrinsics.cpp:333
static void emitAssignName(Instruction *I, IRBuilder<> &B)
Definition SPIRVEmitIntrinsics.cpp:359
static Type * getPointeeTypeByCallInst(StringRef DemangledName, Function *CalledF, unsigned OpIdx)
Definition SPIRVEmitIntrinsics.cpp:596
static void createRoundingModeDecoration(Instruction *I, unsigned RoundingModeDeco, IRBuilder<> &B)
Definition SPIRVEmitIntrinsics.cpp:1462
static void createDecorationIntrinsic(Instruction *I, MDNode *Node, IRBuilder<> &B)
Definition SPIRVEmitIntrinsics.cpp:1454
static SPIRV::FPFastMathDefaultInfo & getFPFastMathDefaultInfo(SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec, const Type *Ty)
Definition SPIRVEmitIntrinsics.cpp:2337
static bool IsKernelArgInt8(Function *F, StoreInst *SI)
Definition SPIRVEmitIntrinsics.cpp:408
static void addSaturatedDecorationToIntrinsic(Instruction *I, IRBuilder<> &B)
Definition SPIRVEmitIntrinsics.cpp:1485
static bool isFirstIndexZero(const GetElementPtrInst *GEP)
Definition SPIRVEmitIntrinsics.cpp:1590
static void setInsertPointSkippingPhis(IRBuilder<> &B, Instruction *I)
Definition SPIRVEmitIntrinsics.cpp:326
static FunctionType * getFunctionPointerElemType(Function *F, SPIRVGlobalRegistry *GR)
Definition SPIRVEmitIntrinsics.cpp:2695
static void createSaturatedConversionDecoration(Instruction *I, IRBuilder<> &B)
Definition SPIRVEmitIntrinsics.cpp:1475
static Type * restoreMutatedType(SPIRVGlobalRegistry *GR, Instruction *I, Type *Ty)
Definition SPIRVEmitIntrinsics.cpp:415
static bool requireAssignType(Instruction *I)
Definition SPIRVEmitIntrinsics.cpp:341
void visit(MachineFunction &MF, MachineBasicBlock &Start, std::function< void(MachineBasicBlock *)> op)
static void insertSpirvDecorations(MachineFunction &MF, SPIRVGlobalRegistry *GR, MachineIRBuilder MIB)
#define SPIRV_BACKEND_SERVICE_FUN_NAME
StringSet - A set-like wrapper for the StringMap.
static SymbolRef::Type getType(const Symbol *Sym)
static std::optional< unsigned > getOpcode(ArrayRef< VPValue * > Values)
Returns the opcode of Values or ~0 if they do not all agree.
static int Lookup(ArrayRef< TableEntry > Table, unsigned Opcode)
This class represents an incoming formal argument to a Function.
const Function * getParent() const
static unsigned getPointerOperandIndex()
static unsigned getPointerOperandIndex()
iterator_range< const_phi_iterator > phis() const
Returns a range that iterates over the phis in the basic block.
const Function * getParent() const
Return the enclosing method, or null if none.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
LLVM_ABI LLVMContext & getContext() const
Get the context in which this basic block lives.
static LLVM_ABI BlockAddress * get(Function *F, BasicBlock *BB)
Return a BlockAddress for the specified function and basic block.
bool isInlineAsm() const
Check if this call is an inline asm statement.
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
LLVM_ABI bool isIndirectCall() const
Return true if the callsite is an indirect call.
Value * getCalledOperand() const
Value * getArgOperand(unsigned i) const
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
uint64_t getZExtValue() const
Return the constant as a 64-bit unsigned integer value after it has been zero extended as appropriate...
static LLVM_ABI Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
LLVM_ABI std::optional< RoundingMode > getRoundingMode() const
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
static LLVM_ABI FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
void addFnAttr(Attribute::AttrKind Kind)
Add function attributes to this function.
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
const DataLayout & getDataLayout() const
Get the data layout of the module this function belongs to.
Intrinsic::ID getIntrinsicID() const LLVM_READONLY
getIntrinsicID - This method returns the ID number of the specified function, or Intrinsic::not_intri...
bool isIntrinsic() const
isIntrinsic - Returns true if the function's name starts with "llvm.".
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Type * getReturnType() const
Returns the type of the ret val.
Argument * getArg(unsigned i) const
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
static LLVM_ABI Type * getTypeAtIndex(Type *Ty, Value *Idx)
Return the type of the element at the given index of an indexable type.
static GetElementPtrInst * Create(Type *PointeeType, Value *Ptr, ArrayRef< Value * > IdxList, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
static unsigned getPointerOperandIndex()
PointerType * getType() const
Global values are always pointers.
@ PrivateLinkage
Like Internal, but omit from symbol table.
@ InternalLinkage
Rename collisions when linking (static functions).
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
LLVM_ABI void addDestination(BasicBlock *Dest)
Add a destination.
Base class for instruction visitors.
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
Instruction * user_back()
Specialize the methods defined in Value, as we know that an instruction can only be used by other ins...
LLVM_ABI const Function * getFunction() const
Return the function this instruction belongs to.
LLVM_ABI void copyMetadata(const Instruction &SrcInst, ArrayRef< unsigned > WL=ArrayRef< unsigned >())
Copy metadata from SrcInst to this instruction.
This is an important class for using LLVM in a threaded context.
static unsigned getPointerOperandIndex()
const MDOperand & getOperand(unsigned I) const
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
unsigned getNumOperands() const
Return number of MDNode operands.
static LLVM_ABI MDString * get(LLVMContext &Context, StringRef Str)
Flags
Flags values. These may be or'd together.
ModulePass class - This class is used to implement unstructured interprocedural optimizations and ana...
A Module instance is used to store all the information related to an LLVM module.
void addAssignPtrTypeInstr(Value *Val, CallInst *AssignPtrTyCI)
void buildAssignPtr(IRBuilder<> &B, Type *ElemTy, Value *Arg)
Type * findDeducedCompositeType(const Value *Val)
void replaceAllUsesWith(Value *Old, Value *New, bool DeleteOld=true)
void addDeducedElementType(Value *Val, Type *Ty)
void addReturnType(const Function *ArgF, TypedPointerType *DerivedTy)
Type * findMutated(const Value *Val)
void addDeducedCompositeType(Value *Val, Type *Ty)
void buildAssignType(IRBuilder<> &B, Type *Ty, Value *Arg)
Type * findDeducedElementType(const Value *Val)
void updateAssignType(CallInst *AssignCI, Value *Arg, Value *OfType)
CallInst * findAssignPtrTypeInstr(const Value *Val)
const SPIRVTargetLowering * getTargetLowering() const override
bool isLogicalSPIRV() const
bool canUseExtension(SPIRV::Extension::Extension E) const
const SPIRVSubtarget * getSubtargetImpl() const
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
bool contains(ConstPtrType Ptr) const
reference emplace_back(ArgTypes &&... Args)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
static unsigned getPointerOperandIndex()
iterator find(StringRef Key)
StringRef - Represent a constant reference to a string, i.e.
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
bool contains(StringRef key) const
Check if the set contains the given key.
static LLVM_ABI StructType * create(LLVMContext &Context, StringRef Name)
This creates an identified struct.
static LLVM_ABI TargetExtType * get(LLVMContext &Context, StringRef Name, ArrayRef< Type * > Types={}, ArrayRef< unsigned > Ints={})
Return a target extension type having the specified name and optional type and integer parameters.
const STC & getSubtarget(const Function &F) const
This method returns a pointer to the specified type of TargetSubtargetInfo.
The instances of the Type class are immutable: once they are created, they are never changed.
bool isVectorTy() const
True if this is an instance of VectorType.
bool isArrayTy() const
True if this is an instance of ArrayType.
static LLVM_ABI IntegerType * getInt32Ty(LLVMContext &C)
bool isPointerTy() const
True if this is an instance of PointerType.
Type * getArrayElementType() const
LLVM_ABI StringRef getTargetExtName() const
static LLVM_ABI IntegerType * getInt8Ty(LLVMContext &C)
bool isStructTy() const
True if this is an instance of StructType.
bool isTargetExtTy() const
Return true if this is a target extension type.
bool isAggregateType() const
Return true if the type is an aggregate type.
static LLVM_ABI Type * getDoubleTy(LLVMContext &C)
static LLVM_ABI Type * getFloatTy(LLVMContext &C)
static LLVM_ABI Type * getHalfTy(LLVMContext &C)
bool isVoidTy() const
Return true if this is 'void'.
static LLVM_ABI bool isValidElementType(Type *ElemTy)
Return true if the specified type is valid as a element type.
static LLVM_ABI TypedPointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
static LLVM_ABI UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
void setOperand(unsigned i, Value *Val)
LLVM_ABI bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
LLVM_ABI void setName(const Twine &Name)
Change the name of the value.
iterator_range< user_iterator > users()
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
const ParentTy * getParent() const
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
@ SPIR_KERNEL
Used for SPIR kernel functions.
@ BasicBlock
Various leaf nodes.
bool match(Val *V, const Pattern &P)
is_zero m_Zero()
Match any null constant or a vector with all elements equal to 0.
DenseSetImpl< ValueT, DenseMap< ValueT, DenseSetEmpty, ValueInfoT, DenseSetPair< ValueT > >, ValueInfoT > DenseSet
ElementType
The element type of an SRV or UAV resource.
@ User
could "use" a pointer
NodeAddr< PhiNode * > Phi
NodeAddr< NodeBase * > Node
NodeAddr< FuncNode * > Func
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
bool getVacantFunctionName(Module &M, std::string &Name)
FunctionAddr VTableAddr Value
bool isTypedPointerWrapper(const TargetExtType *ExtTy)
auto enumerate(FirstRange &&First, RestRanges &&...Rest)
Given two or more input ranges, returns a new range whose values are tuples (A, B,...
ModulePass * createSPIRVEmitIntrinsicsPass(SPIRVTargetMachine *TM)
Definition SPIRVEmitIntrinsics.cpp:3096
unsigned getPointerAddressSpace(const Type *T)
decltype(auto) dyn_cast(const From &Val)
dyn_cast - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
CallInst * buildIntrWithMD(Intrinsic::ID IntrID, ArrayRef< Type * > Types, Value *Arg, Value *Arg2, ArrayRef< Constant * > Imms, IRBuilder<> &B)
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
bool isNestedPointer(const Type *Ty)
MetadataAsValue * buildMD(Value *Arg)
std::string getOclOrSpirvBuiltinDemangledName(StringRef Name)
auto reverse(ContainerTy &&C)
Type * getTypedPointerWrapper(Type *ElemTy, unsigned AS)
bool isPointerTy(const Type *T)
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
bool isa(const From &Val)
isa - Return true if the parameter to the template is an instance of one of the template type argu...
SPIRV::Scope::Scope getMemScope(LLVMContext &Ctx, SyncScope::ID Id)
@ Ref
The access may reference the value stored in memory.
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
DWARFExpression::Operation Op
Type * getPointeeTypeByAttr(Argument *Arg)
bool hasPointeeTypeAttr(Argument *Arg)
constexpr unsigned BitWidth
bool isEquivalentTypes(Type *Ty1, Type *Ty2)
decltype(auto) cast(const From &Val)
cast - Return the argument parameter cast to the specified type.
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
bool hasInitializer(const GlobalVariable *GV)
Type * normalizeType(Type *Ty)
@ Enabled
Convert any .debug_str_offsets tables to DWARF64 if needed.
bool isSpvIntrinsic(const MachineInstr &MI, Intrinsic::ID IntrinsicID)
Type * getPointeeType(const Type *Ty)
PoisonValue * getNormalizedPoisonValue(Type *Ty)
bool isUntypedPointerTy(const Type *T)
Type * reconstitutePeeledArrayType(Type *Ty)
SPIRV::MemorySemantics::MemorySemantics getMemSemantics(AtomicOrdering Ord)
static size_t computeFPFastMathDefaultInfoVecIndex(size_t BitWidth)