LLVM: lib/Transforms/Scalar/InferAddressSpaces.cpp Source File (original) (raw)

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

129#include

130#include

131#include

132#include

133#include

134

135#define DEBUG_TYPE "infer-address-spaces"

136

137using namespace llvm;

138

141 cl::desc("The default address space is assumed as the flat address space. "

142 "This is mainly for test purpose."));

143

145 std::numeric_limits::max();

146

147namespace {

148

150

151

152

153

154

155

156using PredicatedAddrSpaceMapTy =

159

160class InferAddressSpaces : public FunctionPass {

161 unsigned FlatAddrSpace = 0;

162

163public:

164 static char ID;

165

166 InferAddressSpaces()

169 }

170 InferAddressSpaces(unsigned AS) : FunctionPass(ID), FlatAddrSpace(AS) {

172 }

173

174 void getAnalysisUsage(AnalysisUsage &AU) const override {

177 AU.addRequired();

178 AU.addRequired();

179 }

180

182};

183

184class InferAddressSpacesImpl {

185 AssumptionCache ∾

187 const DominatorTree *DT = nullptr;

188 const TargetTransformInfo *TTI = nullptr;

189 const DataLayout *DL = nullptr;

190

191

192

193 unsigned FlatAddrSpace = 0;

194

195

196

197 bool updateAddressSpace(const Value &V,

198 ValueToAddrSpaceMapTy &InferredAddrSpace,

199 PredicatedAddrSpaceMapTy &PredicatedAS) const;

200

201

202

204 ValueToAddrSpaceMapTy &InferredAddrSpace,

205 PredicatedAddrSpaceMapTy &PredicatedAS) const;

206

207 bool isSafeToCastConstAddrSpace(Constant *C, unsigned NewAS) const;

208

209 Value *cloneInstructionWithNewAddressSpace(

210 Instruction *I, unsigned NewAddrSpace,

212 const PredicatedAddrSpaceMapTy &PredicatedAS,

213 SmallVectorImpl<const Use *> *PoisonUsesToFix) const;

214

215 void performPointerReplacement(

217 SmallVectorImpl<Instruction *> &DeadInstructions) const;

218

219

220

221

222 bool rewriteWithNewAddressSpaces(

224 const ValueToAddrSpaceMapTy &InferredAddrSpace,

225 const PredicatedAddrSpaceMapTy &PredicatedAS) const;

226

227 void appendsFlatAddressExpressionToPostorderStack(

228 Value *V, PostorderStackTy &PostorderStack,

229 DenseSet<Value *> &Visited) const;

230

231 bool rewriteIntrinsicOperands(IntrinsicInst *II, Value *OldV,

232 Value *NewV) const;

233 void collectRewritableIntrinsicOperands(IntrinsicInst *II,

234 PostorderStackTy &PostorderStack,

235 DenseSet<Value *> &Visited) const;

236

237 std::vector collectFlatAddressExpressions(Function &F) const;

238

239 Value *cloneValueWithNewAddressSpace(

240 Value *V, unsigned NewAddrSpace,

242 const PredicatedAddrSpaceMapTy &PredicatedAS,

243 SmallVectorImpl<const Use *> *PoisonUsesToFix) const;

244 unsigned joinAddressSpaces(unsigned AS1, unsigned AS2) const;

245

246 unsigned getPredicatedAddrSpace(const Value &PtrV,

247 const Value *UserCtx) const;

248

249public:

250 InferAddressSpacesImpl(AssumptionCache &AC, const DominatorTree *DT,

251 const TargetTransformInfo *TTI, unsigned FlatAddrSpace)

252 : AC(AC), DT(DT), TTI(TTI), FlatAddrSpace(FlatAddrSpace) {}

253 bool run(Function &F);

254};

255

256}

257

258char InferAddressSpaces::ID = 0;

259

261 false, false)

266

268 assert(Ty->isPtrOrPtrVectorTy());

270 return Ty->getWithNewType(NPT);

271}

272

273

274

275

280 if (!P2I || P2I->getOpcode() != Instruction::PtrToInt)

281 return false;

282

283

284

285

286

287

288

289

290

291

292

293

294

295

296 unsigned P2IOp0AS = P2I->getOperand(0)->getType()->getPointerAddressSpace();

300 DL) &&

302 P2I->getOperand(0)->getType(), P2I->getType(),

303 DL) &&

304 (P2IOp0AS == I2PAS || TTI->isNoopAddrSpaceCast(P2IOp0AS, I2PAS));

305}

306

307

308

309

310

313

315 return Arg->getType()->isPointerTy() &&

317

319 if (Op)

320 return false;

321

322 switch (Op->getOpcode()) {

323 case Instruction::PHI:

324 assert(Op->getType()->isPtrOrPtrVectorTy());

325 return true;

326 case Instruction::BitCast:

327 case Instruction::AddrSpaceCast:

328 case Instruction::GetElementPtr:

329 return true;

330 case Instruction::Select:

331 return Op->getType()->isPtrOrPtrVectorTy();

332 case Instruction::Call: {

334 return II && II->getIntrinsicID() == Intrinsic::ptrmask;

335 }

336 case Instruction::IntToPtr:

338 default:

339

341 }

342}

343

344

345

346

351 return {};

352

354 switch (Op.getOpcode()) {

355 case Instruction::PHI: {

356 auto IncomingValues = cast(Op).incoming_values();

357 return {IncomingValues.begin(), IncomingValues.end()};

358 }

359 case Instruction::BitCast:

360 case Instruction::AddrSpaceCast:

361 case Instruction::GetElementPtr:

362 return {Op.getOperand(0)};

363 case Instruction::Select:

364 return {Op.getOperand(1), Op.getOperand(2)};

365 case Instruction::Call: {

367 assert(II.getIntrinsicID() == Intrinsic::ptrmask &&

368 "unexpected intrinsic call");

369 return {II.getArgOperand(0)};

370 }

371 case Instruction::IntToPtr: {

374 return {P2I->getOperand(0)};

375 }

376 default:

378 }

379}

380

381bool InferAddressSpacesImpl::rewriteIntrinsicOperands(IntrinsicInst *II,

383 Value *NewV) const {

384 Module *M = II->getParent()->getParent()->getParent();

386 switch (IID) {

387 case Intrinsic::objectsize:

388 case Intrinsic::masked_load: {

389 Type *DestTy = II->getType();

393 II->setArgOperand(0, NewV);

394 II->setCalledFunction(NewDecl);

395 return true;

396 }

397 case Intrinsic::ptrmask:

398

399 return false;

400 case Intrinsic::masked_gather: {

401 Type *RetTy = II->getType();

405 II->setArgOperand(0, NewV);

406 II->setCalledFunction(NewDecl);

407 return true;

408 }

409 case Intrinsic::masked_store:

410 case Intrinsic::masked_scatter: {

411 Type *ValueTy = II->getOperand(0)->getType();

414 M, II->getIntrinsicID(), {ValueTy, NewPtrTy});

415 II->setArgOperand(1, NewV);

416 II->setCalledFunction(NewDecl);

417 return true;

418 }

419 case Intrinsic::prefetch:

420 case Intrinsic::is_constant: {

422 M, II->getIntrinsicID(), {NewV->getType()});

423 II->setArgOperand(0, NewV);

424 II->setCalledFunction(NewDecl);

425 return true;

426 }

427 case Intrinsic::fake_use: {

428 II->replaceUsesOfWith(OldV, NewV);

429 return true;

430 }

431 case Intrinsic::lifetime_start:

432 case Intrinsic::lifetime_end: {

433

436 M, II->getIntrinsicID(), {NewV->getType()});

437 II->setArgOperand(0, NewV);

438 II->setCalledFunction(NewDecl);

439 return true;

440 }

441 default: {

443 if (!Rewrite)

444 return false;

445 if (Rewrite != II)

446 II->replaceAllUsesWith(Rewrite);

447 return true;

448 }

449 }

450}

451

452void InferAddressSpacesImpl::collectRewritableIntrinsicOperands(

453 IntrinsicInst *II, PostorderStackTy &PostorderStack,

454 DenseSet<Value *> &Visited) const {

455 auto IID = II->getIntrinsicID();

456 switch (IID) {

457 case Intrinsic::ptrmask:

458 case Intrinsic::objectsize:

459 appendsFlatAddressExpressionToPostorderStack(II->getArgOperand(0),

460 PostorderStack, Visited);

461 break;

462 case Intrinsic::is_constant: {

463 Value *Ptr = II->getArgOperand(0);

465 appendsFlatAddressExpressionToPostorderStack(Ptr, PostorderStack,

466 Visited);

467 }

468

469 break;

470 }

471 case Intrinsic::masked_load:

472 case Intrinsic::masked_gather:

473 case Intrinsic::prefetch:

474 appendsFlatAddressExpressionToPostorderStack(II->getArgOperand(0),

475 PostorderStack, Visited);

476 break;

477 case Intrinsic::masked_store:

478 case Intrinsic::masked_scatter:

479 appendsFlatAddressExpressionToPostorderStack(II->getArgOperand(1),

480 PostorderStack, Visited);

481 break;

482 case Intrinsic::fake_use: {

483 for (Value *Op : II->operands()) {

484 if (Op->getType()->isPtrOrPtrVectorTy()) {

485 appendsFlatAddressExpressionToPostorderStack(Op, PostorderStack,

486 Visited);

487 }

488 }

489

490 break;

491 }

492 case Intrinsic::lifetime_start:

493 case Intrinsic::lifetime_end: {

494 appendsFlatAddressExpressionToPostorderStack(II->getArgOperand(0),

495 PostorderStack, Visited);

496 break;

497 }

498 default:

499 SmallVector<int, 2> OpIndexes;

501 for (int Idx : OpIndexes) {

502 appendsFlatAddressExpressionToPostorderStack(II->getArgOperand(Idx),

503 PostorderStack, Visited);

504 }

505 }

506 break;

507 }

508}

509

510

511

512

513void InferAddressSpacesImpl::appendsFlatAddressExpressionToPostorderStack(

514 Value *V, PostorderStackTy &PostorderStack,

515 DenseSet<Value *> &Visited) const {

516 assert(V->getType()->isPtrOrPtrVectorTy());

517

518

519

521

523 PostorderStack.emplace_back(CE, false);

524

525 return;

526 }

527

528 if (V->getType()->getPointerAddressSpace() == FlatAddrSpace &&

530 if (Visited.insert(V).second) {

531 PostorderStack.emplace_back(V, false);

532

534 for (auto &O : Op->operands())

537 PostorderStack.emplace_back(CE, false);

538 }

539 }

540}

541

542

543

544std::vector

545InferAddressSpacesImpl::collectFlatAddressExpressions(Function &F) const {

546

547

548 PostorderStackTy PostorderStack;

549

550 DenseSet<Value *> Visited;

551

552 auto PushPtrOperand = [&](Value *Ptr) {

553 appendsFlatAddressExpressionToPostorderStack(Ptr, PostorderStack, Visited);

554 };

555

556

557

558

561 PushPtrOperand(GEP->getPointerOperand());

563 PushPtrOperand(LI->getPointerOperand());

565 PushPtrOperand(SI->getPointerOperand());

567 PushPtrOperand(RMW->getPointerOperand());

569 PushPtrOperand(CmpX->getPointerOperand());

571

572 PushPtrOperand(MI->getRawDest());

573

574

576 PushPtrOperand(MTI->getRawSource());

578 collectRewritableIntrinsicOperands(II, PostorderStack, Visited);

580 if (Cmp->getOperand(0)->getType()->isPtrOrPtrVectorTy()) {

581 PushPtrOperand(Cmp->getOperand(0));

582 PushPtrOperand(Cmp->getOperand(1));

583 }

585 PushPtrOperand(ASC->getPointerOperand());

588 PushPtrOperand(cast(I2P->getOperand(0))->getOperand(0));

590 if (auto *RV = RI->getReturnValue();

591 RV && RV->getType()->isPtrOrPtrVectorTy())

592 PushPtrOperand(RV);

593 }

594 }

595

596 std::vector Postorder;

597 while (!PostorderStack.empty()) {

598 Value *TopVal = PostorderStack.back().getPointer();

599

600

601 if (PostorderStack.back().getInt()) {

603 Postorder.push_back(TopVal);

604 PostorderStack.pop_back();

605 continue;

606 }

607

608 PostorderStack.back().setInt(true);

609

612 appendsFlatAddressExpressionToPostorderStack(PtrOperand, PostorderStack,

613 Visited);

614 }

615 }

616 }

617 return Postorder;

618}

619

620

621

623 Value *Operand) {

624 auto InsertBefore = [NewI](auto It) {

627 return NewI;

628 };

629

631

632

634 auto InsertI = F->getEntryBlock().getFirstNonPHIIt();

635 return InsertBefore(InsertI);

636 }

637

638

640

643

644

645 auto InsertI = OpInst->getParent()->getFirstNonPHIIt();

646 return InsertBefore(InsertI);

647 }

648

649

652 return NewI;

653}

654

655

656

657

659 const Use &OperandUse, unsigned NewAddrSpace,

661 const PredicatedAddrSpaceMapTy &PredicatedAS,

663 Value *Operand = OperandUse.get();

664

666

669

670 if (Value *NewOperand = ValueWithNewAddrSpace.lookup(Operand))

671 return NewOperand;

672

674 auto I = PredicatedAS.find(std::make_pair(Inst, Operand));

675 if (I != PredicatedAS.end()) {

676

677 unsigned NewAS = I->second;

680

683

686 return NewI;

687 }

688

689 PoisonUsesToFix->push_back(&OperandUse);

691}

692

693

694

695

696

697

698

699

700

701

702

703

704

705Value *InferAddressSpacesImpl::cloneInstructionWithNewAddressSpace(

706 Instruction *I, unsigned NewAddrSpace,

708 const PredicatedAddrSpaceMapTy &PredicatedAS,

709 SmallVectorImpl<const Use *> *PoisonUsesToFix) const {

711

712 if (I->getOpcode() == Instruction::AddrSpaceCast) {

713 Value *Src = I->getOperand(0);

714

715

716

717 assert(Src->getType()->getPointerAddressSpace() == NewAddrSpace);

718 return Src;

719 }

720

722

723

724 assert(II->getIntrinsicID() == Intrinsic::ptrmask);

726 II->getArgOperandUse(0), NewAddrSpace, ValueWithNewAddrSpace,

727 PredicatedAS, PoisonUsesToFix);

730 if (Rewrite) {

731 assert(Rewrite != II && "cannot modify this pointer operation in place");

732 return Rewrite;

733 }

734

735 return nullptr;

736 }

737

740

741

743 auto *NewI = new AddrSpaceCastInst(I, NewPtrTy);

744 NewI->insertAfter(I->getIterator());

745 NewI->setDebugLoc(I->getDebugLoc());

746 return NewI;

747 }

748

749

751 for (const Use &OperandUse : I->operands()) {

752 if (!OperandUse.get()->getType()->isPtrOrPtrVectorTy())

753 NewPointerOperands.push_back(nullptr);

754 else

756 OperandUse, NewAddrSpace, ValueWithNewAddrSpace, PredicatedAS,

757 PoisonUsesToFix));

758 }

759

760 switch (I->getOpcode()) {

761 case Instruction::BitCast:

762 return new BitCastInst(NewPointerOperands[0], NewPtrType);

763 case Instruction::PHI: {

764 assert(I->getType()->isPtrOrPtrVectorTy());

766 PHINode *NewPHI = PHINode::Create(NewPtrType, PHI->getNumIncomingValues());

767 for (unsigned Index = 0; Index < PHI->getNumIncomingValues(); ++Index) {

769 NewPHI->addIncoming(NewPointerOperands[OperandNo],

770 PHI->getIncomingBlock(Index));

771 }

772 return NewPHI;

773 }

774 case Instruction::GetElementPtr: {

777 GEP->getSourceElementType(), NewPointerOperands[0],

780 return NewGEP;

781 }

782 case Instruction::Select:

783 assert(I->getType()->isPtrOrPtrVectorTy());

785 NewPointerOperands[2], "", nullptr, I);

786 case Instruction::IntToPtr: {

789 if (Src->getType() == NewPtrType)

790 return Src;

791

792

793

794

795 return new AddrSpaceCastInst(Src, NewPtrType);

796 }

797 default:

799 }

800}

801

802

803

804

809 Type *TargetType =

810 CE->getType()->isPtrOrPtrVectorTy()

812 : CE->getType();

813

814 if (CE->getOpcode() == Instruction::AddrSpaceCast) {

815

816

817

818 assert(CE->getOperand(0)->getType()->getPointerAddressSpace() ==

819 NewAddrSpace);

820 return CE->getOperand(0);

821 }

822

823 if (CE->getOpcode() == Instruction::BitCast) {

824 if (Value *NewOperand = ValueWithNewAddrSpace.lookup(CE->getOperand(0)))

827 }

828

829 if (CE->getOpcode() == Instruction::IntToPtr) {

832 assert(Src->getType()->getPointerAddressSpace() == NewAddrSpace);

833 return Src;

834 }

835

836

837 bool IsNew = false;

839 for (unsigned Index = 0; Index < CE->getNumOperands(); ++Index) {

840 Constant *Operand = CE->getOperand(Index);

841

842

843

844

845

846 if (Value *NewOperand = ValueWithNewAddrSpace.lookup(Operand)) {

847 IsNew = true;

849 continue;

850 }

853 CExpr, NewAddrSpace, ValueWithNewAddrSpace, DL, TTI)) {

854 IsNew = true;

856 continue;

857 }

858

860 }

861

862

863

864 if (!IsNew)

865 return nullptr;

866

867 if (CE->getOpcode() == Instruction::GetElementPtr) {

868

869

870 return CE->getWithOperands(NewOperands, TargetType, false,

872 }

873

874 return CE->getWithOperands(NewOperands, TargetType);

875}

876

877

878

879

880

881

882Value *InferAddressSpacesImpl::cloneValueWithNewAddressSpace(

883 Value *V, unsigned NewAddrSpace,

885 const PredicatedAddrSpaceMapTy &PredicatedAS,

886 SmallVectorImpl<const Use *> *PoisonUsesToFix) const {

887

888 assert(V->getType()->getPointerAddressSpace() == FlatAddrSpace &&

890

892

893

896

897 Type *NewPtrTy = PointerType::get(Arg->getContext(), NewAddrSpace);

898 auto *NewI = new AddrSpaceCastInst(Arg, NewPtrTy);

899 NewI->insertBefore(Insert);

900 return NewI;

901 }

902

904 Value *NewV = cloneInstructionWithNewAddressSpace(

905 I, NewAddrSpace, ValueWithNewAddrSpace, PredicatedAS, PoisonUsesToFix);

907 if (NewI->getParent() == nullptr) {

908 NewI->insertBefore(I->getIterator());

909 NewI->takeName(I);

910 NewI->setDebugLoc(I->getDebugLoc());

911 }

912 }

913 return NewV;

914 }

915

918}

919

920

921

922unsigned InferAddressSpacesImpl::joinAddressSpaces(unsigned AS1,

923 unsigned AS2) const {

924 if (AS1 == FlatAddrSpace || AS2 == FlatAddrSpace)

925 return FlatAddrSpace;

926

928 return AS2;

930 return AS1;

931

932

933 return (AS1 == AS2) ? AS1 : FlatAddrSpace;

934}

935

936bool InferAddressSpacesImpl::run(Function &CurFn) {

937 F = &CurFn;

938 DL = &F->getDataLayout();

939

941 FlatAddrSpace = 0;

942

946 return false;

947 }

948

949

950 std::vector Postorder = collectFlatAddressExpressions(*F);

951

952

953

954 ValueToAddrSpaceMapTy InferredAddrSpace;

955 PredicatedAddrSpaceMapTy PredicatedAS;

956 inferAddressSpaces(Postorder, InferredAddrSpace, PredicatedAS);

957

958

959

960 return rewriteWithNewAddressSpaces(Postorder, InferredAddrSpace,

961 PredicatedAS);

962}

963

964

965

966void InferAddressSpacesImpl::inferAddressSpaces(

968 ValueToAddrSpaceMapTy &InferredAddrSpace,

969 PredicatedAddrSpaceMapTy &PredicatedAS) const {

971

972 for (Value *V : Postorder)

974

975 while (!Worklist.empty()) {

976 Value *V = Worklist.pop_back_val();

977

978

979

980 if (!updateAddressSpace(*V, InferredAddrSpace, PredicatedAS))

981 continue;

982

983 for (Value *User : V->users()) {

984

985 if (Worklist.count(User))

986 continue;

987

988 auto Pos = InferredAddrSpace.find(User);

989

990

991 if (Pos == InferredAddrSpace.end())

992 continue;

993

994

995

996

997 if (Pos->second == FlatAddrSpace)

998 continue;

999

1000 Worklist.insert(User);

1001 }

1002 }

1003}

1004

1005unsigned

1006InferAddressSpacesImpl::getPredicatedAddrSpace(const Value &Ptr,

1007 const Value *UserCtx) const {

1009 if (!UserCtxI)

1011

1013 for (auto &AssumeVH : AC.assumptionsFor(StrippedPtr)) {

1014 if (!AssumeVH)

1015 continue;

1018 continue;

1019

1020 const Value *Ptr;

1021 unsigned AS;

1023 if (Ptr)

1024 return AS;

1025 }

1026

1028}

1029

1030bool InferAddressSpacesImpl::updateAddressSpace(

1031 const Value &V, ValueToAddrSpaceMapTy &InferredAddrSpace,

1032 PredicatedAddrSpaceMapTy &PredicatedAS) const {

1033 assert(InferredAddrSpace.count(&V));

1034

1035 LLVM_DEBUG(dbgs() << "Updating the address space of\n " << V << '\n');

1036

1037

1038

1040

1041

1043

1046

1047 NewAS = AS;

1048 } else {

1049

1052 auto I = InferredAddrSpace.find(PtrOperand);

1053 unsigned OperandAS;

1054 if (I == InferredAddrSpace.end()) {

1055 OperandAS = PtrOperand->getType()->getPointerAddressSpace();

1057 C && OperandAS == FlatAddrSpace) {

1058

1060 continue;

1061 }

1062 if (OperandAS == FlatAddrSpace) {

1063

1064 unsigned AS = getPredicatedAddrSpace(*PtrOperand, &V);

1067 << " deduce operand AS from the predicate addrspace "

1068 << AS << '\n');

1069 OperandAS = AS;

1070

1071 PredicatedAS[std::make_pair(&V, PtrOperand)] = OperandAS;

1072 }

1073 }

1074 } else

1075 OperandAS = I->second;

1076

1077

1078 NewAS = joinAddressSpaces(NewAS, OperandAS);

1079 if (NewAS == FlatAddrSpace)

1080 break;

1081 }

1083 if (any_of(ConstantPtrOps, [=](Constant *C) {

1084 return !isSafeToCastConstAddrSpace(C, NewAS);

1085 }))

1086 NewAS = FlatAddrSpace;

1087 }

1088 }

1089

1090 unsigned OldAS = InferredAddrSpace.lookup(&V);

1091 assert(OldAS != FlatAddrSpace);

1092 if (OldAS == NewAS)

1093 return false;

1094

1095

1096

1098 InferredAddrSpace[&V] = NewAS;

1099 return true;

1100}

1101

1102

1103

1107 if (U.get() == OldVal) {

1108 U.set(NewVal);

1109 return true;

1110 }

1111

1112 return false;

1113}

1114

1115template

1117 InstrType *MemInstr, unsigned AddrSpace,

1119 if (!MemInstr->isVolatile() || TTI.hasVolatileVariant(MemInstr, AddrSpace)) {

1121 OldV, NewV);

1122 }

1123

1124 return false;

1125}

1126

1127

1128

1129

1130

1131

1132

1133

1135 User *Inst, unsigned AddrSpace,

1139

1142

1145

1148

1149 return false;

1150}

1151

1152

1153

1154

1159 B.CreateMemSet(NewV, MSI->getValue(), MSI->getLength(), MSI->getDestAlign(),

1160 false,

1161 MI->getAAMetadata());

1163 Value *Src = MTI->getRawSource();

1164 Value *Dest = MTI->getRawDest();

1165

1166

1167 if (Src == OldV)

1168 Src = NewV;

1169

1170 if (Dest == OldV)

1171 Dest = NewV;

1172

1174 if (MCI->isForceInlined())

1175 B.CreateMemCpyInline(Dest, MTI->getDestAlign(), Src,

1176 MTI->getSourceAlign(), MTI->getLength(),

1177 false,

1178 MI->getAAMetadata());

1179 else

1180 B.CreateMemCpy(Dest, MTI->getDestAlign(), Src, MTI->getSourceAlign(),

1181 MTI->getLength(),

1182 false,

1183 MI->getAAMetadata());

1184 } else {

1186 B.CreateMemMove(Dest, MTI->getDestAlign(), Src, MTI->getSourceAlign(),

1187 MTI->getLength(),

1188 false,

1189 MI->getAAMetadata());

1190 }

1191 } else

1193

1194 MI->eraseFromParent();

1195 return true;

1196}

1197

1198

1199

1200bool InferAddressSpacesImpl::isSafeToCastConstAddrSpace(Constant *C,

1201 unsigned NewAS) const {

1203

1204 unsigned SrcAS = C->getType()->getPointerAddressSpace();

1206 return true;

1207

1208

1209 if (SrcAS != FlatAddrSpace && NewAS != FlatAddrSpace)

1210 return false;

1211

1213 return true;

1214

1216

1217

1218 if (Op->getOpcode() == Instruction::AddrSpaceCast)

1219 return isSafeToCastConstAddrSpace(cast(Op->getOperand(0)),

1220 NewAS);

1221

1222 if (Op->getOpcode() == Instruction::IntToPtr &&

1223 Op->getType()->getPointerAddressSpace() == FlatAddrSpace)

1224 return true;

1225 }

1226

1227 return false;

1228}

1229

1232 User *CurUser = I->getUser();

1233 ++I;

1234

1235 while (I != End && I->getUser() == CurUser)

1236 ++I;

1237

1238 return I;

1239}

1240

1241void InferAddressSpacesImpl::performPointerReplacement(

1243 SmallVectorImpl<Instruction *> &DeadInstructions) const {

1244

1245 User *CurUser = U.getUser();

1246

1247 unsigned AddrSpace = V->getType()->getPointerAddressSpace();

1249 return;

1250

1251

1252 if (CurUser == NewV)

1253 return;

1254

1256 if (!CurUserI || CurUserI->getFunction() != F)

1257 return;

1258

1259

1262 return;

1263 }

1264

1266 if (rewriteIntrinsicOperands(II, V, NewV))

1267 return;

1268 }

1269

1271

1272

1273

1274

1275

1276

1278 int SrcIdx = U.getOperandNo();

1279 int OtherIdx = (SrcIdx == 0) ? 1 : 0;

1280 Value *OtherSrc = Cmp->getOperand(OtherIdx);

1281

1282 if (Value *OtherNewV = ValueWithNewAddrSpace.lookup(OtherSrc)) {

1283 if (OtherNewV->getType()->getPointerAddressSpace() == NewAS) {

1284 Cmp->setOperand(OtherIdx, OtherNewV);

1285 Cmp->setOperand(SrcIdx, NewV);

1286 return;

1287 }

1288 }

1289

1290

1292 if (isSafeToCastConstAddrSpace(KOtherSrc, NewAS)) {

1293 Cmp->setOperand(SrcIdx, NewV);

1295 KOtherSrc, NewV->getType()));

1296 return;

1297 }

1298 }

1299 }

1300

1303 if (ASC->getDestAddressSpace() == NewAS) {

1304 ASC->replaceAllUsesWith(NewV);

1305 DeadInstructions.push_back(ASC);

1306 return;

1307 }

1308 }

1309

1310

1312

1314 return;

1315

1316

1319 InsertPos = std::next(NewVInst->getIterator());

1320 else

1322

1324 ++InsertPos;

1325

1327 V, new AddrSpaceCastInst(NewV, V->getType(), "", InsertPos));

1328 } else {

1329 CurUserI->replaceUsesOfWith(

1331 }

1332}

1333

1334bool InferAddressSpacesImpl::rewriteWithNewAddressSpaces(

1336 const ValueToAddrSpaceMapTy &InferredAddrSpace,

1337 const PredicatedAddrSpaceMapTy &PredicatedAS) const {

1338

1339

1340

1341

1344 for (Value *V : Postorder) {

1345 unsigned NewAddrSpace = InferredAddrSpace.lookup(V);

1346

1347

1348

1350 continue;

1351

1352 if (V->getType()->getPointerAddressSpace() != NewAddrSpace) {

1354 cloneValueWithNewAddressSpace(V, NewAddrSpace, ValueWithNewAddrSpace,

1355 PredicatedAS, &PoisonUsesToFix);

1356 if (New)

1357 ValueWithNewAddrSpace[V] = New;

1358 }

1359 }

1360

1361 if (ValueWithNewAddrSpace.empty())

1362 return false;

1363

1364

1365 for (const Use *PoisonUse : PoisonUsesToFix) {

1366 User *V = PoisonUse->getUser();

1368 if (!NewV)

1369 continue;

1370

1371 unsigned OperandNo = PoisonUse->getOperandNo();

1373 NewV->setOperand(OperandNo, ValueWithNewAddrSpace.lookup(PoisonUse->get()));

1374 }

1375

1376 SmallVector<Instruction *, 16> DeadInstructions;

1379

1380

1381 for (const WeakTrackingVH &WVH : Postorder) {

1382 assert(WVH && "value was unexpectedly deleted");

1384 Value *NewV = ValueWithNewAddrSpace.lookup(V);

1385 if (NewV == nullptr)

1386 continue;

1387

1388 LLVM_DEBUG(dbgs() << "Replacing the uses of " << *V << "\n with\n "

1389 << *NewV << '\n');

1390

1394 if (C != Replace) {

1395 LLVM_DEBUG(dbgs() << "Inserting replacement const cast: " << Replace

1396 << ": " << *Replace << '\n');

1400 if (I->getFunction() == F)

1401 I->replaceUsesOfWith(C, Replace);

1402 } else {

1403 WorkList.append(U->user_begin(), U->user_end());

1404 }

1405 }

1406 if (!WorkList.empty()) {

1407 VMap[C] = Replace;

1408 DenseSet<User *> Visited{WorkList.begin(), WorkList.end()};

1409 while (!WorkList.empty()) {

1412 if (I->getFunction() == F)

1413 VMapper.remapInstruction(*I);

1414 continue;

1415 }

1416 for (User *U2 : U->users())

1417 if (Visited.insert(U2).second)

1419 }

1420 }

1421 V = Replace;

1422 }

1423 }

1424

1425 Value::use_iterator I, E, Next;

1426 for (I = V->use_begin(), E = V->use_end(); I != E;) {

1428

1429

1430

1432

1433 performPointerReplacement(V, NewV, U, ValueWithNewAddrSpace,

1434 DeadInstructions);

1435 }

1436

1437 if (V->use_empty()) {

1440 }

1441 }

1442

1443 for (Instruction *I : DeadInstructions)

1445

1446 return true;

1447}

1448

1449bool InferAddressSpaces::runOnFunction(Function &F) {

1450 if (skipFunction(F))

1451 return false;

1452

1453 auto *DTWP = getAnalysisIfAvailable();

1454 DominatorTree *DT = DTWP ? &DTWP->getDomTree() : nullptr;

1455 return InferAddressSpacesImpl(

1456 getAnalysis().getAssumptionCache(F), DT,

1457 &getAnalysis().getTTI(F),

1458 FlatAddrSpace)

1459 .run(F);

1460}

1461

1465

1470

1477 .run(F);

1482 return PA;

1483 }

1485}

assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")

MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL

Expand Atomic instructions

static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")

static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")

#define LLVM_UNLIKELY(EXPR)

This file contains the declarations for the subclasses of Constant, which represent the different fla...

This file defines the DenseMap class.

This file defines the DenseSet and SmallDenseSet classes.

static bool runOnFunction(Function &F, bool PostInlining)

This header defines various interfaces for pass management in LLVM.

This defines the Use class.

static bool replaceIfSimplePointerUse(const TargetTransformInfo &TTI, User *Inst, unsigned AddrSpace, Value *OldV, Value *NewV)

If OldV is used as the pointer operand of a compatible memory operation Inst, replaces the pointer op...

Definition InferAddressSpaces.cpp:1134

static bool replaceOperandIfSame(Instruction *Inst, unsigned OpIdx, Value *OldVal, Value *NewVal)

Replace operand OpIdx in Inst, if the value is the same as OldVal with NewVal.

Definition InferAddressSpaces.cpp:1104

static cl::opt< bool > AssumeDefaultIsFlatAddressSpace("assume-default-is-flat-addrspace", cl::init(false), cl::ReallyHidden, cl::desc("The default address space is assumed as the flat address space. " "This is mainly for test purpose."))

static bool isNoopPtrIntCastPair(const Operator *I2P, const DataLayout &DL, const TargetTransformInfo *TTI)

Definition InferAddressSpaces.cpp:276

static Value * phiNodeOperandWithNewAddressSpace(AddrSpaceCastInst *NewI, Value *Operand)

Definition InferAddressSpaces.cpp:622

static bool isAddressExpression(const Value &V, const DataLayout &DL, const TargetTransformInfo *TTI)

Definition InferAddressSpaces.cpp:311

static bool handleMemIntrinsicPtrUse(MemIntrinsic *MI, Value *OldV, Value *NewV)

Update memory intrinsic uses that require more complex processing than simple memory instructions.

Definition InferAddressSpaces.cpp:1155

static SmallVector< Value *, 2 > getPointerOperands(const Value &V, const DataLayout &DL, const TargetTransformInfo *TTI)

Definition InferAddressSpaces.cpp:348

static Value * operandWithNewAddressSpaceOrCreatePoison(const Use &OperandUse, unsigned NewAddrSpace, const ValueToValueMapTy &ValueWithNewAddrSpace, const PredicatedAddrSpaceMapTy &PredicatedAS, SmallVectorImpl< const Use * > *PoisonUsesToFix)

Definition InferAddressSpaces.cpp:658

static Value * cloneConstantExprWithNewAddressSpace(ConstantExpr *CE, unsigned NewAddrSpace, const ValueToValueMapTy &ValueWithNewAddrSpace, const DataLayout *DL, const TargetTransformInfo *TTI)

Definition InferAddressSpaces.cpp:805

static Value::use_iterator skipToNextUser(Value::use_iterator I, Value::use_iterator End)

Definition InferAddressSpaces.cpp:1230

Infer address static false Type * getPtrOrVecOfPtrsWithNewAS(Type *Ty, unsigned NewAddrSpace)

Definition InferAddressSpaces.cpp:267

static bool replaceSimplePointerUse(const TargetTransformInfo &TTI, InstrType *MemInstr, unsigned AddrSpace, Value *OldV, Value *NewV)

Definition InferAddressSpaces.cpp:1116

static const unsigned UninitializedAddressSpace

Definition InferAddressSpaces.cpp:144

Machine Check Debug Module

MachineInstr unsigned OpIdx

uint64_t IntrinsicInst * II

#define INITIALIZE_PASS_DEPENDENCY(depName)

#define INITIALIZE_PASS_END(passName, arg, name, cfg, analysis)

#define INITIALIZE_PASS_BEGIN(passName, arg, name, cfg, analysis)

This file implements a set that has insertion order iteration characteristics.

This file defines the SmallVector class.

This pass exposes codegen information to IR-level passes.

This class represents a conversion between pointers from one address space to another.

PassT::Result * getCachedResult(IRUnitT &IR) const

Get the cached result of an analysis pass for a given IR unit.

PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)

Get the result of an analysis pass for a given IR unit.

AnalysisUsage & addRequired()

AnalysisUsage & addPreserved()

Add the specified Pass class to the set of analyses preserved by this pass.

LLVM_ABI void setPreservesCFG()

This function should be called by the pass, iff they do not:

This class represents an incoming formal argument to a Function.

A function analysis which provides an AssumptionCache.

An immutable pass that tracks lazily created AssumptionCache objects.

MutableArrayRef< ResultElem > assumptionsFor(const Value *V)

Access the list of assumptions which affect this value.

InstListType::iterator iterator

Instruction iterators...

Represents analyses that only rely on functions' control flow.

Value * getArgOperand(unsigned i) const

static LLVM_ABI bool isNoopCast(Instruction::CastOps Opcode, Type *SrcTy, Type *DstTy, const DataLayout &DL)

A no-op cast is one that can be effected without changing any bits.

A constant value that is initialized with an expression using other constant values.

static LLVM_ABI Constant * getAddrSpaceCast(Constant *C, Type *Ty, bool OnlyIfReduced=false)

static LLVM_ABI Constant * getBitCast(Constant *C, Type *Ty, bool OnlyIfReduced=false)

This is an important base class in LLVM.

A parsed version of the target data layout string in and methods for querying it.

Analysis pass which computes a DominatorTree.

FunctionPass class - This class is used to implement most global optimizations.

static GetElementPtrInst * Create(Type *PointeeType, Value *Ptr, ArrayRef< Value * > IdxList, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)

LLVM_ABI void setIsInBounds(bool b=true)

Set or clear the inbounds flag on this GEP instruction.

This provides a uniform API for creating instructions and inserting them into a basic block: either a...

const DebugLoc & getDebugLoc() const

Return the debug location for this node as a DebugLoc.

LLVM_ABI void insertBefore(InstListType::iterator InsertPos)

Insert an unlinked instruction into a basic block immediately before the specified position.

unsigned getOpcode() const

Returns a member of one of the enums like Instruction::Add.

void setDebugLoc(DebugLoc Loc)

Set the debug location information for this instruction.

LLVM_ABI void insertAfter(Instruction *InsertPos)

Insert an unlinked instruction into a basic block immediately after the specified instruction.

A wrapper class for inspecting calls to intrinsic functions.

This is the common base class for memset/memcpy/memmove.

This is a utility class that provides an abstraction for the common functionality between Instruction...

unsigned getOpcode() const

Return the opcode for this Instruction or ConstantExpr.

void addIncoming(Value *V, BasicBlock *BB)

Add an incoming value to the end of the PHI list.

static unsigned getOperandNumForIncomingValue(unsigned i)

static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)

Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...

static LLVM_ABI PassRegistry * getPassRegistry()

getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...

static LLVM_ABI PointerType * get(Type *ElementType, unsigned AddressSpace)

This constructs a pointer to an object of the specified type in a numbered address space.

static LLVM_ABI PoisonValue * get(Type *T)

Static factory methods - Return an 'poison' object of the specified type.

A set of analyses that are preserved following a run of a transformation pass.

static PreservedAnalyses all()

Construct a special preserved set that preserves all passes.

PreservedAnalyses & preserveSet()

Mark an analysis set as preserved.

PreservedAnalyses & preserve()

Mark an analysis as preserved.

static SelectInst * Create(Value *C, Value *S1, Value *S2, const Twine &NameStr="", InsertPosition InsertBefore=nullptr, const Instruction *MDFrom=nullptr)

This class consists of common code factored out of the SmallVector class to reduce code duplication b...

void append(ItTy in_start, ItTy in_end)

Add the specified range to the end of the SmallVector.

void push_back(const T &Elt)

This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.

Analysis pass providing the TargetTransformInfo.

Wrapper pass for TargetTransformInfo.

This pass provides access to the codegen interfaces that are needed for IR-level transformations.

LLVM_ABI unsigned getAssumedAddrSpace(const Value *V) const

LLVM_ABI std::pair< const Value *, unsigned > getPredicatedAddrSpace(const Value *V) const

LLVM_ABI bool collectFlatAddressOperands(SmallVectorImpl< int > &OpIndexes, Intrinsic::ID IID) const

Return any intrinsic address operand indexes which may be rewritten if they use a flat address space ...

LLVM_ABI Value * rewriteIntrinsicWithAddressSpace(IntrinsicInst *II, Value *OldV, Value *NewV) const

Rewrite intrinsic call II such that OldV will be replaced with NewV, which has a different address sp...

LLVM_ABI unsigned getFlatAddressSpace() const

Returns the address space ID for a target's 'flat' address space.

The instances of the Type class are immutable: once they are created, they are never changed.

LLVM_ABI unsigned getPointerAddressSpace() const

Get the address space of this pointer or pointer vector type.

bool isPtrOrPtrVectorTy() const

Return true if this is a pointer type or a vector of pointer types.

A Use represents the edge between a Value definition and its users.

User * getUser() const

Returns the User that contains this Use.

const Use & getOperandUse(unsigned i) const

void setOperand(unsigned i, Value *Val)

LLVM_ABI bool replaceUsesOfWith(Value *From, Value *To)

Replace uses of one Value with another.

Value * getOperand(unsigned i) const

ValueT lookup(const KeyT &Val) const

lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...

LLVM Value Representation.

Type * getType() const

All values are typed, get the type of this value.

LLVM_ABI const Value * stripInBoundsOffsets(function_ref< void(const Value *)> Func=[](const Value *) {}) const

Strip off pointer casts and inbounds GEPs.

use_iterator_impl< Use > use_iterator

LLVM_ABI const Value * stripPointerCasts() const

Strip off pointer casts, all-zero GEPs and address space casts.

std::pair< iterator, bool > insert(const ValueT &V)

const ParentTy * getParent() const

self_iterator getIterator()

#define llvm_unreachable(msg)

Marks that the current location is not supposed to be reachable.

unsigned ID

LLVM IR allows to use arbitrary numbers as calling convention identifiers.

@ C

The default llvm calling convention, compatible with C.

InstrType

This represents what is and is not supported when finding similarity in Instructions.

LLVM_ABI Function * getOrInsertDeclaration(Module *M, ID id, ArrayRef< Type * > Tys={})

Look up the Function declaration of the intrinsic id in the Module M.

initializer< Ty > init(const Ty &Val)

PointerTypeMap run(const Module &M)

Compute the PointerTypeMap for the module M.

@ User

could "use" a pointer

NodeAddr< UseNode * > Use

friend class Instruction

Iterator for Instructions in a `BasicBlock.

This is an optimization pass for GlobalISel generic memory operations.

FunctionAddr VTableAddr Value

LLVM_ABI bool isValidAssumeForContext(const Instruction *I, const Instruction *CxtI, const DominatorTree *DT=nullptr, bool AllowEphemerals=false)

Return true if it is valid to use the assumptions provided by an assume intrinsic,...

LLVM_ABI bool RecursivelyDeleteTriviallyDeadInstructions(Value *V, const TargetLibraryInfo *TLI=nullptr, MemorySSAUpdater *MSSAU=nullptr, std::function< void(Value *)> AboutToDeleteCallback=std::function< void(Value *)>())

If the specified value is a trivially dead instruction, delete it.

LLVM_ABI void initializeInferAddressSpacesPass(PassRegistry &)

decltype(auto) dyn_cast(const From &Val)

dyn_cast - Return the argument parameter cast to the specified type.

constexpr from_range_t from_range

iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)

Make a range that does early increment to allow mutation of the underlying range without disrupting i...

auto cast_or_null(const Y &Val)

auto dyn_cast_or_null(const Y &Val)

bool any_of(R &&range, UnaryPredicate P)

Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.

@ RF_IgnoreMissingLocals

If this flag is set, the remapper ignores missing function-local entries (Argument,...

@ RF_NoModuleLevelChanges

If this flag is set, the remapper knows that only local values within a function (such as an instruct...

LLVM_ABI raw_ostream & dbgs()

dbgs() - This returns a reference to a raw_ostream for debugging messages.

class LLVM_GSL_OWNER SmallVector

Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...

bool isa(const From &Val)

isa - Return true if the parameter to the template is an instance of one of the template type argu...

FunctionAddr VTableAddr Next

DWARFExpression::Operation Op

ArrayRef(const T &OneElt) -> ArrayRef< T >

ValueMap< const Value *, WeakTrackingVH > ValueToValueMapTy

LLVM_ABI FunctionPass * createInferAddressSpacesPass(unsigned AddressSpace=~0u)

Definition InferAddressSpaces.cpp:1462

decltype(auto) cast(const From &Val)

cast - Return the argument parameter cast to the specified type.

AnalysisManager< Function > FunctionAnalysisManager

Convenience typedef for the Function analysis manager.

InferAddressSpacesPass()

Definition InferAddressSpaces.cpp:1466

PreservedAnalyses run(Function &F, FunctionAnalysisManager &AM)

Definition InferAddressSpaces.cpp:1471