LLVM: lib/Analysis/BasicAliasAnalysis.cpp Source File (original) (raw)

1

2

3

4

5

6

7

8

9

10

11

12

13

14

58#include

59#include

60#include

61#include

62#include

63

64#define DEBUG_TYPE "basicaa"

65

66using namespace llvm;

67

68

71

74

75

76

77

78STATISTIC(SearchLimitReached, "Number of times the limit to "

79 "decompose GEPs is reached");

80STATISTIC(SearchTimes, "Number of times a GEP is decomposed");

81

83 FunctionAnalysisManager::Invalidator &Inv) {

84

85

86

87

90 return true;

91

92

93 return false;

94}

95

96

97

98

99

100

104 bool NullIsValidLoc,

105 bool RoundToAlign = false) {

110

111 if (Size->isScalable())

112 return std::nullopt;

114 }

115 return std::nullopt;

116}

117

118

119

120

124 bool NullIsValidLoc) {

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

146 return false;

147

148

149

150 std::optional ObjectSize = getObjectSize(V, DL, TLI, NullIsValidLoc,

151 true);

152

154}

155

156

157

158

162 bool NullIsValidLoc) {

163

164

165

166

167 bool CanBeNull, CanBeFreed;

169 V.getPointerDereferenceableBytes(DL, CanBeNull, CanBeFreed);

170 DerefBytes = (CanBeNull && NullIsValidLoc) ? 0 : DerefBytes;

171

172

176}

177

178

181 std::optional ObjectSize =

183 return ObjectSize && *ObjectSize == Size;

184}

185

186

191

192

193

194

195

197

200 bool OrAt) {

203

204 auto [CacheIt, Inserted] =

206 if (!Inserted)

207 return CacheIt->second;

208

212 CacheIt->second = Ret;

213 return Ret;

214}

215

220 return Succs.empty() ||

222}

223

229

230 auto Iter = EarliestEscapes.try_emplace(Object);

231 if (Iter.second) {

232 std::pair<Instruction *, CaptureComponents> EarliestCapture =

234 false, DT,

236 if (EarliestCapture.first)

237 Inst2Obj[EarliestCapture.first].push_back(Object);

238 Iter.first->second = EarliestCapture;

239 }

240

241 auto IsNotCapturedBefore = [&]() {

242

243 Instruction *CaptureInst = Iter.first->second.first;

244 if (!CaptureInst)

245 return true;

246

247

248 if (I)

249 return false;

250

251 if (I == CaptureInst) {

252 if (OrAt)

253 return false;

255 }

256

258 };

259 if (IsNotCapturedBefore())

261 return Iter.first->second.second;

262}

263

265 auto Iter = Inst2Obj.find(I);

266 if (Iter != Inst2Obj.end()) {

267 for (const Value *Obj : Iter->second)

268 EarliestEscapes.erase(Obj);

269 Inst2Obj.erase(I);

270 }

271}

272

273

274

275

276

277namespace {

278

279struct CastedValue {

281 unsigned ZExtBits = 0;

282 unsigned SExtBits = 0;

283 unsigned TruncBits = 0;

284

285 bool IsNonNegative = false;

286

287 explicit CastedValue(const Value *V) : V(V) {}

288 explicit CastedValue(const Value *V, unsigned ZExtBits, unsigned SExtBits,

289 unsigned TruncBits, bool IsNonNegative)

290 : V(V), ZExtBits(ZExtBits), SExtBits(SExtBits), TruncBits(TruncBits),

291 IsNonNegative(IsNonNegative) {}

292

294 return V->getType()->getPrimitiveSizeInBits() - TruncBits + ZExtBits +

295 SExtBits;

296 }

297

298 CastedValue withValue(const Value *NewV, bool PreserveNonNeg) const {

299 return CastedValue(NewV, ZExtBits, SExtBits, TruncBits,

300 IsNonNegative && PreserveNonNeg);

301 }

302

303

304 CastedValue withZExtOfValue(const Value *NewV, bool ZExtNonNegative) const {

305 unsigned ExtendBy = V->getType()->getPrimitiveSizeInBits() -

307 if (ExtendBy <= TruncBits)

308

309

310 return CastedValue(NewV, ZExtBits, SExtBits, TruncBits - ExtendBy,

311 IsNonNegative);

312

313

314 ExtendBy -= TruncBits;

315

316

317

318

319 return CastedValue(NewV, ZExtBits + SExtBits + ExtendBy, 0, 0,

320 ZExtNonNegative);

321 }

322

323

324 CastedValue withSExtOfValue(const Value *NewV) const {

325 unsigned ExtendBy = V->getType()->getPrimitiveSizeInBits() -

327 if (ExtendBy <= TruncBits)

328

329

330 return CastedValue(NewV, ZExtBits, SExtBits, TruncBits - ExtendBy,

331 IsNonNegative);

332

333

334 ExtendBy -= TruncBits;

335

336

337 return CastedValue(NewV, ZExtBits, SExtBits + ExtendBy, 0, IsNonNegative);

338 }

339

340 APInt evaluateWith(APInt N) const {

341 assert(N.getBitWidth() == V->getType()->getPrimitiveSizeInBits() &&

342 "Incompatible bit width");

343 if (TruncBits) N = N.trunc(N.getBitWidth() - TruncBits);

344 if (SExtBits) N = N.sext(N.getBitWidth() + SExtBits);

345 if (ZExtBits) N = N.zext(N.getBitWidth() + ZExtBits);

346 return N;

347 }

348

349 ConstantRange evaluateWith(ConstantRange N) const {

350 assert(N.getBitWidth() == V->getType()->getPrimitiveSizeInBits() &&

351 "Incompatible bit width");

352 if (TruncBits) N = N.truncate(N.getBitWidth() - TruncBits);

353 if (IsNonNegative && N.isAllNonNegative())

354 N = N.intersectWith(

357 if (SExtBits) N = N.signExtend(N.getBitWidth() + SExtBits);

358 if (ZExtBits) N = N.zeroExtend(N.getBitWidth() + ZExtBits);

359 return N;

360 }

361

362 bool canDistributeOver(bool NUW, bool NSW) const {

363

364

365

366 return (!ZExtBits || NUW) && (!SExtBits || NSW);

367 }

368

369 bool hasSameCastsAs(const CastedValue &Other) const {

370 if (V->getType() != Other.V->getType())

371 return false;

372

373 if (ZExtBits == Other.ZExtBits && SExtBits == Other.SExtBits &&

374 TruncBits == Other.TruncBits)

375 return true;

376

377

378 if (IsNonNegative || Other.IsNonNegative)

379 return (ZExtBits + SExtBits == Other.ZExtBits + Other.SExtBits &&

380 TruncBits == Other.TruncBits);

381 return false;

382 }

383};

384

385

387 CastedValue Val;

390

391

392 bool IsNUW;

393

394 bool IsNSW;

395

397 const APInt &Offset, bool IsNUW, bool IsNSW)

399

401 : Val(Val), IsNUW(true), IsNSW(true) {

402 unsigned BitWidth = Val.getBitWidth();

405 }

406

408

409

410 bool NSW = IsNSW && (Other.isOne() || (MulIsNSW && Offset.isZero()));

411 bool NUW = IsNUW && (Other.isOne() || MulIsNUW);

413 }

414};

415}

416

417

418

422

424 return Val;

425

428 Val.evaluateWith(Const->getValue()), true, true);

429

432 APInt RHS = Val.evaluateWith(RHSC->getValue());

433

434

435 bool NUW = true, NSW = true;

437 NUW &= BOp->hasNoUnsignedWrap();

438 NSW &= BOp->hasNoSignedWrap();

439 }

440 if (!Val.canDistributeOver(NUW, NSW))

441 return Val;

442

443

444

445 if (Val.TruncBits)

446 NUW = NSW = false;

447

449 switch (BOp->getOpcode()) {

450 default:

451

452

453 return Val;

454 case Instruction::Or:

455

457 return Val;

458

459 [[fallthrough]];

460 case Instruction::Add: {

462 Depth + 1, AC, DT);

463 E.Offset += RHS;

464 E.IsNUW &= NUW;

465 E.IsNSW &= NSW;

466 break;

467 }

468 case Instruction::Sub: {

470 Depth + 1, AC, DT);

471 E.Offset -= RHS;

472 E.IsNUW = false;

473 E.IsNSW &= NSW;

474 break;

475 }

476 case Instruction::Mul:

478 Depth + 1, AC, DT)

479 .mul(RHS, NUW, NSW);

480 break;

481 case Instruction::Shl:

482

483

484

485

486

487 if (RHS.getLimitedValue() > Val.getBitWidth())

488 return Val;

489

491 Depth + 1, AC, DT);

492 E.Offset <<= RHS.getLimitedValue();

493 E.Scale <<= RHS.getLimitedValue();

494 E.IsNUW &= NUW;

495 E.IsNSW &= NSW;

496 break;

497 }

498 return E;

499 }

500 }

501

504 Val.withZExtOfValue(ZExt->getOperand(0), ZExt->hasNonNeg()), DL,

505 Depth + 1, AC, DT);

506

509 Val.withSExtOfValue(cast(Val.V)->getOperand(0)),

511

512 return Val;

513}

514

515namespace {

516

517

518struct VariableGEPIndex {

519 CastedValue Val;

520 APInt Scale;

521

522

524

525

526 bool IsNSW;

527

528

529

530

531 bool IsNegated;

532

533 bool hasNegatedScaleOf(const VariableGEPIndex &Other) const {

534 if (IsNegated == Other.IsNegated)

535 return Scale == -Other.Scale;

536 return Scale == Other.Scale;

537 }

538

539 void dump() const {

541 dbgs() << "\n";

542 }

543 void print(raw_ostream &OS) const {

544 OS << "(V=" << Val.V->getName()

545 << ", zextbits=" << Val.ZExtBits

546 << ", sextbits=" << Val.SExtBits

547 << ", truncbits=" << Val.TruncBits

548 << ", scale=" << Scale

549 << ", nsw=" << IsNSW

550 << ", negated=" << IsNegated << ")";

551 }

552};

553}

554

555

556

558

560

562

564

566

569 dbgs() << "\n";

570 }

572 OS << ", inbounds=" << (NWFlags.isInBounds() ? "1" : "0")

573 << ", nuw=" << (NWFlags.hasNoUnsignedWrap() ? "1" : "0")

574 << "(DecomposedGEP Base=" << Base->getName() << ", Offset=" << Offset

575 << ", VarIndices=[";

576 for (size_t i = 0; i < VarIndices.size(); i++) {

577 if (i != 0)

578 OS << ", ";

580 }

581 OS << "])";

582 }

583};

584

585

586

587

588

589

590

591

592

594BasicAAResult::DecomposeGEPExpression(const Value *V, const DataLayout &DL,

596

598 SearchTimes++;

600

601 unsigned IndexSize = DL.getIndexTypeSizeInBits(V->getType());

602 DecomposedGEP Decomposed;

603 Decomposed.Offset = APInt(IndexSize, 0);

604 do {

605

607 if (Op) {

608

610 if (!GA->isInterposable()) {

611 V = GA->getAliasee();

612 continue;

613 }

614 }

615 Decomposed.Base = V;

616 return Decomposed;

617 }

618

619 if (Op->getOpcode() == Instruction::BitCast ||

620 Op->getOpcode() == Instruction::AddrSpaceCast) {

621 Value *NewV = Op->getOperand(0);

622

623

624 if (DL.getIndexTypeSizeInBits(NewV->getType()) != IndexSize) {

625 Decomposed.Base = V;

626 return Decomposed;

627 }

628 V = NewV;

629 continue;

630 }

631

633 if (!GEPOp) {

635

636 if (PHI->getNumIncomingValues() == 1) {

637 V = PHI->getIncomingValue(0);

638 continue;

639 }

641

642

643

644

645

646

647

648

649

652 continue;

653 }

654 }

655

656 Decomposed.Base = V;

657 return Decomposed;

658 }

659

660

662

664

665

668 I != E; ++I, ++GTI) {

670

672

674 if (FieldNo == 0)

675 continue;

676

677 Decomposed.Offset += DL.getStructLayout(STy)->getElementOffset(FieldNo);

678 continue;

679 }

680

681

683 if (CIdx->isZero())

684 continue;

685

686

689 Decomposed.Base = V;

690 return Decomposed;

691 }

692

693 Decomposed.Offset += AllocTypeSize.getFixedValue() *

694 CIdx->getValue().sextOrTrunc(IndexSize);

695 continue;

696 }

697

700 Decomposed.Base = V;

701 return Decomposed;

702 }

703

704

705

708 bool NonNeg = NUSW && NUW;

709 unsigned Width = Index->getType()->getIntegerBitWidth();

710 unsigned SExtBits = IndexSize > Width ? IndexSize - Width : 0;

711 unsigned TruncBits = IndexSize < Width ? Width - IndexSize : 0;

713 CastedValue(Index, 0, SExtBits, TruncBits, NonNeg), DL, 0, AC, DT);

714

715

716 unsigned TypeSize = AllocTypeSize.getFixedValue();

717 LE = LE.mul(APInt(IndexSize, TypeSize), NUW, NUSW);

718 Decomposed.Offset += LE.Offset;

719 APInt Scale = LE.Scale;

720 if (LE.IsNUW)

721 Decomposed.NWFlags = Decomposed.NWFlags.withoutNoUnsignedWrap();

722

723

724

725

726

727 for (unsigned i = 0, e = Decomposed.VarIndices.size(); i != e; ++i) {

728 if ((Decomposed.VarIndices[i].Val.V == LE.Val.V ||

729 areBothVScale(Decomposed.VarIndices[i].Val.V, LE.Val.V)) &&

730 Decomposed.VarIndices[i].Val.hasSameCastsAs(LE.Val)) {

731 Scale += Decomposed.VarIndices[i].Scale;

732

733 LE.IsNSW = LE.IsNUW = false;

734 Decomposed.VarIndices.erase(Decomposed.VarIndices.begin() + i);

735 break;

736 }

737 }

738

739 if (!!Scale) {

740 VariableGEPIndex Entry = {LE.Val, Scale, CxtI, LE.IsNSW,

741 false};

742 Decomposed.VarIndices.push_back(Entry);

743 }

744 }

745

746

748 } while (--MaxLookup);

749

750

751 Decomposed.Base = V;

752 SearchLimitReached++;

753 return Decomposed;

754}

755

758 bool IgnoreLocals) {

759 assert(Visited.empty() && "Visited must be cleared after use!");

761

762 unsigned MaxLookup = 8;

766

767 do {

769 if (!Visited.insert(V).second)

770 continue;

771

772

774 continue;

775

776

777

778

779

780

781

783 if (Arg->hasNoAliasAttr() && Arg->onlyReadsMemory()) {

785 continue;

786 }

787 }

788

789

791

792

793

794 if (!GV->isConstant())

796 continue;

797 }

798

799

803 continue;

804 }

805

806

807

809

810 if (PN->getNumIncomingValues() > MaxLookup)

812 append_range(Worklist, PN->incoming_values());

813 continue;

814 }

815

816

818 } while (!Worklist.empty() && --MaxLookup);

819

820

821 if (!Worklist.empty())

823

824 return Result;

825}

826

829 return II && II->getIntrinsicID() == IID;

830}

831

832

836

839

840

841 if (Call->hasReadingOperandBundles())

843 if (Call->hasClobberingOperandBundles())

845 if (Call->isVolatile()) {

846

848 }

849 Min &= FuncME;

850 }

851

852 return Min;

853}

854

855

856

858 switch (F->getIntrinsicID()) {

859 case Intrinsic::experimental_guard:

860 case Intrinsic::experimental_deoptimize:

861

862

865 }

866

867 return F->getMemoryEffects();

868}

869

871 unsigned ArgIdx) {

872 if (Call->doesNotAccessMemory(ArgIdx))

874

875 if (Call->onlyWritesMemory(ArgIdx))

877

878 if (Call->onlyReadsMemory(ArgIdx))

880

882}

883

884#ifndef NDEBUG

887 if (!inst->getParent())

888 return nullptr;

889 return inst->getParent()->getParent();

890 }

891

894

895 return nullptr;

896}

897

899

902

903 return !F1 || !F2 || F1 == F2;

904}

905#endif

906

911 "BasicAliasAnalysis doesn't support interprocedural queries.");

912 return aliasCheck(LocA.Ptr, LocA.Size, LocB.Ptr, LocB.Size, AAQI, CtxI);

913}

914

915

916

917

918

919

920

925 "AliasAnalysis query involving multiple functions!");

926

928

929

930

931

932

933

936 if (CI->isTailCall() &&

937 !CI->getAttributes().hasAttrSomewhere(Attribute::ByVal))

939

940

941

943 if (!AI->isStaticAlloca() && isIntrinsicCall(Call, Intrinsic::stackrestore))

945

946

947

950 if (ME.doesNotAccessMemory())

952

956

957

958

959

960

961

962

963

964

966 (isa(Object) || Call->hasFnAttr(Attribute::ReturnsTwice))) {

973 }

974

975

976

977

978 if ((ArgMR | OtherMR) != OtherMR) {

980 for (const Use &U : Call->data_ops()) {

981 const Value *Arg = U;

983 continue;

984 unsigned ArgIdx = Call->getDataOperandNo(&U);

986 Call->isArgOperand(&U)

992

993

994 if (NewArgMR == ArgMR)

995 break;

996 }

997 ArgMR = NewArgMR;

998 }

999

1000 ModRefInfo Result = ArgMR | OtherMR;

1001

1002

1003 if ((ErrnoMR | Result) != Result) {

1005

1006 Result |= ErrnoMR;

1007 }

1008 }

1009

1011 return Result;

1012

1013

1014

1015

1016

1017

1018

1020

1021

1025 }

1026

1027

1028

1029

1030

1031

1032

1033

1034

1035

1036

1037

1038

1039

1040

1041

1042

1043

1044

1045

1046

1047

1048

1049

1052

1053

1055}

1056

1060

1061

1062

1063

1064

1065

1066

1067

1068

1069

1070

1071 if (isIntrinsicCall(Call1, Intrinsic::experimental_guard))

1075

1076 if (isIntrinsicCall(Call2, Intrinsic::experimental_guard))

1080

1081

1083}

1084

1085

1086

1087

1088

1089

1090

1095 auto BaseObjectsAlias = [&]() {

1101 };

1102

1104

1105

1108

1109

1110

1111 return BaseObjectsAlias();

1112 }

1113

1114 DominatorTree *DT = getDT(AAQI);

1115 DecomposedGEP DecompGEP1 = DecomposeGEPExpression(GEP1, DL, &AC, DT);

1116 DecomposedGEP DecompGEP2 = DecomposeGEPExpression(V2, DL, &AC, DT);

1117

1118

1119 if (DecompGEP1.Base == GEP1 && DecompGEP2.Base == V2)

1121

1122

1123 if (DecompGEP1.Offset.getBitWidth() != DecompGEP2.Offset.getBitWidth())

1124 return BaseObjectsAlias();

1125

1126

1127 if (DecompGEP1.VarIndices.size() < DecompGEP2.VarIndices.size()) {

1128 std::swap(DecompGEP1, DecompGEP2);

1130 std::swap(UnderlyingV1, UnderlyingV2);

1131 }

1132

1133

1134

1135 subtractDecomposedGEPs(DecompGEP1, DecompGEP2, AAQI);

1136

1137

1138

1139

1140

1141

1142 if (DecompGEP1.NWFlags.isInBounds() && DecompGEP1.VarIndices.empty() &&

1144 DecompGEP1.Offset.sge(V2Size.getValue()) &&

1147

1148

1149 if (DecompGEP2.NWFlags.isInBounds() && DecompGEP1.VarIndices.empty() &&

1151 DecompGEP1.Offset.sle(-V1Size.getValue()) &&

1154

1155

1156

1157 if (DecompGEP1.Offset == 0 && DecompGEP1.VarIndices.empty())

1158 return AAQI.AAR.alias(MemoryLocation(DecompGEP1.Base, V1Size),

1159 MemoryLocation(DecompGEP2.Base, V2Size), AAQI);

1160

1161

1162 AliasResult BaseAlias =

1165

1166

1167

1171 return BaseAlias;

1172 }

1173

1174

1175

1176

1177

1178 if (DecompGEP1.VarIndices.empty()) {

1179 APInt &Off = DecompGEP1.Offset;

1180

1181

1182 LocationSize VLeftSize = V2Size;

1183 LocationSize VRightSize = V1Size;

1184 const bool Swapped = Off.isNegative();

1185

1186 if (Swapped) {

1187

1188

1189

1190

1191

1192

1193 std::swap(VLeftSize, VRightSize);

1195 }

1196

1199

1200 const TypeSize LSize = VLeftSize.getValue();

1202 if (Off.ult(LSize)) {

1203

1204

1207 Off.ule(INT32_MAX) && (Off + VRightSize.getValue()).ule(LSize)) {

1208

1209

1210

1212 AR.swap(Swapped);

1213 }

1214 return AR;

1215 }

1217 } else {

1218

1220 bool Overflow;

1223 if (!Overflow && Off.uge(UpperRange))

1225 }

1226 }

1227

1228

1229

1230

1231 if (DecompGEP1.VarIndices.size() == 1 &&

1232 DecompGEP1.VarIndices[0].Val.TruncBits == 0 &&

1233 DecompGEP1.Offset.isZero() &&

1236 const VariableGEPIndex &ScalableVar = DecompGEP1.VarIndices[0];

1237 APInt Scale =

1238 ScalableVar.IsNegated ? -ScalableVar.Scale : ScalableVar.Scale;

1239 LocationSize VLeftSize = Scale.isNegative() ? V1Size : V2Size;

1240

1241

1242

1243 bool Overflows = !DecompGEP1.VarIndices[0].IsNSW;

1244 if (Overflows) {

1247 }

1248

1249 if (!Overflows) {

1250

1251

1252

1256 }

1257 }

1258

1259

1260

1261

1262

1263

1264

1265

1266

1267

1268 if (!DecompGEP1.VarIndices.empty() &&

1269 DecompGEP1.NWFlags.hasNoUnsignedWrap() && V2Size.hasValue() &&

1272

1273

1276

1277

1278

1279 unsigned BW = DecompGEP1.Offset.getBitWidth();

1283

1284 APInt GCD;

1285 ConstantRange OffsetRange = ConstantRange(DecompGEP1.Offset);

1286 for (unsigned i = 0, e = DecompGEP1.VarIndices.size(); i != e; ++i) {

1287 const VariableGEPIndex &Index = DecompGEP1.VarIndices[i];

1288 const APInt &Scale = Index.Scale;

1289 APInt ScaleForGCD = Scale;

1290 if (Index.IsNSW)

1291 ScaleForGCD =

1293

1294 if (i == 0)

1295 GCD = ScaleForGCD.abs();

1296 else

1298

1300 true, &AC, Index.CxtI);

1305 CR = Index.Val.evaluateWith(CR).sextOrTrunc(OffsetRange.getBitWidth());

1306

1308 "Bit widths are normalized to MaxIndexSize");

1309 if (Index.IsNSW)

1310 CR = CR.smul_sat(ConstantRange(Scale));

1311 else

1312 CR = CR.smul_fast(ConstantRange(Scale));

1313

1314 if (Index.IsNegated)

1315 OffsetRange = OffsetRange.sub(CR);

1316 else

1317 OffsetRange = OffsetRange.add(CR);

1318 }

1319

1320

1321

1322

1323

1324

1325

1326 APInt ModOffset = DecompGEP1.Offset.srem(GCD);

1328 ModOffset += GCD;

1330 (GCD - ModOffset).uge(V1Size.getValue()))

1332

1333

1334

1335 ConstantRange Range1 = OffsetRange.add(

1336 ConstantRange(APInt(BW, 0), APInt(BW, V1Size.getValue())));

1337 ConstantRange Range2 =

1338 ConstantRange(APInt(BW, 0), APInt(BW, V2Size.getValue()));

1341

1342

1343

1344 auto MultiplyByScaleNoWrap = [](const VariableGEPIndex &Var) {

1345 if (Var.IsNSW)

1346 return true;

1347

1348 int ValOrigBW = Var.Val.V->getType()->getPrimitiveSizeInBits();

1349

1350

1351

1352 int MaxScaleValueBW = Var.Val.getBitWidth() - ValOrigBW;

1353 if (MaxScaleValueBW <= 0)

1354 return false;

1355 return Var.Scale.ule(

1357 };

1358

1359

1360

1361 std::optional MinAbsVarIndex;

1362 if (DecompGEP1.VarIndices.size() == 1) {

1363

1364 const VariableGEPIndex &Var = DecompGEP1.VarIndices[0];

1365 if (Var.Val.TruncBits == 0 &&

1366 isKnownNonZero(Var.Val.V, SimplifyQuery(DL, DT, &AC, Var.CxtI))) {

1367

1368

1369 if (MultiplyByScaleNoWrap(Var)) {

1370

1371 MinAbsVarIndex = Var.Scale.abs();

1372 }

1373 }

1374 } else if (DecompGEP1.VarIndices.size() == 2) {

1375

1376

1377

1378

1379 const VariableGEPIndex &Var0 = DecompGEP1.VarIndices[0];

1380 const VariableGEPIndex &Var1 = DecompGEP1.VarIndices[1];

1381 if (Var0.hasNegatedScaleOf(Var1) && Var0.Val.TruncBits == 0 &&

1383 MultiplyByScaleNoWrap(Var0) && MultiplyByScaleNoWrap(Var1) &&

1385 SimplifyQuery(DL, DT, &AC, Var0.CxtI

1386 ? Var0.CxtI

1387 : Var1.CxtI)))

1388 MinAbsVarIndex = Var0.Scale.abs();

1389 }

1390

1391 if (MinAbsVarIndex) {

1392

1393 APInt OffsetLo = DecompGEP1.Offset - *MinAbsVarIndex;

1394 APInt OffsetHi = DecompGEP1.Offset + *MinAbsVarIndex;

1395

1399 }

1400

1401 if (constantOffsetHeuristic(DecompGEP1, V1Size, V2Size, &AC, DT, AAQI))

1403

1404

1405

1406

1408}

1409

1411

1412 if (A == B)

1413 return A;

1414

1418

1420}

1421

1422

1423

1428

1429

1431 if (isValueEqualInPotentialCycles(SI->getCondition(), SI2->getCondition(),

1432 AAQI)) {

1433 AliasResult Alias =

1434 AAQI.AAR.alias(MemoryLocation(SI->getTrueValue(), SISize),

1435 MemoryLocation(SI2->getTrueValue(), V2Size), AAQI);

1438 AliasResult ThisAlias =

1439 AAQI.AAR.alias(MemoryLocation(SI->getFalseValue(), SISize),

1440 MemoryLocation(SI2->getFalseValue(), V2Size), AAQI);

1442 }

1443

1444

1445

1446 AliasResult Alias = AAQI.AAR.alias(MemoryLocation(SI->getTrueValue(), SISize),

1447 MemoryLocation(V2, V2Size), AAQI);

1450

1451 AliasResult ThisAlias =

1452 AAQI.AAR.alias(MemoryLocation(SI->getFalseValue(), SISize),

1453 MemoryLocation(V2, V2Size), AAQI);

1455}

1456

1457

1458

1464

1465

1466

1467

1470 std::optional Alias;

1472 AliasResult ThisAlias = AAQI.AAR.alias(

1474 MemoryLocation(

1475 PN2->getIncomingValueForBlock(PN->getIncomingBlock(i)), V2Size),

1476 AAQI);

1477 if (Alias)

1479 else

1480 Alias = ThisAlias;

1482 break;

1483 }

1484 return *Alias;

1485 }

1486

1488

1489

1490

1491 bool isRecursive = false;

1492 auto CheckForRecPhi = [&](Value *PV) {

1494 return false;

1496 isRecursive = true;

1497 return true;

1498 }

1499 return false;

1500 };

1501

1502 SmallPtrSet<Value *, 4> UniqueSrc;

1503 Value *OnePhi = nullptr;

1505

1506 if (PV1 == PN)

1507 continue;

1508

1510 if (OnePhi && OnePhi != PV1) {

1511

1512

1513

1514

1515

1517 }

1518 OnePhi = PV1;

1519 }

1520

1521 if (CheckForRecPhi(PV1))

1522 continue;

1523

1524 if (UniqueSrc.insert(PV1).second)

1526 }

1527

1528 if (OnePhi && UniqueSrc.size() > 1)

1529

1530

1532

1533

1534

1535

1536 if (V1Srcs.empty())

1538

1539

1540

1541

1542 if (isRecursive)

1544

1545

1546

1548

1549 AliasResult Alias = AAQI.AAR.alias(MemoryLocation(V1Srcs[0], PNSize),

1550 MemoryLocation(V2, V2Size), AAQI);

1551

1552

1553

1556

1557

1560

1561

1562

1563 for (unsigned i = 1, e = V1Srcs.size(); i != e; ++i) {

1564 Value *V = V1Srcs[i];

1565

1566 AliasResult ThisAlias = AAQI.AAR.alias(

1567 MemoryLocation(V, PNSize), MemoryLocation(V2, V2Size), AAQI);

1570 break;

1571 }

1572

1573 return Alias;

1574}

1575

1576

1577

1578

1585

1586

1587

1592

1593

1596

1597

1600

1601

1602

1605

1606

1607

1608

1609

1610

1611

1612 if (isValueEqualInPotentialCycles(V1, V2, AAQI))

1614

1615

1618

1619

1620

1627

1628 if (O1 != O2) {

1629

1632

1633

1634

1638

1639

1640

1641

1642

1643

1644

1645

1646

1647

1656 }

1657

1658

1659

1663 TLI, NullIsValidLocation)) ||

1666 TLI, NullIsValidLocation)))

1668

1670 for (AssumptionCache::ResultElem &Elem : AC.assumptionsFor(O1)) {

1672 continue;

1673

1675 OperandBundleUse OBU = Assume->getOperandBundleAt(Elem.Index);

1676 if (OBU.getTagName() == "separate_storage") {

1678 const Value *Hint1 = OBU.Inputs[0].get();

1679 const Value *Hint2 = OBU.Inputs[1].get();

1680

1681

1684

1685 DominatorTree *DT = getDT(AAQI);

1686 auto ValidAssumeForPtrContext = [&](const Value *Ptr) {

1689 true);

1690 }

1693 &*PtrA->getParent()->getEntryBlock().begin();

1695 true);

1696 }

1697 return false;

1698 };

1699

1700 if ((O1 == HintO1 && O2 == HintO2) || (O1 == HintO2 && O2 == HintO1)) {

1701

1702

1703

1705 true)) ||

1706 ValidAssumeForPtrContext(V1) || ValidAssumeForPtrContext(V2)) {

1708 }

1709 }

1710 }

1711 }

1712 }

1713

1714

1715

1716

1717

1718

1719

1720

1724 }

1725

1726

1727

1728

1729

1730 if (AAQI.Depth >= 512)

1732

1733

1734

1735

1736

1739 const bool Swapped = V1 > V2;

1740 if (Swapped)

1741 std::swap(Locs.first, Locs.second);

1744 if (!Pair.second) {

1745 auto &Entry = Pair.first->second;

1746 if (Entry.isDefinitive()) {

1747

1748

1749

1751 if (Entry.isAssumption())

1752 ++Entry.NumAssumptionUses;

1753 }

1754

1756 Result.swap(Swapped);

1758 }

1759

1762 AliasResult Result =

1763 aliasCheckRecursive(V1, V1Size, V2, V2Size, AAQI, O1, O2);

1764

1767 auto &Entry = It->second;

1768

1769

1770 bool AssumptionDisproven =

1772 if (AssumptionDisproven)

1774

1775

1778

1779 Entry.Result.swap(Swapped);

1780

1781

1782

1783

1784 if (AssumptionDisproven)

1787

1788

1789

1794 } else {

1796 }

1797

1798

1799

1800 if (AAQI.Depth == 1) {

1801

1802

1807 }

1810 }

1812}

1813

1814AliasResult BasicAAResult::aliasCheckRecursive(

1819 AliasResult Result = aliasGEP(GV1, V1Size, V2, V2Size, O1, O2, AAQI);

1823 AliasResult Result = aliasGEP(GV2, V2Size, V1, V1Size, O2, O1, AAQI);

1827 }

1828

1830 AliasResult Result = aliasPHI(PN, V1Size, V2, V2Size, AAQI);

1834 AliasResult Result = aliasPHI(PN, V2Size, V1, V1Size, AAQI);

1838 }

1839

1841 AliasResult Result = aliasSelect(S1, V1Size, V2, V2Size, AAQI);

1845 AliasResult Result = aliasSelect(S2, V2Size, V1, V1Size, AAQI);

1849 }

1850

1851

1852

1853 if (O1 == O2) {

1859 }

1860

1862}

1863

1866

1867

1868

1869 if (Loc.Size.hasValue() &&

1870 Loc.Size.getValue().getKnownMinValue() * 8 > TLI.getIntSize())

1872

1876}

1877

1878

1879

1880

1881

1882

1883

1884bool BasicAAResult::isValueEqualInPotentialCycles(const Value *V,

1885 const Value *V2,

1887 if (V != V2)

1888 return false;

1889

1891 return true;

1892

1893

1894

1896 if (!Inst || Inst->getParent()->isEntryBlock())

1897 return true;

1898

1899 return isNotInCycle(Inst, getDT(AAQI), nullptr);

1900}

1901

1902

1903void BasicAAResult::subtractDecomposedGEPs(DecomposedGEP &DestGEP,

1904 const DecomposedGEP &SrcGEP,

1906

1907

1908 if (DestGEP.Offset.ult(SrcGEP.Offset))

1909 DestGEP.NWFlags = DestGEP.NWFlags.withoutNoUnsignedWrap();

1910

1911 DestGEP.Offset -= SrcGEP.Offset;

1912 for (const VariableGEPIndex &Src : SrcGEP.VarIndices) {

1913

1914

1915 bool Found = false;

1916 for (auto I : enumerate(DestGEP.VarIndices)) {

1917 VariableGEPIndex &Dest = I.value();

1918 if ((!isValueEqualInPotentialCycles(Dest.Val.V, Src.Val.V, AAQI) &&

1920 !Dest.Val.hasSameCastsAs(Src.Val))

1921 continue;

1922

1923

1924 if (Dest.IsNegated) {

1925 Dest.Scale = -Dest.Scale;

1926 Dest.IsNegated = false;

1927 Dest.IsNSW = false;

1928 }

1929

1930

1931

1932 if (Dest.Scale != Src.Scale) {

1933

1934

1935 if (Dest.Scale.ult(Src.Scale))

1936 DestGEP.NWFlags = DestGEP.NWFlags.withoutNoUnsignedWrap();

1937

1938 Dest.Scale -= Src.Scale;

1939 Dest.IsNSW = false;

1940 } else {

1941 DestGEP.VarIndices.erase(DestGEP.VarIndices.begin() + I.index());

1942 }

1943 Found = true;

1944 break;

1945 }

1946

1947

1948 if (!Found) {

1949 VariableGEPIndex Entry = {Src.Val, Src.Scale, Src.CxtI, Src.IsNSW,

1950 true};

1951 DestGEP.VarIndices.push_back(Entry);

1952

1953

1954 DestGEP.NWFlags = DestGEP.NWFlags.withoutNoUnsignedWrap();

1955 }

1956 }

1957}

1958

1959bool BasicAAResult::constantOffsetHeuristic(const DecomposedGEP &GEP,

1965 if (GEP.VarIndices.size() != 2 || !MaybeV1Size.hasValue() ||

1967 return false;

1968

1969 const uint64_t V1Size = MaybeV1Size.getValue();

1970 const uint64_t V2Size = MaybeV2Size.getValue();

1971

1972 const VariableGEPIndex &Var0 = GEP.VarIndices[0], &Var1 = GEP.VarIndices[1];

1973

1974 if (Var0.Val.TruncBits != 0 || !Var0.Val.hasSameCastsAs(Var1.Val) ||

1975 !Var0.hasNegatedScaleOf(Var1) ||

1977 return false;

1978

1979

1980

1981

1982

1983 LinearExpression E0 =

1985 LinearExpression E1 =

1987 if (E0.Scale != E1.Scale || !E0.Val.hasSameCastsAs(E1.Val) ||

1988 !isValueEqualInPotentialCycles(E0.Val.V, E1.Val.V, AAQI))

1989 return false;

1990

1991

1992

1993

1994

1995

1996

1997

1998 APInt MinDiff = E0.Offset - E1.Offset, Wrapped = -MinDiff;

2000 APInt MinDiffBytes =

2002

2003

2004

2005

2006

2007 return MinDiffBytes.uge(V1Size + GEP.Offset.abs()) &&

2008 MinDiffBytes.uge(V2Size + GEP.Offset.abs());

2009}

2010

2011

2012

2013

2014

2016

2023

2025

2027

2028void BasicAAWrapperPass::anchor() {}

2029

2031 "Basic Alias Analysis (stateless AA impl)", true, true)

2036 "Basic Alias Analysis (stateless AA impl)", true, true)

2037

2041

2046

2048 TLIWP.getTLI(F), ACT.getAssumptionCache(F),

2049 &DTWP.getDomTree()));

2050

2051 return false;

2052}

2053

assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")

This file implements a class to represent arbitrary precision integral constant values and operations...

MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL

static void print(raw_ostream &Out, object::Archive::Kind Kind, T Val)

This file contains the simple types necessary to represent the attributes associated with functions a...

static cl::opt< bool > EnableRecPhiAnalysis("basic-aa-recphi", cl::Hidden, cl::init(true))

Enable analysis of recursive PHI nodes.

static const Function * getParent(const Value *V)

Definition BasicAliasAnalysis.cpp:885

static bool isObjectSmallerThan(const Value *V, TypeSize Size, const DataLayout &DL, const TargetLibraryInfo &TLI, bool NullIsValidLoc)

Returns true if we can prove that the object specified by V is smaller than Size.

Definition BasicAliasAnalysis.cpp:121

static bool isObjectSize(const Value *V, TypeSize Size, const DataLayout &DL, const TargetLibraryInfo &TLI, bool NullIsValidLoc)

Returns true if we can prove that the object specified by V has size Size.

Definition BasicAliasAnalysis.cpp:179

static cl::opt< bool > EnableSeparateStorageAnalysis("basic-aa-separate-storage", cl::Hidden, cl::init(true))

static bool isArgumentOrArgumentLike(const Value *V)

Definition BasicAliasAnalysis.cpp:1579

static bool notDifferentParent(const Value *O1, const Value *O2)

Definition BasicAliasAnalysis.cpp:898

static LinearExpression GetLinearExpression(const CastedValue &Val, const DataLayout &DL, unsigned Depth, AssumptionCache *AC, DominatorTree *DT)

Analyzes the specified value as a linear expression: "A*V + B", where A and B are constant integers.

Definition BasicAliasAnalysis.cpp:419

static bool isNotInCycle(const Instruction *I, const DominatorTree *DT, const LoopInfo *LI)

Definition BasicAliasAnalysis.cpp:216

static bool areBothVScale(const Value *V1, const Value *V2)

Return true if both V1 and V2 are VScale.

Definition BasicAliasAnalysis.cpp:187

basic Basic Alias true

Definition BasicAliasAnalysis.cpp:2036

static TypeSize getMinimalExtentFrom(const Value &V, const LocationSize &LocSize, const DataLayout &DL, bool NullIsValidLoc)

Return the minimal extent from V to the end of the underlying object, assuming the result is used in ...

Definition BasicAliasAnalysis.cpp:159

static AliasResult MergeAliasResults(AliasResult A, AliasResult B)

Definition BasicAliasAnalysis.cpp:1410

static bool isIntrinsicCall(const CallBase *Call, Intrinsic::ID IID)

Definition BasicAliasAnalysis.cpp:827

This is the interface for LLVM's primary stateless and local alias analysis.

static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")

static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")

static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")

This file contains the declarations for the subclasses of Constant, which represent the different fla...

This file provides utility analysis objects describing memory locations.

uint64_t IntrinsicInst * II

#define INITIALIZE_PASS_DEPENDENCY(depName)

#define INITIALIZE_PASS_END(passName, arg, name, cfg, analysis)

#define INITIALIZE_PASS_BEGIN(passName, arg, name, cfg, analysis)

This file provides utility classes that use RAII to save and restore values.

This file defines the make_scope_exit function, which executes user-defined cleanup logic at scope ex...

This file defines the SmallPtrSet class.

This file defines the SmallVector class.

This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...

#define STATISTIC(VARNAME, DESC)

static unsigned getBitWidth(Type *Ty, const DataLayout &DL)

Returns the bitwidth of the given scalar or pointer type.

This class stores info we want to provide to or retain within an alias query.

SmallVector< AAQueryInfo::LocPair, 4 > AssumptionBasedResults

Location pairs for which an assumption based result is currently stored.

unsigned Depth

Query depth used to distinguish recursive queries.

int NumAssumptionUses

How many active NoAlias assumption uses there are.

std::pair< AACacheLoc, AACacheLoc > LocPair

bool MayBeCrossIteration

Tracks whether the accesses may be on different cycle iterations.

LLVM_ABI AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB)

The main low level interface to the alias analysis implementation.

LLVM_ABI MemoryEffects getMemoryEffects(const CallBase *Call)

Return the behavior of the given call site.

LLVM_ABI ModRefInfo getArgModRefInfo(const CallBase *Call, unsigned ArgIdx)

Get the ModRef info associated with a pointer argument of a call.

LLVM_ABI AliasResult aliasErrno(const MemoryLocation &Loc, const Module *M)

Class for arbitrary precision integers.

LLVM_ABI APInt umul_ov(const APInt &RHS, bool &Overflow) const

LLVM_ABI APInt zextOrTrunc(unsigned width) const

Zero extend or truncate to width.

static APInt getMaxValue(unsigned numBits)

Gets maximum unsigned value of APInt for specific bit width.

APInt abs() const

Get the absolute value.

unsigned getBitWidth() const

Return the number of bits in the APInt.

bool ult(const APInt &RHS) const

Unsigned less than comparison.

bool isNegative() const

Determine sign of this APInt.

unsigned countr_zero() const

Count the number of trailing zero bits.

static APInt getSignedMinValue(unsigned numBits)

Gets minimum signed value of APInt for a specific bit width.

LLVM_ABI APInt smul_ov(const APInt &RHS, bool &Overflow) const

bool isNonNegative() const

Determine if this APInt Value is non-negative (>= 0)

static APInt getZero(unsigned numBits)

Get the '0' value for the specified bit-width.

static APInt getOneBitSet(unsigned numBits, unsigned BitNo)

Return an APInt with exactly one bit set in the result.

bool uge(const APInt &RHS) const

Unsigned greater or equal comparison.

The possible results of an alias query.

void swap(bool DoSwap=true)

Helper for processing AliasResult for swapped memory location pairs.

@ MayAlias

The two locations may or may not alias.

@ NoAlias

The two locations do not alias at all.

@ PartialAlias

The two locations alias, but only due to a partial overlap.

@ MustAlias

The two locations precisely alias each other.

void setOffset(int32_t NewOffset)

PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)

Get the result of an analysis pass for a given IR unit.

Represent the analysis usage information of a pass.

void setPreservesAll()

Set by analyses that do not transform their input at all.

AnalysisUsage & addRequiredTransitive()

This class represents an incoming formal argument to a Function.

A function analysis which provides an AssumptionCache.

An immutable pass that tracks lazily created AssumptionCache objects.

A cache of @llvm.assume calls within a function.

This is the AA result object for the basic, local, and stateless alias analysis.

LLVM_ABI AliasResult aliasErrno(const MemoryLocation &Loc, const Module *M)

Definition BasicAliasAnalysis.cpp:1864

LLVM_ABI ModRefInfo getModRefInfo(const CallBase *Call, const MemoryLocation &Loc, AAQueryInfo &AAQI)

Checks to see if the specified callsite can clobber the specified memory object.

Definition BasicAliasAnalysis.cpp:921

LLVM_ABI ModRefInfo getArgModRefInfo(const CallBase *Call, unsigned ArgIdx)

Get the location associated with a pointer argument of a callsite.

Definition BasicAliasAnalysis.cpp:870

LLVM_ABI MemoryEffects getMemoryEffects(const CallBase *Call, AAQueryInfo &AAQI)

Returns the behavior when calling the given call site.

Definition BasicAliasAnalysis.cpp:833

LLVM_ABI ModRefInfo getModRefInfoMask(const MemoryLocation &Loc, AAQueryInfo &AAQI, bool IgnoreLocals=false)

Returns a bitmask that should be unconditionally applied to the ModRef info of a memory location.

Definition BasicAliasAnalysis.cpp:756

LLVM_ABI bool invalidate(Function &Fn, const PreservedAnalyses &PA, FunctionAnalysisManager::Invalidator &Inv)

Handle invalidation events in the new pass manager.

Definition BasicAliasAnalysis.cpp:82

LLVM_ABI AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB, AAQueryInfo &AAQI, const Instruction *CtxI)

Definition BasicAliasAnalysis.cpp:907

Legacy wrapper pass to provide the BasicAAResult object.

bool runOnFunction(Function &F) override

runOnFunction - Virtual method overriden by subclasses to do the per-function processing of the pass.

Definition BasicAliasAnalysis.cpp:2042

void getAnalysisUsage(AnalysisUsage &AU) const override

getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...

Definition BasicAliasAnalysis.cpp:2054

BasicAAWrapperPass()

Definition BasicAliasAnalysis.cpp:2024

LLVM_ABI BasicAAResult run(Function &F, FunctionAnalysisManager &AM)

Definition BasicAliasAnalysis.cpp:2017

LLVM Basic Block Representation.

Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...

This class represents a function call, abstracting a target machine's calling convention.

This is the shared class of boolean and integer constants.

LLVM_ABI ConstantRange add(const ConstantRange &Other) const

Return a new range representing the possible values resulting from an addition of a value in this ran...

static LLVM_ABI ConstantRange fromKnownBits(const KnownBits &Known, bool IsSigned)

Initialize a range based on a known bits constraint.

LLVM_ABI ConstantRange smul_fast(const ConstantRange &Other) const

Return range of possible values for a signed multiplication of this and Other.

LLVM_ABI bool isEmptySet() const

Return true if this set contains no members.

LLVM_ABI ConstantRange smul_sat(const ConstantRange &Other) const

Perform a signed saturating multiplication of two constant ranges.

LLVM_ABI APInt getUnsignedMax() const

Return the largest unsigned value contained in the ConstantRange.

LLVM_ABI ConstantRange intersectWith(const ConstantRange &CR, PreferredRangeType Type=Smallest) const

Return the range that results from the intersection of this range with another range.

LLVM_ABI APInt getSignedMax() const

Return the largest signed value contained in the ConstantRange.

uint32_t getBitWidth() const

Get the bit width of this ConstantRange.

LLVM_ABI ConstantRange sub(const ConstantRange &Other) const

Return a new range representing the possible values resulting from a subtraction of a value in this r...

A parsed version of the target data layout string in and methods for querying it.

iterator find(const_arg_type_t< KeyT > Val)

std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)

bool erase(const KeyT &Val)

Analysis pass which computes a DominatorTree.

Legacy analysis pass which computes a DominatorTree.

Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.

void removeInstruction(Instruction *I)

Definition BasicAliasAnalysis.cpp:264

CaptureComponents getCapturesBefore(const Value *Object, const Instruction *I, bool OrAt) override

Return how Object may be captured before instruction I, considering only provenance captures.

Definition BasicAliasAnalysis.cpp:225

FunctionPass class - This class is used to implement most global optimizations.

Represents flags for the getelementptr instruction/expression.

static GEPNoWrapFlags all()

bool hasNoUnsignedSignedWrap() const

bool hasNoUnsignedWrap() const

LLVM_ABI Type * getSourceElementType() const

GEPNoWrapFlags getNoWrapFlags() const

Module * getParent()

Get the module that this global value is contained inside of...

A wrapper class for inspecting calls to intrinsic functions.

bool mayBeBeforePointer() const

Whether accesses before the base pointer are possible.

static constexpr LocationSize beforeOrAfterPointer()

Any location before or after the base pointer (but still within the underlying object).

TypeSize getValue() const

static constexpr LocationSize afterPointer()

Any location after the base pointer (but still within the underlying object).

static MemoryEffectsBase readOnly()

MemoryEffectsBase getWithoutLoc(Location Loc) const

Get new MemoryEffectsBase with NoModRef on the given Loc.

static MemoryEffectsBase inaccessibleMemOnly(ModRefInfo MR=ModRefInfo::ModRef)

static MemoryEffectsBase writeOnly()

Representation for a specific memory location.

LocationSize Size

The maximum size of the location, in address-units, or UnknownSize if the size is not known.

static MemoryLocation getBeforeOrAfter(const Value *Ptr, const AAMDNodes &AATags=AAMDNodes())

Return a location that may access any location before or after Ptr, while remaining within the underl...

const Value * Ptr

The address of the start of the location.

static LLVM_ABI MemoryLocation getForArgument(const CallBase *Call, unsigned ArgIdx, const TargetLibraryInfo *TLI)

Return a location representing a particular argument of a call.

A Module instance is used to store all the information related to an LLVM module.

This is a utility class that provides an abstraction for the common functionality between Instruction...

op_range incoming_values()

BasicBlock * getIncomingBlock(unsigned i) const

Return incoming basic block number i.

Value * getIncomingValue(unsigned i) const

Return incoming value number x.

unsigned getNumIncomingValues() const

Return the number of incoming edges.

AnalysisType & getAnalysis() const

getAnalysis() - This function is used by subclasses to get to the analysis information ...

A set of analyses that are preserved following a run of a transformation pass.

This class represents the LLVM 'select' instruction.

CaptureComponents getCapturesBefore(const Value *Object, const Instruction *I, bool OrAt) override

Return how Object may be captured before instruction I, considering only provenance captures.

Definition BasicAliasAnalysis.cpp:198

std::pair< iterator, bool > insert(PtrType Ptr)

Inserts Ptr if and only if there is no element in the container equal to Ptr.

void push_back(const T &Elt)

This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.

Analysis pass providing the TargetLibraryInfo.

Provides information about what library functions are available for the current target.

static constexpr TypeSize getFixed(ScalarTy ExactSize)

bool isPointerTy() const

True if this is an instance of PointerType.

LLVM_ABI TypeSize getPrimitiveSizeInBits() const LLVM_READONLY

Return the basic size of this type if it is a primitive type.

bool isSized(SmallPtrSetImpl< Type * > *Visited=nullptr) const

Return true if it makes sense to take the size of this type.

A Use represents the edge between a Value definition and its users.

const Use * const_op_iterator

Value * getOperand(unsigned i) const

LLVM Value Representation.

Type * getType() const

All values are typed, get the type of this value.

LLVM_ABI StringRef getName() const

Return a constant reference to the value's name.

LLVM_ABI const Value * stripPointerCastsForAliasAnalysis() const

Strip off pointer casts, all-zero GEPs, single-argument phi nodes and invariant group info.

constexpr ScalarTy getFixedValue() const

static constexpr bool isKnownLT(const FixedOrScalableQuantity &LHS, const FixedOrScalableQuantity &RHS)

constexpr bool isScalable() const

Returns whether the quantity is scaled by a runtime quantity (vscale).

constexpr ScalarTy getKnownMinValue() const

Returns the minimum value this quantity can represent.

StructType * getStructTypeOrNull() const

TypeSize getSequentialElementStride(const DataLayout &DL) const

const ParentTy * getParent() const

This class implements an extremely fast bulk output stream that can only output to a stream.

const APInt & umin(const APInt &A, const APInt &B)

Determine the smaller of two APInts considered to be unsigned.

LLVM_ABI APInt GreatestCommonDivisor(APInt A, APInt B)

Compute GCD of two unsigned APInt values.

bool match(Val *V, const Pattern &P)

IntrinsicID_match m_VScale()

Matches a call to llvm.vscale().

initializer< Ty > init(const Ty &Val)

@ Assume

Do not drop type tests (default).

friend class Instruction

Iterator for Instructions in a `BasicBlock.

This is an optimization pass for GlobalISel generic memory operations.

void dump(const SparseBitVector< ElementSize > &LHS, raw_ostream &out)

bool capturesReadProvenanceOnly(CaptureComponents CC)

FunctionAddr VTableAddr Value

LLVM_ABI bool isValidAssumeForContext(const Instruction *I, const Instruction *CxtI, const DominatorTree *DT=nullptr, bool AllowEphemerals=false)

Return true if it is valid to use the assumptions provided by an assume intrinsic,...

SaveAndRestore(T &) -> SaveAndRestore< T >

detail::scope_exit< std::decay_t< Callable > > make_scope_exit(Callable &&F)

LLVM_ABI const Value * getArgumentAliasingToReturnedPointer(const CallBase *Call, bool MustPreserveNullness)

This function returns call pointer argument that is considered the same by aliasing rules.

auto enumerate(FirstRange &&First, RestRanges &&...Rest)

Given two or more input ranges, returns a new range whose values are tuples (A, B,...

LLVM_ABI bool isPotentiallyReachableFromMany(SmallVectorImpl< BasicBlock * > &Worklist, const BasicBlock *StopBB, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet, const DominatorTree *DT=nullptr, const LoopInfo *LI=nullptr)

Determine whether there is at least one path from a block in 'Worklist' to 'StopBB' without passing t...

decltype(auto) dyn_cast(const From &Val)

dyn_cast - Return the argument parameter cast to the specified type.

auto successors(const MachineBasicBlock *BB)

LLVM_ABI bool isBaseOfObject(const Value *V)

Return true if we know V to the base address of the corresponding memory object.

void append_range(Container &C, Range &&R)

Wrapper function to append range R to container C.

constexpr bool isUIntN(unsigned N, uint64_t x)

Checks if an unsigned integer fits into the given (dynamic) bit width.

LLVM_ABI std::pair< Instruction *, CaptureComponents > FindEarliestCapture(const Value *V, Function &F, bool ReturnCaptures, const DominatorTree &DT, CaptureComponents Mask, unsigned MaxUsesToExplore=0)

MemoryEffectsBase< IRMemLocation > MemoryEffects

Summary of how a function affects memory in the program.

LLVM_ABI std::optional< TypeSize > getBaseObjectSize(const Value *Ptr, const DataLayout &DL, const TargetLibraryInfo *TLI, ObjectSizeOpts Opts={})

Like getObjectSize(), but only returns the size of base objects (like allocas, global variables and a...

LLVM_ABI ConstantRange computeConstantRange(const Value *V, bool ForSigned, bool UseInstrInfo=true, AssumptionCache *AC=nullptr, const Instruction *CtxI=nullptr, const DominatorTree *DT=nullptr, unsigned Depth=0)

Determine the possible constant range of an integer or vector of integer value.

LLVM_ABI bool getObjectSize(const Value *Ptr, uint64_t &Size, const DataLayout &DL, const TargetLibraryInfo *TLI, ObjectSizeOpts Opts={})

Compute the size of the object pointed by Ptr.

bool capturesFullProvenance(CaptureComponents CC)

bool isModSet(const ModRefInfo MRI)

LLVM_ABI void computeKnownBits(const Value *V, KnownBits &Known, const DataLayout &DL, AssumptionCache *AC=nullptr, const Instruction *CxtI=nullptr, const DominatorTree *DT=nullptr, bool UseInstrInfo=true, unsigned Depth=0)

Determine which bits of V are known to be either zero or one and return them in the KnownZero/KnownOn...

LLVM_ABI bool NullPointerIsDefined(const Function *F, unsigned AS=0)

Check whether null pointer dereferencing is considered undefined behavior for a given function or an ...

LLVM_ABI raw_ostream & dbgs()

dbgs() - This returns a reference to a raw_ostream for debugging messages.

generic_gep_type_iterator<> gep_type_iterator

bool isModOrRefSet(const ModRefInfo MRI)

constexpr unsigned MaxLookupSearchDepth

The max limit of the search depth in DecomposeGEPExpression() and getUnderlyingObject().

LLVM_ABI ConstantRange getVScaleRange(const Function *F, unsigned BitWidth)

Determine the possible constant range of vscale with the given bit width, based on the vscale_range f...

LLVM_ABI FunctionPass * createBasicAAWrapperPass()

Definition BasicAliasAnalysis.cpp:2038

LLVM_ABI bool isMallocOrCallocLikeFn(const Value *V, const TargetLibraryInfo *TLI)

Tests if a value is a call or invoke to a library function that allocates memory similar to malloc or...

CaptureComponents

Components of the pointer that may be captured.

class LLVM_GSL_OWNER SmallVector

Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...

bool isa(const From &Val)

isa - Return true if the parameter to the template is an instance of one of the template type argu...

LLVM_ABI bool isKnownNonZero(const Value *V, const SimplifyQuery &Q, unsigned Depth=0)

Return true if the given value is known to be non-zero when defined.

ModRefInfo

Flags indicating whether a memory access modifies or references memory.

@ Ref

The access may reference the value stored in memory.

@ ModRef

The access may reference and may modify the value stored in memory.

@ Mod

The access may modify the value stored in memory.

@ NoModRef

The access neither references nor modifies the value stored in memory.

@ ArgMem

Access to memory via argument pointers.

@ InaccessibleMem

Memory that is inaccessible via LLVM IR.

LLVM_ABI bool isKnownNonEqual(const Value *V1, const Value *V2, const SimplifyQuery &SQ, unsigned Depth=0)

Return true if the given values are known to be non-equal when defined.

DWARFExpression::Operation Op

LLVM_ABI bool PointerMayBeCaptured(const Value *V, bool ReturnCaptures, unsigned MaxUsesToExplore=0)

PointerMayBeCaptured - Return true if this pointer value may be captured by the enclosing function (w...

bool isModAndRefSet(const ModRefInfo MRI)

LLVM_ABI bool isIdentifiedFunctionLocal(const Value *V)

Return true if V is umabigously identified at the function-level.

constexpr unsigned BitWidth

LLVM_ABI bool isEscapeSource(const Value *V)

Returns true if the pointer is one which would have been considered an escape by isNotCapturedBefore.

decltype(auto) cast(const From &Val)

cast - Return the argument parameter cast to the specified type.

gep_type_iterator gep_type_begin(const User *GEP)

AnalysisManager< Function > FunctionAnalysisManager

Convenience typedef for the Function analysis manager.

LLVM_ABI const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=MaxLookupSearchDepth)

This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....

bool capturesNothing(CaptureComponents CC)

LLVM_ABI bool isIdentifiedObject(const Value *V)

Return true if this pointer refers to a distinct and identifiable object.

LLVM_ABI bool isPotentiallyReachable(const Instruction *From, const Instruction *To, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet=nullptr, const DominatorTree *DT=nullptr, const LoopInfo *LI=nullptr)

Determine whether instruction 'To' is reachable from 'From', without passing through any blocks in Ex...

void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)

Implement std::swap in terms of BitVector swap.

APInt Offset

Definition BasicAliasAnalysis.cpp:561

void dump() const

Definition BasicAliasAnalysis.cpp:567

SmallVector< VariableGEPIndex, 4 > VarIndices

Definition BasicAliasAnalysis.cpp:563

const Value * Base

Definition BasicAliasAnalysis.cpp:559

void print(raw_ostream &OS) const

Definition BasicAliasAnalysis.cpp:571

GEPNoWrapFlags NWFlags

Definition BasicAliasAnalysis.cpp:565

static constexpr int Definitive

Cache entry is neither an assumption nor does it use a (non-definitive) assumption.

static constexpr int AssumptionBased

Cache entry is not an assumption itself, but may be using an assumption from higher up the stack.

A special type used by analysis passes to provide an address that identifies that particular analysis...

virtual CaptureComponents getCapturesBefore(const Value *Object, const Instruction *I, bool OrAt)=0

Return how Object may be captured before instruction I, considering only provenance captures.

virtual ~CaptureAnalysis()=0

Linear expression BasePtr + Index * Scale + Offset.

LinearExpression(Value *BasePtr, unsigned BitWidth)

Various options to control the behavior of getObjectSize.

bool NullIsUnknownSize

If this is true, null pointers in address space 0 will be treated as though they can't be evaluated.

bool RoundToAlign

Whether to round the result up to the alignment of allocas, byval arguments, and global variables.

StringRef getTagName() const

Return the tag of this operand bundle as a string.