clang: lib/CodeGen/CGRecordLayoutBuilder.cpp Source File (original) (raw)
1
2
3
4
5
6
7
8
9
10
11
12
24#include "llvm/IR/DataLayout.h"
25#include "llvm/IR/DerivedTypes.h"
26#include "llvm/IR/Type.h"
27#include "llvm/Support/Debug.h"
28#include "llvm/Support/MathExtras.h"
29#include "llvm/Support/raw_ostream.h"
30using namespace clang;
32
33namespace {
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73struct CGRecordLowering {
74
75
76
77 struct MemberInfo {
78 CharUnits Offset;
79 enum InfoKind { VFPtr, VBPtr, Field, Base, VBase } Kind;
80 llvm::Type *Data;
81 union {
82 const FieldDecl *FD;
83 const CXXRecordDecl *RD;
84 };
85 MemberInfo(CharUnits Offset, InfoKind Kind, llvm::Type *Data,
86 const FieldDecl *FD = nullptr)
87 : Offset(Offset), Kind(Kind), Data(Data), FD(FD) {}
88 MemberInfo(CharUnits Offset, InfoKind Kind, llvm::Type *Data,
89 const CXXRecordDecl *RD)
90 : Offset(Offset), Kind(Kind), Data(Data), RD(RD) {}
91
92 bool operator <(const MemberInfo& a) const { return Offset < a.Offset; }
93 };
94
95 CGRecordLowering(CodeGenTypes &Types, const RecordDecl *D, bool Packed);
96
97
98 static MemberInfo StorageInfo(CharUnits Offset, llvm::Type *Data) {
99 return MemberInfo(Offset, MemberInfo::Field, Data);
100 }
101
102
103
104
105
106
107 bool isDiscreteBitFieldABI() const {
108 return Context.getTargetInfo().getCXXABI().isMicrosoft() ||
109 D->isMsStruct(Context);
110 }
111
112
114 return Context.getTargetInfo().getABI().starts_with("aapcs");
115 }
116
117
118 bool isBE() const { return Context.getTargetInfo().isBigEndian(); }
119
120
121
122
123
124 bool isOverlappingVBaseABI() const {
125 return !Context.getTargetInfo().getCXXABI().isMicrosoft();
126 }
127
128
129 llvm::Type *getIntNType(uint64_t NumBits) const {
130 unsigned AlignedBits = llvm::alignTo(NumBits, Context.getCharWidth());
131 return llvm::Type::getIntNTy(Types.getLLVMContext(), AlignedBits);
132 }
133
134 llvm::Type *getCharType() const {
135 return llvm::Type::getIntNTy(Types.getLLVMContext(),
136 Context.getCharWidth());
137 }
138
139 llvm::Type *getByteArrayType(CharUnits NumChars) const {
140 assert(!NumChars.isZero() && "Empty byte arrays aren't allowed.");
141 llvm::Type *Type = getCharType();
143 (llvm::Type *)llvm::ArrayType::get(Type, NumChars.getQuantity());
144 }
145
146
147 llvm::Type *getStorageType(const FieldDecl *FD) const {
148 llvm::Type *Type = Types.ConvertTypeForMem(FD->getType());
150 if (isDiscreteBitFieldABI()) return Type;
152 (unsigned)Context.toBits(getSize(Type))));
153 }
154
155 llvm::Type *getStorageType(const CXXRecordDecl *RD) const {
156 return Types.getCGRecordLayout(RD).getBaseSubobjectLLVMType();
157 }
158 CharUnits bitsToCharUnits(uint64_t BitOffset) const {
159 return Context.toCharUnitsFromBits(BitOffset);
160 }
161 CharUnits getSize(llvm::Type *Type) const {
163 }
164 CharUnits getAlignment(llvm::Type *Type) const {
166 }
167 bool isZeroInitializable(const FieldDecl *FD) const {
168 return Types.isZeroInitializable(FD->getType());
169 }
170 bool isZeroInitializable(const RecordDecl *RD) const {
171 return Types.isZeroInitializable(RD);
172 }
173 void appendPaddingBytes(CharUnits Size) {
174 if (.isZero())
175 FieldTypes.push_back(getByteArrayType(Size));
176 }
177 uint64_t getFieldBitOffset(const FieldDecl *FD) const {
178 return Layout.getFieldOffset(FD->getFieldIndex());
179 }
180
181 void setBitFieldInfo(const FieldDecl *FD, CharUnits StartOffset,
182 llvm::Type *StorageType);
183
184 void lower(bool NonVirtualBaseType);
185 void lowerUnion(bool isNonVirtualBaseType);
186 void accumulateFields(bool isNonVirtualBaseType);
188 accumulateBitFields(bool isNonVirtualBaseType,
191 void computeVolatileBitfields();
192 void accumulateBases();
193 void accumulateVPtrs();
194 void accumulateVBases();
195
196
197 bool hasOwnStorage(const CXXRecordDecl *Decl,
198 const CXXRecordDecl *Query) const;
199 void calculateZeroInit();
200 CharUnits calculateTailClippingOffset(bool isNonVirtualBaseType) const;
201 void checkBitfieldClipping(bool isNonVirtualBaseType) const;
202
203 void determinePacked(bool NVBaseType);
204
205 void insertPadding();
206
207 void fillOutputFields();
208
209 CodeGenTypes &Types;
210 const ASTContext &Context;
211 const RecordDecl *D;
212 const CXXRecordDecl *RD;
213 const ASTRecordLayout &Layout;
214 const llvm::DataLayout &DataLayout;
215
216 std::vector Members;
217
218 SmallVector<llvm::Type *, 16> FieldTypes;
219 llvm::DenseMap<const FieldDecl *, unsigned> Fields;
220 llvm::DenseMap<const FieldDecl *, CGBitFieldInfo> BitFields;
221 llvm::DenseMap<const CXXRecordDecl *, unsigned> NonVirtualBases;
222 llvm::DenseMap<const CXXRecordDecl *, unsigned> VirtualBases;
223 bool IsZeroInitializable : 1;
224 bool IsZeroInitializableAsBase : 1;
225 bool Packed : 1;
226private:
227 CGRecordLowering(const CGRecordLowering &) = delete;
228 void operator =(const CGRecordLowering &) = delete;
229};
230}
231
234 : Types(Types), Context(Types.getContext()), D(D),
236 Layout(Types.getContext().getASTRecordLayout(D)),
237 DataLayout(Types.getDataLayout()), IsZeroInitializable(true),
239
240void CGRecordLowering::setBitFieldInfo(
241 const FieldDecl *FD, CharUnits StartOffset, llvm::Type *StorageType) {
244 Info.Offset = (unsigned)(getFieldBitOffset(FD) - Context.toBits(StartOffset));
246 Info.StorageSize = (unsigned)DataLayout.getTypeAllocSizeInBits(StorageType);
250
251
252
253
254 if (DataLayout.isBigEndian())
256
260}
261
262void CGRecordLowering::lower(bool NVBaseType) {
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
285 lowerUnion(NVBaseType);
286 computeVolatileBitfields();
287 return;
288 }
289 accumulateFields(NVBaseType);
290
291 if (RD) {
292 accumulateVPtrs();
293 accumulateBases();
294 if (Members.empty()) {
295 appendPaddingBytes(Size);
296 computeVolatileBitfields();
297 return;
298 }
299 if (!NVBaseType)
300 accumulateVBases();
301 }
302 llvm::stable_sort(Members);
303 checkBitfieldClipping(NVBaseType);
304 Members.push_back(StorageInfo(Size, getIntNType(8)));
305 determinePacked(NVBaseType);
306 insertPadding();
307 Members.pop_back();
308 calculateZeroInit();
309 fillOutputFields();
310 computeVolatileBitfields();
311}
312
313void CGRecordLowering::lowerUnion(bool isNonVirtualBaseType) {
314 CharUnits LayoutSize =
316 llvm::Type *StorageType = nullptr;
317 bool SeenNamedMember = false;
318
319
320
321
322
323 for (const auto *Field : D->fields()) {
324 if (Field->isBitField()) {
325 if (Field->isZeroLengthBitField())
326 continue;
327 llvm::Type *FieldType = getStorageType(Field);
328 if (LayoutSize < getSize(FieldType))
329 FieldType = getByteArrayType(LayoutSize);
331 }
332 Fields[Field->getCanonicalDecl()] = 0;
333 llvm::Type *FieldType = getStorageType(Field);
334
335
336
337
338
339
340 if (!SeenNamedMember) {
341 SeenNamedMember = Field->getIdentifier();
342 if (!SeenNamedMember)
343 if (const auto *FieldRD = Field->getType()->getAsRecordDecl())
344 SeenNamedMember = FieldRD->findFirstNamedDataMember();
345 if (SeenNamedMember && !isZeroInitializable(Field)) {
346 IsZeroInitializable = IsZeroInitializableAsBase = false;
347 StorageType = FieldType;
348 }
349 }
350
351
352 if (!IsZeroInitializable)
353 continue;
354
355 if (!StorageType ||
356 getAlignment(FieldType) > getAlignment(StorageType) ||
357 (getAlignment(FieldType) == getAlignment(StorageType) &&
358 getSize(FieldType) > getSize(StorageType)))
359 StorageType = FieldType;
360 }
361
362 if (!StorageType)
363 return appendPaddingBytes(LayoutSize);
364
365
366 if (LayoutSize < getSize(StorageType))
367 StorageType = getByteArrayType(LayoutSize);
368 FieldTypes.push_back(StorageType);
369 appendPaddingBytes(LayoutSize - getSize(StorageType));
370
371 const auto StorageAlignment = getAlignment(StorageType);
374 "Union's standard layout and no_unique_address layout must agree on "
375 "packedness");
378}
379
380void CGRecordLowering::accumulateFields(bool isNonVirtualBaseType) {
383 Field != FieldEnd;) {
384 if (Field->isBitField()) {
385 Field = accumulateBitFields(isNonVirtualBaseType, Field, FieldEnd);
386 assert((Field == FieldEnd || ->isBitField()) &&
387 "Failed to accumulate all the bitfields");
389
391 } else {
392
393
394 Members.push_back(MemberInfo(
395 bitsToCharUnits(getFieldBitOffset(*Field)), MemberInfo::Field,
396 Field->isPotentiallyOverlapping()
397 ? getStorageType(Field->getType()->getAsCXXRecordDecl())
398 : getStorageType(*Field),
399 *Field));
401 }
402 }
403}
404
405
406
407
408
410CGRecordLowering::accumulateBitFields(bool isNonVirtualBaseType,
413 if (isDiscreteBitFieldABI()) {
414
415
416
417
418
420
421
422
423 uint64_t StartBitOffset, Tail = 0;
424 for (; Field != FieldEnd && Field->isBitField(); ++Field) {
425
426 if (Field->isZeroLengthBitField()) {
427 Run = FieldEnd;
428 continue;
429 }
430 uint64_t BitOffset = getFieldBitOffset(*Field);
432
433
434 if (Run == FieldEnd || BitOffset >= Tail) {
436 StartBitOffset = BitOffset;
437 Tail = StartBitOffset + DataLayout.getTypeAllocSizeInBits(Type);
438
439
440
441 Members.push_back(StorageInfo(bitsToCharUnits(StartBitOffset), Type));
442 }
443
444
445 Members.push_back(MemberInfo(bitsToCharUnits(StartBitOffset),
446 MemberInfo::Field, nullptr, *Field));
447 }
449 }
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505 CharUnits RegSize =
508
509
510
512
513
514
515
516
517
519 CharUnits BeginOffset;
521
522
523
524
525
526
528 CharUnits BestEndOffset;
529 bool BestClipped;
530
531 for (;;) {
532
533
534
535
536 bool AtAlignedBoundary = false;
537 bool Barrier = false;
538
539 if (Field != FieldEnd && Field->isBitField()) {
540 uint64_t BitOffset = getFieldBitOffset(*Field);
541 if (Begin == FieldEnd) {
542
544 BestEnd = Begin;
545
546 assert((BitOffset % CharBits) == 0 && "Not at start of char");
547 BeginOffset = bitsToCharUnits(BitOffset);
548 BitSizeSinceBegin = 0;
549 } else if ((BitOffset % CharBits) != 0) {
550
551
552
553
554
555
556 assert(BitOffset == Context.toBits(BeginOffset) + BitSizeSinceBegin &&
557 "Concatenating non-contiguous bitfields");
558 } else {
559
560
561
562 if (Field->isZeroLengthBitField())
563 Barrier = true;
564 AtAlignedBoundary = true;
565 }
566 } else {
567
568
569 if (Begin == FieldEnd)
570 break;
571
572 Barrier = true;
573 AtAlignedBoundary = true;
574 }
575
576
577
578
579 bool InstallBest = false;
580 if (AtAlignedBoundary) {
581
582
583
584
585
586 CharUnits AccessSize = bitsToCharUnits(BitSizeSinceBegin + CharBits - 1);
587 if (BestEnd == Begin) {
588
589
591 BestEndOffset = BeginOffset + AccessSize;
592
593 BestClipped = true;
594 if (!BitSizeSinceBegin)
595
596
597 InstallBest = true;
598 } else if (AccessSize > RegSize)
599
600
601 InstallBest = true;
602
603 if (!InstallBest) {
604
605
606 llvm::Type *Type = getIntNType(Context.toBits(AccessSize));
608
609
610
611
612
613 CharUnits Align = getAlignment(Type);
615
616
617 InstallBest = true;
619
620
621 InstallBest = true;
622
623 if (InstallBest && BestEnd == Field)
624
625
626 if (getSize(Type) == AccessSize)
627 BestClipped = false;
628 }
629
630 if (!InstallBest) {
631
632
633
634
635 CharUnits LimitOffset;
636 for (auto Probe = Field; Probe != FieldEnd; ++Probe)
638
639 assert((getFieldBitOffset(*Probe) % CharBits) == 0 &&
640 "Next storage is not byte-aligned");
641 LimitOffset = bitsToCharUnits(getFieldBitOffset(*Probe));
642 goto FoundLimit;
643 }
644
645
646 if (ScissorOffset.isZero()) {
647 ScissorOffset = calculateTailClippingOffset(isNonVirtualBaseType);
648 assert(!ScissorOffset.isZero() && "Tail clipping at zero");
649 }
650
651 LimitOffset = ScissorOffset;
652 FoundLimit:;
653
654 CharUnits TypeSize = getSize(Type);
655 if (BeginOffset + TypeSize <= LimitOffset) {
656
657
658 BestEndOffset = BeginOffset + TypeSize;
660 BestClipped = false;
661 }
662
663 if (Barrier)
664
665 InstallBest = true;
666 else if (Types.getCodeGenOpts().FineGrainedBitfieldAccesses)
667
668 InstallBest = true;
669 else
670
671
672
673 BitSizeSinceBegin = Context.toBits(LimitOffset - BeginOffset);
674 }
675 }
676 }
677
678 if (InstallBest) {
679 assert((Field == FieldEnd || ->isBitField() ||
680 (getFieldBitOffset(*Field) % CharBits) == 0) &&
681 "Installing but not at an aligned bitfield or limit");
682 CharUnits AccessSize = BestEndOffset - BeginOffset;
683 if (!AccessSize.isZero()) {
684
685
686
687 llvm::Type *Type;
688 if (BestClipped) {
689 assert(getSize(getIntNType(Context.toBits(AccessSize))) >
690 AccessSize &&
691 "Clipped access need not be clipped");
692 Type = getByteArrayType(AccessSize);
693 } else {
694 Type = getIntNType(Context.toBits(AccessSize));
695 assert(getSize(Type) == AccessSize &&
696 "Unclipped access must be clipped");
697 }
698 Members.push_back(StorageInfo(BeginOffset, Type));
699 for (; Begin != BestEnd; ++Begin)
700 if (!Begin->isZeroLengthBitField())
701 Members.push_back(
702 MemberInfo(BeginOffset, MemberInfo::Field, nullptr, *Begin));
703 }
704
706 Begin = FieldEnd;
707 } else {
708 assert(Field != FieldEnd && Field->isBitField() &&
709 "Accumulating past end of bitfields");
710 assert(!Barrier && "Accumulating across barrier");
711
712 BitSizeSinceBegin += Field->getBitWidthValue();
714 }
715 }
716
718}
719
720void CGRecordLowering::accumulateBases() {
721
723 const CXXRecordDecl *BaseDecl = Layout.getPrimaryBase();
724 Members.push_back(MemberInfo(CharUnits::Zero(), MemberInfo::Base,
725 getStorageType(BaseDecl), BaseDecl));
726 }
727
728 for (const auto &Base : RD->bases()) {
729 if (Base.isVirtual())
730 continue;
731
732
733
734 const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
738 MemberInfo::Base, getStorageType(BaseDecl), BaseDecl));
739 }
740}
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755void CGRecordLowering::computeVolatileBitfields() {
757 return;
758
759 for (auto &I : BitFields) {
760 const FieldDecl *Field = I.first;
761 CGBitFieldInfo &Info = I.second;
763
764
766 ResLTy->getPrimitiveSizeInBits())
767 continue;
768
769
770
771
772
773 const unsigned OldOffset =
775
776 const unsigned AbsoluteOffset =
778
779
780 const unsigned StorageSize = ResLTy->getPrimitiveSizeInBits();
781
782
783 if (Info.StorageSize == StorageSize && (OldOffset % StorageSize == 0))
784 continue;
785
786
787 unsigned Offset = AbsoluteOffset & (StorageSize - 1);
788
789
790
791
792 if (Offset + Info.Size > StorageSize)
793 continue;
794
795
796 if (isBE())
797 Offset = StorageSize - (Offset + Info.Size);
798
799 const CharUnits StorageOffset =
801 const CharUnits End = StorageOffset +
804
805 const ASTRecordLayout &Layout =
807
808 const CharUnits RecordSize = Layout.getSize();
809 if (End >= RecordSize)
810 continue;
811
812
813 bool Conflict = false;
814 for (const auto *F : D->fields()) {
815
816 if (F->isBitField() && !F->isZeroLengthBitField())
817 continue;
818
821
822
823
824
825
826 if (F->isZeroLengthBitField()) {
827 if (End > FOffset && StorageOffset < FOffset) {
828 Conflict = true;
829 break;
830 }
831 }
832
833 const CharUnits FEnd =
834 FOffset +
836 Types.ConvertTypeForMem(F->getType())->getPrimitiveSizeInBits()) -
838
839 if (End < FOffset || FEnd < StorageOffset)
840 continue;
841
842
843 Conflict = true;
844 break;
845 }
846
847 if (Conflict)
848 continue;
849
850
851
856 }
857}
858
859void CGRecordLowering::accumulateVPtrs() {
861 Members.push_back(
863 llvm::PointerType::getUnqual(Types.getLLVMContext())));
865 Members.push_back(
866 MemberInfo(Layout.getVBPtrOffset(), MemberInfo::VBPtr,
867 llvm::PointerType::getUnqual(Types.getLLVMContext())));
868}
869
870CharUnits
871CGRecordLowering::calculateTailClippingOffset(bool isNonVirtualBaseType) const {
872 if (!RD)
874
876
877
878
879
880 if (!isNonVirtualBaseType && isOverlappingVBaseABI())
881 for (const auto &Base : RD->vbases()) {
882 const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
884 continue;
885
886
887 if (Context.isNearlyEmpty(BaseDecl) && !hasOwnStorage(RD, BaseDecl))
888 continue;
889 ScissorOffset = std::min(ScissorOffset,
891 }
892
893 return ScissorOffset;
894}
895
896void CGRecordLowering::accumulateVBases() {
897 for (const auto &Base : RD->vbases()) {
898 const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
900 continue;
902
903
904 if (isOverlappingVBaseABI() &&
906 !hasOwnStorage(RD, BaseDecl)) {
907 Members.push_back(MemberInfo(Offset, MemberInfo::VBase, nullptr,
908 BaseDecl));
909 continue;
910 }
911
912 if (Layout.getVBaseOffsetsMap().find(BaseDecl)->second.hasVtorDisp())
914 getIntNType(32)));
915 Members.push_back(MemberInfo(Offset, MemberInfo::VBase,
916 getStorageType(BaseDecl), BaseDecl));
917 }
918}
919
920bool CGRecordLowering::hasOwnStorage(const CXXRecordDecl *Decl,
921 const CXXRecordDecl *Query) const {
924 return false;
925 for (const auto &Base : Decl->bases())
926 if (!hasOwnStorage(Base.getType()->getAsCXXRecordDecl(), Query))
927 return false;
928 return true;
929}
930
931void CGRecordLowering::calculateZeroInit() {
932 for (std::vector::const_iterator Member = Members.begin(),
933 MemberEnd = Members.end();
934 IsZeroInitializableAsBase && Member != MemberEnd; ++Member) {
935 if (Member->Kind == MemberInfo::Field) {
936 if (->FD || isZeroInitializable(Member->FD))
937 continue;
938 IsZeroInitializable = IsZeroInitializableAsBase = false;
939 } else if (Member->Kind == MemberInfo::Base ||
940 Member->Kind == MemberInfo::VBase) {
941 if (isZeroInitializable(Member->RD))
942 continue;
943 IsZeroInitializable = false;
944 if (Member->Kind == MemberInfo::Base)
945 IsZeroInitializableAsBase = false;
946 }
947 }
948}
949
950
951void CGRecordLowering::checkBitfieldClipping(bool IsNonVirtualBaseType) const {
952#ifndef NDEBUG
953 auto ScissorOffset = calculateTailClippingOffset(IsNonVirtualBaseType);
955 for (const auto &M : Members) {
956
957 if (!M.Data)
958 continue;
959
960 assert(M.Offset >= Tail && "Bitfield access unit is not clipped");
961 Tail = M.Offset + getSize(M.Data);
962 assert((Tail <= ScissorOffset || M.Offset >= ScissorOffset) &&
963 "Bitfield straddles scissor offset");
964 }
965#endif
966}
967
968void CGRecordLowering::determinePacked(bool NVBaseType) {
970 return;
973 CharUnits NVSize =
975 for (const MemberInfo &Member : Members) {
977 continue;
978
979
980 if (.Offset.isMultipleOf(getAlignment(Member.Data)))
982 if (Member.Offset < NVSize)
983 NVAlignment = std::max(NVAlignment, getAlignment(Member.Data));
984 Alignment = std::max(Alignment, getAlignment(Member.Data));
985 }
986
987
988 if (!Members.back().Offset.isMultipleOf(Alignment))
990
991
992
995
997 Members.back().Data = getIntNType(Context.toBits(Alignment));
998}
999
1000void CGRecordLowering::insertPadding() {
1001 std::vector<std::pair<CharUnits, CharUnits> > Padding;
1003 for (const MemberInfo &Member : Members) {
1005 continue;
1006 CharUnits Offset = Member.Offset;
1007 assert(Offset >= Size);
1008
1009 if (Offset !=
1011 Padding.push_back(std::make_pair(Size, Offset - Size));
1012 Size = Offset + getSize(Member.Data);
1013 }
1014 if (Padding.empty())
1015 return;
1016
1017 for (const auto &Pad : Padding)
1018 Members.push_back(StorageInfo(Pad.first, getByteArrayType(Pad.second)));
1019 llvm::stable_sort(Members);
1020}
1021
1022void CGRecordLowering::fillOutputFields() {
1023 for (const MemberInfo &Member : Members) {
1025 FieldTypes.push_back(Member.Data);
1026 if (Member.Kind == MemberInfo::Field) {
1028 Fields[Member.FD->getCanonicalDecl()] = FieldTypes.size() - 1;
1029
1032 "Member.Data is a nullptr so Member.FD should not be");
1033 setBitFieldInfo(Member.FD, Member.Offset, FieldTypes.back());
1034 }
1035 } else if (Member.Kind == MemberInfo::Base)
1036 NonVirtualBases[Member.RD] = FieldTypes.size() - 1;
1037 else if (Member.Kind == MemberInfo::VBase)
1038 VirtualBases[Member.RD] = FieldTypes.size() - 1;
1039 }
1040}
1041
1047
1048
1049
1053 uint64_t TypeSizeInBits = Types.getContext().toBits(TypeSizeInBytes);
1054
1056
1057 if (Size > TypeSizeInBits) {
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067 Size = TypeSizeInBits;
1068 }
1069
1070
1071
1072
1073
1076 }
1077
1079}
1080
1081std::unique_ptr
1083 CGRecordLowering Builder(*this, D, false);
1084
1085 Builder.lower(false);
1086
1087
1088 llvm::StructType *BaseTy = nullptr;
1090 BaseTy = Ty;
1091 if (Builder.Layout.getNonVirtualSize() != Builder.Layout.getSize()) {
1092 CGRecordLowering BaseBuilder(*this, D, Builder.Packed);
1093 BaseBuilder.lower(true);
1094 BaseTy = llvm::StructType::create(
1095 getLLVMContext(), BaseBuilder.FieldTypes, "", BaseBuilder.Packed);
1097
1098
1099 assert(Builder.Packed == BaseBuilder.Packed &&
1100 "Non-virtual and complete types must agree on packedness");
1101 }
1102 }
1103
1104
1105
1106
1107 Ty->setBody(Builder.FieldTypes, Builder.Packed);
1108
1109 auto RL = std::make_unique(
1110 Ty, BaseTy, (bool)Builder.IsZeroInitializable,
1111 (bool)Builder.IsZeroInitializableAsBase);
1112
1113 RL->NonVirtualBases.swap(Builder.NonVirtualBases);
1114 RL->CompleteObjectVirtualBases.swap(Builder.VirtualBases);
1115
1116
1117 RL->FieldInfo.swap(Builder.Fields);
1118
1119
1120 RL->BitFields.swap(Builder.BitFields);
1121
1122
1123 if (getContext().getLangOpts().DumpRecordLayouts) {
1124 llvm::outs() << "\n*** Dumping IRgen Record Layout\n";
1125 llvm::outs() << "Record: ";
1126 D->dump(llvm::outs());
1127 llvm::outs() << "\nLayout: ";
1128 RL->print(llvm::outs());
1129 }
1130
1131#ifndef NDEBUG
1132
1134
1135 uint64_t TypeSizeInBits = getContext().toBits(Layout.getSize());
1136 assert(TypeSizeInBits == getDataLayout().getTypeAllocSizeInBits(Ty) &&
1137 "Type size mismatch!");
1138
1139 if (BaseTy) {
1141
1142 uint64_t AlignedNonVirtualTypeSizeInBits =
1143 getContext().toBits(NonVirtualSize);
1144
1145 assert(AlignedNonVirtualTypeSizeInBits ==
1146 getDataLayout().getTypeAllocSizeInBits(BaseTy) &&
1147 "Type size mismatch!");
1148 }
1149
1150
1151 llvm::StructType *ST = RL->getLLVMType();
1152 const llvm::StructLayout *SL = getDataLayout().getStructLayout(ST);
1153
1156 for (unsigned i = 0, e = AST_RL.getFieldCount(); i != e; ++i, ++it) {
1158
1159
1161 continue;
1162
1163
1164
1166 unsigned FieldNo = RL->getLLVMFieldNo(FD);
1167 assert(AST_RL.getFieldOffset(i) == SL->getElementOffsetInBits(FieldNo) &&
1168 "Invalid field offset!");
1169 continue;
1170 }
1171
1172
1174 continue;
1175
1176 const CGBitFieldInfo &Info = RL->getBitFieldInfo(FD);
1177 llvm::Type *ElementTy = ST->getTypeAtIndex(RL->getLLVMFieldNo(FD));
1178
1179
1180
1181
1183
1184
1185
1186
1188 assert(static_cast<unsigned>(Info.Offset + Info.Size) ==
1190 "Big endian union bitfield does not end at the back");
1191 else
1192 assert(Info.Offset == 0 &&
1193 "Little endian union bitfield with a non-zero offset");
1194 assert(Info.StorageSize <= SL->getSizeInBits() &&
1195 "Union not large enough for bitfield storage");
1196 } else {
1198 getDataLayout().getTypeAllocSizeInBits(ElementTy) ||
1200 getDataLayout().getTypeAllocSizeInBits(ElementTy)) &&
1201 "Storage size does not match the element type size");
1202 }
1203 assert(Info.Size > 0 && "Empty bitfield!");
1205 "Bitfield outside of its allocated storage");
1206 }
1207#endif
1208
1209 return RL;
1210}
1211
1213 OS << "<CGRecordLayout\n";
1214 OS << " LLVMType:" << *CompleteObjectType << "\n";
1215 if (BaseSubobjectType)
1216 OS << " NonVirtualBaseLLVMType:" << *BaseSubobjectType << "\n";
1217 OS << " IsZeroInitializable:" << IsZeroInitializable << "\n";
1218 OS << " BitFields:[\n";
1219
1220
1221 std::vector<std::pair<unsigned, const CGBitFieldInfo*> > BFIs;
1222 for (const auto &BitField : BitFields) {
1224 unsigned Index = 0;
1226 *it2 != BitField.first; ++it2)
1227 ++Index;
1228 BFIs.push_back(std::make_pair(Index, &BitField.second));
1229 }
1230 llvm::array_pod_sort(BFIs.begin(), BFIs.end());
1231 for (auto &BFI : BFIs) {
1232 OS.indent(4);
1233 BFI.second->print(OS);
1234 OS << "\n";
1235 }
1236
1237 OS << "]>\n";
1238}
1239
1241 print(llvm::errs());
1242}
1243
1245 OS << "<CGBitFieldInfo"
1246 << " Offset:" << Offset << " Size:" << Size << " IsSigned:" << IsSigned
1248 << " StorageOffset:" << StorageOffset.getQuantity()
1252}
1253
1255 print(llvm::errs());
1256}
Defines the clang::ASTContext interface.
static bool isAAPCS(const TargetInfo &targetInfo)
Helper method to check if the underlying ABI is AAPCS.
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate....
static void print(llvm::raw_ostream &OS, const T &V, ASTContext &ASTCtx, QualType Ty)
const ASTRecordLayout & getASTRecordLayout(const RecordDecl *D) const
Get or compute information about the layout of the specified record (struct/union/class) D,...
bool isNearlyEmpty(const CXXRecordDecl *RD) const
int64_t toBits(CharUnits CharSize) const
Convert a size in characters to a size in bits.
const TargetInfo & getTargetInfo() const
CharUnits toCharUnitsFromBits(int64_t BitSize) const
Convert a size in bits to a size in characters.
uint64_t getCharWidth() const
Return the size of the character type, in bits.
ASTRecordLayout - This class contains layout information for one RecordDecl, which is a struct/union/...
bool hasOwnVFPtr() const
hasOwnVFPtr - Does this class provide its own virtual-function table pointer, rather than inheriting ...
CharUnits getAlignment() const
getAlignment - Get the record alignment in characters.
bool hasOwnVBPtr() const
hasOwnVBPtr - Does this class provide its own virtual-base table pointer, rather than inheriting one ...
CharUnits getSize() const
getSize - Get the record size in characters.
unsigned getFieldCount() const
getFieldCount - Get the number of fields in the layout.
uint64_t getFieldOffset(unsigned FieldNo) const
getFieldOffset - Get the offset of the given field index, in bits.
CharUnits getVBPtrOffset() const
getVBPtrOffset - Get the offset for virtual base table pointer.
CharUnits getDataSize() const
getDataSize() - Get the record data size, which is the record size without tail padding,...
CharUnits getBaseClassOffset(const CXXRecordDecl *Base) const
getBaseClassOffset - Get the offset, in chars, for the given base class.
CharUnits getVBaseClassOffset(const CXXRecordDecl *VBase) const
getVBaseClassOffset - Get the offset, in chars, for the given base class.
const VBaseOffsetsMapTy & getVBaseOffsetsMap() const
const CXXRecordDecl * getPrimaryBase() const
getPrimaryBase - Get the primary base for this record.
bool isPrimaryBaseVirtual() const
isPrimaryBaseVirtual - Get whether the primary base for this record is virtual or not.
CharUnits getNonVirtualSize() const
getNonVirtualSize - Get the non-virtual size (in chars) of an object, which is the size of the object...
Represents a C++ struct/union/class.
CharUnits - This is an opaque type for sizes expressed in character units.
bool isZero() const
isZero - Test whether the quantity equals zero.
QuantityType getQuantity() const
getQuantity - Get the raw integer representation of this quantity.
static CharUnits One()
One - Construct a CharUnits quantity of one.
bool isMultipleOf(CharUnits N) const
Test whether this is a multiple of the other value.
static CharUnits fromQuantity(QuantityType Quantity)
fromQuantity - Construct a CharUnits quantity from a raw integer type.
static CharUnits Zero()
Zero - Construct a CharUnits quantity of zero.
void dump() const
Definition CGRecordLayoutBuilder.cpp:1240
void print(raw_ostream &OS) const
Definition CGRecordLayoutBuilder.cpp:1212
This class organizes the cross-module state that is used while lowering AST types to LLVM types.
const CodeGenOptions & getCodeGenOpts() const
ASTContext & getContext() const
std::unique_ptr< CGRecordLayout > ComputeRecordLayout(const RecordDecl *D, llvm::StructType *Ty)
Compute a new LLVM record layout object for the given record.
Definition CGRecordLayoutBuilder.cpp:1082
llvm::Type * ConvertTypeForMem(QualType T)
ConvertTypeForMem - Convert type T into a llvm::Type.
llvm::LLVMContext & getLLVMContext()
const llvm::DataLayout & getDataLayout() const
void addRecordTypeName(const RecordDecl *RD, llvm::StructType *Ty, StringRef suffix)
addRecordTypeName - Compute a name from the given record decl with an optional suffix and name the gi...
DeclContext * getParent()
getParent - Returns the containing DeclContext.
Represents a member of a struct/union/class.
bool isBitField() const
Determines whether this field is a bitfield.
unsigned getBitWidthValue() const
Computes the bit width of this field, if this is a bit field.
unsigned getFieldIndex() const
Returns the index of this field within its record, as appropriate for passing to ASTRecordLayout::get...
FieldDecl * getCanonicalDecl() override
Retrieves the canonical declaration of this field.
DeclarationName getDeclName() const
Get the actual, stored name of the declaration, which may be a special name.
Represents a struct/union/class.
field_iterator field_end() const
field_range fields() const
specific_decl_iterator< FieldDecl > field_iterator
field_iterator field_begin() const
virtual unsigned getRegisterWidth() const
Return the "preferred" register width on this target.
bool hasCheapUnalignedBitFieldAccess() const
Return true iff unaligned accesses are cheap.
bool isSignedIntegerOrEnumerationType() const
Determines whether this is an integer type that is signed or an enumeration types whose underlying ty...
bool isEmptyRecordForLayout(const ASTContext &Context, QualType T)
isEmptyRecordForLayout - Return true iff a structure contains only empty base classes (per isEmptyRec...
@ Decl
The l-value was an access to a declared entity or something equivalently strong, like the address of ...
bool isEmptyFieldForLayout(const ASTContext &Context, const FieldDecl *FD)
isEmptyFieldForLayout - Return true iff the field is "empty", that is, either a zero-width bit-field ...
The JSON file list parser is used to communicate input to InstallAPI.
bool isa(CodeGen::Address addr)
bool operator<(DeclarationName LHS, DeclarationName RHS)
Ordering on two declaration names.
@ Type
The name was classified as a type.
Structure with information about how a bitfield should be accessed.
CharUnits StorageOffset
The offset of the bitfield storage from the start of the struct.
CharUnits VolatileStorageOffset
The offset of the bitfield storage from the start of the struct.
void dump() const
Definition CGRecordLayoutBuilder.cpp:1254
unsigned VolatileOffset
The offset within a contiguous run of bitfields that are represented as a single "field" within the L...
unsigned Offset
The offset within a contiguous run of bitfields that are represented as a single "field" within the L...
unsigned VolatileStorageSize
The storage size in bits which should be used when accessing this bitfield.
void print(raw_ostream &OS) const
Definition CGRecordLayoutBuilder.cpp:1244
unsigned Size
The total size of the bit-field, in bits.
unsigned StorageSize
The storage size in bits which should be used when accessing this bitfield.
unsigned IsSigned
Whether the bit-field is signed.
static CGBitFieldInfo MakeInfo(class CodeGenTypes &Types, const FieldDecl *FD, uint64_t Offset, uint64_t Size, uint64_t StorageSize, CharUnits StorageOffset)
Given a bit-field decl, build an appropriate helper object for accessing that field (which is expecte...
Definition CGRecordLayoutBuilder.cpp:1042