clang: lib/CodeGen/CGRecordLayoutBuilder.cpp Source File (original) (raw)
1
2
3
4
5
6
7
8
9
10
11
12
24#include "llvm/IR/DataLayout.h"
25#include "llvm/IR/DerivedTypes.h"
26#include "llvm/IR/Type.h"
27#include "llvm/Support/Debug.h"
28#include "llvm/Support/MathExtras.h"
29#include "llvm/Support/raw_ostream.h"
30using namespace clang;
31using namespace CodeGen;
32
33namespace {
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73struct CGRecordLowering {
74
75
76
77 struct MemberInfo {
79 enum InfoKind { VFPtr, VBPtr, Field, Base, VBase } Kind;
80 llvm::Type *Data;
81 union {
84 };
85 MemberInfo(CharUnits Offset, InfoKind Kind, llvm::Type *Data,
88 MemberInfo(CharUnits Offset, InfoKind Kind, llvm::Type *Data,
91
92 bool operator <(const MemberInfo& a) const { return Offset < a.Offset; }
93 };
94
96
97
98 static MemberInfo StorageInfo(CharUnits Offset, llvm::Type *Data) {
99 return MemberInfo(Offset, MemberInfo::Field, Data);
100 }
101
102
103
104
105
106
107 bool isDiscreteBitFieldABI() const {
109 D->isMsStruct(Context);
110 }
111
112
115 }
116
117
119
120
121
122
123
124 bool isOverlappingVBaseABI() const {
126 }
127
128
129 llvm::Type *getIntNType(uint64_t NumBits) const {
130 unsigned AlignedBits = llvm::alignTo(NumBits, Context.getCharWidth());
131 return llvm::Type::getIntNTy(Types.getLLVMContext(), AlignedBits);
132 }
133
134 llvm::Type *getCharType() const {
135 return llvm::Type::getIntNTy(Types.getLLVMContext(),
137 }
138
139 llvm::Type *getByteArrayType(CharUnits NumChars) const {
140 assert(!NumChars.isZero() && "Empty byte arrays aren't allowed.");
141 llvm::Type *Type = getCharType();
143 (llvm::Type *)llvm::ArrayType::get(Type, NumChars.getQuantity());
144 }
145
146
147 llvm::Type *getStorageType(const FieldDecl *FD) const {
148 llvm::Type *Type = Types.ConvertTypeForMem(FD->getType());
150 if (isDiscreteBitFieldABI()) return Type;
152 (unsigned)Context.toBits(getSize(Type))));
153 }
154
155 llvm::Type *getStorageType(const CXXRecordDecl *RD) const {
156 return Types.getCGRecordLayout(RD).getBaseSubobjectLLVMType();
157 }
158 CharUnits bitsToCharUnits(uint64_t BitOffset) const {
160 }
163 }
166 }
167 bool isZeroInitializable(const FieldDecl *FD) const {
168 return Types.isZeroInitializable(FD->getType());
169 }
170 bool isZeroInitializable(const RecordDecl *RD) const {
171 return Types.isZeroInitializable(RD);
172 }
173 void appendPaddingBytes(CharUnits Size) {
174 if (.isZero())
175 FieldTypes.push_back(getByteArrayType(Size));
176 }
178 return Layout.getFieldOffset(FD->getFieldIndex());
179 }
180
182 llvm::Type *StorageType);
183
184 void lower(bool NonVirtualBaseType);
185 void lowerUnion(bool isNonVirtualBaseType);
186 void accumulateFields(bool isNonVirtualBaseType);
188 accumulateBitFields(bool isNonVirtualBaseType,
191 void computeVolatileBitfields();
192 void accumulateBases();
193 void accumulateVPtrs();
194 void accumulateVBases();
195
196
199 void calculateZeroInit();
200 CharUnits calculateTailClippingOffset(bool isNonVirtualBaseType) const;
201 void checkBitfieldClipping(bool isNonVirtualBaseType) const;
202
203 void determinePacked(bool NVBaseType);
204
205 void insertPadding();
206
207 void fillOutputFields();
208
214 const llvm::DataLayout &DataLayout;
215
216 std::vector Members;
217
219 llvm::DenseMap<const FieldDecl *, unsigned> Fields;
220 llvm::DenseMap<const FieldDecl *, CGBitFieldInfo> BitFields;
221 llvm::DenseMap<const CXXRecordDecl *, unsigned> NonVirtualBases;
222 llvm::DenseMap<const CXXRecordDecl *, unsigned> VirtualBases;
223 bool IsZeroInitializable : 1;
224 bool IsZeroInitializableAsBase : 1;
225 bool Packed : 1;
226private:
227 CGRecordLowering(const CGRecordLowering &) = delete;
228 void operator =(const CGRecordLowering &) = delete;
229};
230}
231
233 bool Packed)
234 : Types(Types), Context(Types.getContext()), D(D),
236 Layout(Types.getContext().getASTRecordLayout(D)),
237 DataLayout(Types.getDataLayout()), IsZeroInitializable(true),
238 IsZeroInitializableAsBase(true), Packed(Packed) {}
239
240void CGRecordLowering::setBitFieldInfo(
241 const FieldDecl *FD, CharUnits StartOffset, llvm::Type *StorageType) {
244 Info.Offset = (unsigned)(getFieldBitOffset(FD) - Context.toBits(StartOffset));
246 Info.StorageSize = (unsigned)DataLayout.getTypeAllocSizeInBits(StorageType);
250
251
252
253
254 if (DataLayout.isBigEndian())
256
260}
261
262void CGRecordLowering::lower(bool NVBaseType) {
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283 CharUnits Size = NVBaseType ? Layout.getNonVirtualSize() : Layout.getSize();
284 if (D->isUnion()) {
285 lowerUnion(NVBaseType);
286 computeVolatileBitfields();
287 return;
288 }
289 accumulateFields(NVBaseType);
290
291 if (RD) {
292 accumulateVPtrs();
293 accumulateBases();
294 if (Members.empty()) {
295 appendPaddingBytes(Size);
296 computeVolatileBitfields();
297 return;
298 }
299 if (!NVBaseType)
300 accumulateVBases();
301 }
302 llvm::stable_sort(Members);
303 checkBitfieldClipping(NVBaseType);
304 Members.push_back(StorageInfo(Size, getIntNType(8)));
305 determinePacked(NVBaseType);
306 insertPadding();
307 Members.pop_back();
308 calculateZeroInit();
309 fillOutputFields();
310 computeVolatileBitfields();
311}
312
313void CGRecordLowering::lowerUnion(bool isNonVirtualBaseType) {
315 isNonVirtualBaseType ? Layout.getDataSize() : Layout.getSize();
316 llvm::Type *StorageType = nullptr;
317 bool SeenNamedMember = false;
318
319
320
321
322
323 for (const auto *Field : D->fields()) {
324 if (Field->isBitField()) {
325 if (Field->isZeroLengthBitField())
326 continue;
327 llvm::Type *FieldType = getStorageType(Field);
328 if (LayoutSize < getSize(FieldType))
329 FieldType = getByteArrayType(LayoutSize);
331 }
332 Fields[Field->getCanonicalDecl()] = 0;
333 llvm::Type *FieldType = getStorageType(Field);
334
335
336
337
338
339
340 if (!SeenNamedMember) {
341 SeenNamedMember = Field->getIdentifier();
342 if (!SeenNamedMember)
343 if (const auto *FieldRD = Field->getType()->getAsRecordDecl())
344 SeenNamedMember = FieldRD->findFirstNamedDataMember();
345 if (SeenNamedMember && !isZeroInitializable(Field)) {
346 IsZeroInitializable = IsZeroInitializableAsBase = false;
347 StorageType = FieldType;
348 }
349 }
350
351
352 if (!IsZeroInitializable)
353 continue;
354
355 if (!StorageType ||
356 getAlignment(FieldType) > getAlignment(StorageType) ||
357 (getAlignment(FieldType) == getAlignment(StorageType) &&
358 getSize(FieldType) > getSize(StorageType)))
359 StorageType = FieldType;
360 }
361
362 if (!StorageType)
363 return appendPaddingBytes(LayoutSize);
364
365
366 if (LayoutSize < getSize(StorageType))
367 StorageType = getByteArrayType(LayoutSize);
368 FieldTypes.push_back(StorageType);
369 appendPaddingBytes(LayoutSize - getSize(StorageType));
370
371 const auto StorageAlignment = getAlignment(StorageType);
372 assert((Layout.getSize() % StorageAlignment == 0 ||
373 Layout.getDataSize() % StorageAlignment) &&
374 "Union's standard layout and no_unique_address layout must agree on "
375 "packedness");
376 if (Layout.getDataSize() % StorageAlignment)
377 Packed = true;
378}
379
380void CGRecordLowering::accumulateFields(bool isNonVirtualBaseType) {
382 FieldEnd = D->field_end();
383 Field != FieldEnd;) {
384 if (Field->isBitField()) {
385 Field = accumulateBitFields(isNonVirtualBaseType, Field, FieldEnd);
386 assert((Field == FieldEnd || ->isBitField()) &&
387 "Failed to accumulate all the bitfields");
389
391 } else {
392
393
394 Members.push_back(MemberInfo(
395 bitsToCharUnits(getFieldBitOffset(*Field)), MemberInfo::Field,
396 Field->isPotentiallyOverlapping()
397 ? getStorageType(Field->getType()->getAsCXXRecordDecl())
398 : getStorageType(*Field),
399 *Field));
401 }
402 }
403}
404
405
406
407
408
410CGRecordLowering::accumulateBitFields(bool isNonVirtualBaseType,
413 if (isDiscreteBitFieldABI()) {
414
415
416
417
418
420
421
422
423 uint64_t StartBitOffset, Tail = 0;
424 for (; Field != FieldEnd && Field->isBitField(); ++Field) {
425
426 if (Field->isZeroLengthBitField()) {
427 Run = FieldEnd;
428 continue;
429 }
430 uint64_t BitOffset = getFieldBitOffset(*Field);
431 llvm::Type *Type = Types.ConvertTypeForMem(Field->getType());
432
433
434 if (Run == FieldEnd || BitOffset >= Tail) {
436 StartBitOffset = BitOffset;
437 Tail = StartBitOffset + DataLayout.getTypeAllocSizeInBits(Type);
438
439
440
441 Members.push_back(StorageInfo(bitsToCharUnits(StartBitOffset), Type));
442 }
443
444
445 Members.push_back(MemberInfo(bitsToCharUnits(StartBitOffset),
446 MemberInfo::Field, nullptr, *Field));
447 }
449 }
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
508
509
510
512
513
514
515
516
517
521
522
523
524
525
526
529 bool BestClipped;
530
531 for (;;) {
532
533
534
535
536 bool AtAlignedBoundary = false;
537 bool Barrier = false;
538
539 if (Field != FieldEnd && Field->isBitField()) {
540 uint64_t BitOffset = getFieldBitOffset(*Field);
541 if (Begin == FieldEnd) {
542
545
546 assert((BitOffset % CharBits) == 0 && "Not at start of char");
547 BeginOffset = bitsToCharUnits(BitOffset);
548 BitSizeSinceBegin = 0;
549 } else if ((BitOffset % CharBits) != 0) {
550
551
552
553
554
555
556 assert(BitOffset == Context.toBits(BeginOffset) + BitSizeSinceBegin &&
557 "Concatenating non-contiguous bitfields");
558 } else {
559
560
561
562 if (Field->isZeroLengthBitField())
563 Barrier = true;
564 AtAlignedBoundary = true;
565 }
566 } else {
567
568
569 if (Begin == FieldEnd)
570 break;
571
572 Barrier = true;
573 AtAlignedBoundary = true;
574 }
575
576
577
578
579 bool InstallBest = false;
580 if (AtAlignedBoundary) {
581
582
583
584
585
586 CharUnits AccessSize = bitsToCharUnits(BitSizeSinceBegin + CharBits - 1);
587 if (BestEnd == Begin) {
588
589
591 BestEndOffset = BeginOffset + AccessSize;
592
593 BestClipped = true;
594 if (!BitSizeSinceBegin)
595
596
597 InstallBest = true;
598 } else if (AccessSize > RegSize)
599
600
601 InstallBest = true;
602
603 if (!InstallBest) {
604
605
606 llvm::Type *Type = getIntNType(Context.toBits(AccessSize));
608
609
610
611
612
614 if (Align > Layout.getAlignment())
615
616
617 InstallBest = true;
619
620
621 InstallBest = true;
622
623 if (InstallBest && BestEnd == Field)
624
625
626 if (getSize(Type) == AccessSize)
627 BestClipped = false;
628 }
629
630 if (!InstallBest) {
631
632
633
634
636 for (auto Probe = Field; Probe != FieldEnd; ++Probe)
638
639 assert((getFieldBitOffset(*Probe) % CharBits) == 0 &&
640 "Next storage is not byte-aligned");
641 LimitOffset = bitsToCharUnits(getFieldBitOffset(*Probe));
642 goto FoundLimit;
643 }
644
645
646 if (ScissorOffset.isZero()) {
647 ScissorOffset = calculateTailClippingOffset(isNonVirtualBaseType);
648 assert(!ScissorOffset.isZero() && "Tail clipping at zero");
649 }
650
651 LimitOffset = ScissorOffset;
652 FoundLimit:;
653
655 if (BeginOffset + TypeSize <= LimitOffset) {
656
657
658 BestEndOffset = BeginOffset + TypeSize;
660 BestClipped = false;
661 }
662
663 if (Barrier)
664
665 InstallBest = true;
666 else if (Types.getCodeGenOpts().FineGrainedBitfieldAccesses)
667
668 InstallBest = true;
669 else
670
671
672
673 BitSizeSinceBegin = Context.toBits(LimitOffset - BeginOffset);
674 }
675 }
676 }
677
678 if (InstallBest) {
679 assert((Field == FieldEnd || ->isBitField() ||
680 (getFieldBitOffset(*Field) % CharBits) == 0) &&
681 "Installing but not at an aligned bitfield or limit");
682 CharUnits AccessSize = BestEndOffset - BeginOffset;
683 if (!AccessSize.isZero()) {
684
685
686
687 llvm::Type *Type;
688 if (BestClipped) {
689 assert(getSize(getIntNType(Context.toBits(AccessSize))) >
690 AccessSize &&
691 "Clipped access need not be clipped");
692 Type = getByteArrayType(AccessSize);
693 } else {
694 Type = getIntNType(Context.toBits(AccessSize));
695 assert(getSize(Type) == AccessSize &&
696 "Unclipped access must be clipped");
697 }
698 Members.push_back(StorageInfo(BeginOffset, Type));
700 if (->isZeroLengthBitField())
701 Members.push_back(
702 MemberInfo(BeginOffset, MemberInfo::Field, nullptr, *Begin));
703 }
704
706 Begin = FieldEnd;
707 } else {
708 assert(Field != FieldEnd && Field->isBitField() &&
709 "Accumulating past end of bitfields");
710 assert(!Barrier && "Accumulating across barrier");
711
712 BitSizeSinceBegin += Field->getBitWidthValue();
714 }
715 }
716
718}
719
720void CGRecordLowering::accumulateBases() {
721
722 if (Layout.isPrimaryBaseVirtual()) {
723 const CXXRecordDecl *BaseDecl = Layout.getPrimaryBase();
724 Members.push_back(MemberInfo(CharUnits::Zero(), MemberInfo::Base,
725 getStorageType(BaseDecl), BaseDecl));
726 }
727
728 for (const auto &Base : RD->bases()) {
729 if (Base.isVirtual())
730 continue;
731
732
733
734 const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
737 Members.push_back(MemberInfo(Layout.getBaseClassOffset(BaseDecl),
738 MemberInfo::Base, getStorageType(BaseDecl), BaseDecl));
739 }
740}
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755void CGRecordLowering::computeVolatileBitfields() {
756 if (() || !Types.getCodeGenOpts().AAPCSBitfieldWidth)
757 return;
758
759 for (auto &I : BitFields) {
762 llvm::Type *ResLTy = Types.ConvertTypeForMem(Field->getType());
763
764
765 if ((uint64_t)(Context.toBits(Layout.getAlignment())) <
766 ResLTy->getPrimitiveSizeInBits())
767 continue;
768
769
770
771
772
773 const unsigned OldOffset =
775
776 const unsigned AbsoluteOffset =
778
779
780 const unsigned StorageSize = ResLTy->getPrimitiveSizeInBits();
781
782
783 if (Info.StorageSize == StorageSize && (OldOffset % StorageSize == 0))
784 continue;
785
786
787 unsigned Offset = AbsoluteOffset & (StorageSize - 1);
788
789
790
791
792 if (Offset + Info.Size > StorageSize)
793 continue;
794
795
796 if (isBE())
797 Offset = StorageSize - (Offset + Info.Size);
798
801 const CharUnits End = StorageOffset +
804
807
809 if (End >= RecordSize)
810 continue;
811
812
813 bool Conflict = false;
814 for (const auto *F : D->fields()) {
815
816 if (F->isBitField() && !F->isZeroLengthBitField())
817 continue;
818
821
822
823
824
825
826 if (F->isZeroLengthBitField()) {
827 if (End > FOffset && StorageOffset < FOffset) {
828 Conflict = true;
829 break;
830 }
831 }
832
834 FOffset +
836 Types.ConvertTypeForMem(F->getType())->getPrimitiveSizeInBits()) -
838
839 if (End < FOffset || FEnd < StorageOffset)
840 continue;
841
842
843 Conflict = true;
844 break;
845 }
846
847 if (Conflict)
848 continue;
849
850
851
856 }
857}
858
859void CGRecordLowering::accumulateVPtrs() {
861 Members.push_back(
863 llvm::PointerType::getUnqual(Types.getLLVMContext())));
865 Members.push_back(
866 MemberInfo(Layout.getVBPtrOffset(), MemberInfo::VBPtr,
867 llvm::PointerType::getUnqual(Types.getLLVMContext())));
868}
869
871CGRecordLowering::calculateTailClippingOffset(bool isNonVirtualBaseType) const {
872 if (!RD)
874
876
877
878
879
880 if (!isNonVirtualBaseType && isOverlappingVBaseABI())
881 for (const auto &Base : RD->vbases()) {
882 const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
884 continue;
885
886
887 if (Context.isNearlyEmpty(BaseDecl) && !hasOwnStorage(RD, BaseDecl))
888 continue;
889 ScissorOffset = std::min(ScissorOffset,
891 }
892
893 return ScissorOffset;
894}
895
896void CGRecordLowering::accumulateVBases() {
897 for (const auto &Base : RD->vbases()) {
898 const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
900 continue;
902
903
904 if (isOverlappingVBaseABI() &&
906 !hasOwnStorage(RD, BaseDecl)) {
907 Members.push_back(MemberInfo(Offset, MemberInfo::VBase, nullptr,
908 BaseDecl));
909 continue;
910 }
911
912 if (Layout.getVBaseOffsetsMap().find(BaseDecl)->second.hasVtorDisp())
914 getIntNType(32)));
915 Members.push_back(MemberInfo(Offset, MemberInfo::VBase,
916 getStorageType(BaseDecl), BaseDecl));
917 }
918}
919
924 return false;
925 for (const auto &Base : Decl->bases())
926 if (!hasOwnStorage(Base.getType()->getAsCXXRecordDecl(), Query))
927 return false;
928 return true;
929}
930
931void CGRecordLowering::calculateZeroInit() {
932 for (std::vector::const_iterator Member = Members.begin(),
933 MemberEnd = Members.end();
934 IsZeroInitializableAsBase && Member != MemberEnd; ++Member) {
935 if (Member->Kind == MemberInfo::Field) {
936 if (->FD || isZeroInitializable(Member->FD))
937 continue;
938 IsZeroInitializable = IsZeroInitializableAsBase = false;
939 } else if (Member->Kind == MemberInfo::Base ||
940 Member->Kind == MemberInfo::VBase) {
941 if (isZeroInitializable(Member->RD))
942 continue;
943 IsZeroInitializable = false;
944 if (Member->Kind == MemberInfo::Base)
945 IsZeroInitializableAsBase = false;
946 }
947 }
948}
949
950
951void CGRecordLowering::checkBitfieldClipping(bool IsNonVirtualBaseType) const {
952#ifndef NDEBUG
953 auto ScissorOffset = calculateTailClippingOffset(IsNonVirtualBaseType);
955 for (const auto &M : Members) {
956
957 if (!M.Data)
958 continue;
959
960 assert(M.Offset >= Tail && "Bitfield access unit is not clipped");
961 Tail = M.Offset + getSize(M.Data);
962 assert((Tail <= ScissorOffset || M.Offset >= ScissorOffset) &&
963 "Bitfield straddles scissor offset");
964 }
965#endif
966}
967
968void CGRecordLowering::determinePacked(bool NVBaseType) {
969 if (Packed)
970 return;
975 for (std::vector::const_iterator Member = Members.begin(),
976 MemberEnd = Members.end();
979 continue;
980
981
982 if (Member->Offset % getAlignment(Member->Data))
983 Packed = true;
984 if (Member->Offset < NVSize)
985 NVAlignment = std::max(NVAlignment, getAlignment(Member->Data));
986 Alignment = std::max(Alignment, getAlignment(Member->Data));
987 }
988
989
990 if (Members.back().Offset % Alignment)
991 Packed = true;
992
993
994
995 if (NVSize % NVAlignment)
996 Packed = true;
997
998 if (!Packed)
999 Members.back().Data = getIntNType(Context.toBits(Alignment));
1000}
1001
1002void CGRecordLowering::insertPadding() {
1003 std::vector<std::pair<CharUnits, CharUnits> > Padding;
1005 for (std::vector::const_iterator Member = Members.begin(),
1006 MemberEnd = Members.end();
1009 continue;
1011 assert(Offset >= Size);
1012
1013 if (Offset !=
1015 Padding.push_back(std::make_pair(Size, Offset - Size));
1016 Size = Offset + getSize(Member->Data);
1017 }
1018 if (Padding.empty())
1019 return;
1020
1021 for (std::vector<std::pair<CharUnits, CharUnits> >::const_iterator
1022 Pad = Padding.begin(), PadEnd = Padding.end();
1023 Pad != PadEnd; ++Pad)
1024 Members.push_back(StorageInfo(Pad->first, getByteArrayType(Pad->second)));
1025 llvm::stable_sort(Members);
1026}
1027
1028void CGRecordLowering::fillOutputFields() {
1029 for (std::vector::const_iterator Member = Members.begin(),
1030 MemberEnd = Members.end();
1033 FieldTypes.push_back(Member->Data);
1034 if (Member->Kind == MemberInfo::Field) {
1036 Fields[Member->FD->getCanonicalDecl()] = FieldTypes.size() - 1;
1037
1039 setBitFieldInfo(Member->FD, Member->Offset, FieldTypes.back());
1040 } else if (Member->Kind == MemberInfo::Base)
1041 NonVirtualBases[Member->RD] = FieldTypes.size() - 1;
1042 else if (Member->Kind == MemberInfo::VBase)
1043 VirtualBases[Member->RD] = FieldTypes.size() - 1;
1044 }
1045}
1046
1049 uint64_t Offset, uint64_t Size,
1050 uint64_t StorageSize,
1052
1053
1054
1055 llvm::Type *Ty = Types.ConvertTypeForMem(FD->getType());
1058 uint64_t TypeSizeInBits = Types.getContext().toBits(TypeSizeInBytes);
1059
1061
1062 if (Size > TypeSizeInBits) {
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072 Size = TypeSizeInBits;
1073 }
1074
1075
1076
1077
1078
1079 if (Types.getDataLayout().isBigEndian()) {
1081 }
1082
1084}
1085
1086std::unique_ptr
1088 CGRecordLowering Builder(*this, D, false);
1089
1090 Builder.lower(false);
1091
1092
1093 llvm::StructType *BaseTy = nullptr;
1094 if (isa(D)) {
1095 BaseTy = Ty;
1096 if (Builder.Layout.getNonVirtualSize() != Builder.Layout.getSize()) {
1097 CGRecordLowering BaseBuilder(*this, D, Builder.Packed);
1098 BaseBuilder.lower(true);
1099 BaseTy = llvm::StructType::create(
1100 getLLVMContext(), BaseBuilder.FieldTypes, "", BaseBuilder.Packed);
1102
1103
1104 assert(Builder.Packed == BaseBuilder.Packed &&
1105 "Non-virtual and complete types must agree on packedness");
1106 }
1107 }
1108
1109
1110
1111
1112 Ty->setBody(Builder.FieldTypes, Builder.Packed);
1113
1114 auto RL = std::make_unique(
1115 Ty, BaseTy, (bool)Builder.IsZeroInitializable,
1116 (bool)Builder.IsZeroInitializableAsBase);
1117
1118 RL->NonVirtualBases.swap(Builder.NonVirtualBases);
1119 RL->CompleteObjectVirtualBases.swap(Builder.VirtualBases);
1120
1121
1122 RL->FieldInfo.swap(Builder.Fields);
1123
1124
1125 RL->BitFields.swap(Builder.BitFields);
1126
1127
1128 if (getContext().getLangOpts().DumpRecordLayouts) {
1129 llvm::outs() << "\n*** Dumping IRgen Record Layout\n";
1130 llvm::outs() << "Record: ";
1132 llvm::outs() << "\nLayout: ";
1133 RL->print(llvm::outs());
1134 }
1135
1136#ifndef NDEBUG
1137
1139
1141 assert(TypeSizeInBits == getDataLayout().getTypeAllocSizeInBits(Ty) &&
1142 "Type size mismatch!");
1143
1144 if (BaseTy) {
1146
1147 uint64_t AlignedNonVirtualTypeSizeInBits =
1149
1150 assert(AlignedNonVirtualTypeSizeInBits ==
1151 getDataLayout().getTypeAllocSizeInBits(BaseTy) &&
1152 "Type size mismatch!");
1153 }
1154
1155
1156 llvm::StructType *ST = RL->getLLVMType();
1157 const llvm::StructLayout *SL = getDataLayout().getStructLayout(ST);
1158
1161 for (unsigned i = 0, e = AST_RL.getFieldCount(); i != e; ++i, ++it) {
1163
1164
1166 continue;
1167
1168
1169
1171 unsigned FieldNo = RL->getLLVMFieldNo(FD);
1172 assert(AST_RL.getFieldOffset(i) == SL->getElementOffsetInBits(FieldNo) &&
1173 "Invalid field offset!");
1174 continue;
1175 }
1176
1177
1179 continue;
1180
1181 const CGBitFieldInfo &Info = RL->getBitFieldInfo(FD);
1182 llvm::Type *ElementTy = ST->getTypeAtIndex(RL->getLLVMFieldNo(FD));
1183
1184
1185
1186
1187 if (D->isUnion()) {
1188
1189
1190
1191
1193 assert(static_cast<unsigned>(Info.Offset + Info.Size) ==
1195 "Big endian union bitfield does not end at the back");
1196 else
1197 assert(Info.Offset == 0 &&
1198 "Little endian union bitfield with a non-zero offset");
1199 assert(Info.StorageSize <= SL->getSizeInBits() &&
1200 "Union not large enough for bitfield storage");
1201 } else {
1203 getDataLayout().getTypeAllocSizeInBits(ElementTy) ||
1205 getDataLayout().getTypeAllocSizeInBits(ElementTy)) &&
1206 "Storage size does not match the element type size");
1207 }
1208 assert(Info.Size > 0 && "Empty bitfield!");
1210 "Bitfield outside of its allocated storage");
1211 }
1212#endif
1213
1214 return RL;
1215}
1216
1218 OS << "<CGRecordLayout\n";
1219 OS << " LLVMType:" << *CompleteObjectType << "\n";
1220 if (BaseSubobjectType)
1221 OS << " NonVirtualBaseLLVMType:" << *BaseSubobjectType << "\n";
1222 OS << " IsZeroInitializable:" << IsZeroInitializable << "\n";
1223 OS << " BitFields:[\n";
1224
1225
1226 std::vector<std::pair<unsigned, const CGBitFieldInfo*> > BFIs;
1227 for (llvm::DenseMap<const FieldDecl*, CGBitFieldInfo>::const_iterator
1228 it = BitFields.begin(), ie = BitFields.end();
1229 it != ie; ++it) {
1231 unsigned Index = 0;
1233 it2 = RD->field_begin(); *it2 != it->first; ++it2)
1234 ++Index;
1235 BFIs.push_back(std::make_pair(Index, &it->second));
1236 }
1237 llvm::array_pod_sort(BFIs.begin(), BFIs.end());
1238 for (unsigned i = 0, e = BFIs.size(); i != e; ++i) {
1239 OS.indent(4);
1240 BFIs[i].second->print(OS);
1241 OS << "\n";
1242 }
1243
1244 OS << "]>\n";
1245}
1246
1248 print(llvm::errs());
1249}
1250
1252 OS << "<CGBitFieldInfo"
1253 << " Offset:" << Offset << " Size:" << Size << " IsSigned:" << IsSigned
1259}
1260
1262 print(llvm::errs());
1263}
Defines the clang::ASTContext interface.
static bool isAAPCS(const TargetInfo &TargetInfo)
Helper method to check if the underlying ABI is AAPCS.
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate....
Holds long-lived AST nodes (such as types and decls) that can be referred to throughout the semantic ...
const ASTRecordLayout & getASTRecordLayout(const RecordDecl *D) const
Get or compute information about the layout of the specified record (struct/union/class) D,...
bool isNearlyEmpty(const CXXRecordDecl *RD) const
int64_t toBits(CharUnits CharSize) const
Convert a size in characters to a size in bits.
const TargetInfo & getTargetInfo() const
CharUnits toCharUnitsFromBits(int64_t BitSize) const
Convert a size in bits to a size in characters.
uint64_t getCharWidth() const
Return the size of the character type, in bits.
ASTRecordLayout - This class contains layout information for one RecordDecl, which is a struct/union/...
bool hasOwnVFPtr() const
hasOwnVFPtr - Does this class provide its own virtual-function table pointer, rather than inheriting ...
bool hasOwnVBPtr() const
hasOwnVBPtr - Does this class provide its own virtual-base table pointer, rather than inheriting one ...
CharUnits getSize() const
getSize - Get the record size in characters.
unsigned getFieldCount() const
getFieldCount - Get the number of fields in the layout.
uint64_t getFieldOffset(unsigned FieldNo) const
getFieldOffset - Get the offset of the given field index, in bits.
CharUnits getVBPtrOffset() const
getVBPtrOffset - Get the offset for virtual base table pointer.
CharUnits getDataSize() const
getDataSize() - Get the record data size, which is the record size without tail padding,...
CharUnits getVBaseClassOffset(const CXXRecordDecl *VBase) const
getVBaseClassOffset - Get the offset, in chars, for the given base class.
const VBaseOffsetsMapTy & getVBaseOffsetsMap() const
const CXXRecordDecl * getPrimaryBase() const
getPrimaryBase - Get the primary base for this record.
bool isPrimaryBaseVirtual() const
isPrimaryBaseVirtual - Get whether the primary base for this record is virtual or not.
CharUnits getNonVirtualSize() const
getNonVirtualSize - Get the non-virtual size (in chars) of an object, which is the size of the object...
Represents a C++ struct/union/class.
CharUnits - This is an opaque type for sizes expressed in character units.
bool isZero() const
isZero - Test whether the quantity equals zero.
QuantityType getQuantity() const
getQuantity - Get the raw integer representation of this quantity.
static CharUnits One()
One - Construct a CharUnits quantity of one.
bool isMultipleOf(CharUnits N) const
Test whether this is a multiple of the other value.
static CharUnits fromQuantity(QuantityType Quantity)
fromQuantity - Construct a CharUnits quantity from a raw integer type.
static CharUnits Zero()
Zero - Construct a CharUnits quantity of zero.
void print(raw_ostream &OS) const
This class organizes the cross-module state that is used while lowering AST types to LLVM types.
ASTContext & getContext() const
std::unique_ptr< CGRecordLayout > ComputeRecordLayout(const RecordDecl *D, llvm::StructType *Ty)
Compute a new LLVM record layout object for the given record.
llvm::LLVMContext & getLLVMContext()
const llvm::DataLayout & getDataLayout() const
void addRecordTypeName(const RecordDecl *RD, llvm::StructType *Ty, StringRef suffix)
addRecordTypeName - Compute a name from the given record decl with an optional suffix and name the gi...
specific_decl_iterator - Iterates over a subrange of declarations stored in a DeclContext,...
DeclContext * getParent()
getParent - Returns the containing DeclContext.
Decl - This represents one declaration (or definition), e.g.
Represents a member of a struct/union/class.
bool isBitField() const
Determines whether this field is a bitfield.
unsigned getBitWidthValue() const
Computes the bit width of this field, if this is a bit field.
unsigned getFieldIndex() const
Returns the index of this field within its record, as appropriate for passing to ASTRecordLayout::get...
FieldDecl * getCanonicalDecl() override
Retrieves the canonical declaration of this field.
DeclarationName getDeclName() const
Get the actual, stored name of the declaration, which may be a special name.
Represents a struct/union/class.
specific_decl_iterator< FieldDecl > field_iterator
field_iterator field_begin() const
bool isMicrosoft() const
Is this ABI an MSVC-compatible ABI?
virtual unsigned getRegisterWidth() const
Return the "preferred" register width on this target.
bool hasCheapUnalignedBitFieldAccess() const
Return true iff unaligned accesses are cheap.
TargetCXXABI getCXXABI() const
Get the C++ ABI currently in use.
virtual StringRef getABI() const
Get the ABI currently in use.
The base class of the type hierarchy.
bool isSignedIntegerOrEnumerationType() const
Determines whether this is an integer type that is signed or an enumeration types whose underlying ty...
bool isEmptyRecordForLayout(const ASTContext &Context, QualType T)
isEmptyRecordForLayout - Return true iff a structure contains only empty base classes (per isEmptyRec...
bool isEmptyFieldForLayout(const ASTContext &Context, const FieldDecl *FD)
isEmptyFieldForLayout - Return true iff the field is "empty", that is, either a zero-width bit-field ...
The JSON file list parser is used to communicate input to InstallAPI.
bool operator<(DeclarationName LHS, DeclarationName RHS)
Ordering on two declaration names.
Structure with information about how a bitfield should be accessed.
CharUnits StorageOffset
The offset of the bitfield storage from the start of the struct.
CharUnits VolatileStorageOffset
The offset of the bitfield storage from the start of the struct.
unsigned VolatileOffset
The offset within a contiguous run of bitfields that are represented as a single "field" within the L...
unsigned Offset
The offset within a contiguous run of bitfields that are represented as a single "field" within the L...
unsigned VolatileStorageSize
The storage size in bits which should be used when accessing this bitfield.
void print(raw_ostream &OS) const
unsigned Size
The total size of the bit-field, in bits.
unsigned StorageSize
The storage size in bits which should be used when accessing this bitfield.
unsigned IsSigned
Whether the bit-field is signed.
static CGBitFieldInfo MakeInfo(class CodeGenTypes &Types, const FieldDecl *FD, uint64_t Offset, uint64_t Size, uint64_t StorageSize, CharUnits StorageOffset)
Given a bit-field decl, build an appropriate helper object for accessing that field (which is expecte...