24#include "llvm/IR/DataLayout.h"
25#include "llvm/IR/DerivedTypes.h"
26#include "llvm/IR/Type.h"
27#include "llvm/Support/Debug.h"
28#include "llvm/Support/MathExtras.h"
29#include "llvm/Support/raw_ostream.h"
31using namespace CodeGen;
73struct CGRecordLowering {
85 MemberInfo(
CharUnits Offset, InfoKind Kind, llvm::Type *Data,
88 MemberInfo(
CharUnits Offset, InfoKind Kind, llvm::Type *Data,
92 bool operator <(
const MemberInfo& a)
const {
return Offset < a.Offset; }
98 static MemberInfo StorageInfo(
CharUnits Offset, llvm::Type *
Data) {
99 return MemberInfo(Offset, MemberInfo::Field,
Data);
107 bool isDiscreteBitFieldABI()
const {
109 D->isMsStruct(Context);
124 bool isOverlappingVBaseABI()
const {
129 llvm::Type *getIntNType(uint64_t NumBits)
const {
130 unsigned AlignedBits = llvm::alignTo(NumBits, Context.
getCharWidth());
131 return llvm::Type::getIntNTy(Types.getLLVMContext(), AlignedBits);
134 llvm::Type *getCharType()
const {
135 return llvm::Type::getIntNTy(Types.getLLVMContext(),
139 llvm::Type *getByteArrayType(
CharUnits NumChars)
const {
140 assert(!NumChars.
isZero() &&
"Empty byte arrays aren't allowed.");
141 llvm::Type *
Type = getCharType();
147 llvm::Type *getStorageType(
const FieldDecl *FD)
const {
148 llvm::Type *
Type = Types.ConvertTypeForMem(FD->
getType());
150 if (isDiscreteBitFieldABI())
return Type;
156 return Types.getCGRecordLayout(RD).getBaseSubobjectLLVMType();
158 CharUnits bitsToCharUnits(uint64_t BitOffset)
const {
167 bool isZeroInitializable(
const FieldDecl *FD)
const {
168 return Types.isZeroInitializable(FD->
getType());
170 bool isZeroInitializable(
const RecordDecl *RD)
const {
171 return Types.isZeroInitializable(RD);
173 void appendPaddingBytes(
CharUnits Size) {
175 FieldTypes.push_back(getByteArrayType(Size));
182 llvm::Type *StorageType);
184 void lower(
bool NonVirtualBaseType);
185 void lowerUnion(
bool isNoUniqueAddress);
186 void accumulateFields(
bool isNonVirtualBaseType);
188 accumulateBitFields(
bool isNonVirtualBaseType,
191 void computeVolatileBitfields();
192 void accumulateBases();
193 void accumulateVPtrs();
194 void accumulateVBases();
199 void calculateZeroInit();
200 CharUnits calculateTailClippingOffset(
bool isNonVirtualBaseType)
const;
201 void checkBitfieldClipping(
bool isNonVirtualBaseType)
const;
203 void determinePacked(
bool NVBaseType);
205 void insertPadding();
207 void fillOutputFields();
214 const llvm::DataLayout &DataLayout;
216 std::vector<MemberInfo> Members;
219 llvm::DenseMap<const FieldDecl *, unsigned> Fields;
220 llvm::DenseMap<const FieldDecl *, CGBitFieldInfo> BitFields;
221 llvm::DenseMap<const CXXRecordDecl *, unsigned> NonVirtualBases;
222 llvm::DenseMap<const CXXRecordDecl *, unsigned> VirtualBases;
223 bool IsZeroInitializable : 1;
224 bool IsZeroInitializableAsBase : 1;
227 CGRecordLowering(
const CGRecordLowering &) =
delete;
228 void operator =(
const CGRecordLowering &) =
delete;
234 : Types(Types), Context(Types.getContext()),
D(
D),
236 Layout(Types.getContext().getASTRecordLayout(
D)),
237 DataLayout(Types.getDataLayout()), IsZeroInitializable(
true),
238 IsZeroInitializableAsBase(
true), Packed(Packed) {}
240void CGRecordLowering::setBitFieldInfo(
254 if (DataLayout.isBigEndian())
262void CGRecordLowering::lower(
bool NVBaseType) {
283 CharUnits Size = NVBaseType ? Layout.getNonVirtualSize() : Layout.getSize();
285 lowerUnion(NVBaseType);
286 computeVolatileBitfields();
289 accumulateFields(NVBaseType);
294 if (Members.empty()) {
295 appendPaddingBytes(Size);
296 computeVolatileBitfields();
302 llvm::stable_sort(Members);
303 checkBitfieldClipping(NVBaseType);
304 Members.push_back(StorageInfo(Size, getIntNType(8)));
305 determinePacked(NVBaseType);
310 computeVolatileBitfields();
313void CGRecordLowering::lowerUnion(
bool isNoUniqueAddress) {
315 isNoUniqueAddress ? Layout.getDataSize() : Layout.getSize();
316 llvm::Type *StorageType =
nullptr;
317 bool SeenNamedMember =
false;
323 for (
const auto *Field :
D->fields()) {
324 if (
Field->isBitField()) {
325 if (
Field->isZeroLengthBitField(Context))
327 llvm::Type *FieldType = getStorageType(Field);
328 if (LayoutSize < getSize(FieldType))
329 FieldType = getByteArrayType(LayoutSize);
332 Fields[
Field->getCanonicalDecl()] = 0;
333 llvm::Type *FieldType = getStorageType(Field);
340 if (!SeenNamedMember) {
341 SeenNamedMember =
Field->getIdentifier();
342 if (!SeenNamedMember)
343 if (
const auto *FieldRD =
Field->getType()->getAsRecordDecl())
344 SeenNamedMember = FieldRD->findFirstNamedDataMember();
345 if (SeenNamedMember && !isZeroInitializable(Field)) {
346 IsZeroInitializable = IsZeroInitializableAsBase =
false;
347 StorageType = FieldType;
352 if (!IsZeroInitializable)
356 getAlignment(FieldType) > getAlignment(StorageType) ||
357 (getAlignment(FieldType) == getAlignment(StorageType) &&
358 getSize(FieldType) > getSize(StorageType)))
359 StorageType = FieldType;
363 return appendPaddingBytes(LayoutSize);
366 if (LayoutSize < getSize(StorageType))
367 StorageType = getByteArrayType(LayoutSize);
368 FieldTypes.push_back(StorageType);
369 appendPaddingBytes(LayoutSize - getSize(StorageType));
371 const auto StorageAlignment = getAlignment(StorageType);
372 assert((Layout.getSize() % StorageAlignment == 0 ||
373 Layout.getDataSize() % StorageAlignment) &&
374 "Union's standard layout and no_unique_address layout must agree on "
376 if (Layout.getDataSize() % StorageAlignment)
380void CGRecordLowering::accumulateFields(
bool isNonVirtualBaseType) {
382 FieldEnd =
D->field_end();
383 Field != FieldEnd;) {
384 if (
Field->isBitField()) {
385 Field = accumulateBitFields(isNonVirtualBaseType, Field, FieldEnd);
386 assert((Field == FieldEnd || !
Field->isBitField()) &&
387 "Failed to accumulate all the bitfields");
394 Members.push_back(MemberInfo(
395 bitsToCharUnits(getFieldBitOffset(*Field)), MemberInfo::Field,
396 Field->isPotentiallyOverlapping()
397 ? getStorageType(
Field->getType()->getAsCXXRecordDecl())
398 : getStorageType(*Field),
410CGRecordLowering::accumulateBitFields(
bool isNonVirtualBaseType,
413 if (isDiscreteBitFieldABI()) {
426 if (
Field->isZeroLengthBitField(Context)) {
430 uint64_t BitOffset = getFieldBitOffset(*Field);
431 llvm::Type *
Type = Types.ConvertTypeForMem(
Field->getType());
434 if (Run == FieldEnd || BitOffset >= Tail) {
436 StartBitOffset = BitOffset;
437 Tail = StartBitOffset + DataLayout.getTypeAllocSizeInBits(
Type);
441 Members.push_back(StorageInfo(bitsToCharUnits(StartBitOffset),
Type));
445 Members.push_back(MemberInfo(bitsToCharUnits(StartBitOffset),
446 MemberInfo::Field,
nullptr, *Field));
536 bool AtAlignedBoundary =
false;
537 bool Barrier =
false;
539 if (Field != FieldEnd &&
Field->isBitField()) {
540 uint64_t BitOffset = getFieldBitOffset(*Field);
541 if (
Begin == FieldEnd) {
546 assert((BitOffset % CharBits) == 0 &&
"Not at start of char");
547 BeginOffset = bitsToCharUnits(BitOffset);
548 BitSizeSinceBegin = 0;
549 }
else if ((BitOffset % CharBits) != 0) {
556 assert(BitOffset == Context.
toBits(BeginOffset) + BitSizeSinceBegin &&
557 "Concatenating non-contiguous bitfields");
562 if (
Field->isZeroLengthBitField(Context))
564 AtAlignedBoundary =
true;
569 if (
Begin == FieldEnd)
573 AtAlignedBoundary =
true;
579 bool InstallBest =
false;
580 if (AtAlignedBoundary) {
586 CharUnits AccessSize = bitsToCharUnits(BitSizeSinceBegin + CharBits - 1);
587 if (BestEnd ==
Begin) {
591 BestEndOffset = BeginOffset + AccessSize;
594 if (!BitSizeSinceBegin)
598 }
else if (AccessSize > RegSize)
606 llvm::Type *
Type = getIntNType(Context.
toBits(AccessSize));
614 if (Align > Layout.getAlignment())
623 if (InstallBest && BestEnd == Field)
626 if (getSize(
Type) == AccessSize)
636 for (
auto Probe = Field; Probe != FieldEnd; ++Probe)
639 assert((getFieldBitOffset(*Probe) % CharBits) == 0 &&
640 "Next storage is not byte-aligned");
641 LimitOffset = bitsToCharUnits(getFieldBitOffset(*Probe));
646 if (ScissorOffset.
isZero()) {
647 ScissorOffset = calculateTailClippingOffset(isNonVirtualBaseType);
648 assert(!ScissorOffset.
isZero() &&
"Tail clipping at zero");
651 LimitOffset = ScissorOffset;
655 if (BeginOffset + TypeSize <= LimitOffset) {
658 BestEndOffset = BeginOffset + TypeSize;
666 else if (Types.getCodeGenOpts().FineGrainedBitfieldAccesses)
673 BitSizeSinceBegin = Context.
toBits(LimitOffset - BeginOffset);
679 assert((Field == FieldEnd || !
Field->isBitField() ||
680 (getFieldBitOffset(*Field) % CharBits) == 0) &&
681 "Installing but not at an aligned bitfield or limit");
682 CharUnits AccessSize = BestEndOffset - BeginOffset;
683 if (!AccessSize.
isZero()) {
689 assert(getSize(getIntNType(Context.
toBits(AccessSize))) >
691 "Clipped access need not be clipped");
692 Type = getByteArrayType(AccessSize);
694 Type = getIntNType(Context.
toBits(AccessSize));
695 assert(getSize(
Type) == AccessSize &&
696 "Unclipped access must be clipped");
698 Members.push_back(StorageInfo(BeginOffset,
Type));
700 if (!
Begin->isZeroLengthBitField(Context))
702 MemberInfo(BeginOffset, MemberInfo::Field,
nullptr, *
Begin));
708 assert(Field != FieldEnd &&
Field->isBitField() &&
709 "Accumulating past end of bitfields");
710 assert(!Barrier &&
"Accumulating across barrier");
712 BitSizeSinceBegin +=
Field->getBitWidthValue(Context);
720void CGRecordLowering::accumulateBases() {
722 if (Layout.isPrimaryBaseVirtual()) {
725 getStorageType(BaseDecl), BaseDecl));
728 for (
const auto &
Base : RD->bases()) {
729 if (
Base.isVirtual())
737 Members.push_back(MemberInfo(Layout.getBaseClassOffset(BaseDecl),
738 MemberInfo::Base, getStorageType(BaseDecl), BaseDecl));
755void CGRecordLowering::computeVolatileBitfields() {
756 if (!
isAAPCS() || !Types.getCodeGenOpts().AAPCSBitfieldWidth)
759 for (
auto &I : BitFields) {
762 llvm::Type *ResLTy = Types.ConvertTypeForMem(
Field->getType());
765 if ((uint64_t)(Context.
toBits(Layout.getAlignment())) <
766 ResLTy->getPrimitiveSizeInBits())
773 const unsigned OldOffset =
776 const unsigned AbsoluteOffset =
780 const unsigned StorageSize = ResLTy->getPrimitiveSizeInBits();
783 if (Info.
StorageSize == StorageSize && (OldOffset % StorageSize == 0))
787 unsigned Offset = AbsoluteOffset & (StorageSize - 1);
792 if (Offset + Info.
Size > StorageSize)
797 Offset = StorageSize - (Offset + Info.
Size);
809 if (End >= RecordSize)
813 bool Conflict =
false;
814 for (
const auto *F :
D->fields()) {
816 if (F->isBitField() && !F->isZeroLengthBitField(Context))
826 if (F->isZeroLengthBitField(Context)) {
827 if (End > FOffset && StorageOffset < FOffset) {
836 Types.ConvertTypeForMem(F->getType())->getPrimitiveSizeInBits()) -
839 if (End < FOffset || FEnd < StorageOffset)
859void CGRecordLowering::accumulateVPtrs() {
863 llvm::PointerType::getUnqual(Types.getLLVMContext())));
867 llvm::PointerType::getUnqual(Types.getLLVMContext())));
871CGRecordLowering::calculateTailClippingOffset(
bool isNonVirtualBaseType)
const {
880 if (!isNonVirtualBaseType && isOverlappingVBaseABI())
881 for (
const auto &
Base : RD->vbases()) {
887 if (Context.
isNearlyEmpty(BaseDecl) && !hasOwnStorage(RD, BaseDecl))
889 ScissorOffset = std::min(ScissorOffset,
893 return ScissorOffset;
896void CGRecordLowering::accumulateVBases() {
897 for (
const auto &
Base : RD->vbases()) {
904 if (isOverlappingVBaseABI() &&
906 !hasOwnStorage(RD, BaseDecl)) {
907 Members.push_back(MemberInfo(Offset, MemberInfo::VBase,
nullptr,
915 Members.push_back(MemberInfo(Offset, MemberInfo::VBase,
916 getStorageType(BaseDecl), BaseDecl));
925 for (
const auto &
Base :
Decl->bases())
926 if (!hasOwnStorage(
Base.getType()->getAsCXXRecordDecl(), Query))
931void CGRecordLowering::calculateZeroInit() {
932 for (std::vector<MemberInfo>::const_iterator
Member = Members.begin(),
933 MemberEnd = Members.end();
934 IsZeroInitializableAsBase &&
Member != MemberEnd; ++
Member) {
935 if (
Member->Kind == MemberInfo::Field) {
938 IsZeroInitializable = IsZeroInitializableAsBase =
false;
939 }
else if (
Member->Kind == MemberInfo::Base ||
940 Member->Kind == MemberInfo::VBase) {
941 if (isZeroInitializable(
Member->RD))
943 IsZeroInitializable =
false;
944 if (
Member->Kind == MemberInfo::Base)
945 IsZeroInitializableAsBase =
false;
951void CGRecordLowering::checkBitfieldClipping(
bool IsNonVirtualBaseType)
const {
953 auto ScissorOffset = calculateTailClippingOffset(IsNonVirtualBaseType);
955 for (
const auto &M : Members) {
960 assert(M.Offset >= Tail &&
"Bitfield access unit is not clipped");
961 Tail = M.Offset + getSize(M.Data);
962 assert((Tail <= ScissorOffset || M.Offset >= ScissorOffset) &&
963 "Bitfield straddles scissor offset");
968void CGRecordLowering::determinePacked(
bool NVBaseType) {
975 for (std::vector<MemberInfo>::const_iterator
Member = Members.begin(),
976 MemberEnd = Members.end();
984 if (
Member->Offset < NVSize)
985 NVAlignment = std::max(NVAlignment, getAlignment(
Member->Data));
986 Alignment = std::max(Alignment, getAlignment(
Member->Data));
990 if (Members.back().Offset % Alignment)
995 if (NVSize % NVAlignment)
999 Members.back().Data = getIntNType(Context.
toBits(Alignment));
1002void CGRecordLowering::insertPadding() {
1003 std::vector<std::pair<CharUnits, CharUnits> > Padding;
1005 for (std::vector<MemberInfo>::const_iterator
Member = Members.begin(),
1006 MemberEnd = Members.end();
1011 assert(Offset >= Size);
1015 Padding.push_back(std::make_pair(Size, Offset - Size));
1018 if (Padding.empty())
1021 for (std::vector<std::pair<CharUnits, CharUnits> >::const_iterator
1022 Pad = Padding.begin(), PadEnd = Padding.end();
1023 Pad != PadEnd; ++Pad)
1024 Members.push_back(StorageInfo(Pad->first, getByteArrayType(Pad->second)));
1025 llvm::stable_sort(Members);
1028void CGRecordLowering::fillOutputFields() {
1029 for (std::vector<MemberInfo>::const_iterator
Member = Members.begin(),
1030 MemberEnd = Members.end();
1033 FieldTypes.push_back(
Member->Data);
1034 if (
Member->Kind == MemberInfo::Field) {
1036 Fields[
Member->FD->getCanonicalDecl()] = FieldTypes.size() - 1;
1039 setBitFieldInfo(
Member->FD,
Member->Offset, FieldTypes.back());
1040 }
else if (
Member->Kind == MemberInfo::Base)
1041 NonVirtualBases[
Member->RD] = FieldTypes.size() - 1;
1042 else if (
Member->Kind == MemberInfo::VBase)
1043 VirtualBases[
Member->RD] = FieldTypes.size() - 1;
1049 uint64_t Offset, uint64_t Size,
1050 uint64_t StorageSize,
1055 llvm::Type *Ty = Types.ConvertTypeForMem(FD->
getType());
1058 uint64_t TypeSizeInBits = Types.getContext().toBits(TypeSizeInBytes);
1062 if (
Size > TypeSizeInBits) {
1072 Size = TypeSizeInBits;
1079 if (Types.getDataLayout().isBigEndian()) {
1086std::unique_ptr<CGRecordLayout>
1088 CGRecordLowering Builder(*
this,
D,
false);
1090 Builder.lower(
false);
1093 llvm::StructType *BaseTy =
nullptr;
1094 if (isa<CXXRecordDecl>(
D)) {
1096 if (Builder.Layout.getNonVirtualSize() != Builder.Layout.getSize()) {
1097 CGRecordLowering BaseBuilder(*
this,
D, Builder.Packed);
1098 BaseBuilder.lower(
true);
1099 BaseTy = llvm::StructType::create(
1100 getLLVMContext(), BaseBuilder.FieldTypes,
"", BaseBuilder.Packed);
1104 assert(Builder.Packed == BaseBuilder.Packed &&
1105 "Non-virtual and complete types must agree on packedness");
1112 Ty->setBody(Builder.FieldTypes, Builder.Packed);
1114 auto RL = std::make_unique<CGRecordLayout>(
1115 Ty, BaseTy, (
bool)Builder.IsZeroInitializable,
1116 (
bool)Builder.IsZeroInitializableAsBase);
1118 RL->NonVirtualBases.swap(Builder.NonVirtualBases);
1119 RL->CompleteObjectVirtualBases.swap(Builder.VirtualBases);
1122 RL->FieldInfo.swap(Builder.Fields);
1125 RL->BitFields.swap(Builder.BitFields);
1128 if (
getContext().getLangOpts().DumpRecordLayouts) {
1129 llvm::outs() <<
"\n*** Dumping IRgen Record Layout\n";
1130 llvm::outs() <<
"Record: ";
1131 D->
dump(llvm::outs());
1132 llvm::outs() <<
"\nLayout: ";
1133 RL->print(llvm::outs());
1141 assert(TypeSizeInBits ==
getDataLayout().getTypeAllocSizeInBits(Ty) &&
1142 "Type size mismatch!");
1147 uint64_t AlignedNonVirtualTypeSizeInBits =
1150 assert(AlignedNonVirtualTypeSizeInBits ==
1152 "Type size mismatch!");
1156 llvm::StructType *ST = RL->getLLVMType();
1157 const llvm::StructLayout *SL =
getDataLayout().getStructLayout(ST);
1161 for (
unsigned i = 0, e = AST_RL.
getFieldCount(); i != e; ++i, ++it) {
1171 unsigned FieldNo = RL->getLLVMFieldNo(FD);
1172 assert(AST_RL.
getFieldOffset(i) == SL->getElementOffsetInBits(FieldNo) &&
1173 "Invalid field offset!");
1182 llvm::Type *ElementTy = ST->getTypeAtIndex(RL->getLLVMFieldNo(FD));
1193 assert(
static_cast<unsigned>(Info.
Offset + Info.
Size) ==
1195 "Big endian union bitfield does not end at the back");
1197 assert(Info.
Offset == 0 &&
1198 "Little endian union bitfield with a non-zero offset");
1200 "Union not large enough for bitfield storage");
1206 "Storage size does not match the element type size");
1208 assert(Info.
Size > 0 &&
"Empty bitfield!");
1210 "Bitfield outside of its allocated storage");
1218 OS <<
"<CGRecordLayout\n";
1219 OS <<
" LLVMType:" << *CompleteObjectType <<
"\n";
1220 if (BaseSubobjectType)
1221 OS <<
" NonVirtualBaseLLVMType:" << *BaseSubobjectType <<
"\n";
1222 OS <<
" IsZeroInitializable:" << IsZeroInitializable <<
"\n";
1223 OS <<
" BitFields:[\n";
1226 std::vector<std::pair<unsigned, const CGBitFieldInfo*> > BFIs;
1227 for (llvm::DenseMap<const FieldDecl*, CGBitFieldInfo>::const_iterator
1228 it = BitFields.begin(), ie = BitFields.end();
1233 it2 = RD->
field_begin(); *it2 != it->first; ++it2)
1235 BFIs.push_back(std::make_pair(Index, &it->second));
1237 llvm::array_pod_sort(BFIs.begin(), BFIs.end());
1238 for (
unsigned i = 0, e = BFIs.size(); i != e; ++i) {
1240 BFIs[i].second->print(OS);
1248 print(llvm::errs());
1252 OS <<
"<CGBitFieldInfo"
1262 print(llvm::errs());
Defines the clang::ASTContext interface.
static bool isAAPCS(const TargetInfo &TargetInfo)
Helper method to check if the underlying ABI is AAPCS.
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate....
Holds long-lived AST nodes (such as types and decls) that can be referred to throughout the semantic ...
const ASTRecordLayout & getASTRecordLayout(const RecordDecl *D) const
Get or compute information about the layout of the specified record (struct/union/class) D,...
bool isNearlyEmpty(const CXXRecordDecl *RD) const
int64_t toBits(CharUnits CharSize) const
Convert a size in characters to a size in bits.
const TargetInfo & getTargetInfo() const
CharUnits toCharUnitsFromBits(int64_t BitSize) const
Convert a size in bits to a size in characters.
uint64_t getCharWidth() const
Return the size of the character type, in bits.
ASTRecordLayout - This class contains layout information for one RecordDecl, which is a struct/union/...
bool hasOwnVFPtr() const
hasOwnVFPtr - Does this class provide its own virtual-function table pointer, rather than inheriting ...
bool hasOwnVBPtr() const
hasOwnVBPtr - Does this class provide its own virtual-base table pointer, rather than inheriting one ...
CharUnits getSize() const
getSize - Get the record size in characters.
unsigned getFieldCount() const
getFieldCount - Get the number of fields in the layout.
uint64_t getFieldOffset(unsigned FieldNo) const
getFieldOffset - Get the offset of the given field index, in bits.
CharUnits getVBPtrOffset() const
getVBPtrOffset - Get the offset for virtual base table pointer.
CharUnits getDataSize() const
getDataSize() - Get the record data size, which is the record size without tail padding,...
CharUnits getVBaseClassOffset(const CXXRecordDecl *VBase) const
getVBaseClassOffset - Get the offset, in chars, for the given base class.
const VBaseOffsetsMapTy & getVBaseOffsetsMap() const
const CXXRecordDecl * getPrimaryBase() const
getPrimaryBase - Get the primary base for this record.
bool isPrimaryBaseVirtual() const
isPrimaryBaseVirtual - Get whether the primary base for this record is virtual or not.
CharUnits getNonVirtualSize() const
getNonVirtualSize - Get the non-virtual size (in chars) of an object, which is the size of the object...
Represents a C++ struct/union/class.
CharUnits - This is an opaque type for sizes expressed in character units.
bool isZero() const
isZero - Test whether the quantity equals zero.
QuantityType getQuantity() const
getQuantity - Get the raw integer representation of this quantity.
static CharUnits One()
One - Construct a CharUnits quantity of one.
bool isMultipleOf(CharUnits N) const
Test whether this is a multiple of the other value.
static CharUnits fromQuantity(QuantityType Quantity)
fromQuantity - Construct a CharUnits quantity from a raw integer type.
static CharUnits Zero()
Zero - Construct a CharUnits quantity of zero.
void print(raw_ostream &OS) const
This class organizes the cross-module state that is used while lowering AST types to LLVM types.
ASTContext & getContext() const
std::unique_ptr< CGRecordLayout > ComputeRecordLayout(const RecordDecl *D, llvm::StructType *Ty)
Compute a new LLVM record layout object for the given record.
llvm::LLVMContext & getLLVMContext()
const llvm::DataLayout & getDataLayout() const
void addRecordTypeName(const RecordDecl *RD, llvm::StructType *Ty, StringRef suffix)
addRecordTypeName - Compute a name from the given record decl with an optional suffix and name the gi...
specific_decl_iterator - Iterates over a subrange of declarations stored in a DeclContext,...
DeclContext * getParent()
getParent - Returns the containing DeclContext.
Decl - This represents one declaration (or definition), e.g.
Represents a member of a struct/union/class.
bool isBitField() const
Determines whether this field is a bitfield.
unsigned getFieldIndex() const
Returns the index of this field within its record, as appropriate for passing to ASTRecordLayout::get...
unsigned getBitWidthValue(const ASTContext &Ctx) const
Computes the bit width of this field, if this is a bit field.
FieldDecl * getCanonicalDecl() override
Retrieves the canonical declaration of this field.
DeclarationName getDeclName() const
Get the actual, stored name of the declaration, which may be a special name.
Represents a struct/union/class.
specific_decl_iterator< FieldDecl > field_iterator
field_iterator field_begin() const
bool isMicrosoft() const
Is this ABI an MSVC-compatible ABI?
virtual unsigned getRegisterWidth() const
Return the "preferred" register width on this target.
bool hasCheapUnalignedBitFieldAccess() const
Return true iff unaligned accesses are cheap.
TargetCXXABI getCXXABI() const
Get the C++ ABI currently in use.
virtual StringRef getABI() const
Get the ABI currently in use.
The base class of the type hierarchy.
bool isSignedIntegerOrEnumerationType() const
Determines whether this is an integer type that is signed or an enumeration types whose underlying ty...
bool isEmptyRecordForLayout(const ASTContext &Context, QualType T)
isEmptyRecordForLayout - Return true iff a structure contains only empty base classes (per isEmptyRec...
bool isEmptyFieldForLayout(const ASTContext &Context, const FieldDecl *FD)
isEmptyFieldForLayout - Return true iff the field is "empty", that is, either a zero-width bit-field ...
The JSON file list parser is used to communicate input to InstallAPI.
bool operator<(DeclarationName LHS, DeclarationName RHS)
Ordering on two declaration names.
Structure with information about how a bitfield should be accessed.
CharUnits StorageOffset
The offset of the bitfield storage from the start of the struct.
CharUnits VolatileStorageOffset
The offset of the bitfield storage from the start of the struct.
unsigned VolatileOffset
The offset within a contiguous run of bitfields that are represented as a single "field" within the L...
unsigned Offset
The offset within a contiguous run of bitfields that are represented as a single "field" within the L...
unsigned VolatileStorageSize
The storage size in bits which should be used when accessing this bitfield.
void print(raw_ostream &OS) const
unsigned Size
The total size of the bit-field, in bits.
unsigned StorageSize
The storage size in bits which should be used when accessing this bitfield.
unsigned IsSigned
Whether the bit-field is signed.
static CGBitFieldInfo MakeInfo(class CodeGenTypes &Types, const FieldDecl *FD, uint64_t Offset, uint64_t Size, uint64_t StorageSize, CharUnits StorageOffset)
Given a bit-field decl, build an appropriate helper object for accessing that field (which is expecte...