24#include "llvm/Support/Casting.h"
37struct CIRRecordLowering final {
42 struct MemberInfo final {
44 enum class InfoKind { VFPtr,
Field,
Base, VBase } kind;
50 MemberInfo(CharUnits offset, InfoKind kind, mlir::Type data,
53 MemberInfo(CharUnits offset, InfoKind kind, mlir::Type data,
54 const CXXRecordDecl *rd)
55 : offset{offset}, kind{kind}, data{data},
cxxRecordDecl{rd} {}
57 bool operator<(
const MemberInfo &other)
const {
58 return offset < other.offset;
62 CIRRecordLowering(CIRGenTypes &cirGenTypes,
const RecordDecl *recordDecl,
66 MemberInfo makeStorageInfo(CharUnits offset, mlir::Type data) {
67 return MemberInfo(offset, MemberInfo::InfoKind::Field, data);
71 void setBitFieldInfo(
const FieldDecl *fd, CharUnits startOffset,
72 mlir::Type storageType);
74 void lower(
bool NonVirtualBaseType);
78 void determinePacked(
bool nvBaseType);
82 void computeVolatileBitfields();
83 void accumulateBases();
84 void accumulateVPtrs();
85 void accumulateVBases();
86 void accumulateFields();
91 mlir::Type getVFPtrType();
94 return astContext.getTargetInfo().getABI().starts_with(
"aapcs");
98 bool isBigEndian()
const {
return astContext.getTargetInfo().isBigEndian(); }
104 bool isOverlappingVBaseABI() {
105 return !astContext.getTargetInfo().getCXXABI().isMicrosoft();
109 bool hasOwnStorage(
const CXXRecordDecl *
decl,
const CXXRecordDecl *query);
116 bool isDiscreteBitFieldABI() {
117 return astContext.getTargetInfo().getCXXABI().isMicrosoft() ||
118 recordDecl->isMsStruct(astContext);
121 CharUnits bitsToCharUnits(uint64_t bitOffset) {
122 return astContext.toCharUnitsFromBits(bitOffset);
125 void calculateZeroInit();
127 CharUnits getSize(mlir::Type Ty) {
130 CharUnits getSizeInBits(mlir::Type ty) {
133 CharUnits getAlignment(mlir::Type Ty) {
137 bool isZeroInitializable(
const FieldDecl *fd) {
138 return cirGenTypes.isZeroInitializable(fd->
getType());
140 bool isZeroInitializable(
const RecordDecl *rd) {
141 return cirGenTypes.isZeroInitializable(rd);
145 mlir::Type getUIntNType(uint64_t numBits) {
146 unsigned alignedBits = llvm::PowerOf2Ceil(numBits);
147 alignedBits = std::max(8u, alignedBits);
148 return cir::IntType::get(&cirGenTypes.getMLIRContext(), alignedBits,
152 mlir::Type getCharType() {
153 return cir::IntType::get(&cirGenTypes.getMLIRContext(),
154 astContext.getCharWidth(),
158 mlir::Type getByteArrayType(CharUnits numberOfChars) {
159 assert(!numberOfChars.
isZero() &&
"Empty byte arrays aren't allowed.");
160 mlir::Type
type = getCharType();
167 mlir::Type getStorageType(
const CXXRecordDecl *RD) {
168 return cirGenTypes.getCIRGenRecordLayout(RD).getBaseSubobjectCIRType();
173 mlir::Type getBitfieldStorageType(
unsigned numBits) {
174 unsigned alignedBits = llvm::alignTo(numBits, astContext.getCharWidth());
176 return builder.getUIntNTy(alignedBits);
178 mlir::Type
type = getCharType();
179 return cir::ArrayType::get(
type, alignedBits / astContext.getCharWidth());
182 mlir::Type getStorageType(
const FieldDecl *
fieldDecl) {
183 mlir::Type
type = cirGenTypes.convertTypeForMem(
fieldDecl->getType());
185 cirGenTypes.getCGModule().errorNYI(recordDecl->getSourceRange(),
186 "getStorageType for bitfields");
192 return astRecordLayout.getFieldOffset(
fieldDecl->getFieldIndex());
196 void fillOutputFields();
198 void appendPaddingBytes(CharUnits size) {
200 fieldTypes.push_back(getByteArrayType(size));
205 CIRGenTypes &cirGenTypes;
206 CIRGenBuilderTy &builder;
207 const ASTContext &astContext;
208 const RecordDecl *recordDecl;
209 const CXXRecordDecl *cxxRecordDecl;
210 const ASTRecordLayout &astRecordLayout;
212 std::vector<MemberInfo> members;
214 llvm::SmallVector<mlir::Type, 16> fieldTypes;
215 llvm::DenseMap<const FieldDecl *, CIRGenBitFieldInfo> bitFields;
216 llvm::DenseMap<const FieldDecl *, unsigned> fieldIdxMap;
217 llvm::DenseMap<const CXXRecordDecl *, unsigned> nonVirtualBases;
218 llvm::DenseMap<const CXXRecordDecl *, unsigned> virtualBases;
219 cir::CIRDataLayout dataLayout;
221 LLVM_PREFERRED_TYPE(
bool)
222 unsigned zeroInitializable : 1;
223 LLVM_PREFERRED_TYPE(
bool)
224 unsigned zeroInitializableAsBase : 1;
225 LLVM_PREFERRED_TYPE(
bool)
227 LLVM_PREFERRED_TYPE(
bool)
231 CIRRecordLowering(
const CIRRecordLowering &) =
delete;
232 void operator=(
const CIRRecordLowering &) =
delete;
236CIRRecordLowering::CIRRecordLowering(
CIRGenTypes &cirGenTypes,
238 : cirGenTypes{cirGenTypes}, builder{cirGenTypes.getBuilder()},
242 cirGenTypes.getASTContext().getASTRecordLayout(
recordDecl)},
243 dataLayout{cirGenTypes.getCGModule().getModule()},
244 zeroInitializable{
true}, zeroInitializableAsBase{
true}, packed{packed},
247void CIRRecordLowering::setBitFieldInfo(
const FieldDecl *fd,
249 mlir::Type storageType) {
253 (unsigned)(getFieldBitOffset(fd) - astContext.
toBits(startOffset));
255 info.
storageSize = getSizeInBits(storageType).getQuantity();
274void CIRRecordLowering::lower(
bool nonVirtualBaseType) {
277 computeVolatileBitfields();
289 if (members.empty()) {
290 appendPaddingBytes(size);
291 computeVolatileBitfields();
294 if (!nonVirtualBaseType)
298 llvm::stable_sort(members);
302 members.push_back(makeStorageInfo(size, getUIntNType(8)));
303 determinePacked(nonVirtualBaseType);
309 computeVolatileBitfields();
312void CIRRecordLowering::fillOutputFields() {
313 for (
const MemberInfo &member : members) {
315 fieldTypes.push_back(
member.data);
316 if (
member.kind == MemberInfo::InfoKind::Field) {
318 fieldIdxMap[
member.fieldDecl->getCanonicalDecl()] =
319 fieldTypes.size() - 1;
322 assert(
member.fieldDecl &&
323 "member.data is a nullptr so member.fieldDecl should not be");
324 setBitFieldInfo(
member.fieldDecl,
member.offset, fieldTypes.back());
326 }
else if (
member.kind == MemberInfo::InfoKind::Base) {
327 nonVirtualBases[
member.cxxRecordDecl] = fieldTypes.size() - 1;
328 }
else if (
member.kind == MemberInfo::InfoKind::VBase) {
329 virtualBases[
member.cxxRecordDecl] = fieldTypes.size() - 1;
337 if (isDiscreteBitFieldABI()) {
348 for (; field != fieldEnd && field->isBitField(); ++field) {
350 if (field->isZeroLengthBitField()) {
354 uint64_t bitOffset = getFieldBitOffset(*field);
358 if (run == fieldEnd || bitOffset >= tail) {
360 startBitOffset = bitOffset;
366 makeStorageInfo(bitsToCharUnits(startBitOffset),
type));
370 members.push_back(MemberInfo(bitsToCharUnits(startBitOffset),
371 MemberInfo::InfoKind::Field,
nullptr,
388 CharUnits beginOffset;
397 CharUnits bestEndOffset;
405 bool atAlignedBoundary =
false;
407 if (field != fieldEnd && field->isBitField()) {
408 uint64_t bitOffset = getFieldBitOffset(*field);
409 if (begin == fieldEnd) {
414 assert((bitOffset % charBits) == 0 &&
"Not at start of char");
415 beginOffset = bitsToCharUnits(bitOffset);
416 bitSizeSinceBegin = 0;
417 }
else if ((bitOffset % charBits) != 0) {
425 astContext.
toBits(beginOffset) + bitSizeSinceBegin &&
426 "Concatenating non-contiguous bitfields");
431 if (field->isZeroLengthBitField())
433 atAlignedBoundary =
true;
438 if (begin == fieldEnd)
442 atAlignedBoundary =
true;
448 bool installBest =
false;
449 if (atAlignedBoundary) {
455 CharUnits accessSize = bitsToCharUnits(bitSizeSinceBegin + charBits - 1);
456 if (bestEnd == begin) {
460 bestEndOffset = beginOffset + accessSize;
463 if (!bitSizeSinceBegin)
467 }
else if (accessSize > regSize) {
476 mlir::Type
type = getUIntNType(astContext.
toBits(accessSize));
479 field->getSourceRange(),
"NYI CheapUnalignedBitFieldAccess");
486 CharUnits limitOffset;
487 for (
auto probe = field; probe != fieldEnd; ++probe)
490 assert((getFieldBitOffset(*probe) % charBits) == 0 &&
491 "Next storage is not byte-aligned");
492 limitOffset = bitsToCharUnits(getFieldBitOffset(*probe));
499 CharUnits typeSize = getSize(
type);
500 if (beginOffset + typeSize <= limitOffset) {
503 bestEndOffset = beginOffset + typeSize;
512 .FineGrainedBitfieldAccesses) {
518 bitSizeSinceBegin = astContext.
toBits(limitOffset - beginOffset);
525 assert((field == fieldEnd || !field->isBitField() ||
526 (getFieldBitOffset(*field) % charBits) == 0) &&
527 "Installing but not at an aligned bitfield or limit");
528 CharUnits accessSize = bestEndOffset - beginOffset;
529 if (!accessSize.
isZero()) {
535 assert(getSize(getUIntNType(astContext.
toBits(accessSize))) >
537 "Clipped access need not be clipped");
538 type = getByteArrayType(accessSize);
540 type = getUIntNType(astContext.
toBits(accessSize));
541 assert(getSize(
type) == accessSize &&
542 "Unclipped access must be clipped");
544 members.push_back(makeStorageInfo(beginOffset,
type));
545 for (; begin != bestEnd; ++begin)
546 if (!begin->isZeroLengthBitField())
547 members.push_back(MemberInfo(
548 beginOffset, MemberInfo::InfoKind::Field,
nullptr, *begin));
554 assert(field != fieldEnd && field->isBitField() &&
555 "Accumulating past end of bitfields");
556 assert(!
barrier &&
"Accumulating across barrier");
558 bitSizeSinceBegin += field->getBitWidthValue();
566void CIRRecordLowering::accumulateFields() {
569 field != fieldEnd;) {
570 if (field->isBitField()) {
571 field = accumulateBitFields(field, fieldEnd);
572 assert((field == fieldEnd || !field->isBitField()) &&
573 "Failed to accumulate all the bitfields");
581 members.push_back(MemberInfo(
582 bitsToCharUnits(getFieldBitOffset(*field)),
583 MemberInfo::InfoKind::Field,
584 field->isPotentiallyOverlapping()
585 ? getStorageType(field->getType()->getAsCXXRecordDecl())
586 : getStorageType(*field),
593void CIRRecordLowering::calculateZeroInit() {
594 for (
const MemberInfo &member : members) {
595 if (
member.kind == MemberInfo::InfoKind::Field) {
596 if (!
member.fieldDecl || isZeroInitializable(
member.fieldDecl))
598 zeroInitializable = zeroInitializableAsBase =
false;
600 }
else if (
member.kind == MemberInfo::InfoKind::Base ||
601 member.kind == MemberInfo::InfoKind::VBase) {
602 if (isZeroInitializable(
member.cxxRecordDecl))
604 zeroInitializable =
false;
605 if (
member.kind == MemberInfo::InfoKind::Base)
606 zeroInitializableAsBase =
false;
611void CIRRecordLowering::determinePacked(
bool nvBaseType) {
620 for (
const MemberInfo &member : members) {
625 if (!
member.offset.isMultipleOf(getAlignment(
member.data)))
627 if (
member.offset < nvSize)
628 nvAlignment = std::max(nvAlignment, getAlignment(
member.data));
629 alignment = std::max(alignment, getAlignment(
member.data));
633 if (!members.back().offset.isMultipleOf(alignment))
642 members.back().data = getUIntNType(astContext.
toBits(alignment));
645void CIRRecordLowering::insertPadding() {
646 std::vector<std::pair<CharUnits, CharUnits>> padding;
648 for (
const MemberInfo &member : members) {
651 CharUnits offset =
member.offset;
652 assert(offset >= size);
656 padding.push_back(std::make_pair(size, offset - size));
657 size = offset + getSize(
member.data);
663 for (
const std::pair<CharUnits, CharUnits> &paddingPair : padding)
664 members.push_back(makeStorageInfo(paddingPair.first,
665 getByteArrayType(paddingPair.second)));
666 llvm::stable_sort(members);
669static cir::ArgPassingKind
673 return cir::ArgPassingKind::CanPassInRegs;
675 return cir::ArgPassingKind::CannotPassInRegs;
677 return cir::ArgPassingKind::CanNeverPassInRegs;
679 llvm_unreachable(
"unknown RecordArgPassingKind");
682std::unique_ptr<CIRGenRecordLayout>
684 CIRRecordLowering lowering(*
this, rd,
false);
685 assert(ty->isIncomplete() &&
"recomputing record layout?");
686 lowering.lower(
false);
689 cir::RecordType baseTy;
690 if (llvm::isa<CXXRecordDecl>(rd) && !rd->
isUnion() &&
694 lowering.astRecordLayout.
getSize()) {
695 CIRRecordLowering baseLowering(*
this, rd, lowering.packed);
696 baseLowering.lower(
true);
698 baseTy = builder.getCompleteNamedRecordType(
699 baseLowering.fieldTypes, baseLowering.packed, baseLowering.padded,
705 assert(lowering.packed == baseLowering.packed &&
706 "Non-virtual and complete types must agree on packedness");
714 ty->complete(lowering.fieldTypes, lowering.packed, lowering.padded);
718 mlir::MLIRContext *mlirCtx = ty->getContext();
719 cir::ArgPassingKind apk =
722 bool hasTrivialDestructor =
true;
723 if (
auto *cxxRD = dyn_cast<CXXRecordDecl>(rd))
724 hasTrivialDestructor = cxxRD->hasTrivialDestructor();
725 const auto &astLayout = astContext.getASTRecordLayout(rd);
726 uint64_t recordAlignInBytes = astLayout.getAlignment().getQuantity();
728 cgm.addRecordLayout(ty->getName(), cir::RecordLayoutAttr::get(
729 mlirCtx, apk, hasTrivialDestructor,
730 recordAlignInBytes));
733 auto rl = std::make_unique<CIRGenRecordLayout>(
734 ty ? *ty : cir::RecordType{}, baseTy ? baseTy : cir::RecordType{},
735 (bool)lowering.zeroInitializable, (bool)lowering.zeroInitializableAsBase);
737 rl->nonVirtualBases.swap(lowering.nonVirtualBases);
738 rl->completeObjectVirtualBases.swap(lowering.virtualBases);
741 rl->fieldIdxMap.swap(lowering.fieldIdxMap);
743 rl->bitFields.swap(lowering.bitFields);
747 llvm::outs() <<
"\n*** Dumping CIRgen Record Layout\n";
748 llvm::outs() <<
"Record: ";
749 rd->
dump(llvm::outs());
750 llvm::outs() <<
"\nLayout: ";
751 rl->print(llvm::outs());
759 os <<
"<CIRecordLayout\n";
760 os <<
" CIR Type:" << completeObjectType <<
"\n";
761 if (baseSubobjectType)
762 os <<
" NonVirtualBaseCIRType:" << baseSubobjectType <<
"\n";
763 os <<
" IsZeroInitializable:" << zeroInitializable <<
"\n";
764 os <<
" BitFields:[\n";
765 std::vector<std::pair<unsigned, const CIRGenBitFieldInfo *>> bitInfo;
766 for (
auto &[
decl, info] : bitFields) {
771 bitInfo.push_back(std::make_pair(
index, &info));
773 llvm::array_pod_sort(bitInfo.begin(), bitInfo.end());
774 for (std::pair<unsigned, const CIRGenBitFieldInfo *> &info : bitInfo) {
776 info.second->
print(os);
783 os <<
"<CIRBitFieldInfo" <<
" name:" <<
name <<
" offset:" <<
offset
796void CIRRecordLowering::lowerUnion() {
798 mlir::Type storageType =
nullptr;
799 bool seenNamedMember =
false;
804 mlir::Type fieldType;
805 if (field->isBitField()) {
806 if (field->isZeroLengthBitField())
808 fieldType = getBitfieldStorageType(field->getBitWidthValue());
811 fieldType = getStorageType(field);
815 fieldIdxMap[field->getCanonicalDecl()] = 0;
823 if (!seenNamedMember) {
824 seenNamedMember = field->getIdentifier();
825 if (!seenNamedMember)
826 if (
const RecordDecl *fieldRD = field->getType()->getAsRecordDecl())
827 seenNamedMember = fieldRD->findFirstNamedDataMember();
828 if (seenNamedMember && !isZeroInitializable(field)) {
829 zeroInitializable = zeroInitializableAsBase =
false;
830 storageType = fieldType;
836 if (!zeroInitializable)
840 if (!storageType || getAlignment(fieldType) > getAlignment(storageType) ||
841 (getAlignment(fieldType) == getAlignment(storageType) &&
842 getSize(fieldType) > getSize(storageType)))
843 storageType = fieldType;
847 fieldTypes.push_back(fieldType);
851 appendPaddingBytes(layoutSize);
855 if (layoutSize < getSize(storageType))
856 storageType = getByteArrayType(layoutSize);
858 appendPaddingBytes(layoutSize - getSize(storageType));
861 if (!layoutSize.
isMultipleOf(getAlignment(storageType)))
865bool CIRRecordLowering::hasOwnStorage(
const CXXRecordDecl *
decl,
866 const CXXRecordDecl *query) {
870 for (
const auto &base :
decl->bases())
871 if (!hasOwnStorage(base.getType()->getAsCXXRecordDecl(), query))
889void CIRRecordLowering::computeVolatileBitfields() {
894 for (
auto &[field, info] : bitFields) {
898 getSizeInBits(resLTy).getQuantity())
906 const unsigned oldOffset =
910 const unsigned absoluteOffset =
914 const unsigned storageSize = getSizeInBits(resLTy).getQuantity();
917 if (info.
storageSize == storageSize && (oldOffset % storageSize == 0))
921 unsigned offset = absoluteOffset & (storageSize - 1);
926 if (offset + info.
size > storageSize)
931 offset = storageSize - (offset + info.
size);
933 const CharUnits storageOffset =
935 const CharUnits end = storageOffset +
939 const ASTRecordLayout &layout =
942 const CharUnits recordSize = layout.
getSize();
943 if (end >= recordSize)
947 bool conflict =
false;
950 if (f->isBitField() && !f->isZeroLengthBitField())
960 if (f->isZeroLengthBitField()) {
961 if (end > fOffset && storageOffset < fOffset) {
967 const CharUnits fEnd =
974 if (end < fOffset || fEnd < storageOffset)
995void CIRRecordLowering::accumulateBases() {
999 members.push_back(MemberInfo(
CharUnits::Zero(), MemberInfo::InfoKind::Base,
1000 getStorageType(baseDecl), baseDecl));
1005 if (base.isVirtual())
1009 const CXXRecordDecl *baseDecl = base.getType()->getAsCXXRecordDecl();
1013 MemberInfo::InfoKind::Base,
1014 getStorageType(baseDecl), baseDecl));
1019void CIRRecordLowering::accumulateVBases() {
1021 const CXXRecordDecl *baseDecl = base.getType()->getAsCXXRecordDecl();
1027 if (isOverlappingVBaseABI() && astContext.
isNearlyEmpty(baseDecl) &&
1030 MemberInfo(offset, MemberInfo::InfoKind::VBase,
nullptr, baseDecl));
1036 ->second.hasVtorDisp())
1039 members.push_back(MemberInfo(offset, MemberInfo::InfoKind::VBase,
1040 getStorageType(baseDecl), baseDecl));
1044void CIRRecordLowering::accumulateVPtrs() {
1046 members.push_back(MemberInfo(
CharUnits::Zero(), MemberInfo::InfoKind::VFPtr,
1051 "accumulateVPtrs: hasOwnVBPtr");
1054mlir::Type CIRRecordLowering::getVFPtrType() {
1055 return cir::VPtrType::get(builder.getContext());
Defines the clang::ASTContext interface.
static bool isAAPCS(const TargetInfo &targetInfo)
Helper method to check if the underlying ABI is AAPCS.
static cir::ArgPassingKind convertRecordArgPassingKind(RecordArgPassingKind kind)
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate....
static void print(llvm::raw_ostream &OS, const T &V, ASTContext &ASTCtx, QualType Ty)
*collection of selector each with an associated kind and an ordered *collection of selectors A selector has a kind
llvm::TypeSize getTypeAllocSizeInBits(mlir::Type ty) const
Returns the offset in bits between successive objects of the specified type, including alignment padd...
const ASTRecordLayout & getASTRecordLayout(const RecordDecl *D) const
Get or compute information about the layout of the specified record (struct/union/class) D,...
bool isNearlyEmpty(const CXXRecordDecl *RD) const
int64_t toBits(CharUnits CharSize) const
Convert a size in characters to a size in bits.
const TargetInfo & getTargetInfo() const
CharUnits toCharUnitsFromBits(int64_t BitSize) const
Convert a size in bits to a size in characters.
uint64_t getCharWidth() const
Return the size of the character type, in bits.
bool hasOwnVFPtr() const
hasOwnVFPtr - Does this class provide its own virtual-function table pointer, rather than inheriting ...
CharUnits getAlignment() const
getAlignment - Get the record alignment in characters.
bool hasOwnVBPtr() const
hasOwnVBPtr - Does this class provide its own virtual-base table pointer, rather than inheriting one ...
CharUnits getSize() const
getSize - Get the record size in characters.
uint64_t getFieldOffset(unsigned FieldNo) const
getFieldOffset - Get the offset of the given field index, in bits.
CharUnits getDataSize() const
getDataSize() - Get the record data size, which is the record size without tail padding,...
CharUnits getBaseClassOffset(const CXXRecordDecl *Base) const
getBaseClassOffset - Get the offset, in chars, for the given base class.
CharUnits getVBaseClassOffset(const CXXRecordDecl *VBase) const
getVBaseClassOffset - Get the offset, in chars, for the given base class.
const VBaseOffsetsMapTy & getVBaseOffsetsMap() const
const CXXRecordDecl * getPrimaryBase() const
getPrimaryBase - Get the primary base for this record.
bool isPrimaryBaseVirtual() const
isPrimaryBaseVirtual - Get whether the primary base for this record is virtual or not.
CharUnits getNonVirtualSize() const
getNonVirtualSize - Get the non-virtual size (in chars) of an object, which is the size of the object...
DiagnosticBuilder errorNYI(SourceLocation, llvm::StringRef)
Helpers to emit "not yet implemented" error diagnostics.
const clang::CodeGenOptions & getCodeGenOpts() const
LLVM_DUMP_METHOD void dump() const
void print(raw_ostream &os) const
This class organizes the cross-module state that is used while lowering AST types to CIR types.
CIRGenModule & getCGModule() const
std::string getRecordTypeName(const clang::RecordDecl *, llvm::StringRef suffix)
clang::ASTContext & getASTContext() const
std::unique_ptr< CIRGenRecordLayout > computeRecordLayout(const clang::RecordDecl *rd, cir::RecordType *ty)
mlir::Type convertTypeForMem(clang::QualType, bool forBitField=false)
Convert type T into an mlir::Type.
Represents a C++ struct/union/class.
bool isEmpty() const
Determine whether this is an empty class in the sense of (C++11 [meta.unary.prop]).
CharUnits - This is an opaque type for sizes expressed in character units.
bool isZero() const
isZero - Test whether the quantity equals zero.
QuantityType getQuantity() const
getQuantity - Get the raw integer representation of this quantity.
static CharUnits One()
One - Construct a CharUnits quantity of one.
bool isMultipleOf(CharUnits N) const
Test whether this is a multiple of the other value.
static CharUnits fromQuantity(QuantityType Quantity)
fromQuantity - Construct a CharUnits quantity from a raw integer type.
CharUnits alignTo(const CharUnits &Align) const
alignTo - Returns the next integer (mod 2**64) that is greater than or equal to this quantity and is ...
static CharUnits Zero()
Zero - Construct a CharUnits quantity of zero.
Represents a member of a struct/union/class.
unsigned getBitWidthValue() const
Computes the bit width of this field, if this is a bit field.
FieldDecl * getCanonicalDecl() override
Retrieves the canonical declaration of this field.
StringRef getName() const
Get the name of identifier for this declaration as a StringRef.
Represents a struct/union/class.
RecordArgPassingKind getArgPassingRestrictions() const
specific_decl_iterator< FieldDecl > field_iterator
field_iterator field_begin() const
virtual unsigned getRegisterWidth() const
Return the "preferred" register width on this target.
bool hasCheapUnalignedBitFieldAccess() const
Return true iff unaligned accesses are cheap.
bool isSignedIntegerOrEnumerationType() const
Determines whether this is an integer type that is signed or an enumeration types whose underlying ty...
bool isValidFundamentalIntWidth(unsigned width)
bool isEmptyFieldForLayout(const ASTContext &context, const FieldDecl *fd)
isEmptyFieldForLayout - Return true if the field is "empty", that is, either a zero-width bit-field o...
bool isEmptyRecordForLayout(const ASTContext &context, QualType t)
isEmptyRecordForLayout - Return true if a structure contains only empty base classes (per isEmptyReco...
const internal::VariadicAllOfMatcher< Type > type
Matches Types in the clang AST.
const internal::VariadicDynCastAllOfMatcher< Decl, FieldDecl > fieldDecl
Matches field declarations.
const internal::VariadicDynCastAllOfMatcher< Decl, CXXRecordDecl > cxxRecordDecl
Matches C++ class declarations.
const internal::VariadicAllOfMatcher< Decl > decl
Matches declarations.
const internal::VariadicDynCastAllOfMatcher< Decl, RecordDecl > recordDecl
Matches class, struct, and union declarations.
The JSON file list parser is used to communicate input to InstallAPI.
bool operator<(DeclarationName LHS, DeclarationName RHS)
Ordering on two declaration names.
RecordArgPassingKind
Enum that represents the different ways arguments are passed to and returned from function calls.
@ CanPassInRegs
The argument of this type can be passed directly in registers.
@ CanNeverPassInRegs
The argument of this type cannot be passed directly in registers.
@ CannotPassInRegs
The argument of this type cannot be passed directly in registers.
Diagnostic wrappers for TextAPI types for error reporting.
void __ovld __conv barrier(cl_mem_fence_flags)
All work-items in a work-group executing the kernel on a processor must execute this function before ...
static bool zeroSizeRecordMembers()
static bool checkBitfieldClipping()
static bool astRecordDeclAttr()
unsigned offset
The offset within a contiguous run of bitfields that are represented as a single "field" within the c...
void print(llvm::raw_ostream &os) const
LLVM_DUMP_METHOD void dump() const
unsigned storageSize
The storage size in bits which should be used when accessing this bitfield.
unsigned volatileStorageSize
The storage size in bits which should be used when accessing this bitfield.
clang::CharUnits storageOffset
The offset of the bitfield storage from the start of the record.
unsigned size
The total size of the bit-field, in bits.
unsigned isSigned
Whether the bit-field is signed.
clang::CharUnits volatileStorageOffset
The offset of the bitfield storage from the start of the record.
unsigned volatileOffset
The offset within a contiguous run of bitfields that are represented as a single "field" within the c...
llvm::StringRef name
The name of a bitfield.