clang 22.0.0git
CIRGenExprCXX.cpp
Go to the documentation of this file.
1//===--- CIRGenExprCXX.cpp - Emit CIR Code for C++ expressions ------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This contains code dealing with code generation of C++ expressions
10//
11//===----------------------------------------------------------------------===//
12
13#include "CIRGenCXXABI.h"
15#include "CIRGenFunction.h"
16
17#include "clang/AST/DeclCXX.h"
18#include "clang/AST/ExprCXX.h"
20
21using namespace clang;
22using namespace clang::CIRGen;
23
24namespace {
25struct MemberCallInfo {
26 RequiredArgs reqArgs;
27 // Number of prefix arguments for the call. Ignores the `this` pointer.
28 unsigned prefixSize;
29};
30} // namespace
31
33 CIRGenFunction &cgf, const CXXMethodDecl *md, mlir::Value thisPtr,
34 mlir::Value implicitParam, QualType implicitParamTy, const CallExpr *ce,
35 CallArgList &args, CallArgList *rtlArgs) {
36 assert(ce == nullptr || isa<CXXMemberCallExpr>(ce) ||
38 assert(md->isInstance() &&
39 "Trying to emit a member or operator call expr on a static method!");
40
41 // Push the this ptr.
42 const CXXRecordDecl *rd =
44 args.add(RValue::get(thisPtr), cgf.getTypes().deriveThisType(rd, md));
45
46 // If there is an implicit parameter (e.g. VTT), emit it.
47 if (implicitParam) {
48 args.add(RValue::get(implicitParam), implicitParamTy);
49 }
50
51 const auto *fpt = md->getType()->castAs<FunctionProtoType>();
52 RequiredArgs required =
54 unsigned prefixSize = args.size() - 1;
55
56 // Add the rest of the call args
57 if (rtlArgs) {
58 // Special case: if the caller emitted the arguments right-to-left already
59 // (prior to emitting the *this argument), we're done. This happens for
60 // assignment operators.
61 args.addFrom(*rtlArgs);
62 } else if (ce) {
63 // Special case: skip first argument of CXXOperatorCall (it is "this").
64 unsigned argsToSkip = isa<CXXOperatorCallExpr>(ce) ? 1 : 0;
65 cgf.emitCallArgs(args, fpt, drop_begin(ce->arguments(), argsToSkip),
66 ce->getDirectCallee());
67 } else {
68 assert(
69 fpt->getNumParams() == 0 &&
70 "No CallExpr specified for function with non-zero number of arguments");
71 }
72
73 // return {required, prefixSize};
74 return {required, prefixSize};
75}
76
79 bool hasQualifier, NestedNameSpecifier qualifier, bool isArrow,
80 const Expr *base) {
82
83 // Compute the object pointer.
84 bool canUseVirtualCall = md->isVirtual() && !hasQualifier;
85 const CXXMethodDecl *devirtualizedMethod = nullptr;
87
88 // Note on trivial assignment
89 // --------------------------
90 // Classic codegen avoids generating the trivial copy/move assignment operator
91 // when it isn't necessary, choosing instead to just produce IR with an
92 // equivalent effect. We have chosen not to do that in CIR, instead emitting
93 // trivial copy/move assignment operators and allowing later transformations
94 // to optimize them away if appropriate.
95
96 // C++17 demands that we evaluate the RHS of a (possibly-compound) assignment
97 // operator before the LHS.
98 CallArgList rtlArgStorage;
99 CallArgList *rtlArgs = nullptr;
100 if (auto *oce = dyn_cast<CXXOperatorCallExpr>(ce)) {
101 if (oce->isAssignmentOp()) {
102 rtlArgs = &rtlArgStorage;
103 emitCallArgs(*rtlArgs, md->getType()->castAs<FunctionProtoType>(),
104 drop_begin(ce->arguments(), 1), ce->getDirectCallee(),
105 /*ParamsToSkip*/ 0);
106 }
107 }
108
109 LValue thisPtr;
110 if (isArrow) {
111 LValueBaseInfo baseInfo;
113 Address thisValue = emitPointerWithAlignment(base, &baseInfo);
114 thisPtr = makeAddrLValue(thisValue, base->getType(), baseInfo);
115 } else {
116 thisPtr = emitLValue(base);
117 }
118
119 if (isa<CXXConstructorDecl>(md)) {
120 cgm.errorNYI(ce->getSourceRange(),
121 "emitCXXMemberOrOperatorMemberCallExpr: constructor call");
122 return RValue::get(nullptr);
123 }
124
125 if ((md->isTrivial() || (md->isDefaulted() && md->getParent()->isUnion())) &&
127 return RValue::get(nullptr);
128
129 // Compute the function type we're calling
130 const CXXMethodDecl *calleeDecl =
131 devirtualizedMethod ? devirtualizedMethod : md;
132 const CIRGenFunctionInfo *fInfo = nullptr;
133 if (const auto *dtor = dyn_cast<CXXDestructorDecl>(calleeDecl))
134 fInfo = &cgm.getTypes().arrangeCXXStructorDeclaration(
136 else
137 fInfo = &cgm.getTypes().arrangeCXXMethodDeclaration(calleeDecl);
138
139 cir::FuncType ty = cgm.getTypes().getFunctionType(*fInfo);
140
143
144 // C++ [class.virtual]p12:
145 // Explicit qualification with the scope operator (5.1) suppresses the
146 // virtual call mechanism.
147 //
148 // We also don't emit a virtual call if the base expression has a record type
149 // because then we know what the type is.
150 bool useVirtualCall = canUseVirtualCall && !devirtualizedMethod;
151
152 if (const auto *dtor = dyn_cast<CXXDestructorDecl>(calleeDecl)) {
153 assert(ce->arg_begin() == ce->arg_end() &&
154 "Destructor shouldn't have explicit parameters");
155 assert(returnValue.isNull() && "Destructor shouldn't have return value");
156 if (useVirtualCall) {
157 cgm.getCXXABI().emitVirtualDestructorCall(*this, dtor, Dtor_Complete,
158 thisPtr.getAddress(),
160 } else {
161 GlobalDecl globalDecl(dtor, Dtor_Complete);
162 CIRGenCallee callee;
164 if (!devirtualizedMethod) {
166 cgm.getAddrOfCXXStructor(globalDecl, fInfo, ty), globalDecl);
167 } else {
168 cgm.errorNYI(ce->getSourceRange(), "devirtualized destructor call");
169 return RValue::get(nullptr);
170 }
171
172 QualType thisTy =
173 isArrow ? base->getType()->getPointeeType() : base->getType();
174 // CIRGen does not pass CallOrInvoke here (different from OG LLVM codegen)
175 // because in practice it always null even in OG.
176 emitCXXDestructorCall(globalDecl, callee, thisPtr.getPointer(), thisTy,
177 /*implicitParam=*/nullptr,
178 /*implicitParamTy=*/QualType(), ce);
179 }
180 return RValue::get(nullptr);
181 }
182
183 CIRGenCallee callee;
184 if (useVirtualCall) {
185 callee = CIRGenCallee::forVirtual(ce, md, thisPtr.getAddress(), ty);
186 } else {
188 if (getLangOpts().AppleKext) {
189 cgm.errorNYI(ce->getSourceRange(),
190 "emitCXXMemberOrOperatorMemberCallExpr: AppleKext");
191 return RValue::get(nullptr);
192 }
193
194 callee = CIRGenCallee::forDirect(cgm.getAddrOfFunction(calleeDecl, ty),
195 GlobalDecl(calleeDecl));
196 }
197
198 if (md->isVirtual()) {
199 Address newThisAddr =
200 cgm.getCXXABI().adjustThisArgumentForVirtualFunctionCall(
201 *this, calleeDecl, thisPtr.getAddress(), useVirtualCall);
202 thisPtr.setAddress(newThisAddr);
203 }
204
206 calleeDecl, callee, returnValue, thisPtr.getPointer(),
207 /*ImplicitParam=*/nullptr, QualType(), ce, rtlArgs);
208}
209
210RValue
212 const CXXMethodDecl *md,
214 assert(md->isInstance() &&
215 "Trying to emit a member call expr on a static method!");
217 e, md, returnValue, /*HasQualifier=*/false, /*Qualifier=*/std::nullopt,
218 /*IsArrow=*/false, e->getArg(0));
219}
220
222 const CXXMethodDecl *md, const CIRGenCallee &callee,
223 ReturnValueSlot returnValue, mlir::Value thisPtr, mlir::Value implicitParam,
224 QualType implicitParamTy, const CallExpr *ce, CallArgList *rtlArgs) {
225 const auto *fpt = md->getType()->castAs<FunctionProtoType>();
226 CallArgList args;
227 MemberCallInfo callInfo = commonBuildCXXMemberOrOperatorCall(
228 *this, md, thisPtr, implicitParam, implicitParamTy, ce, args, rtlArgs);
229 auto &fnInfo = cgm.getTypes().arrangeCXXMethodCall(
230 args, fpt, callInfo.reqArgs, callInfo.prefixSize);
231 assert((ce || currSrcLoc) && "expected source location");
232 mlir::Location loc = ce ? getLoc(ce->getExprLoc()) : *currSrcLoc;
234 return emitCall(fnInfo, callee, returnValue, args, nullptr, loc);
235}
236
238 const CXXNewExpr *e) {
239 if (!e->isArray())
240 return CharUnits::Zero();
241
242 // No cookie is required if the operator new[] being used is the
243 // reserved placement operator new[].
245 return CharUnits::Zero();
246
247 return cgf.cgm.getCXXABI().getArrayCookieSize(e);
248}
249
250static mlir::Value emitCXXNewAllocSize(CIRGenFunction &cgf, const CXXNewExpr *e,
251 unsigned minElements,
252 mlir::Value &numElements,
253 mlir::Value &sizeWithoutCookie) {
255 mlir::Location loc = cgf.getLoc(e->getSourceRange());
256
257 if (!e->isArray()) {
259 sizeWithoutCookie = cgf.getBuilder().getConstant(
260 loc, cir::IntAttr::get(cgf.sizeTy, typeSize.getQuantity()));
261 return sizeWithoutCookie;
262 }
263
264 // The width of size_t.
265 unsigned sizeWidth = cgf.cgm.getDataLayout().getTypeSizeInBits(cgf.sizeTy);
266
267 // The number of elements can be have an arbitrary integer type;
268 // essentially, we need to multiply it by a constant factor, add a
269 // cookie size, and verify that the result is representable as a
270 // size_t. That's just a gloss, though, and it's wrong in one
271 // important way: if the count is negative, it's an error even if
272 // the cookie size would bring the total size >= 0.
273 //
274 // If the array size is constant, Sema will have prevented negative
275 // values and size overflow.
276
277 // Compute the constant factor.
278 llvm::APInt arraySizeMultiplier(sizeWidth, 1);
279 while (const ConstantArrayType *cat =
281 type = cat->getElementType();
282 arraySizeMultiplier *= cat->getSize();
283 }
284
286 llvm::APInt typeSizeMultiplier(sizeWidth, typeSize.getQuantity());
287 typeSizeMultiplier *= arraySizeMultiplier;
288
289 // Figure out the cookie size.
290 llvm::APInt cookieSize(sizeWidth,
291 calculateCookiePadding(cgf, e).getQuantity());
292
293 // This will be a size_t.
294 mlir::Value size;
295
296 // Emit the array size expression.
297 // We multiply the size of all dimensions for NumElements.
298 // e.g for 'int[2][3]', ElemType is 'int' and NumElements is 6.
299 const Expr *arraySize = *e->getArraySize();
300 mlir::Attribute constNumElements =
301 ConstantEmitter(cgf.cgm, &cgf)
302 .emitAbstract(arraySize, arraySize->getType());
303 if (constNumElements) {
304 // Get an APInt from the constant
305 const llvm::APInt &count =
306 mlir::cast<cir::IntAttr>(constNumElements).getValue();
307
308 unsigned numElementsWidth = count.getBitWidth();
309 bool hasAnyOverflow = false;
310
311 // The equivalent code in CodeGen/CGExprCXX.cpp handles these cases as
312 // overflow, but that should never happen. The size argument is implicitly
313 // cast to a size_t, so it can never be negative and numElementsWidth will
314 // always equal sizeWidth.
315 assert(!count.isNegative() && "Expected non-negative array size");
316 assert(numElementsWidth == sizeWidth &&
317 "Expected a size_t array size constant");
318
319 // Okay, compute a count at the right width.
320 llvm::APInt adjustedCount = count.zextOrTrunc(sizeWidth);
321
322 // Scale numElements by that. This might overflow, but we don't
323 // care because it only overflows if allocationSize does too, and
324 // if that overflows then we shouldn't use this.
325 // This emits a constant that may not be used, but we can't tell here
326 // whether it will be needed or not.
327 numElements =
328 cgf.getBuilder().getConstInt(loc, adjustedCount * arraySizeMultiplier);
329
330 // Compute the size before cookie, and track whether it overflowed.
331 bool overflow;
332 llvm::APInt allocationSize =
333 adjustedCount.umul_ov(typeSizeMultiplier, overflow);
334
335 // Sema prevents us from hitting this case
336 assert(!overflow && "Overflow in array allocation size");
337
338 // Add in the cookie, and check whether it's overflowed.
339 if (cookieSize != 0) {
340 // Save the current size without a cookie. This shouldn't be
341 // used if there was overflow
342 sizeWithoutCookie = cgf.getBuilder().getConstInt(
343 loc, allocationSize.zextOrTrunc(sizeWidth));
344
345 allocationSize = allocationSize.uadd_ov(cookieSize, overflow);
346 hasAnyOverflow |= overflow;
347 }
348
349 // On overflow, produce a -1 so operator new will fail
350 if (hasAnyOverflow) {
351 size =
352 cgf.getBuilder().getConstInt(loc, llvm::APInt::getAllOnes(sizeWidth));
353 } else {
354 size = cgf.getBuilder().getConstInt(loc, allocationSize);
355 }
356 } else {
357 // TODO: Handle the variable size case
358 cgf.cgm.errorNYI(e->getSourceRange(),
359 "emitCXXNewAllocSize: variable array size");
360 }
361
362 if (cookieSize == 0)
363 sizeWithoutCookie = size;
364 else
365 assert(sizeWithoutCookie && "didn't set sizeWithoutCookie?");
366
367 return size;
368}
369
370static void storeAnyExprIntoOneUnit(CIRGenFunction &cgf, const Expr *init,
371 QualType allocType, Address newPtr,
372 AggValueSlot::Overlap_t mayOverlap) {
373 // FIXME: Refactor with emitExprAsInit.
374 switch (cgf.getEvaluationKind(allocType)) {
375 case cir::TEK_Scalar:
376 cgf.emitScalarInit(init, cgf.getLoc(init->getSourceRange()),
377 cgf.makeAddrLValue(newPtr, allocType), false);
378 return;
379 case cir::TEK_Complex:
380 cgf.emitComplexExprIntoLValue(init, cgf.makeAddrLValue(newPtr, allocType),
381 /*isInit*/ true);
382 return;
383 case cir::TEK_Aggregate: {
387 newPtr, allocType.getQualifiers(), AggValueSlot::IsDestructed,
389 cgf.emitAggExpr(init, slot);
390 return;
391 }
392 }
393 llvm_unreachable("bad evaluation kind");
394}
395
397 const CXXNewExpr *e, QualType elementType, mlir::Type elementTy,
398 Address beginPtr, mlir::Value numElements,
399 mlir::Value allocSizeWithoutCookie) {
400 // If we have a type with trivial initialization and no initializer,
401 // there's nothing to do.
402 if (!e->hasInitializer())
403 return;
404
405 unsigned initListElements = 0;
406
407 const Expr *init = e->getInitializer();
408 const InitListExpr *ile = dyn_cast<InitListExpr>(init);
409 if (ile) {
410 cgm.errorNYI(ile->getSourceRange(), "emitNewArrayInitializer: init list");
411 return;
412 }
413
414 // If all elements have already been initialized, skip any further
415 // initialization.
416 auto constOp = mlir::dyn_cast<cir::ConstantOp>(numElements.getDefiningOp());
417 if (constOp) {
418 auto constIntAttr = mlir::dyn_cast<cir::IntAttr>(constOp.getValue());
419 // Just skip out if the constant count is zero.
420 if (constIntAttr && constIntAttr.getUInt() <= initListElements)
421 return;
422 }
423
424 assert(init && "have trailing elements to initialize but no initializer");
425
426 // If this is a constructor call, try to optimize it out, and failing that
427 // emit a single loop to initialize all remaining elements.
428 if (const CXXConstructExpr *cce = dyn_cast<CXXConstructExpr>(init)) {
429 CXXConstructorDecl *ctor = cce->getConstructor();
430 if (ctor->isTrivial()) {
431 // If new expression did not specify value-initialization, then there
432 // is no initialization.
433 if (!cce->requiresZeroInitialization())
434 return;
435
436 cgm.errorNYI(cce->getSourceRange(),
437 "emitNewArrayInitializer: trivial ctor zero-init");
438 return;
439 }
440
441 cgm.errorNYI(cce->getSourceRange(),
442 "emitNewArrayInitializer: ctor initializer");
443 return;
444 }
445
446 cgm.errorNYI(init->getSourceRange(),
447 "emitNewArrayInitializer: unsupported initializer");
448 return;
449}
450
452 QualType elementType, mlir::Type elementTy,
453 Address newPtr, mlir::Value numElements,
454 mlir::Value allocSizeWithoutCookie) {
456 if (e->isArray()) {
457 cgf.emitNewArrayInitializer(e, elementType, elementTy, newPtr, numElements,
458 allocSizeWithoutCookie);
459 } else if (const Expr *init = e->getInitializer()) {
460 storeAnyExprIntoOneUnit(cgf, init, e->getAllocatedType(), newPtr,
462 }
463}
464
466 GlobalDecl dtor, const CIRGenCallee &callee, mlir::Value thisVal,
467 QualType thisTy, mlir::Value implicitParam, QualType implicitParamTy,
468 const CallExpr *ce) {
469 const CXXMethodDecl *dtorDecl = cast<CXXMethodDecl>(dtor.getDecl());
470
471 assert(!thisTy.isNull());
472 assert(thisTy->getAsCXXRecordDecl() == dtorDecl->getParent() &&
473 "Pointer/Object mixup");
474
476
477 CallArgList args;
478 commonBuildCXXMemberOrOperatorCall(*this, dtorDecl, thisVal, implicitParam,
479 implicitParamTy, ce, args, nullptr);
480 assert((ce || dtor.getDecl()) && "expected source location provider");
482 return emitCall(cgm.getTypes().arrangeCXXStructorDeclaration(dtor), callee,
483 ReturnValueSlot(), args, nullptr,
484 ce ? getLoc(ce->getExprLoc())
485 : getLoc(dtor.getDecl()->getSourceRange()));
486}
487
490 QualType destroyedType = expr->getDestroyedType();
491 if (destroyedType.hasStrongOrWeakObjCLifetime()) {
493 cgm.errorNYI(expr->getExprLoc(),
494 "emitCXXPseudoDestructorExpr: Objective-C lifetime is NYI");
495 } else {
496 // C++ [expr.pseudo]p1:
497 // The result shall only be used as the operand for the function call
498 // operator (), and the result of such a call has type void. The only
499 // effect is the evaluation of the postfix-expression before the dot or
500 // arrow.
501 emitIgnoredExpr(expr->getBase());
502 }
503
504 return RValue::get(nullptr);
505}
506
507/// Emit a call to an operator new or operator delete function, as implicitly
508/// created by new-expressions and delete-expressions.
510 const FunctionDecl *calleeDecl,
511 const FunctionProtoType *calleeType,
512 const CallArgList &args) {
513 cir::CIRCallOpInterface callOrTryCall;
514 cir::FuncOp calleePtr = cgf.cgm.getAddrOfFunction(calleeDecl);
515 CIRGenCallee callee =
516 CIRGenCallee::forDirect(calleePtr, GlobalDecl(calleeDecl));
517 RValue rv =
518 cgf.emitCall(cgf.cgm.getTypes().arrangeFreeFunctionCall(args, calleeType),
519 callee, ReturnValueSlot(), args, &callOrTryCall);
520
521 /// C++1y [expr.new]p10:
522 /// [In a new-expression,] an implementation is allowed to omit a call
523 /// to a replaceable global allocation function.
524 ///
525 /// We model such elidable calls with the 'builtin' attribute.
527 return rv;
528}
529
530namespace {
531/// Calls the given 'operator delete' on a single object.
532struct CallObjectDelete final : EHScopeStack::Cleanup {
533 mlir::Value ptr;
534 const FunctionDecl *operatorDelete;
535 QualType elementType;
536
537 CallObjectDelete(mlir::Value ptr, const FunctionDecl *operatorDelete,
538 QualType elementType)
539 : ptr(ptr), operatorDelete(operatorDelete), elementType(elementType) {}
540
541 void emit(CIRGenFunction &cgf) override {
542 cgf.emitDeleteCall(operatorDelete, ptr, elementType);
543 }
544};
545} // namespace
546
547/// Emit the code for deleting a single object.
549 Address ptr, QualType elementType) {
550 // C++11 [expr.delete]p3:
551 // If the static type of the object to be deleted is different from its
552 // dynamic type, the static type shall be a base class of the dynamic type
553 // of the object to be deleted and the static type shall have a virtual
554 // destructor or the behavior is undefined.
556
557 const FunctionDecl *operatorDelete = de->getOperatorDelete();
558 assert(!operatorDelete->isDestroyingOperatorDelete());
559
560 // Find the destructor for the type, if applicable. If the
561 // destructor is virtual, we'll just emit the vcall and return.
562 const CXXDestructorDecl *dtor = nullptr;
563 if (const auto *rd = elementType->getAsCXXRecordDecl()) {
564 if (rd->hasDefinition() && !rd->hasTrivialDestructor()) {
565 dtor = rd->getDestructor();
566
567 if (dtor->isVirtual()) {
569 cgf.cgm.getCXXABI().emitVirtualObjectDelete(cgf, de, ptr, elementType,
570 dtor);
571 return;
572 }
573 }
574 }
575
576 // Make sure that we call delete even if the dtor throws.
577 // This doesn't have to a conditional cleanup because we're going
578 // to pop it off in a second.
579 cgf.ehStack.pushCleanup<CallObjectDelete>(
580 NormalAndEHCleanup, ptr.getPointer(), operatorDelete, elementType);
581
582 if (dtor) {
584 /*ForVirtualBase=*/false,
585 /*Delegating=*/false, ptr, elementType);
586 } else if (elementType.getObjCLifetime()) {
588 cgf.cgm.errorNYI(de->getSourceRange(), "emitObjectDelete: ObjCLifetime");
589 }
590
591 // In traditional LLVM codegen null checks are emitted to save a delete call.
592 // In CIR we optimize for size by default, the null check should be added into
593 // this function callers.
595
596 cgf.popCleanupBlock();
597}
598
600 const Expr *arg = e->getArgument();
602
603 // Null check the pointer.
604 //
605 // We could avoid this null check if we can determine that the object
606 // destruction is trivial and doesn't require an array cookie; we can
607 // unconditionally perform the operator delete call in that case. For now, we
608 // assume that deleted pointers are null rarely enough that it's better to
609 // keep the branch. This might be worth revisiting for a -O0 code size win.
610 //
611 // CIR note: emit the code size friendly by default for now, such as mentioned
612 // in `emitObjectDelete`.
614 QualType deleteTy = e->getDestroyedType();
615
616 // A destroying operator delete overrides the entire operation of the
617 // delete expression.
619 cgm.errorNYI(e->getSourceRange(),
620 "emitCXXDeleteExpr: destroying operator delete");
621 return;
622 }
623
624 // We might be deleting a pointer to array.
625 deleteTy = getContext().getBaseElementType(deleteTy);
626 ptr = ptr.withElementType(builder, convertTypeForMem(deleteTy));
627
628 if (e->isArrayForm()) {
630 cgm.errorNYI(e->getSourceRange(), "emitCXXDeleteExpr: array delete");
631 return;
632 } else {
633 emitObjectDelete(*this, e, ptr, deleteTy);
634 }
635}
636
638 // The element type being allocated.
640
641 // 1. Build a call to the allocation function.
642 FunctionDecl *allocator = e->getOperatorNew();
643
644 // If there is a brace-initializer, cannot allocate fewer elements than inits.
645 unsigned minElements = 0;
646
647 mlir::Value numElements = nullptr;
648 mlir::Value allocSizeWithoutCookie = nullptr;
649 mlir::Value allocSize = emitCXXNewAllocSize(
650 *this, e, minElements, numElements, allocSizeWithoutCookie);
651 CharUnits allocAlign = getContext().getTypeAlignInChars(allocType);
652
653 // Emit the allocation call.
654 Address allocation = Address::invalid();
655 CallArgList allocatorArgs;
656 if (allocator->isReservedGlobalPlacementOperator()) {
657 cgm.errorNYI(e->getSourceRange(),
658 "emitCXXNewExpr: reserved global placement operator");
659 } else {
660 const FunctionProtoType *allocatorType =
661 allocator->getType()->castAs<FunctionProtoType>();
662 unsigned paramsToSkip = 0;
663
664 // The allocation size is the first argument.
665 QualType sizeType = getContext().getSizeType();
666 allocatorArgs.add(RValue::get(allocSize), sizeType);
667 ++paramsToSkip;
668
669 if (allocSize != allocSizeWithoutCookie) {
670 CharUnits cookieAlign = getSizeAlign(); // FIXME: Ask the ABI.
671 allocAlign = std::max(allocAlign, cookieAlign);
672 }
673
674 // The allocation alignment may be passed as the second argument.
675 if (e->passAlignment()) {
676 cgm.errorNYI(e->getSourceRange(), "emitCXXNewExpr: pass alignment");
677 }
678
679 // FIXME: Why do we not pass a CalleeDecl here?
680 emitCallArgs(allocatorArgs, allocatorType, e->placement_arguments(),
681 AbstractCallee(), paramsToSkip);
682 RValue rv =
683 emitNewDeleteCall(*this, allocator, allocatorType, allocatorArgs);
684
685 // Set !heapallocsite metadata on the call to operator new.
687
688 // If this was a call to a global replaceable allocation function that does
689 // not take an alignment argument, the allocator is known to produce storage
690 // that's suitably aligned for any object that fits, up to a known
691 // threshold. Otherwise assume it's suitably aligned for the allocated type.
692 CharUnits allocationAlign = allocAlign;
693 if (!e->passAlignment() &&
694 allocator->isReplaceableGlobalAllocationFunction()) {
695 const TargetInfo &target = cgm.getASTContext().getTargetInfo();
696 unsigned allocatorAlign = llvm::bit_floor(std::min<uint64_t>(
697 target.getNewAlign(), getContext().getTypeSize(allocType)));
698 allocationAlign = std::max(
699 allocationAlign, getContext().toCharUnitsFromBits(allocatorAlign));
700 }
701
702 mlir::Value allocPtr = rv.getValue();
703 allocation = Address(
704 allocPtr, mlir::cast<cir::PointerType>(allocPtr.getType()).getPointee(),
705 allocationAlign);
706 }
707
708 // Emit a null check on the allocation result if the allocation
709 // function is allowed to return null (because it has a non-throwing
710 // exception spec or is the reserved placement new) and we have an
711 // interesting initializer will be running sanitizers on the initialization.
712 bool nullCheck = e->shouldNullCheckAllocation() &&
713 (!allocType.isPODType(getContext()) || e->hasInitializer());
715 if (nullCheck)
716 cgm.errorNYI(e->getSourceRange(), "emitCXXNewExpr: null check");
717
718 // If there's an operator delete, enter a cleanup to call it if an
719 // exception is thrown.
720 if (e->getOperatorDelete() &&
722 cgm.errorNYI(e->getSourceRange(), "emitCXXNewExpr: operator delete");
723
724 if (allocSize != allocSizeWithoutCookie) {
725 assert(e->isArray());
726 allocation = cgm.getCXXABI().initializeArrayCookie(
727 *this, allocation, numElements, e, allocType);
728 }
729
730 mlir::Type elementTy;
731 if (e->isArray()) {
732 // For array new, use the allocated type to handle multidimensional arrays
733 // correctly
734 elementTy = convertTypeForMem(e->getAllocatedType());
735 } else {
736 elementTy = convertTypeForMem(allocType);
737 }
738 Address result = builder.createElementBitCast(getLoc(e->getSourceRange()),
739 allocation, elementTy);
740
741 // Passing pointer through launder.invariant.group to avoid propagation of
742 // vptrs information which may be included in previous type.
743 // To not break LTO with different optimizations levels, we do it regardless
744 // of optimization level.
745 if (cgm.getCodeGenOpts().StrictVTablePointers &&
746 allocator->isReservedGlobalPlacementOperator())
747 cgm.errorNYI(e->getSourceRange(), "emitCXXNewExpr: strict vtable pointers");
748
750
751 emitNewInitializer(*this, e, allocType, elementTy, result, numElements,
752 allocSizeWithoutCookie);
753 return result.getPointer();
754}
755
757 mlir::Value ptr, QualType deleteTy) {
759
760 const auto *deleteFTy = deleteFD->getType()->castAs<FunctionProtoType>();
761 CallArgList deleteArgs;
762
763 UsualDeleteParams params = deleteFD->getUsualDeleteParams();
764 auto paramTypeIt = deleteFTy->param_type_begin();
765
766 // Pass std::type_identity tag if present
768 cgm.errorNYI(deleteFD->getSourceRange(),
769 "emitDeleteCall: type aware delete");
770
771 // Pass the pointer itself.
772 QualType argTy = *paramTypeIt++;
773 mlir::Value deletePtr =
774 builder.createBitcast(ptr.getLoc(), ptr, convertType(argTy));
775 deleteArgs.add(RValue::get(deletePtr), argTy);
776
777 // Pass the std::destroying_delete tag if present.
778 if (params.DestroyingDelete)
779 cgm.errorNYI(deleteFD->getSourceRange(),
780 "emitDeleteCall: destroying delete");
781
782 // Pass the size if the delete function has a size_t parameter.
783 if (params.Size) {
784 QualType sizeType = *paramTypeIt++;
785 CharUnits deleteTypeSize = getContext().getTypeSizeInChars(deleteTy);
786 assert(mlir::isa<cir::IntType>(convertType(sizeType)) &&
787 "expected cir::IntType");
788 cir::ConstantOp size = builder.getConstInt(
789 *currSrcLoc, convertType(sizeType), deleteTypeSize.getQuantity());
790
791 deleteArgs.add(RValue::get(size), sizeType);
792 }
793
794 // Pass the alignment if the delete function has an align_val_t parameter.
795 if (isAlignedAllocation(params.Alignment))
796 cgm.errorNYI(deleteFD->getSourceRange(),
797 "emitDeleteCall: aligned allocation");
798
799 assert(paramTypeIt == deleteFTy->param_type_end() &&
800 "unknown parameter to usual delete function");
801
802 // Emit the call to delete.
803 emitNewDeleteCall(*this, deleteFD, deleteFTy, deleteArgs);
804}
805
806static mlir::Value emitDynamicCastToNull(CIRGenFunction &cgf,
807 mlir::Location loc, QualType destTy) {
808 mlir::Type destCIRTy = cgf.convertType(destTy);
809 assert(mlir::isa<cir::PointerType>(destCIRTy) &&
810 "result of dynamic_cast should be a ptr");
811
812 if (!destTy->isPointerType()) {
813 mlir::Region *currentRegion = cgf.getBuilder().getBlock()->getParent();
814 /// C++ [expr.dynamic.cast]p9:
815 /// A failed cast to reference type throws std::bad_cast
816 cgf.cgm.getCXXABI().emitBadCastCall(cgf, loc);
817
818 // The call to bad_cast will terminate the current block. Create a new block
819 // to hold any follow up code.
820 cgf.getBuilder().createBlock(currentRegion, currentRegion->end());
821 }
822
823 return cgf.getBuilder().getNullPtr(destCIRTy, loc);
824}
825
827 const CXXDynamicCastExpr *dce) {
828 mlir::Location loc = getLoc(dce->getSourceRange());
829
830 cgm.emitExplicitCastExprType(dce, this);
831 QualType destTy = dce->getTypeAsWritten();
832 QualType srcTy = dce->getSubExpr()->getType();
833
834 // C++ [expr.dynamic.cast]p7:
835 // If T is "pointer to cv void," then the result is a pointer to the most
836 // derived object pointed to by v.
837 bool isDynCastToVoid = destTy->isVoidPointerType();
838 bool isRefCast = destTy->isReferenceType();
839
840 QualType srcRecordTy;
841 QualType destRecordTy;
842 if (isDynCastToVoid) {
843 srcRecordTy = srcTy->getPointeeType();
844 // No destRecordTy.
845 } else if (const PointerType *destPTy = destTy->getAs<PointerType>()) {
846 srcRecordTy = srcTy->castAs<PointerType>()->getPointeeType();
847 destRecordTy = destPTy->getPointeeType();
848 } else {
849 srcRecordTy = srcTy;
850 destRecordTy = destTy->castAs<ReferenceType>()->getPointeeType();
851 }
852
853 assert(srcRecordTy->isRecordType() && "source type must be a record type!");
855
856 if (dce->isAlwaysNull())
857 return emitDynamicCastToNull(*this, loc, destTy);
858
859 auto destCirTy = mlir::cast<cir::PointerType>(convertType(destTy));
860 return cgm.getCXXABI().emitDynamicCast(*this, loc, srcRecordTy, destRecordTy,
861 destCirTy, isRefCast, thisAddr);
862}
static void emit(Program &P, llvm::SmallVectorImpl< std::byte > &Code, const T &Val, bool &Success)
Helper to write bytecode and bail out if 32-bit offsets become invalid.
static void emitNewInitializer(CIRGenFunction &cgf, const CXXNewExpr *e, QualType elementType, mlir::Type elementTy, Address newPtr, mlir::Value numElements, mlir::Value allocSizeWithoutCookie)
static void emitObjectDelete(CIRGenFunction &cgf, const CXXDeleteExpr *de, Address ptr, QualType elementType)
Emit the code for deleting a single object.
static mlir::Value emitCXXNewAllocSize(CIRGenFunction &cgf, const CXXNewExpr *e, unsigned minElements, mlir::Value &numElements, mlir::Value &sizeWithoutCookie)
static void storeAnyExprIntoOneUnit(CIRGenFunction &cgf, const Expr *init, QualType allocType, Address newPtr, AggValueSlot::Overlap_t mayOverlap)
static CharUnits calculateCookiePadding(CIRGenFunction &cgf, const CXXNewExpr *e)
static mlir::Value emitDynamicCastToNull(CIRGenFunction &cgf, mlir::Location loc, QualType destTy)
static MemberCallInfo commonBuildCXXMemberOrOperatorCall(CIRGenFunction &cgf, const CXXMethodDecl *md, mlir::Value thisPtr, mlir::Value implicitParam, QualType implicitParamTy, const CallExpr *ce, CallArgList &args, CallArgList *rtlArgs)
static RValue emitNewDeleteCall(CIRGenFunction &cgf, const FunctionDecl *calleeDecl, const FunctionProtoType *calleeType, const CallArgList &args)
Emit a call to an operator new or operator delete function, as implicitly created by new-expressions ...
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate....
Defines the clang::Expr interface and subclasses for C++ expressions.
static QualType getPointeeType(const MemRegion *R)
cir::ConstantOp getConstant(mlir::Location loc, mlir::TypedAttr attr)
cir::ConstantOp getNullPtr(mlir::Type ty, mlir::Location loc)
llvm::TypeSize getTypeSizeInBits(mlir::Type ty) const
const ConstantArrayType * getAsConstantArrayType(QualType T) const
CharUnits getTypeAlignInChars(QualType T) const
Return the ABI-specified alignment of a (complete) type T, in characters.
QualType getBaseElementType(const ArrayType *VAT) const
Return the innermost element type of an array type.
CharUnits getTypeSizeInChars(QualType T) const
Return the size of the specified (complete) type T, in characters.
QualType getSizeType() const
Return the unique type for "size_t" (C99 7.17), defined in <stddef.h>.
mlir::Value getPointer() const
Definition Address.h:82
static Address invalid()
Definition Address.h:67
Address withElementType(CIRGenBuilderTy &builder, mlir::Type ElemTy) const
Return address with different element type, a bitcast pointer, and the same alignment.
An aggregate value slot.
static AggValueSlot forAddr(Address addr, clang::Qualifiers quals, IsDestructed_t isDestructed, IsAliased_t isAliased, Overlap_t mayOverlap, IsZeroed_t isZeroed=IsNotZeroed)
cir::ConstantOp getConstInt(mlir::Location loc, llvm::APSInt intVal)
virtual void emitVirtualObjectDelete(CIRGenFunction &cgf, const CXXDeleteExpr *de, Address ptr, QualType elementType, const CXXDestructorDecl *dtor)=0
virtual const clang::CXXRecordDecl * getThisArgumentTypeForMethod(const clang::CXXMethodDecl *md)
Get the type of the implicit "this" parameter used by a method.
virtual void emitBadCastCall(CIRGenFunction &cgf, mlir::Location loc)=0
virtual CharUnits getArrayCookieSize(const CXXNewExpr *e)
Returns the extra size required in order to store the array cookie for the given new-expression.
static CIRGenCallee forDirect(mlir::Operation *funcPtr, const CIRGenCalleeInfo &abstractInfo=CIRGenCalleeInfo())
Definition CIRGenCall.h:90
static CIRGenCallee forVirtual(const clang::CallExpr *ce, clang::GlobalDecl md, Address addr, cir::FuncType fTy)
Definition CIRGenCall.h:152
An abstract representation of regular/ObjC call/message targets.
void emitCallArgs(CallArgList &args, PrototypeWrapper prototype, llvm::iterator_range< clang::CallExpr::const_arg_iterator > argRange, AbstractCallee callee=AbstractCallee(), unsigned paramsToSkip=0)
mlir::Type convertType(clang::QualType t)
static cir::TypeEvaluationKind getEvaluationKind(clang::QualType type)
Return the cir::TypeEvaluationKind of QualType type.
CIRGenTypes & getTypes() const
Address emitPointerWithAlignment(const clang::Expr *expr, LValueBaseInfo *baseInfo=nullptr)
Given an expression with a pointer type, emit the value and compute our best estimate of the alignmen...
const clang::LangOptions & getLangOpts() const
void emitDeleteCall(const FunctionDecl *deleteFD, mlir::Value ptr, QualType deleteTy)
LValue emitLValue(const clang::Expr *e)
Emit code to compute a designator that specifies the location of the expression.
mlir::Location getLoc(clang::SourceLocation srcLoc)
Helpers to convert Clang's SourceLocation to a MLIR Location.
RValue emitCXXMemberOrOperatorCall(const clang::CXXMethodDecl *md, const CIRGenCallee &callee, ReturnValueSlot returnValue, mlir::Value thisPtr, mlir::Value implicitParam, clang::QualType implicitParamTy, const clang::CallExpr *ce, CallArgList *rtlArgs)
EHScopeStack ehStack
Tracks function scope overall cleanup handling.
void emitNewArrayInitializer(const CXXNewExpr *e, QualType elementType, mlir::Type elementTy, Address beginPtr, mlir::Value numElements, mlir::Value allocSizeWithoutCookie)
mlir::Type convertTypeForMem(QualType t)
mlir::Value emitCXXNewExpr(const CXXNewExpr *e)
Address returnValue
The temporary alloca to hold the return value.
void emitScalarInit(const clang::Expr *init, mlir::Location loc, LValue lvalue, bool capturedByInit=false)
RValue emitCall(const CIRGenFunctionInfo &funcInfo, const CIRGenCallee &callee, ReturnValueSlot returnValue, const CallArgList &args, cir::CIRCallOpInterface *callOp, mlir::Location loc)
CIRGenBuilderTy & getBuilder()
void emitComplexExprIntoLValue(const Expr *e, LValue dest, bool isInit)
LValue makeAddrLValue(Address addr, QualType ty, AlignmentSource source=AlignmentSource::Type)
void emitCXXDestructorCall(const CXXDestructorDecl *dd, CXXDtorType type, bool forVirtualBase, bool delegating, Address thisAddr, QualType thisTy)
std::optional< mlir::Location > currSrcLoc
Use to track source locations across nested visitor traversals.
clang::ASTContext & getContext() const
RValue emitCXXMemberOrOperatorMemberCallExpr(const clang::CallExpr *ce, const clang::CXXMethodDecl *md, ReturnValueSlot returnValue, bool hasQualifier, clang::NestedNameSpecifier qualifier, bool isArrow, const clang::Expr *base)
void emitCXXDeleteExpr(const CXXDeleteExpr *e)
RValue emitCXXOperatorMemberCallExpr(const CXXOperatorCallExpr *e, const CXXMethodDecl *md, ReturnValueSlot returnValue)
void emitIgnoredExpr(const clang::Expr *e)
Emit code to compute the specified expression, ignoring the result.
mlir::Value emitDynamicCast(Address thisAddr, const CXXDynamicCastExpr *dce)
void emitAggExpr(const clang::Expr *e, AggValueSlot slot)
void popCleanupBlock()
Pops a cleanup block.
RValue emitCXXPseudoDestructorExpr(const CXXPseudoDestructorExpr *expr)
DiagnosticBuilder errorNYI(SourceLocation, llvm::StringRef)
Helpers to emit "not yet implemented" error diagnostics.
cir::FuncOp getAddrOfFunction(clang::GlobalDecl gd, mlir::Type funcType=nullptr, bool forVTable=false, bool dontDefer=false, ForDefinition_t isForDefinition=NotForDefinition)
Return the address of the given function.
const cir::CIRDataLayout getDataLayout() const
CIRGenCXXABI & getCXXABI() const
const CIRGenFunctionInfo & arrangeFreeFunctionCall(const CallArgList &args, const FunctionType *fnType)
clang::CanQualType deriveThisType(const clang::CXXRecordDecl *rd, const clang::CXXMethodDecl *md)
Derives the 'this' type for CIRGen purposes, i.e.
void addFrom(const CallArgList &other)
Add all the arguments from another CallArgList to this one.
Definition CIRGenCall.h:242
void add(RValue rvalue, clang::QualType type)
Definition CIRGenCall.h:233
mlir::Attribute emitAbstract(const Expr *e, QualType destType)
Emit the result of the given expression as an abstract constant, asserting that it succeeded.
Information for lazily generating a cleanup.
Address getAddress() const
mlir::Value getPointer() const
void setAddress(Address address)
This trivial value class is used to represent the result of an expression that is evaluated.
Definition CIRGenValue.h:33
static RValue get(mlir::Value v)
Definition CIRGenValue.h:82
mlir::Value getValue() const
Return the value of this scalar value.
Definition CIRGenValue.h:56
A class for recording the number of arguments that a function signature requires.
static RequiredArgs getFromProtoWithExtraSlots(const clang::FunctionProtoType *prototype, unsigned additional)
Compute the arguments required by the given formal prototype, given that there may be some additional...
Contains the address where the return value of a function can be stored, and whether the address is v...
Definition CIRGenCall.h:254
Represents a call to a C++ constructor.
Definition ExprCXX.h:1549
Represents a C++ constructor within a class.
Definition DeclCXX.h:2604
Represents a delete expression for memory deallocation and destructor calls, e.g.
Definition ExprCXX.h:2628
FunctionDecl * getOperatorDelete() const
Definition ExprCXX.h:2667
bool isArrayForm() const
Definition ExprCXX.h:2654
QualType getDestroyedType() const
Retrieve the type being destroyed.
Definition ExprCXX.cpp:338
Represents a C++ destructor within a class.
Definition DeclCXX.h:2869
A C++ dynamic_cast expression (C++ [expr.dynamic.cast]).
Definition ExprCXX.h:481
bool isAlwaysNull() const
isAlwaysNull - Return whether the result of the dynamic_cast is proven to always be null.
Definition ExprCXX.cpp:838
Represents a static or instance method of a struct/union/class.
Definition DeclCXX.h:2129
bool isVirtual() const
Definition DeclCXX.h:2184
const CXXRecordDecl * getParent() const
Return the parent of this method declaration, which is the class in which this method is defined.
Definition DeclCXX.h:2255
bool isInstance() const
Definition DeclCXX.h:2156
Represents a new-expression for memory allocation and constructor calls, e.g: "new CXXNewExpr(foo)".
Definition ExprCXX.h:2357
bool isArray() const
Definition ExprCXX.h:2466
llvm::iterator_range< arg_iterator > placement_arguments()
Definition ExprCXX.h:2574
QualType getAllocatedType() const
Definition ExprCXX.h:2436
std::optional< Expr * > getArraySize()
This might return std::nullopt even if isArray() returns true, since there might not be an array size...
Definition ExprCXX.h:2471
bool hasInitializer() const
Whether this new-expression has any initializer at all.
Definition ExprCXX.h:2526
bool shouldNullCheckAllocation() const
True if the allocation result needs to be null-checked.
Definition ExprCXX.cpp:326
bool passAlignment() const
Indicates whether the required alignment should be implicitly passed to the allocation function.
Definition ExprCXX.h:2553
FunctionDecl * getOperatorDelete() const
Definition ExprCXX.h:2463
SourceRange getSourceRange() const
Definition ExprCXX.h:2612
FunctionDecl * getOperatorNew() const
Definition ExprCXX.h:2461
Expr * getInitializer()
The initializer of this new-expression.
Definition ExprCXX.h:2535
A call to an overloaded operator written using operator syntax.
Definition ExprCXX.h:84
Represents a C++ pseudo-destructor (C++ [expr.pseudo]).
Definition ExprCXX.h:2747
Represents a C++ struct/union/class.
Definition DeclCXX.h:258
CallExpr - Represents a function call (C99 6.5.2.2, C++ [expr.call]).
Definition Expr.h:2877
Expr * getArg(unsigned Arg)
getArg - Return the specified argument.
Definition Expr.h:3081
arg_iterator arg_begin()
Definition Expr.h:3134
arg_iterator arg_end()
Definition Expr.h:3137
FunctionDecl * getDirectCallee()
If the callee is a FunctionDecl, return it. Otherwise return null.
Definition Expr.h:3060
arg_range arguments()
Definition Expr.h:3129
Expr * getSubExpr()
Definition Expr.h:3660
CharUnits - This is an opaque type for sizes expressed in character units.
Definition CharUnits.h:38
QuantityType getQuantity() const
getQuantity - Get the raw integer representation of this quantity.
Definition CharUnits.h:185
static CharUnits Zero()
Zero - Construct a CharUnits quantity of zero.
Definition CharUnits.h:53
Represents the canonical version of C arrays with a specified constant size.
Definition TypeBase.h:3760
virtual SourceRange getSourceRange() const LLVM_READONLY
Source range that this declaration covers.
Definition DeclBase.h:427
QualType getTypeAsWritten() const
getTypeAsWritten - Returns the type that this expression is casting to, as written in the source code...
Definition Expr.h:3889
This represents one expression.
Definition Expr.h:112
SourceLocation getExprLoc() const LLVM_READONLY
getExprLoc - Return the preferred location for the arrow when diagnosing a problem with a generic exp...
Definition Expr.cpp:273
QualType getType() const
Definition Expr.h:144
Represents a function declaration or definition.
Definition Decl.h:2000
bool isDestroyingOperatorDelete() const
Determine whether this is a destroying operator delete.
Definition Decl.cpp:3540
bool isTrivial() const
Whether this function is "trivial" in some specialized C++ senses.
Definition Decl.h:2377
UsualDeleteParams getUsualDeleteParams() const
Definition Decl.cpp:3556
bool isReservedGlobalPlacementOperator() const
Determines whether this operator new or delete is one of the reserved global placement operators: voi...
Definition Decl.cpp:3392
bool isDefaulted() const
Whether this function is defaulted.
Definition Decl.h:2385
SourceRange getSourceRange() const override LLVM_READONLY
Source range that this declaration covers.
Definition Decl.cpp:4538
Represents a prototype with parameter type info, e.g.
Definition TypeBase.h:5254
GlobalDecl - represents a global declaration.
Definition GlobalDecl.h:57
const Decl * getDecl() const
Definition GlobalDecl.h:106
Describes an C or C++ initializer list.
Definition Expr.h:5233
Represents a C++ nested name specifier, such as "\::std::vector<int>::".
PointerType - C99 6.7.5.1 - Pointer Declarators.
Definition TypeBase.h:3328
A (possibly-)qualified type.
Definition TypeBase.h:937
bool isNull() const
Return true if this QualType doesn't point to a type yet.
Definition TypeBase.h:1004
Qualifiers getQualifiers() const
Retrieve the set of qualifiers applied to this type.
Definition TypeBase.h:8318
Qualifiers::ObjCLifetime getObjCLifetime() const
Returns lifetime attribute of this type.
Definition TypeBase.h:1438
bool isPODType(const ASTContext &Context) const
Determine whether this is a Plain Old Data (POD) type (C++ 3.9p10).
Definition Type.cpp:2694
bool hasStrongOrWeakObjCLifetime() const
Definition TypeBase.h:1446
Base for LValueReferenceType and RValueReferenceType.
Definition TypeBase.h:3573
SourceRange getSourceRange() const LLVM_READONLY
SourceLocation tokens are not useful in isolation - they are low level value objects created/interpre...
Definition Stmt.cpp:338
bool isUnion() const
Definition Decl.h:3922
Exposes information about the current target.
Definition TargetInfo.h:226
unsigned getNewAlign() const
Return the largest alignment for which a suitably-sized allocation with 'operator new(size_t)' is gua...
Definition TargetInfo.h:761
CXXRecordDecl * getAsCXXRecordDecl() const
Retrieves the CXXRecordDecl that this type refers to, either because the type is a RecordType or beca...
Definition Type.h:26
bool isVoidPointerType() const
Definition Type.cpp:712
bool isPointerType() const
Definition TypeBase.h:8515
const T * castAs() const
Member-template castAs<specific type>.
Definition TypeBase.h:9158
bool isReferenceType() const
Definition TypeBase.h:8539
QualType getPointeeType() const
If this is a pointer, ObjC object pointer, or block pointer, this returns the respective pointee.
Definition Type.cpp:752
const T * getAs() const
Member-template getAs<specific type>'.
Definition TypeBase.h:9091
bool isRecordType() const
Definition TypeBase.h:8642
QualType getType() const
Definition Decl.h:723
const internal::VariadicAllOfMatcher< Type > type
Matches Types in the clang AST.
const internal::VariadicDynCastAllOfMatcher< Stmt, Expr > expr
Matches expressions.
The JSON file list parser is used to communicate input to InstallAPI.
bool isa(CodeGen::Address addr)
Definition Address.h:330
bool isAlignedAllocation(AlignedAllocationMode Mode)
Definition ExprCXX.h:2267
@ Dtor_Complete
Complete object dtor.
Definition ABI.h:36
bool isTypeAwareAllocation(TypeAwareAllocationMode Mode)
Definition ExprCXX.h:2255
U cast(CodeGen::Address addr)
Definition Address.h:327
static bool objCLifetime()
static bool addressSpace()
static bool devirtualizeDestructor()
static bool aggValueSlotGC()
static bool devirtualizeMemberFunction()
static bool deleteArray()
static bool emitTypeCheck()
static bool opCallMustTail()
static bool exprNewNullCheck()
static bool attributeBuiltin()
static bool emitNullCheckForDeleteCalls()
static bool generateDebugInfo()
clang::CharUnits getSizeAlign() const
The parameters to pass to a usual operator delete.
Definition ExprCXX.h:2346
TypeAwareAllocationMode TypeAwareDelete
Definition ExprCXX.h:2347
AlignedAllocationMode Alignment
Definition ExprCXX.h:2350