clang  3.9.0svn
CGBlocks.cpp
Go to the documentation of this file.
00001 //===--- CGBlocks.cpp - Emit LLVM Code for declarations ---------*- C++ -*-===//
00002 //
00003 //                     The LLVM Compiler Infrastructure
00004 //
00005 // This file is distributed under the University of Illinois Open Source
00006 // License. See LICENSE.TXT for details.
00007 //
00008 //===----------------------------------------------------------------------===//
00009 //
00010 // This contains code to emit blocks.
00011 //
00012 //===----------------------------------------------------------------------===//
00013 
00014 #include "CGBlocks.h"
00015 #include "CGDebugInfo.h"
00016 #include "CGObjCRuntime.h"
00017 #include "CodeGenFunction.h"
00018 #include "CodeGenModule.h"
00019 #include "clang/AST/DeclObjC.h"
00020 #include "llvm/ADT/SmallSet.h"
00021 #include "llvm/IR/CallSite.h"
00022 #include "llvm/IR/DataLayout.h"
00023 #include "llvm/IR/Module.h"
00024 #include <algorithm>
00025 #include <cstdio>
00026 
00027 using namespace clang;
00028 using namespace CodeGen;
00029 
00030 CGBlockInfo::CGBlockInfo(const BlockDecl *block, StringRef name)
00031   : Name(name), CXXThisIndex(0), CanBeGlobal(false), NeedsCopyDispose(false),
00032     HasCXXObject(false), UsesStret(false), HasCapturedVariableLayout(false),
00033     LocalAddress(Address::invalid()), StructureType(nullptr), Block(block),
00034     DominatingIP(nullptr) {
00035 
00036   // Skip asm prefix, if any.  'name' is usually taken directly from
00037   // the mangled name of the enclosing function.
00038   if (!name.empty() && name[0] == '\01')
00039     name = name.substr(1);
00040 }
00041 
00042 // Anchor the vtable to this translation unit.
00043 BlockByrefHelpers::~BlockByrefHelpers() {}
00044 
00045 /// Build the given block as a global block.
00046 static llvm::Constant *buildGlobalBlock(CodeGenModule &CGM,
00047                                         const CGBlockInfo &blockInfo,
00048                                         llvm::Constant *blockFn);
00049 
00050 /// Build the helper function to copy a block.
00051 static llvm::Constant *buildCopyHelper(CodeGenModule &CGM,
00052                                        const CGBlockInfo &blockInfo) {
00053   return CodeGenFunction(CGM).GenerateCopyHelperFunction(blockInfo);
00054 }
00055 
00056 /// Build the helper function to dispose of a block.
00057 static llvm::Constant *buildDisposeHelper(CodeGenModule &CGM,
00058                                           const CGBlockInfo &blockInfo) {
00059   return CodeGenFunction(CGM).GenerateDestroyHelperFunction(blockInfo);
00060 }
00061 
00062 /// buildBlockDescriptor - Build the block descriptor meta-data for a block.
00063 /// buildBlockDescriptor is accessed from 5th field of the Block_literal
00064 /// meta-data and contains stationary information about the block literal.
00065 /// Its definition will have 4 (or optinally 6) words.
00066 /// \code
00067 /// struct Block_descriptor {
00068 ///   unsigned long reserved;
00069 ///   unsigned long size;  // size of Block_literal metadata in bytes.
00070 ///   void *copy_func_helper_decl;  // optional copy helper.
00071 ///   void *destroy_func_decl; // optioanl destructor helper.
00072 ///   void *block_method_encoding_address; // @encode for block literal signature.
00073 ///   void *block_layout_info; // encoding of captured block variables.
00074 /// };
00075 /// \endcode
00076 static llvm::Constant *buildBlockDescriptor(CodeGenModule &CGM,
00077                                             const CGBlockInfo &blockInfo) {
00078   ASTContext &C = CGM.getContext();
00079 
00080   llvm::Type *ulong = CGM.getTypes().ConvertType(C.UnsignedLongTy);
00081   llvm::Type *i8p = nullptr;
00082   if (CGM.getLangOpts().OpenCL)
00083     i8p = 
00084       llvm::Type::getInt8PtrTy(
00085            CGM.getLLVMContext(), C.getTargetAddressSpace(LangAS::opencl_constant));
00086   else
00087     i8p = CGM.getTypes().ConvertType(C.VoidPtrTy);
00088 
00089   SmallVector<llvm::Constant*, 6> elements;
00090 
00091   // reserved
00092   elements.push_back(llvm::ConstantInt::get(ulong, 0));
00093 
00094   // Size
00095   // FIXME: What is the right way to say this doesn't fit?  We should give
00096   // a user diagnostic in that case.  Better fix would be to change the
00097   // API to size_t.
00098   elements.push_back(llvm::ConstantInt::get(ulong,
00099                                             blockInfo.BlockSize.getQuantity()));
00100 
00101   // Optional copy/dispose helpers.
00102   if (blockInfo.NeedsCopyDispose) {
00103     // copy_func_helper_decl
00104     elements.push_back(buildCopyHelper(CGM, blockInfo));
00105 
00106     // destroy_func_decl
00107     elements.push_back(buildDisposeHelper(CGM, blockInfo));
00108   }
00109 
00110   // Signature.  Mandatory ObjC-style method descriptor @encode sequence.
00111   std::string typeAtEncoding =
00112     CGM.getContext().getObjCEncodingForBlock(blockInfo.getBlockExpr());
00113   elements.push_back(llvm::ConstantExpr::getBitCast(
00114     CGM.GetAddrOfConstantCString(typeAtEncoding).getPointer(), i8p));
00115   
00116   // GC layout.
00117   if (C.getLangOpts().ObjC1) {
00118     if (CGM.getLangOpts().getGC() != LangOptions::NonGC)
00119       elements.push_back(CGM.getObjCRuntime().BuildGCBlockLayout(CGM, blockInfo));
00120     else
00121       elements.push_back(CGM.getObjCRuntime().BuildRCBlockLayout(CGM, blockInfo));
00122   }
00123   else
00124     elements.push_back(llvm::Constant::getNullValue(i8p));
00125 
00126   llvm::Constant *init = llvm::ConstantStruct::getAnon(elements);
00127 
00128   llvm::GlobalVariable *global =
00129     new llvm::GlobalVariable(CGM.getModule(), init->getType(), true,
00130                              llvm::GlobalValue::InternalLinkage,
00131                              init, "__block_descriptor_tmp");
00132 
00133   return llvm::ConstantExpr::getBitCast(global, CGM.getBlockDescriptorType());
00134 }
00135 
00136 /*
00137   Purely notional variadic template describing the layout of a block.
00138 
00139   template <class _ResultType, class... _ParamTypes, class... _CaptureTypes>
00140   struct Block_literal {
00141     /// Initialized to one of:
00142     ///   extern void *_NSConcreteStackBlock[];
00143     ///   extern void *_NSConcreteGlobalBlock[];
00144     ///
00145     /// In theory, we could start one off malloc'ed by setting
00146     /// BLOCK_NEEDS_FREE, giving it a refcount of 1, and using
00147     /// this isa:
00148     ///   extern void *_NSConcreteMallocBlock[];
00149     struct objc_class *isa;
00150 
00151     /// These are the flags (with corresponding bit number) that the
00152     /// compiler is actually supposed to know about.
00153     ///  25. BLOCK_HAS_COPY_DISPOSE - indicates that the block
00154     ///   descriptor provides copy and dispose helper functions
00155     ///  26. BLOCK_HAS_CXX_OBJ - indicates that there's a captured
00156     ///   object with a nontrivial destructor or copy constructor
00157     ///  28. BLOCK_IS_GLOBAL - indicates that the block is allocated
00158     ///   as global memory
00159     ///  29. BLOCK_USE_STRET - indicates that the block function
00160     ///   uses stret, which objc_msgSend needs to know about
00161     ///  30. BLOCK_HAS_SIGNATURE - indicates that the block has an
00162     ///   @encoded signature string
00163     /// And we're not supposed to manipulate these:
00164     ///  24. BLOCK_NEEDS_FREE - indicates that the block has been moved
00165     ///   to malloc'ed memory
00166     ///  27. BLOCK_IS_GC - indicates that the block has been moved to
00167     ///   to GC-allocated memory
00168     /// Additionally, the bottom 16 bits are a reference count which
00169     /// should be zero on the stack.
00170     int flags;
00171 
00172     /// Reserved;  should be zero-initialized.
00173     int reserved;
00174 
00175     /// Function pointer generated from block literal.
00176     _ResultType (*invoke)(Block_literal *, _ParamTypes...);
00177 
00178     /// Block description metadata generated from block literal.
00179     struct Block_descriptor *block_descriptor;
00180 
00181     /// Captured values follow.
00182     _CapturesTypes captures...;
00183   };
00184  */
00185 
00186 /// The number of fields in a block header.
00187 const unsigned BlockHeaderSize = 5;
00188 
00189 namespace {
00190   /// A chunk of data that we actually have to capture in the block.
00191   struct BlockLayoutChunk {
00192     CharUnits Alignment;
00193     CharUnits Size;
00194     Qualifiers::ObjCLifetime Lifetime;
00195     const BlockDecl::Capture *Capture; // null for 'this'
00196     llvm::Type *Type;
00197 
00198     BlockLayoutChunk(CharUnits align, CharUnits size,
00199                      Qualifiers::ObjCLifetime lifetime,
00200                      const BlockDecl::Capture *capture,
00201                      llvm::Type *type)
00202       : Alignment(align), Size(size), Lifetime(lifetime),
00203         Capture(capture), Type(type) {}
00204 
00205     /// Tell the block info that this chunk has the given field index.
00206     void setIndex(CGBlockInfo &info, unsigned index, CharUnits offset) {
00207       if (!Capture) {
00208         info.CXXThisIndex = index;
00209         info.CXXThisOffset = offset;
00210       } else {
00211         info.Captures.insert({Capture->getVariable(),
00212                               CGBlockInfo::Capture::makeIndex(index, offset)});
00213       }
00214     }
00215   };
00216 
00217   /// Order by 1) all __strong together 2) next, all byfref together 3) next,
00218   /// all __weak together. Preserve descending alignment in all situations.
00219   bool operator<(const BlockLayoutChunk &left, const BlockLayoutChunk &right) {
00220     if (left.Alignment != right.Alignment)
00221       return left.Alignment > right.Alignment;
00222 
00223     auto getPrefOrder = [](const BlockLayoutChunk &chunk) {
00224       if (chunk.Capture && chunk.Capture->isByRef())
00225         return 1;
00226       if (chunk.Lifetime == Qualifiers::OCL_Strong)
00227         return 0;
00228       if (chunk.Lifetime == Qualifiers::OCL_Weak)
00229         return 2;
00230       return 3;
00231     };
00232 
00233     return getPrefOrder(left) < getPrefOrder(right);
00234   }
00235 } // end anonymous namespace
00236 
00237 /// Determines if the given type is safe for constant capture in C++.
00238 static bool isSafeForCXXConstantCapture(QualType type) {
00239   const RecordType *recordType =
00240     type->getBaseElementTypeUnsafe()->getAs<RecordType>();
00241 
00242   // Only records can be unsafe.
00243   if (!recordType) return true;
00244 
00245   const auto *record = cast<CXXRecordDecl>(recordType->getDecl());
00246 
00247   // Maintain semantics for classes with non-trivial dtors or copy ctors.
00248   if (!record->hasTrivialDestructor()) return false;
00249   if (record->hasNonTrivialCopyConstructor()) return false;
00250 
00251   // Otherwise, we just have to make sure there aren't any mutable
00252   // fields that might have changed since initialization.
00253   return !record->hasMutableFields();
00254 }
00255 
00256 /// It is illegal to modify a const object after initialization.
00257 /// Therefore, if a const object has a constant initializer, we don't
00258 /// actually need to keep storage for it in the block; we'll just
00259 /// rematerialize it at the start of the block function.  This is
00260 /// acceptable because we make no promises about address stability of
00261 /// captured variables.
00262 static llvm::Constant *tryCaptureAsConstant(CodeGenModule &CGM,
00263                                             CodeGenFunction *CGF,
00264                                             const VarDecl *var) {
00265   QualType type = var->getType();
00266 
00267   // We can only do this if the variable is const.
00268   if (!type.isConstQualified()) return nullptr;
00269 
00270   // Furthermore, in C++ we have to worry about mutable fields:
00271   // C++ [dcl.type.cv]p4:
00272   //   Except that any class member declared mutable can be
00273   //   modified, any attempt to modify a const object during its
00274   //   lifetime results in undefined behavior.
00275   if (CGM.getLangOpts().CPlusPlus && !isSafeForCXXConstantCapture(type))
00276     return nullptr;
00277 
00278   // If the variable doesn't have any initializer (shouldn't this be
00279   // invalid?), it's not clear what we should do.  Maybe capture as
00280   // zero?
00281   const Expr *init = var->getInit();
00282   if (!init) return nullptr;
00283 
00284   return CGM.EmitConstantInit(*var, CGF);
00285 }
00286 
00287 /// Get the low bit of a nonzero character count.  This is the
00288 /// alignment of the nth byte if the 0th byte is universally aligned.
00289 static CharUnits getLowBit(CharUnits v) {
00290   return CharUnits::fromQuantity(v.getQuantity() & (~v.getQuantity() + 1));
00291 }
00292 
00293 static void initializeForBlockHeader(CodeGenModule &CGM, CGBlockInfo &info,
00294                              SmallVectorImpl<llvm::Type*> &elementTypes) {
00295   // The header is basically 'struct { void *; int; int; void *; void *; }'.
00296   // Assert that that struct is packed.
00297   assert(CGM.getIntSize() <= CGM.getPointerSize());
00298   assert(CGM.getIntAlign() <= CGM.getPointerAlign());
00299   assert((2 * CGM.getIntSize()).isMultipleOf(CGM.getPointerAlign()));
00300 
00301   info.BlockAlign = CGM.getPointerAlign();
00302   info.BlockSize = 3 * CGM.getPointerSize() + 2 * CGM.getIntSize();
00303 
00304   assert(elementTypes.empty());
00305   elementTypes.push_back(CGM.VoidPtrTy);
00306   elementTypes.push_back(CGM.IntTy);
00307   elementTypes.push_back(CGM.IntTy);
00308   elementTypes.push_back(CGM.VoidPtrTy);
00309   elementTypes.push_back(CGM.getBlockDescriptorType());
00310 
00311   assert(elementTypes.size() == BlockHeaderSize);
00312 }
00313 
00314 /// Compute the layout of the given block.  Attempts to lay the block
00315 /// out with minimal space requirements.
00316 static void computeBlockInfo(CodeGenModule &CGM, CodeGenFunction *CGF,
00317                              CGBlockInfo &info) {
00318   ASTContext &C = CGM.getContext();
00319   const BlockDecl *block = info.getBlockDecl();
00320 
00321   SmallVector<llvm::Type*, 8> elementTypes;
00322   initializeForBlockHeader(CGM, info, elementTypes);
00323 
00324   if (!block->hasCaptures()) {
00325     info.StructureType =
00326       llvm::StructType::get(CGM.getLLVMContext(), elementTypes, true);
00327     info.CanBeGlobal = true;
00328     return;
00329   }
00330   else if (C.getLangOpts().ObjC1 &&
00331            CGM.getLangOpts().getGC() == LangOptions::NonGC)
00332     info.HasCapturedVariableLayout = true;
00333   
00334   // Collect the layout chunks.
00335   SmallVector<BlockLayoutChunk, 16> layout;
00336   layout.reserve(block->capturesCXXThis() +
00337                  (block->capture_end() - block->capture_begin()));
00338 
00339   CharUnits maxFieldAlign;
00340 
00341   // First, 'this'.
00342   if (block->capturesCXXThis()) {
00343     assert(CGF && CGF->CurFuncDecl && isa<CXXMethodDecl>(CGF->CurFuncDecl) &&
00344            "Can't capture 'this' outside a method");
00345     QualType thisType = cast<CXXMethodDecl>(CGF->CurFuncDecl)->getThisType(C);
00346 
00347     // Theoretically, this could be in a different address space, so
00348     // don't assume standard pointer size/align.
00349     llvm::Type *llvmType = CGM.getTypes().ConvertType(thisType);
00350     std::pair<CharUnits,CharUnits> tinfo
00351       = CGM.getContext().getTypeInfoInChars(thisType);
00352     maxFieldAlign = std::max(maxFieldAlign, tinfo.second);
00353 
00354     layout.push_back(BlockLayoutChunk(tinfo.second, tinfo.first,
00355                                       Qualifiers::OCL_None,
00356                                       nullptr, llvmType));
00357   }
00358 
00359   // Next, all the block captures.
00360   for (const auto &CI : block->captures()) {
00361     const VarDecl *variable = CI.getVariable();
00362 
00363     if (CI.isByRef()) {
00364       // We have to copy/dispose of the __block reference.
00365       info.NeedsCopyDispose = true;
00366 
00367       // Just use void* instead of a pointer to the byref type.
00368       CharUnits align = CGM.getPointerAlign();
00369       maxFieldAlign = std::max(maxFieldAlign, align);
00370 
00371       layout.push_back(BlockLayoutChunk(align, CGM.getPointerSize(),
00372                                         Qualifiers::OCL_None, &CI,
00373                                         CGM.VoidPtrTy));
00374       continue;
00375     }
00376 
00377     // Otherwise, build a layout chunk with the size and alignment of
00378     // the declaration.
00379     if (llvm::Constant *constant = tryCaptureAsConstant(CGM, CGF, variable)) {
00380       info.Captures[variable] = CGBlockInfo::Capture::makeConstant(constant);
00381       continue;
00382     }
00383 
00384     // If we have a lifetime qualifier, honor it for capture purposes.
00385     // That includes *not* copying it if it's __unsafe_unretained.
00386     Qualifiers::ObjCLifetime lifetime =
00387       variable->getType().getObjCLifetime();
00388     if (lifetime) {
00389       switch (lifetime) {
00390       case Qualifiers::OCL_None: llvm_unreachable("impossible");
00391       case Qualifiers::OCL_ExplicitNone:
00392       case Qualifiers::OCL_Autoreleasing:
00393         break;
00394 
00395       case Qualifiers::OCL_Strong:
00396       case Qualifiers::OCL_Weak:
00397         info.NeedsCopyDispose = true;
00398       }
00399 
00400     // Block pointers require copy/dispose.  So do Objective-C pointers.
00401     } else if (variable->getType()->isObjCRetainableType()) {
00402       // But honor the inert __unsafe_unretained qualifier, which doesn't
00403       // actually make it into the type system.
00404        if (variable->getType()->isObjCInertUnsafeUnretainedType()) {
00405         lifetime = Qualifiers::OCL_ExplicitNone;
00406       } else {
00407         info.NeedsCopyDispose = true;
00408         // used for mrr below.
00409         lifetime = Qualifiers::OCL_Strong;
00410       }
00411 
00412     // So do types that require non-trivial copy construction.
00413     } else if (CI.hasCopyExpr()) {
00414       info.NeedsCopyDispose = true;
00415       info.HasCXXObject = true;
00416 
00417     // And so do types with destructors.
00418     } else if (CGM.getLangOpts().CPlusPlus) {
00419       if (const CXXRecordDecl *record =
00420             variable->getType()->getAsCXXRecordDecl()) {
00421         if (!record->hasTrivialDestructor()) {
00422           info.HasCXXObject = true;
00423           info.NeedsCopyDispose = true;
00424         }
00425       }
00426     }
00427 
00428     QualType VT = variable->getType();
00429     CharUnits size = C.getTypeSizeInChars(VT);
00430     CharUnits align = C.getDeclAlign(variable);
00431     
00432     maxFieldAlign = std::max(maxFieldAlign, align);
00433 
00434     llvm::Type *llvmType =
00435       CGM.getTypes().ConvertTypeForMem(VT);
00436     
00437     layout.push_back(BlockLayoutChunk(align, size, lifetime, &CI, llvmType));
00438   }
00439 
00440   // If that was everything, we're done here.
00441   if (layout.empty()) {
00442     info.StructureType =
00443       llvm::StructType::get(CGM.getLLVMContext(), elementTypes, true);
00444     info.CanBeGlobal = true;
00445     return;
00446   }
00447 
00448   // Sort the layout by alignment.  We have to use a stable sort here
00449   // to get reproducible results.  There should probably be an
00450   // llvm::array_pod_stable_sort.
00451   std::stable_sort(layout.begin(), layout.end());
00452   
00453   // Needed for blocks layout info.
00454   info.BlockHeaderForcedGapOffset = info.BlockSize;
00455   info.BlockHeaderForcedGapSize = CharUnits::Zero();
00456   
00457   CharUnits &blockSize = info.BlockSize;
00458   info.BlockAlign = std::max(maxFieldAlign, info.BlockAlign);
00459 
00460   // Assuming that the first byte in the header is maximally aligned,
00461   // get the alignment of the first byte following the header.
00462   CharUnits endAlign = getLowBit(blockSize);
00463 
00464   // If the end of the header isn't satisfactorily aligned for the
00465   // maximum thing, look for things that are okay with the header-end
00466   // alignment, and keep appending them until we get something that's
00467   // aligned right.  This algorithm is only guaranteed optimal if
00468   // that condition is satisfied at some point; otherwise we can get
00469   // things like:
00470   //   header                 // next byte has alignment 4
00471   //   something_with_size_5; // next byte has alignment 1
00472   //   something_with_alignment_8;
00473   // which has 7 bytes of padding, as opposed to the naive solution
00474   // which might have less (?).
00475   if (endAlign < maxFieldAlign) {
00476     SmallVectorImpl<BlockLayoutChunk>::iterator
00477       li = layout.begin() + 1, le = layout.end();
00478 
00479     // Look for something that the header end is already
00480     // satisfactorily aligned for.
00481     for (; li != le && endAlign < li->Alignment; ++li)
00482       ;
00483 
00484     // If we found something that's naturally aligned for the end of
00485     // the header, keep adding things...
00486     if (li != le) {
00487       SmallVectorImpl<BlockLayoutChunk>::iterator first = li;
00488       for (; li != le; ++li) {
00489         assert(endAlign >= li->Alignment);
00490 
00491         li->setIndex(info, elementTypes.size(), blockSize);
00492         elementTypes.push_back(li->Type);
00493         blockSize += li->Size;
00494         endAlign = getLowBit(blockSize);
00495 
00496         // ...until we get to the alignment of the maximum field.
00497         if (endAlign >= maxFieldAlign) {
00498           break;
00499         }
00500       }
00501       // Don't re-append everything we just appended.
00502       layout.erase(first, li);
00503     }
00504   }
00505 
00506   assert(endAlign == getLowBit(blockSize));
00507   
00508   // At this point, we just have to add padding if the end align still
00509   // isn't aligned right.
00510   if (endAlign < maxFieldAlign) {
00511     CharUnits newBlockSize = blockSize.alignTo(maxFieldAlign);
00512     CharUnits padding = newBlockSize - blockSize;
00513 
00514     // If we haven't yet added any fields, remember that there was an
00515     // initial gap; this need to go into the block layout bit map.
00516     if (blockSize == info.BlockHeaderForcedGapOffset) {
00517       info.BlockHeaderForcedGapSize = padding;
00518     }
00519 
00520     elementTypes.push_back(llvm::ArrayType::get(CGM.Int8Ty,
00521                                                 padding.getQuantity()));
00522     blockSize = newBlockSize;
00523     endAlign = getLowBit(blockSize); // might be > maxFieldAlign
00524   }
00525 
00526   assert(endAlign >= maxFieldAlign);
00527   assert(endAlign == getLowBit(blockSize));
00528   // Slam everything else on now.  This works because they have
00529   // strictly decreasing alignment and we expect that size is always a
00530   // multiple of alignment.
00531   for (SmallVectorImpl<BlockLayoutChunk>::iterator
00532          li = layout.begin(), le = layout.end(); li != le; ++li) {
00533     if (endAlign < li->Alignment) {
00534       // size may not be multiple of alignment. This can only happen with
00535       // an over-aligned variable. We will be adding a padding field to
00536       // make the size be multiple of alignment.
00537       CharUnits padding = li->Alignment - endAlign;
00538       elementTypes.push_back(llvm::ArrayType::get(CGM.Int8Ty,
00539                                                   padding.getQuantity()));
00540       blockSize += padding;
00541       endAlign = getLowBit(blockSize);
00542     }
00543     assert(endAlign >= li->Alignment);
00544     li->setIndex(info, elementTypes.size(), blockSize);
00545     elementTypes.push_back(li->Type);
00546     blockSize += li->Size;
00547     endAlign = getLowBit(blockSize);
00548   }
00549 
00550   info.StructureType =
00551     llvm::StructType::get(CGM.getLLVMContext(), elementTypes, true);
00552 }
00553 
00554 /// Enter the scope of a block.  This should be run at the entrance to
00555 /// a full-expression so that the block's cleanups are pushed at the
00556 /// right place in the stack.
00557 static void enterBlockScope(CodeGenFunction &CGF, BlockDecl *block) {
00558   assert(CGF.HaveInsertPoint());
00559 
00560   // Allocate the block info and place it at the head of the list.
00561   CGBlockInfo &blockInfo =
00562     *new CGBlockInfo(block, CGF.CurFn->getName());
00563   blockInfo.NextBlockInfo = CGF.FirstBlockInfo;
00564   CGF.FirstBlockInfo = &blockInfo;
00565 
00566   // Compute information about the layout, etc., of this block,
00567   // pushing cleanups as necessary.
00568   computeBlockInfo(CGF.CGM, &CGF, blockInfo);
00569 
00570   // Nothing else to do if it can be global.
00571   if (blockInfo.CanBeGlobal) return;
00572 
00573   // Make the allocation for the block.
00574   blockInfo.LocalAddress = CGF.CreateTempAlloca(blockInfo.StructureType,
00575                                                 blockInfo.BlockAlign, "block");
00576 
00577   // If there are cleanups to emit, enter them (but inactive).
00578   if (!blockInfo.NeedsCopyDispose) return;
00579 
00580   // Walk through the captures (in order) and find the ones not
00581   // captured by constant.
00582   for (const auto &CI : block->captures()) {
00583     // Ignore __block captures; there's nothing special in the
00584     // on-stack block that we need to do for them.
00585     if (CI.isByRef()) continue;
00586 
00587     // Ignore variables that are constant-captured.
00588     const VarDecl *variable = CI.getVariable();
00589     CGBlockInfo::Capture &capture = blockInfo.getCapture(variable);
00590     if (capture.isConstant()) continue;
00591 
00592     // Ignore objects that aren't destructed.
00593     QualType::DestructionKind dtorKind =
00594       variable->getType().isDestructedType();
00595     if (dtorKind == QualType::DK_none) continue;
00596 
00597     CodeGenFunction::Destroyer *destroyer;
00598 
00599     // Block captures count as local values and have imprecise semantics.
00600     // They also can't be arrays, so need to worry about that.
00601     if (dtorKind == QualType::DK_objc_strong_lifetime) {
00602       destroyer = CodeGenFunction::destroyARCStrongImprecise;
00603     } else {
00604       destroyer = CGF.getDestroyer(dtorKind);
00605     }
00606 
00607     // GEP down to the address.
00608     Address addr = CGF.Builder.CreateStructGEP(blockInfo.LocalAddress,
00609                                                capture.getIndex(),
00610                                                capture.getOffset());
00611 
00612     // We can use that GEP as the dominating IP.
00613     if (!blockInfo.DominatingIP)
00614       blockInfo.DominatingIP = cast<llvm::Instruction>(addr.getPointer());
00615 
00616     CleanupKind cleanupKind = InactiveNormalCleanup;
00617     bool useArrayEHCleanup = CGF.needsEHCleanup(dtorKind);
00618     if (useArrayEHCleanup) 
00619       cleanupKind = InactiveNormalAndEHCleanup;
00620 
00621     CGF.pushDestroy(cleanupKind, addr, variable->getType(),
00622                     destroyer, useArrayEHCleanup);
00623 
00624     // Remember where that cleanup was.
00625     capture.setCleanup(CGF.EHStack.stable_begin());
00626   }
00627 }
00628 
00629 /// Enter a full-expression with a non-trivial number of objects to
00630 /// clean up.  This is in this file because, at the moment, the only
00631 /// kind of cleanup object is a BlockDecl*.
00632 void CodeGenFunction::enterNonTrivialFullExpression(const ExprWithCleanups *E) {
00633   assert(E->getNumObjects() != 0);
00634   ArrayRef<ExprWithCleanups::CleanupObject> cleanups = E->getObjects();
00635   for (ArrayRef<ExprWithCleanups::CleanupObject>::iterator
00636          i = cleanups.begin(), e = cleanups.end(); i != e; ++i) {
00637     enterBlockScope(*this, *i);
00638   }
00639 }
00640 
00641 /// Find the layout for the given block in a linked list and remove it.
00642 static CGBlockInfo *findAndRemoveBlockInfo(CGBlockInfo **head,
00643                                            const BlockDecl *block) {
00644   while (true) {
00645     assert(head && *head);
00646     CGBlockInfo *cur = *head;
00647 
00648     // If this is the block we're looking for, splice it out of the list.
00649     if (cur->getBlockDecl() == block) {
00650       *head = cur->NextBlockInfo;
00651       return cur;
00652     }
00653 
00654     head = &cur->NextBlockInfo;
00655   }
00656 }
00657 
00658 /// Destroy a chain of block layouts.
00659 void CodeGenFunction::destroyBlockInfos(CGBlockInfo *head) {
00660   assert(head && "destroying an empty chain");
00661   do {
00662     CGBlockInfo *cur = head;
00663     head = cur->NextBlockInfo;
00664     delete cur;
00665   } while (head != nullptr);
00666 }
00667 
00668 /// Emit a block literal expression in the current function.
00669 llvm::Value *CodeGenFunction::EmitBlockLiteral(const BlockExpr *blockExpr) {
00670   // If the block has no captures, we won't have a pre-computed
00671   // layout for it.
00672   if (!blockExpr->getBlockDecl()->hasCaptures()) {
00673     CGBlockInfo blockInfo(blockExpr->getBlockDecl(), CurFn->getName());
00674     computeBlockInfo(CGM, this, blockInfo);
00675     blockInfo.BlockExpression = blockExpr;
00676     return EmitBlockLiteral(blockInfo);
00677   }
00678 
00679   // Find the block info for this block and take ownership of it.
00680   std::unique_ptr<CGBlockInfo> blockInfo;
00681   blockInfo.reset(findAndRemoveBlockInfo(&FirstBlockInfo,
00682                                          blockExpr->getBlockDecl()));
00683 
00684   blockInfo->BlockExpression = blockExpr;
00685   return EmitBlockLiteral(*blockInfo);
00686 }
00687 
00688 llvm::Value *CodeGenFunction::EmitBlockLiteral(const CGBlockInfo &blockInfo) {
00689   // Using the computed layout, generate the actual block function.
00690   bool isLambdaConv = blockInfo.getBlockDecl()->isConversionFromLambda();
00691   llvm::Constant *blockFn
00692     = CodeGenFunction(CGM, true).GenerateBlockFunction(CurGD, blockInfo,
00693                                                        LocalDeclMap,
00694                                                        isLambdaConv);
00695   blockFn = llvm::ConstantExpr::getBitCast(blockFn, VoidPtrTy);
00696 
00697   // If there is nothing to capture, we can emit this as a global block.
00698   if (blockInfo.CanBeGlobal)
00699     return buildGlobalBlock(CGM, blockInfo, blockFn);
00700 
00701   // Otherwise, we have to emit this as a local block.
00702 
00703   llvm::Constant *isa = CGM.getNSConcreteStackBlock();
00704   isa = llvm::ConstantExpr::getBitCast(isa, VoidPtrTy);
00705 
00706   // Build the block descriptor.
00707   llvm::Constant *descriptor = buildBlockDescriptor(CGM, blockInfo);
00708 
00709   Address blockAddr = blockInfo.LocalAddress;
00710   assert(blockAddr.isValid() && "block has no address!");
00711 
00712   // Compute the initial on-stack block flags.
00713   BlockFlags flags = BLOCK_HAS_SIGNATURE;
00714   if (blockInfo.HasCapturedVariableLayout) flags |= BLOCK_HAS_EXTENDED_LAYOUT;
00715   if (blockInfo.NeedsCopyDispose) flags |= BLOCK_HAS_COPY_DISPOSE;
00716   if (blockInfo.HasCXXObject) flags |= BLOCK_HAS_CXX_OBJ;
00717   if (blockInfo.UsesStret) flags |= BLOCK_USE_STRET;
00718 
00719   auto projectField =
00720     [&](unsigned index, CharUnits offset, const Twine &name) -> Address {
00721       return Builder.CreateStructGEP(blockAddr, index, offset, name);
00722     };
00723   auto storeField =
00724     [&](llvm::Value *value, unsigned index, CharUnits offset,
00725         const Twine &name) {
00726       Builder.CreateStore(value, projectField(index, offset, name));
00727     };
00728 
00729   // Initialize the block header.
00730   {
00731     // We assume all the header fields are densely packed.
00732     unsigned index = 0;
00733     CharUnits offset;
00734     auto addHeaderField =
00735       [&](llvm::Value *value, CharUnits size, const Twine &name) {
00736         storeField(value, index, offset, name);
00737         offset += size;
00738         index++;
00739       };
00740 
00741     addHeaderField(isa, getPointerSize(), "block.isa");
00742     addHeaderField(llvm::ConstantInt::get(IntTy, flags.getBitMask()),
00743                    getIntSize(), "block.flags");
00744     addHeaderField(llvm::ConstantInt::get(IntTy, 0),
00745                    getIntSize(), "block.reserved");
00746     addHeaderField(blockFn, getPointerSize(), "block.invoke");
00747     addHeaderField(descriptor, getPointerSize(), "block.descriptor");
00748   }
00749 
00750   // Finally, capture all the values into the block.
00751   const BlockDecl *blockDecl = blockInfo.getBlockDecl();
00752 
00753   // First, 'this'.
00754   if (blockDecl->capturesCXXThis()) {
00755     Address addr = projectField(blockInfo.CXXThisIndex, blockInfo.CXXThisOffset,
00756                                 "block.captured-this.addr");
00757     Builder.CreateStore(LoadCXXThis(), addr);
00758   }
00759 
00760   // Next, captured variables.
00761   for (const auto &CI : blockDecl->captures()) {
00762     const VarDecl *variable = CI.getVariable();
00763     const CGBlockInfo::Capture &capture = blockInfo.getCapture(variable);
00764 
00765     // Ignore constant captures.
00766     if (capture.isConstant()) continue;
00767 
00768     QualType type = variable->getType();
00769 
00770     // This will be a [[type]]*, except that a byref entry will just be
00771     // an i8**.
00772     Address blockField =
00773       projectField(capture.getIndex(), capture.getOffset(), "block.captured");
00774 
00775     // Compute the address of the thing we're going to move into the
00776     // block literal.
00777     Address src = Address::invalid();
00778     if (BlockInfo && CI.isNested()) {
00779       // We need to use the capture from the enclosing block.
00780       const CGBlockInfo::Capture &enclosingCapture =
00781         BlockInfo->getCapture(variable);
00782 
00783       // This is a [[type]]*, except that a byref entry wil just be an i8**.
00784       src = Builder.CreateStructGEP(LoadBlockStruct(),
00785                                     enclosingCapture.getIndex(),
00786                                     enclosingCapture.getOffset(),
00787                                     "block.capture.addr");
00788     } else if (blockDecl->isConversionFromLambda()) {
00789       // The lambda capture in a lambda's conversion-to-block-pointer is
00790       // special; we'll simply emit it directly.
00791       src = Address::invalid();
00792     } else {
00793       // Just look it up in the locals map, which will give us back a
00794       // [[type]]*.  If that doesn't work, do the more elaborate DRE
00795       // emission.
00796       auto it = LocalDeclMap.find(variable);
00797       if (it != LocalDeclMap.end()) {
00798         src = it->second;
00799       } else {
00800         DeclRefExpr declRef(
00801             const_cast<VarDecl *>(variable),
00802             /*RefersToEnclosingVariableOrCapture*/ CI.isNested(), type,
00803             VK_LValue, SourceLocation());
00804         src = EmitDeclRefLValue(&declRef).getAddress();
00805       }
00806     }
00807 
00808     // For byrefs, we just write the pointer to the byref struct into
00809     // the block field.  There's no need to chase the forwarding
00810     // pointer at this point, since we're building something that will
00811     // live a shorter life than the stack byref anyway.
00812     if (CI.isByRef()) {
00813       // Get a void* that points to the byref struct.
00814       llvm::Value *byrefPointer;
00815       if (CI.isNested())
00816         byrefPointer = Builder.CreateLoad(src, "byref.capture");
00817       else
00818         byrefPointer = Builder.CreateBitCast(src.getPointer(), VoidPtrTy);
00819 
00820       // Write that void* into the capture field.
00821       Builder.CreateStore(byrefPointer, blockField);
00822 
00823     // If we have a copy constructor, evaluate that into the block field.
00824     } else if (const Expr *copyExpr = CI.getCopyExpr()) {
00825       if (blockDecl->isConversionFromLambda()) {
00826         // If we have a lambda conversion, emit the expression
00827         // directly into the block instead.
00828         AggValueSlot Slot =
00829             AggValueSlot::forAddr(blockField, Qualifiers(),
00830                                   AggValueSlot::IsDestructed,
00831                                   AggValueSlot::DoesNotNeedGCBarriers,
00832                                   AggValueSlot::IsNotAliased);
00833         EmitAggExpr(copyExpr, Slot);
00834       } else {
00835         EmitSynthesizedCXXCopyCtor(blockField, src, copyExpr);
00836       }
00837 
00838     // If it's a reference variable, copy the reference into the block field.
00839     } else if (type->isReferenceType()) {
00840       llvm::Value *ref = Builder.CreateLoad(src, "ref.val");
00841       Builder.CreateStore(ref, blockField);
00842 
00843     // If this is an ARC __strong block-pointer variable, don't do a
00844     // block copy.
00845     //
00846     // TODO: this can be generalized into the normal initialization logic:
00847     // we should never need to do a block-copy when initializing a local
00848     // variable, because the local variable's lifetime should be strictly
00849     // contained within the stack block's.
00850     } else if (type.getObjCLifetime() == Qualifiers::OCL_Strong &&
00851                type->isBlockPointerType()) {
00852       // Load the block and do a simple retain.
00853       llvm::Value *value = Builder.CreateLoad(src, "block.captured_block");
00854       value = EmitARCRetainNonBlock(value);
00855 
00856       // Do a primitive store to the block field.
00857       Builder.CreateStore(value, blockField);
00858 
00859     // Otherwise, fake up a POD copy into the block field.
00860     } else {
00861       // Fake up a new variable so that EmitScalarInit doesn't think
00862       // we're referring to the variable in its own initializer.
00863       ImplicitParamDecl blockFieldPseudoVar(getContext(), /*DC*/ nullptr,
00864                                             SourceLocation(), /*name*/ nullptr,
00865                                             type);
00866 
00867       // We use one of these or the other depending on whether the
00868       // reference is nested.
00869       DeclRefExpr declRef(const_cast<VarDecl *>(variable),
00870                           /*RefersToEnclosingVariableOrCapture*/ CI.isNested(),
00871                           type, VK_LValue, SourceLocation());
00872 
00873       ImplicitCastExpr l2r(ImplicitCastExpr::OnStack, type, CK_LValueToRValue,
00874                            &declRef, VK_RValue);
00875       // FIXME: Pass a specific location for the expr init so that the store is
00876       // attributed to a reasonable location - otherwise it may be attributed to
00877       // locations of subexpressions in the initialization.
00878       EmitExprAsInit(&l2r, &blockFieldPseudoVar,
00879                      MakeAddrLValue(blockField, type, AlignmentSource::Decl),
00880                      /*captured by init*/ false);
00881     }
00882 
00883     // Activate the cleanup if layout pushed one.
00884     if (!CI.isByRef()) {
00885       EHScopeStack::stable_iterator cleanup = capture.getCleanup();
00886       if (cleanup.isValid())
00887         ActivateCleanupBlock(cleanup, blockInfo.DominatingIP);
00888     }
00889   }
00890 
00891   // Cast to the converted block-pointer type, which happens (somewhat
00892   // unfortunately) to be a pointer to function type.
00893   llvm::Value *result =
00894     Builder.CreateBitCast(blockAddr.getPointer(),
00895                           ConvertType(blockInfo.getBlockExpr()->getType()));
00896 
00897   return result;
00898 }
00899 
00900 
00901 llvm::Type *CodeGenModule::getBlockDescriptorType() {
00902   if (BlockDescriptorType)
00903     return BlockDescriptorType;
00904 
00905   llvm::Type *UnsignedLongTy =
00906     getTypes().ConvertType(getContext().UnsignedLongTy);
00907 
00908   // struct __block_descriptor {
00909   //   unsigned long reserved;
00910   //   unsigned long block_size;
00911   //
00912   //   // later, the following will be added
00913   //
00914   //   struct {
00915   //     void (*copyHelper)();
00916   //     void (*copyHelper)();
00917   //   } helpers;                // !!! optional
00918   //
00919   //   const char *signature;   // the block signature
00920   //   const char *layout;      // reserved
00921   // };
00922   BlockDescriptorType =
00923     llvm::StructType::create("struct.__block_descriptor",
00924                              UnsignedLongTy, UnsignedLongTy, nullptr);
00925 
00926   // Now form a pointer to that.
00927   BlockDescriptorType = llvm::PointerType::getUnqual(BlockDescriptorType);
00928   return BlockDescriptorType;
00929 }
00930 
00931 llvm::Type *CodeGenModule::getGenericBlockLiteralType() {
00932   if (GenericBlockLiteralType)
00933     return GenericBlockLiteralType;
00934 
00935   llvm::Type *BlockDescPtrTy = getBlockDescriptorType();
00936 
00937   // struct __block_literal_generic {
00938   //   void *__isa;
00939   //   int __flags;
00940   //   int __reserved;
00941   //   void (*__invoke)(void *);
00942   //   struct __block_descriptor *__descriptor;
00943   // };
00944   GenericBlockLiteralType =
00945     llvm::StructType::create("struct.__block_literal_generic",
00946                              VoidPtrTy, IntTy, IntTy, VoidPtrTy,
00947                              BlockDescPtrTy, nullptr);
00948 
00949   return GenericBlockLiteralType;
00950 }
00951 
00952 RValue CodeGenFunction::EmitBlockCallExpr(const CallExpr *E, 
00953                                           ReturnValueSlot ReturnValue) {
00954   const BlockPointerType *BPT =
00955     E->getCallee()->getType()->getAs<BlockPointerType>();
00956 
00957   llvm::Value *Callee = EmitScalarExpr(E->getCallee());
00958 
00959   // Get a pointer to the generic block literal.
00960   llvm::Type *BlockLiteralTy =
00961     llvm::PointerType::getUnqual(CGM.getGenericBlockLiteralType());
00962 
00963   // Bitcast the callee to a block literal.
00964   llvm::Value *BlockLiteral =
00965     Builder.CreateBitCast(Callee, BlockLiteralTy, "block.literal");
00966 
00967   // Get the function pointer from the literal.
00968   llvm::Value *FuncPtr =
00969     Builder.CreateStructGEP(CGM.getGenericBlockLiteralType(), BlockLiteral, 3);
00970 
00971   BlockLiteral = Builder.CreateBitCast(BlockLiteral, VoidPtrTy);
00972 
00973   // Add the block literal.
00974   CallArgList Args;
00975   Args.add(RValue::get(BlockLiteral), getContext().VoidPtrTy);
00976 
00977   QualType FnType = BPT->getPointeeType();
00978 
00979   // And the rest of the arguments.
00980   EmitCallArgs(Args, FnType->getAs<FunctionProtoType>(), E->arguments());
00981 
00982   // Load the function.
00983   llvm::Value *Func = Builder.CreateAlignedLoad(FuncPtr, getPointerAlign());
00984 
00985   const FunctionType *FuncTy = FnType->castAs<FunctionType>();
00986   const CGFunctionInfo &FnInfo =
00987     CGM.getTypes().arrangeBlockFunctionCall(Args, FuncTy);
00988 
00989   // Cast the function pointer to the right type.
00990   llvm::Type *BlockFTy = CGM.getTypes().GetFunctionType(FnInfo);
00991 
00992   llvm::Type *BlockFTyPtr = llvm::PointerType::getUnqual(BlockFTy);
00993   Func = Builder.CreateBitCast(Func, BlockFTyPtr);
00994 
00995   // And call the block.
00996   return EmitCall(FnInfo, Func, ReturnValue, Args);
00997 }
00998 
00999 Address CodeGenFunction::GetAddrOfBlockDecl(const VarDecl *variable,
01000                                             bool isByRef) {
01001   assert(BlockInfo && "evaluating block ref without block information?");
01002   const CGBlockInfo::Capture &capture = BlockInfo->getCapture(variable);
01003 
01004   // Handle constant captures.
01005   if (capture.isConstant()) return LocalDeclMap.find(variable)->second;
01006 
01007   Address addr =
01008     Builder.CreateStructGEP(LoadBlockStruct(), capture.getIndex(),
01009                             capture.getOffset(), "block.capture.addr");
01010 
01011   if (isByRef) {
01012     // addr should be a void** right now.  Load, then cast the result
01013     // to byref*.
01014 
01015     auto &byrefInfo = getBlockByrefInfo(variable);
01016     addr = Address(Builder.CreateLoad(addr), byrefInfo.ByrefAlignment);
01017 
01018     auto byrefPointerType = llvm::PointerType::get(byrefInfo.Type, 0);
01019     addr = Builder.CreateBitCast(addr, byrefPointerType, "byref.addr");
01020 
01021     addr = emitBlockByrefAddress(addr, byrefInfo, /*follow*/ true,
01022                                  variable->getName());
01023   }
01024 
01025   if (auto refType = variable->getType()->getAs<ReferenceType>()) {
01026     addr = EmitLoadOfReference(addr, refType);
01027   }
01028 
01029   return addr;
01030 }
01031 
01032 llvm::Constant *
01033 CodeGenModule::GetAddrOfGlobalBlock(const BlockExpr *blockExpr,
01034                                     const char *name) {
01035   CGBlockInfo blockInfo(blockExpr->getBlockDecl(), name);
01036   blockInfo.BlockExpression = blockExpr;
01037 
01038   // Compute information about the layout, etc., of this block.
01039   computeBlockInfo(*this, nullptr, blockInfo);
01040 
01041   // Using that metadata, generate the actual block function.
01042   llvm::Constant *blockFn;
01043   {
01044     CodeGenFunction::DeclMapTy LocalDeclMap;
01045     blockFn = CodeGenFunction(*this).GenerateBlockFunction(GlobalDecl(),
01046                                                            blockInfo,
01047                                                            LocalDeclMap,
01048                                                            false);
01049   }
01050   blockFn = llvm::ConstantExpr::getBitCast(blockFn, VoidPtrTy);
01051 
01052   return buildGlobalBlock(*this, blockInfo, blockFn);
01053 }
01054 
01055 static llvm::Constant *buildGlobalBlock(CodeGenModule &CGM,
01056                                         const CGBlockInfo &blockInfo,
01057                                         llvm::Constant *blockFn) {
01058   assert(blockInfo.CanBeGlobal);
01059 
01060   // Generate the constants for the block literal initializer.
01061   llvm::Constant *fields[BlockHeaderSize];
01062 
01063   // isa
01064   fields[0] = CGM.getNSConcreteGlobalBlock();
01065 
01066   // __flags
01067   BlockFlags flags = BLOCK_IS_GLOBAL | BLOCK_HAS_SIGNATURE;
01068   if (blockInfo.UsesStret) flags |= BLOCK_USE_STRET;
01069                                       
01070   fields[1] = llvm::ConstantInt::get(CGM.IntTy, flags.getBitMask());
01071 
01072   // Reserved
01073   fields[2] = llvm::Constant::getNullValue(CGM.IntTy);
01074 
01075   // Function
01076   fields[3] = blockFn;
01077 
01078   // Descriptor
01079   fields[4] = buildBlockDescriptor(CGM, blockInfo);
01080 
01081   llvm::Constant *init = llvm::ConstantStruct::getAnon(fields);
01082 
01083   llvm::GlobalVariable *literal =
01084     new llvm::GlobalVariable(CGM.getModule(),
01085                              init->getType(),
01086                              /*constant*/ true,
01087                              llvm::GlobalVariable::InternalLinkage,
01088                              init,
01089                              "__block_literal_global");
01090   literal->setAlignment(blockInfo.BlockAlign.getQuantity());
01091 
01092   // Return a constant of the appropriately-casted type.
01093   llvm::Type *requiredType =
01094     CGM.getTypes().ConvertType(blockInfo.getBlockExpr()->getType());
01095   return llvm::ConstantExpr::getBitCast(literal, requiredType);
01096 }
01097 
01098 void CodeGenFunction::setBlockContextParameter(const ImplicitParamDecl *D,
01099                                                unsigned argNum,
01100                                                llvm::Value *arg) {
01101   assert(BlockInfo && "not emitting prologue of block invocation function?!");
01102 
01103   llvm::Value *localAddr = nullptr;
01104   if (CGM.getCodeGenOpts().OptimizationLevel == 0) {
01105     // Allocate a stack slot to let the debug info survive the RA.
01106     Address alloc = CreateMemTemp(D->getType(), D->getName() + ".addr");
01107     Builder.CreateStore(arg, alloc);
01108     localAddr = Builder.CreateLoad(alloc);
01109   }
01110 
01111   if (CGDebugInfo *DI = getDebugInfo()) {
01112     if (CGM.getCodeGenOpts().getDebugInfo() >=
01113         codegenoptions::LimitedDebugInfo) {
01114       DI->setLocation(D->getLocation());
01115       DI->EmitDeclareOfBlockLiteralArgVariable(*BlockInfo, arg, argNum,
01116                                                localAddr, Builder);
01117     }
01118   }
01119 
01120   SourceLocation StartLoc = BlockInfo->getBlockExpr()->getBody()->getLocStart();
01121   ApplyDebugLocation Scope(*this, StartLoc);
01122 
01123   // Instead of messing around with LocalDeclMap, just set the value
01124   // directly as BlockPointer.
01125   BlockPointer = Builder.CreateBitCast(arg,
01126                                        BlockInfo->StructureType->getPointerTo(),
01127                                        "block");
01128 }
01129 
01130 Address CodeGenFunction::LoadBlockStruct() {
01131   assert(BlockInfo && "not in a block invocation function!");
01132   assert(BlockPointer && "no block pointer set!");
01133   return Address(BlockPointer, BlockInfo->BlockAlign);
01134 }
01135 
01136 llvm::Function *
01137 CodeGenFunction::GenerateBlockFunction(GlobalDecl GD,
01138                                        const CGBlockInfo &blockInfo,
01139                                        const DeclMapTy &ldm,
01140                                        bool IsLambdaConversionToBlock) {
01141   const BlockDecl *blockDecl = blockInfo.getBlockDecl();
01142 
01143   CurGD = GD;
01144 
01145   CurEHLocation = blockInfo.getBlockExpr()->getLocEnd();
01146   
01147   BlockInfo = &blockInfo;
01148 
01149   // Arrange for local static and local extern declarations to appear
01150   // to be local to this function as well, in case they're directly
01151   // referenced in a block.
01152   for (DeclMapTy::const_iterator i = ldm.begin(), e = ldm.end(); i != e; ++i) {
01153     const auto *var = dyn_cast<VarDecl>(i->first);
01154     if (var && !var->hasLocalStorage())
01155       setAddrOfLocalVar(var, i->second);
01156   }
01157 
01158   // Begin building the function declaration.
01159 
01160   // Build the argument list.
01161   FunctionArgList args;
01162 
01163   // The first argument is the block pointer.  Just take it as a void*
01164   // and cast it later.
01165   QualType selfTy = getContext().VoidPtrTy;
01166   IdentifierInfo *II = &CGM.getContext().Idents.get(".block_descriptor");
01167 
01168   ImplicitParamDecl selfDecl(getContext(), const_cast<BlockDecl*>(blockDecl),
01169                              SourceLocation(), II, selfTy);
01170   args.push_back(&selfDecl);
01171 
01172   // Now add the rest of the parameters.
01173   args.append(blockDecl->param_begin(), blockDecl->param_end());
01174 
01175   // Create the function declaration.
01176   const FunctionProtoType *fnType = blockInfo.getBlockExpr()->getFunctionType();
01177   const CGFunctionInfo &fnInfo = CGM.getTypes().arrangeFreeFunctionDeclaration(
01178       fnType->getReturnType(), args, fnType->getExtInfo(),
01179       fnType->isVariadic());
01180   if (CGM.ReturnSlotInterferesWithArgs(fnInfo))
01181     blockInfo.UsesStret = true;
01182 
01183   llvm::FunctionType *fnLLVMType = CGM.getTypes().GetFunctionType(fnInfo);
01184 
01185   StringRef name = CGM.getBlockMangledName(GD, blockDecl);
01186   llvm::Function *fn = llvm::Function::Create(
01187       fnLLVMType, llvm::GlobalValue::InternalLinkage, name, &CGM.getModule());
01188   CGM.SetInternalFunctionAttributes(blockDecl, fn, fnInfo);
01189 
01190   // Begin generating the function.
01191   StartFunction(blockDecl, fnType->getReturnType(), fn, fnInfo, args,
01192                 blockDecl->getLocation(),
01193                 blockInfo.getBlockExpr()->getBody()->getLocStart());
01194 
01195   // Okay.  Undo some of what StartFunction did.
01196 
01197   // At -O0 we generate an explicit alloca for the BlockPointer, so the RA
01198   // won't delete the dbg.declare intrinsics for captured variables.
01199   llvm::Value *BlockPointerDbgLoc = BlockPointer;
01200   if (CGM.getCodeGenOpts().OptimizationLevel == 0) {
01201     // Allocate a stack slot for it, so we can point the debugger to it
01202     Address Alloca = CreateTempAlloca(BlockPointer->getType(),
01203                                       getPointerAlign(),
01204                                       "block.addr");
01205     // Set the DebugLocation to empty, so the store is recognized as a
01206     // frame setup instruction by llvm::DwarfDebug::beginFunction().
01207     auto NL = ApplyDebugLocation::CreateEmpty(*this);
01208     Builder.CreateStore(BlockPointer, Alloca);
01209     BlockPointerDbgLoc = Alloca.getPointer();
01210   }
01211 
01212   // If we have a C++ 'this' reference, go ahead and force it into
01213   // existence now.
01214   if (blockDecl->capturesCXXThis()) {
01215     Address addr =
01216       Builder.CreateStructGEP(LoadBlockStruct(), blockInfo.CXXThisIndex,
01217                               blockInfo.CXXThisOffset, "block.captured-this");
01218     CXXThisValue = Builder.CreateLoad(addr, "this");
01219   }
01220 
01221   // Also force all the constant captures.
01222   for (const auto &CI : blockDecl->captures()) {
01223     const VarDecl *variable = CI.getVariable();
01224     const CGBlockInfo::Capture &capture = blockInfo.getCapture(variable);
01225     if (!capture.isConstant()) continue;
01226 
01227     CharUnits align = getContext().getDeclAlign(variable);
01228     Address alloca =
01229       CreateMemTemp(variable->getType(), align, "block.captured-const");
01230 
01231     Builder.CreateStore(capture.getConstant(), alloca);
01232 
01233     setAddrOfLocalVar(variable, alloca);
01234   }
01235 
01236   // Save a spot to insert the debug information for all the DeclRefExprs.
01237   llvm::BasicBlock *entry = Builder.GetInsertBlock();
01238   llvm::BasicBlock::iterator entry_ptr = Builder.GetInsertPoint();
01239   --entry_ptr;
01240 
01241   if (IsLambdaConversionToBlock)
01242     EmitLambdaBlockInvokeBody();
01243   else {
01244     PGO.assignRegionCounters(GlobalDecl(blockDecl), fn);
01245     incrementProfileCounter(blockDecl->getBody());
01246     EmitStmt(blockDecl->getBody());
01247   }
01248 
01249   // Remember where we were...
01250   llvm::BasicBlock *resume = Builder.GetInsertBlock();
01251 
01252   // Go back to the entry.
01253   ++entry_ptr;
01254   Builder.SetInsertPoint(entry, entry_ptr);
01255 
01256   // Emit debug information for all the DeclRefExprs.
01257   // FIXME: also for 'this'
01258   if (CGDebugInfo *DI = getDebugInfo()) {
01259     for (const auto &CI : blockDecl->captures()) {
01260       const VarDecl *variable = CI.getVariable();
01261       DI->EmitLocation(Builder, variable->getLocation());
01262 
01263       if (CGM.getCodeGenOpts().getDebugInfo() >=
01264           codegenoptions::LimitedDebugInfo) {
01265         const CGBlockInfo::Capture &capture = blockInfo.getCapture(variable);
01266         if (capture.isConstant()) {
01267           auto addr = LocalDeclMap.find(variable)->second;
01268           DI->EmitDeclareOfAutoVariable(variable, addr.getPointer(),
01269                                         Builder);
01270           continue;
01271         }
01272 
01273         DI->EmitDeclareOfBlockDeclRefVariable(
01274             variable, BlockPointerDbgLoc, Builder, blockInfo,
01275             entry_ptr == entry->end() ? nullptr : &*entry_ptr);
01276       }
01277     }
01278     // Recover location if it was changed in the above loop.
01279     DI->EmitLocation(Builder,
01280                      cast<CompoundStmt>(blockDecl->getBody())->getRBracLoc());
01281   }
01282 
01283   // And resume where we left off.
01284   if (resume == nullptr)
01285     Builder.ClearInsertionPoint();
01286   else
01287     Builder.SetInsertPoint(resume);
01288 
01289   FinishFunction(cast<CompoundStmt>(blockDecl->getBody())->getRBracLoc());
01290 
01291   return fn;
01292 }
01293 
01294 /*
01295     notes.push_back(HelperInfo());
01296     HelperInfo &note = notes.back();
01297     note.index = capture.getIndex();
01298     note.RequiresCopying = (ci->hasCopyExpr() || BlockRequiresCopying(type));
01299     note.cxxbar_import = ci->getCopyExpr();
01300 
01301     if (ci->isByRef()) {
01302       note.flag = BLOCK_FIELD_IS_BYREF;
01303       if (type.isObjCGCWeak())
01304         note.flag |= BLOCK_FIELD_IS_WEAK;
01305     } else if (type->isBlockPointerType()) {
01306       note.flag = BLOCK_FIELD_IS_BLOCK;
01307     } else {
01308       note.flag = BLOCK_FIELD_IS_OBJECT;
01309     }
01310  */
01311 
01312 /// Generate the copy-helper function for a block closure object:
01313 ///   static void block_copy_helper(block_t *dst, block_t *src);
01314 /// The runtime will have previously initialized 'dst' by doing a
01315 /// bit-copy of 'src'.
01316 ///
01317 /// Note that this copies an entire block closure object to the heap;
01318 /// it should not be confused with a 'byref copy helper', which moves
01319 /// the contents of an individual __block variable to the heap.
01320 llvm::Constant *
01321 CodeGenFunction::GenerateCopyHelperFunction(const CGBlockInfo &blockInfo) {
01322   ASTContext &C = getContext();
01323 
01324   FunctionArgList args;
01325   ImplicitParamDecl dstDecl(getContext(), nullptr, SourceLocation(), nullptr,
01326                             C.VoidPtrTy);
01327   args.push_back(&dstDecl);
01328   ImplicitParamDecl srcDecl(getContext(), nullptr, SourceLocation(), nullptr,
01329                             C.VoidPtrTy);
01330   args.push_back(&srcDecl);
01331 
01332   const CGFunctionInfo &FI = CGM.getTypes().arrangeFreeFunctionDeclaration(
01333       C.VoidTy, args, FunctionType::ExtInfo(), /*variadic=*/false);
01334 
01335   // FIXME: it would be nice if these were mergeable with things with
01336   // identical semantics.
01337   llvm::FunctionType *LTy = CGM.getTypes().GetFunctionType(FI);
01338 
01339   llvm::Function *Fn =
01340     llvm::Function::Create(LTy, llvm::GlobalValue::InternalLinkage,
01341                            "__copy_helper_block_", &CGM.getModule());
01342 
01343   IdentifierInfo *II
01344     = &CGM.getContext().Idents.get("__copy_helper_block_");
01345 
01346   FunctionDecl *FD = FunctionDecl::Create(C,
01347                                           C.getTranslationUnitDecl(),
01348                                           SourceLocation(),
01349                                           SourceLocation(), II, C.VoidTy,
01350                                           nullptr, SC_Static,
01351                                           false,
01352                                           false);
01353 
01354   CGM.SetInternalFunctionAttributes(nullptr, Fn, FI);
01355 
01356   auto NL = ApplyDebugLocation::CreateEmpty(*this);
01357   StartFunction(FD, C.VoidTy, Fn, FI, args);
01358   // Create a scope with an artificial location for the body of this function.
01359   auto AL = ApplyDebugLocation::CreateArtificial(*this);
01360   llvm::Type *structPtrTy = blockInfo.StructureType->getPointerTo();
01361 
01362   Address src = GetAddrOfLocalVar(&srcDecl);
01363   src = Address(Builder.CreateLoad(src), blockInfo.BlockAlign);
01364   src = Builder.CreateBitCast(src, structPtrTy, "block.source");
01365 
01366   Address dst = GetAddrOfLocalVar(&dstDecl);
01367   dst = Address(Builder.CreateLoad(dst), blockInfo.BlockAlign);
01368   dst = Builder.CreateBitCast(dst, structPtrTy, "block.dest");
01369 
01370   const BlockDecl *blockDecl = blockInfo.getBlockDecl();
01371 
01372   for (const auto &CI : blockDecl->captures()) {
01373     const VarDecl *variable = CI.getVariable();
01374     QualType type = variable->getType();
01375 
01376     const CGBlockInfo::Capture &capture = blockInfo.getCapture(variable);
01377     if (capture.isConstant()) continue;
01378 
01379     const Expr *copyExpr = CI.getCopyExpr();
01380     BlockFieldFlags flags;
01381 
01382     bool useARCWeakCopy = false;
01383     bool useARCStrongCopy = false;
01384 
01385     if (copyExpr) {
01386       assert(!CI.isByRef());
01387       // don't bother computing flags
01388 
01389     } else if (CI.isByRef()) {
01390       flags = BLOCK_FIELD_IS_BYREF;
01391       if (type.isObjCGCWeak())
01392         flags |= BLOCK_FIELD_IS_WEAK;
01393 
01394     } else if (type->isObjCRetainableType()) {
01395       flags = BLOCK_FIELD_IS_OBJECT;
01396       bool isBlockPointer = type->isBlockPointerType();
01397       if (isBlockPointer)
01398         flags = BLOCK_FIELD_IS_BLOCK;
01399 
01400       // Special rules for ARC captures:
01401       Qualifiers qs = type.getQualifiers();
01402 
01403       // We need to register __weak direct captures with the runtime.
01404       if (qs.getObjCLifetime() == Qualifiers::OCL_Weak) {
01405         useARCWeakCopy = true;
01406 
01407       // We need to retain the copied value for __strong direct captures.
01408       } else if (qs.getObjCLifetime() == Qualifiers::OCL_Strong) {
01409         // If it's a block pointer, we have to copy the block and
01410         // assign that to the destination pointer, so we might as
01411         // well use _Block_object_assign.  Otherwise we can avoid that.
01412         if (!isBlockPointer)
01413           useARCStrongCopy = true;
01414 
01415       // Non-ARC captures of retainable pointers are strong and
01416       // therefore require a call to _Block_object_assign.
01417       } else if (!qs.getObjCLifetime() && !getLangOpts().ObjCAutoRefCount) {
01418         // fall through
01419 
01420       // Otherwise the memcpy is fine.
01421       } else {
01422         continue;
01423       }
01424 
01425     // For all other types, the memcpy is fine.
01426     } else {
01427       continue;
01428     }
01429 
01430     unsigned index = capture.getIndex();
01431     Address srcField = Builder.CreateStructGEP(src, index, capture.getOffset());
01432     Address dstField = Builder.CreateStructGEP(dst, index, capture.getOffset());
01433 
01434     // If there's an explicit copy expression, we do that.
01435     if (copyExpr) {
01436       EmitSynthesizedCXXCopyCtor(dstField, srcField, copyExpr);
01437     } else if (useARCWeakCopy) {
01438       EmitARCCopyWeak(dstField, srcField);
01439     } else {
01440       llvm::Value *srcValue = Builder.CreateLoad(srcField, "blockcopy.src");
01441       if (useARCStrongCopy) {
01442         // At -O0, store null into the destination field (so that the
01443         // storeStrong doesn't over-release) and then call storeStrong.
01444         // This is a workaround to not having an initStrong call.
01445         if (CGM.getCodeGenOpts().OptimizationLevel == 0) {
01446           auto *ty = cast<llvm::PointerType>(srcValue->getType());
01447           llvm::Value *null = llvm::ConstantPointerNull::get(ty);
01448           Builder.CreateStore(null, dstField);
01449           EmitARCStoreStrongCall(dstField, srcValue, true);
01450 
01451         // With optimization enabled, take advantage of the fact that
01452         // the blocks runtime guarantees a memcpy of the block data, and
01453         // just emit a retain of the src field.
01454         } else {
01455           EmitARCRetainNonBlock(srcValue);
01456 
01457           // We don't need this anymore, so kill it.  It's not quite
01458           // worth the annoyance to avoid creating it in the first place.
01459           cast<llvm::Instruction>(dstField.getPointer())->eraseFromParent();
01460         }
01461       } else {
01462         srcValue = Builder.CreateBitCast(srcValue, VoidPtrTy);
01463         llvm::Value *dstAddr =
01464           Builder.CreateBitCast(dstField.getPointer(), VoidPtrTy);
01465         llvm::Value *args[] = {
01466           dstAddr, srcValue, llvm::ConstantInt::get(Int32Ty, flags.getBitMask())
01467         };
01468 
01469         bool copyCanThrow = false;
01470         if (CI.isByRef() && variable->getType()->getAsCXXRecordDecl()) {
01471           const Expr *copyExpr =
01472             CGM.getContext().getBlockVarCopyInits(variable);
01473           if (copyExpr) {
01474             copyCanThrow = true; // FIXME: reuse the noexcept logic
01475           }
01476         }
01477 
01478         if (copyCanThrow) {
01479           EmitRuntimeCallOrInvoke(CGM.getBlockObjectAssign(), args);
01480         } else {
01481           EmitNounwindRuntimeCall(CGM.getBlockObjectAssign(), args);
01482         }
01483       }
01484     }
01485   }
01486 
01487   FinishFunction();
01488 
01489   return llvm::ConstantExpr::getBitCast(Fn, VoidPtrTy);
01490 }
01491 
01492 /// Generate the destroy-helper function for a block closure object:
01493 ///   static void block_destroy_helper(block_t *theBlock);
01494 ///
01495 /// Note that this destroys a heap-allocated block closure object;
01496 /// it should not be confused with a 'byref destroy helper', which
01497 /// destroys the heap-allocated contents of an individual __block
01498 /// variable.
01499 llvm::Constant *
01500 CodeGenFunction::GenerateDestroyHelperFunction(const CGBlockInfo &blockInfo) {
01501   ASTContext &C = getContext();
01502 
01503   FunctionArgList args;
01504   ImplicitParamDecl srcDecl(getContext(), nullptr, SourceLocation(), nullptr,
01505                             C.VoidPtrTy);
01506   args.push_back(&srcDecl);
01507 
01508   const CGFunctionInfo &FI = CGM.getTypes().arrangeFreeFunctionDeclaration(
01509       C.VoidTy, args, FunctionType::ExtInfo(), /*variadic=*/false);
01510 
01511   // FIXME: We'd like to put these into a mergable by content, with
01512   // internal linkage.
01513   llvm::FunctionType *LTy = CGM.getTypes().GetFunctionType(FI);
01514 
01515   llvm::Function *Fn =
01516     llvm::Function::Create(LTy, llvm::GlobalValue::InternalLinkage,
01517                            "__destroy_helper_block_", &CGM.getModule());
01518 
01519   IdentifierInfo *II
01520     = &CGM.getContext().Idents.get("__destroy_helper_block_");
01521 
01522   FunctionDecl *FD = FunctionDecl::Create(C, C.getTranslationUnitDecl(),
01523                                           SourceLocation(),
01524                                           SourceLocation(), II, C.VoidTy,
01525                                           nullptr, SC_Static,
01526                                           false, false);
01527 
01528   CGM.SetInternalFunctionAttributes(nullptr, Fn, FI);
01529 
01530   // Create a scope with an artificial location for the body of this function.
01531   auto NL = ApplyDebugLocation::CreateEmpty(*this);
01532   StartFunction(FD, C.VoidTy, Fn, FI, args);
01533   auto AL = ApplyDebugLocation::CreateArtificial(*this);
01534 
01535   llvm::Type *structPtrTy = blockInfo.StructureType->getPointerTo();
01536 
01537   Address src = GetAddrOfLocalVar(&srcDecl);
01538   src = Address(Builder.CreateLoad(src), blockInfo.BlockAlign);
01539   src = Builder.CreateBitCast(src, structPtrTy, "block");
01540 
01541   const BlockDecl *blockDecl = blockInfo.getBlockDecl();
01542 
01543   CodeGenFunction::RunCleanupsScope cleanups(*this);
01544 
01545   for (const auto &CI : blockDecl->captures()) {
01546     const VarDecl *variable = CI.getVariable();
01547     QualType type = variable->getType();
01548 
01549     const CGBlockInfo::Capture &capture = blockInfo.getCapture(variable);
01550     if (capture.isConstant()) continue;
01551 
01552     BlockFieldFlags flags;
01553     const CXXDestructorDecl *dtor = nullptr;
01554 
01555     bool useARCWeakDestroy = false;
01556     bool useARCStrongDestroy = false;
01557 
01558     if (CI.isByRef()) {
01559       flags = BLOCK_FIELD_IS_BYREF;
01560       if (type.isObjCGCWeak())
01561         flags |= BLOCK_FIELD_IS_WEAK;
01562     } else if (const CXXRecordDecl *record = type->getAsCXXRecordDecl()) {
01563       if (record->hasTrivialDestructor())
01564         continue;
01565       dtor = record->getDestructor();
01566     } else if (type->isObjCRetainableType()) {
01567       flags = BLOCK_FIELD_IS_OBJECT;
01568       if (type->isBlockPointerType())
01569         flags = BLOCK_FIELD_IS_BLOCK;
01570 
01571       // Special rules for ARC captures.
01572       Qualifiers qs = type.getQualifiers();
01573 
01574       // Use objc_storeStrong for __strong direct captures; the
01575       // dynamic tools really like it when we do this.
01576       if (qs.getObjCLifetime() == Qualifiers::OCL_Strong) {
01577         useARCStrongDestroy = true;
01578 
01579       // Support __weak direct captures.
01580       } else if (qs.getObjCLifetime() == Qualifiers::OCL_Weak) {
01581         useARCWeakDestroy = true;
01582 
01583       // Non-ARC captures are strong, and we need to use _Block_object_dispose.
01584       } else if (!qs.hasObjCLifetime() && !getLangOpts().ObjCAutoRefCount) {
01585         // fall through
01586 
01587       // Otherwise, we have nothing to do.
01588       } else {
01589         continue;
01590       }
01591     } else {
01592       continue;
01593     }
01594 
01595     Address srcField =
01596       Builder.CreateStructGEP(src, capture.getIndex(), capture.getOffset());
01597 
01598     // If there's an explicit copy expression, we do that.
01599     if (dtor) {
01600       PushDestructorCleanup(dtor, srcField);
01601 
01602     // If this is a __weak capture, emit the release directly.
01603     } else if (useARCWeakDestroy) {
01604       EmitARCDestroyWeak(srcField);
01605 
01606     // Destroy strong objects with a call if requested.
01607     } else if (useARCStrongDestroy) {
01608       EmitARCDestroyStrong(srcField, ARCImpreciseLifetime);
01609 
01610     // Otherwise we call _Block_object_dispose.  It wouldn't be too
01611     // hard to just emit this as a cleanup if we wanted to make sure
01612     // that things were done in reverse.
01613     } else {
01614       llvm::Value *value = Builder.CreateLoad(srcField);
01615       value = Builder.CreateBitCast(value, VoidPtrTy);
01616       BuildBlockRelease(value, flags);
01617     }
01618   }
01619 
01620   cleanups.ForceCleanup();
01621 
01622   FinishFunction();
01623 
01624   return llvm::ConstantExpr::getBitCast(Fn, VoidPtrTy);
01625 }
01626 
01627 namespace {
01628 
01629 /// Emits the copy/dispose helper functions for a __block object of id type.
01630 class ObjectByrefHelpers final : public BlockByrefHelpers {
01631   BlockFieldFlags Flags;
01632 
01633 public:
01634   ObjectByrefHelpers(CharUnits alignment, BlockFieldFlags flags)
01635     : BlockByrefHelpers(alignment), Flags(flags) {}
01636 
01637   void emitCopy(CodeGenFunction &CGF, Address destField,
01638                 Address srcField) override {
01639     destField = CGF.Builder.CreateBitCast(destField, CGF.VoidPtrTy);
01640 
01641     srcField = CGF.Builder.CreateBitCast(srcField, CGF.VoidPtrPtrTy);
01642     llvm::Value *srcValue = CGF.Builder.CreateLoad(srcField);
01643 
01644     unsigned flags = (Flags | BLOCK_BYREF_CALLER).getBitMask();
01645 
01646     llvm::Value *flagsVal = llvm::ConstantInt::get(CGF.Int32Ty, flags);
01647     llvm::Value *fn = CGF.CGM.getBlockObjectAssign();
01648 
01649     llvm::Value *args[] = { destField.getPointer(), srcValue, flagsVal };
01650     CGF.EmitNounwindRuntimeCall(fn, args);
01651   }
01652 
01653   void emitDispose(CodeGenFunction &CGF, Address field) override {
01654     field = CGF.Builder.CreateBitCast(field, CGF.Int8PtrTy->getPointerTo(0));
01655     llvm::Value *value = CGF.Builder.CreateLoad(field);
01656 
01657     CGF.BuildBlockRelease(value, Flags | BLOCK_BYREF_CALLER);
01658   }
01659 
01660   void profileImpl(llvm::FoldingSetNodeID &id) const override {
01661     id.AddInteger(Flags.getBitMask());
01662   }
01663 };
01664 
01665 /// Emits the copy/dispose helpers for an ARC __block __weak variable.
01666 class ARCWeakByrefHelpers final : public BlockByrefHelpers {
01667 public:
01668   ARCWeakByrefHelpers(CharUnits alignment) : BlockByrefHelpers(alignment) {}
01669 
01670   void emitCopy(CodeGenFunction &CGF, Address destField,
01671                 Address srcField) override {
01672     CGF.EmitARCMoveWeak(destField, srcField);
01673   }
01674 
01675   void emitDispose(CodeGenFunction &CGF, Address field) override {
01676     CGF.EmitARCDestroyWeak(field);
01677   }
01678 
01679   void profileImpl(llvm::FoldingSetNodeID &id) const override {
01680     // 0 is distinguishable from all pointers and byref flags
01681     id.AddInteger(0);
01682   }
01683 };
01684 
01685 /// Emits the copy/dispose helpers for an ARC __block __strong variable
01686 /// that's not of block-pointer type.
01687 class ARCStrongByrefHelpers final : public BlockByrefHelpers {
01688 public:
01689   ARCStrongByrefHelpers(CharUnits alignment) : BlockByrefHelpers(alignment) {}
01690 
01691   void emitCopy(CodeGenFunction &CGF, Address destField,
01692                 Address srcField) override {
01693     // Do a "move" by copying the value and then zeroing out the old
01694     // variable.
01695 
01696     llvm::Value *value = CGF.Builder.CreateLoad(srcField);
01697     
01698     llvm::Value *null =
01699       llvm::ConstantPointerNull::get(cast<llvm::PointerType>(value->getType()));
01700 
01701     if (CGF.CGM.getCodeGenOpts().OptimizationLevel == 0) {
01702       CGF.Builder.CreateStore(null, destField);
01703       CGF.EmitARCStoreStrongCall(destField, value, /*ignored*/ true);
01704       CGF.EmitARCStoreStrongCall(srcField, null, /*ignored*/ true);
01705       return;
01706     }
01707     CGF.Builder.CreateStore(value, destField);
01708     CGF.Builder.CreateStore(null, srcField);
01709   }
01710 
01711   void emitDispose(CodeGenFunction &CGF, Address field) override {
01712     CGF.EmitARCDestroyStrong(field, ARCImpreciseLifetime);
01713   }
01714 
01715   void profileImpl(llvm::FoldingSetNodeID &id) const override {
01716     // 1 is distinguishable from all pointers and byref flags
01717     id.AddInteger(1);
01718   }
01719 };
01720 
01721 /// Emits the copy/dispose helpers for an ARC __block __strong
01722 /// variable that's of block-pointer type.
01723 class ARCStrongBlockByrefHelpers final : public BlockByrefHelpers {
01724 public:
01725   ARCStrongBlockByrefHelpers(CharUnits alignment)
01726     : BlockByrefHelpers(alignment) {}
01727 
01728   void emitCopy(CodeGenFunction &CGF, Address destField,
01729                 Address srcField) override {
01730     // Do the copy with objc_retainBlock; that's all that
01731     // _Block_object_assign would do anyway, and we'd have to pass the
01732     // right arguments to make sure it doesn't get no-op'ed.
01733     llvm::Value *oldValue = CGF.Builder.CreateLoad(srcField);
01734     llvm::Value *copy = CGF.EmitARCRetainBlock(oldValue, /*mandatory*/ true);
01735     CGF.Builder.CreateStore(copy, destField);
01736   }
01737 
01738   void emitDispose(CodeGenFunction &CGF, Address field) override {
01739     CGF.EmitARCDestroyStrong(field, ARCImpreciseLifetime);
01740   }
01741 
01742   void profileImpl(llvm::FoldingSetNodeID &id) const override {
01743     // 2 is distinguishable from all pointers and byref flags
01744     id.AddInteger(2);
01745   }
01746 };
01747 
01748 /// Emits the copy/dispose helpers for a __block variable with a
01749 /// nontrivial copy constructor or destructor.
01750 class CXXByrefHelpers final : public BlockByrefHelpers {
01751   QualType VarType;
01752   const Expr *CopyExpr;
01753 
01754 public:
01755   CXXByrefHelpers(CharUnits alignment, QualType type,
01756                   const Expr *copyExpr)
01757     : BlockByrefHelpers(alignment), VarType(type), CopyExpr(copyExpr) {}
01758 
01759   bool needsCopy() const override { return CopyExpr != nullptr; }
01760   void emitCopy(CodeGenFunction &CGF, Address destField,
01761                 Address srcField) override {
01762     if (!CopyExpr) return;
01763     CGF.EmitSynthesizedCXXCopyCtor(destField, srcField, CopyExpr);
01764   }
01765 
01766   void emitDispose(CodeGenFunction &CGF, Address field) override {
01767     EHScopeStack::stable_iterator cleanupDepth = CGF.EHStack.stable_begin();
01768     CGF.PushDestructorCleanup(VarType, field);
01769     CGF.PopCleanupBlocks(cleanupDepth);
01770   }
01771 
01772   void profileImpl(llvm::FoldingSetNodeID &id) const override {
01773     id.AddPointer(VarType.getCanonicalType().getAsOpaquePtr());
01774   }
01775 };
01776 } // end anonymous namespace
01777 
01778 static llvm::Constant *
01779 generateByrefCopyHelper(CodeGenFunction &CGF, const BlockByrefInfo &byrefInfo,
01780                         BlockByrefHelpers &generator) {
01781   ASTContext &Context = CGF.getContext();
01782 
01783   QualType R = Context.VoidTy;
01784 
01785   FunctionArgList args;
01786   ImplicitParamDecl dst(CGF.getContext(), nullptr, SourceLocation(), nullptr,
01787                         Context.VoidPtrTy);
01788   args.push_back(&dst);
01789 
01790   ImplicitParamDecl src(CGF.getContext(), nullptr, SourceLocation(), nullptr,
01791                         Context.VoidPtrTy);
01792   args.push_back(&src);
01793 
01794   const CGFunctionInfo &FI = CGF.CGM.getTypes().arrangeFreeFunctionDeclaration(
01795       R, args, FunctionType::ExtInfo(), /*variadic=*/false);
01796 
01797   llvm::FunctionType *LTy = CGF.CGM.getTypes().GetFunctionType(FI);
01798 
01799   // FIXME: We'd like to put these into a mergable by content, with
01800   // internal linkage.
01801   llvm::Function *Fn =
01802     llvm::Function::Create(LTy, llvm::GlobalValue::InternalLinkage,
01803                            "__Block_byref_object_copy_", &CGF.CGM.getModule());
01804 
01805   IdentifierInfo *II
01806     = &Context.Idents.get("__Block_byref_object_copy_");
01807 
01808   FunctionDecl *FD = FunctionDecl::Create(Context,
01809                                           Context.getTranslationUnitDecl(),
01810                                           SourceLocation(),
01811                                           SourceLocation(), II, R, nullptr,
01812                                           SC_Static,
01813                                           false, false);
01814 
01815   CGF.CGM.SetInternalFunctionAttributes(nullptr, Fn, FI);
01816 
01817   CGF.StartFunction(FD, R, Fn, FI, args);
01818 
01819   if (generator.needsCopy()) {
01820     llvm::Type *byrefPtrType = byrefInfo.Type->getPointerTo(0);
01821 
01822     // dst->x
01823     Address destField = CGF.GetAddrOfLocalVar(&dst);
01824     destField = Address(CGF.Builder.CreateLoad(destField),
01825                         byrefInfo.ByrefAlignment);
01826     destField = CGF.Builder.CreateBitCast(destField, byrefPtrType);
01827     destField = CGF.emitBlockByrefAddress(destField, byrefInfo, false,
01828                                           "dest-object");
01829 
01830     // src->x
01831     Address srcField = CGF.GetAddrOfLocalVar(&src);
01832     srcField = Address(CGF.Builder.CreateLoad(srcField),
01833                        byrefInfo.ByrefAlignment);
01834     srcField = CGF.Builder.CreateBitCast(srcField, byrefPtrType);
01835     srcField = CGF.emitBlockByrefAddress(srcField, byrefInfo, false,
01836                                          "src-object");
01837 
01838     generator.emitCopy(CGF, destField, srcField);
01839   }  
01840 
01841   CGF.FinishFunction();
01842 
01843   return llvm::ConstantExpr::getBitCast(Fn, CGF.Int8PtrTy);
01844 }
01845 
01846 /// Build the copy helper for a __block variable.
01847 static llvm::Constant *buildByrefCopyHelper(CodeGenModule &CGM,
01848                                             const BlockByrefInfo &byrefInfo,
01849                                             BlockByrefHelpers &generator) {
01850   CodeGenFunction CGF(CGM);
01851   return generateByrefCopyHelper(CGF, byrefInfo, generator);
01852 }
01853 
01854 /// Generate code for a __block variable's dispose helper.
01855 static llvm::Constant *
01856 generateByrefDisposeHelper(CodeGenFunction &CGF,
01857                            const BlockByrefInfo &byrefInfo,
01858                            BlockByrefHelpers &generator) {
01859   ASTContext &Context = CGF.getContext();
01860   QualType R = Context.VoidTy;
01861 
01862   FunctionArgList args;
01863   ImplicitParamDecl src(CGF.getContext(), nullptr, SourceLocation(), nullptr,
01864                         Context.VoidPtrTy);
01865   args.push_back(&src);
01866 
01867   const CGFunctionInfo &FI = CGF.CGM.getTypes().arrangeFreeFunctionDeclaration(
01868       R, args, FunctionType::ExtInfo(), /*variadic=*/false);
01869 
01870   llvm::FunctionType *LTy = CGF.CGM.getTypes().GetFunctionType(FI);
01871 
01872   // FIXME: We'd like to put these into a mergable by content, with
01873   // internal linkage.
01874   llvm::Function *Fn =
01875     llvm::Function::Create(LTy, llvm::GlobalValue::InternalLinkage,
01876                            "__Block_byref_object_dispose_",
01877                            &CGF.CGM.getModule());
01878 
01879   IdentifierInfo *II
01880     = &Context.Idents.get("__Block_byref_object_dispose_");
01881 
01882   FunctionDecl *FD = FunctionDecl::Create(Context,
01883                                           Context.getTranslationUnitDecl(),
01884                                           SourceLocation(),
01885                                           SourceLocation(), II, R, nullptr,
01886                                           SC_Static,
01887                                           false, false);
01888 
01889   CGF.CGM.SetInternalFunctionAttributes(nullptr, Fn, FI);
01890 
01891   CGF.StartFunction(FD, R, Fn, FI, args);
01892 
01893   if (generator.needsDispose()) {
01894     Address addr = CGF.GetAddrOfLocalVar(&src);
01895     addr = Address(CGF.Builder.CreateLoad(addr), byrefInfo.ByrefAlignment);
01896     auto byrefPtrType = byrefInfo.Type->getPointerTo(0);
01897     addr = CGF.Builder.CreateBitCast(addr, byrefPtrType);
01898     addr = CGF.emitBlockByrefAddress(addr, byrefInfo, false, "object");
01899 
01900     generator.emitDispose(CGF, addr);
01901   }
01902 
01903   CGF.FinishFunction();
01904 
01905   return llvm::ConstantExpr::getBitCast(Fn, CGF.Int8PtrTy);
01906 }
01907 
01908 /// Build the dispose helper for a __block variable.
01909 static llvm::Constant *buildByrefDisposeHelper(CodeGenModule &CGM,
01910                                                const BlockByrefInfo &byrefInfo,
01911                                                BlockByrefHelpers &generator) {
01912   CodeGenFunction CGF(CGM);
01913   return generateByrefDisposeHelper(CGF, byrefInfo, generator);
01914 }
01915 
01916 /// Lazily build the copy and dispose helpers for a __block variable
01917 /// with the given information.
01918 template <class T>
01919 static T *buildByrefHelpers(CodeGenModule &CGM, const BlockByrefInfo &byrefInfo,
01920                             T &&generator) {
01921   llvm::FoldingSetNodeID id;
01922   generator.Profile(id);
01923 
01924   void *insertPos;
01925   BlockByrefHelpers *node
01926     = CGM.ByrefHelpersCache.FindNodeOrInsertPos(id, insertPos);
01927   if (node) return static_cast<T*>(node);
01928 
01929   generator.CopyHelper = buildByrefCopyHelper(CGM, byrefInfo, generator);
01930   generator.DisposeHelper = buildByrefDisposeHelper(CGM, byrefInfo, generator);
01931 
01932   T *copy = new (CGM.getContext()) T(std::move(generator));
01933   CGM.ByrefHelpersCache.InsertNode(copy, insertPos);
01934   return copy;
01935 }
01936 
01937 /// Build the copy and dispose helpers for the given __block variable
01938 /// emission.  Places the helpers in the global cache.  Returns null
01939 /// if no helpers are required.
01940 BlockByrefHelpers *
01941 CodeGenFunction::buildByrefHelpers(llvm::StructType &byrefType,
01942                                    const AutoVarEmission &emission) {
01943   const VarDecl &var = *emission.Variable;
01944   QualType type = var.getType();
01945 
01946   auto &byrefInfo = getBlockByrefInfo(&var);
01947 
01948   // The alignment we care about for the purposes of uniquing byref
01949   // helpers is the alignment of the actual byref value field.
01950   CharUnits valueAlignment =
01951     byrefInfo.ByrefAlignment.alignmentAtOffset(byrefInfo.FieldOffset);
01952 
01953   if (const CXXRecordDecl *record = type->getAsCXXRecordDecl()) {
01954     const Expr *copyExpr = CGM.getContext().getBlockVarCopyInits(&var);
01955     if (!copyExpr && record->hasTrivialDestructor()) return nullptr;
01956 
01957     return ::buildByrefHelpers(
01958         CGM, byrefInfo, CXXByrefHelpers(valueAlignment, type, copyExpr));
01959   }
01960 
01961   // Otherwise, if we don't have a retainable type, there's nothing to do.
01962   // that the runtime does extra copies.
01963   if (!type->isObjCRetainableType()) return nullptr;
01964 
01965   Qualifiers qs = type.getQualifiers();
01966 
01967   // If we have lifetime, that dominates.
01968   if (Qualifiers::ObjCLifetime lifetime = qs.getObjCLifetime()) {
01969     switch (lifetime) {
01970     case Qualifiers::OCL_None: llvm_unreachable("impossible");
01971 
01972     // These are just bits as far as the runtime is concerned.
01973     case Qualifiers::OCL_ExplicitNone:
01974     case Qualifiers::OCL_Autoreleasing:
01975       return nullptr;
01976 
01977     // Tell the runtime that this is ARC __weak, called by the
01978     // byref routines.
01979     case Qualifiers::OCL_Weak:
01980       return ::buildByrefHelpers(CGM, byrefInfo,
01981                                  ARCWeakByrefHelpers(valueAlignment));
01982 
01983     // ARC __strong __block variables need to be retained.
01984     case Qualifiers::OCL_Strong:
01985       // Block pointers need to be copied, and there's no direct
01986       // transfer possible.
01987       if (type->isBlockPointerType()) {
01988         return ::buildByrefHelpers(CGM, byrefInfo,
01989                                    ARCStrongBlockByrefHelpers(valueAlignment));
01990 
01991       // Otherwise, we transfer ownership of the retain from the stack
01992       // to the heap.
01993       } else {
01994         return ::buildByrefHelpers(CGM, byrefInfo,
01995                                    ARCStrongByrefHelpers(valueAlignment));
01996       }
01997     }
01998     llvm_unreachable("fell out of lifetime switch!");
01999   }
02000 
02001   BlockFieldFlags flags;
02002   if (type->isBlockPointerType()) {
02003     flags |= BLOCK_FIELD_IS_BLOCK;
02004   } else if (CGM.getContext().isObjCNSObjectType(type) || 
02005              type->isObjCObjectPointerType()) {
02006     flags |= BLOCK_FIELD_IS_OBJECT;
02007   } else {
02008     return nullptr;
02009   }
02010 
02011   if (type.isObjCGCWeak())
02012     flags |= BLOCK_FIELD_IS_WEAK;
02013 
02014   return ::buildByrefHelpers(CGM, byrefInfo,
02015                              ObjectByrefHelpers(valueAlignment, flags));
02016 }
02017 
02018 Address CodeGenFunction::emitBlockByrefAddress(Address baseAddr,
02019                                                const VarDecl *var,
02020                                                bool followForward) {
02021   auto &info = getBlockByrefInfo(var);
02022   return emitBlockByrefAddress(baseAddr, info, followForward, var->getName());
02023 }
02024 
02025 Address CodeGenFunction::emitBlockByrefAddress(Address baseAddr,
02026                                                const BlockByrefInfo &info,
02027                                                bool followForward,
02028                                                const llvm::Twine &name) {
02029   // Chase the forwarding address if requested.
02030   if (followForward) {
02031     Address forwardingAddr =
02032       Builder.CreateStructGEP(baseAddr, 1, getPointerSize(), "forwarding");
02033     baseAddr = Address(Builder.CreateLoad(forwardingAddr), info.ByrefAlignment);
02034   }
02035 
02036   return Builder.CreateStructGEP(baseAddr, info.FieldIndex,
02037                                  info.FieldOffset, name);
02038 }
02039 
02040 /// BuildByrefInfo - This routine changes a __block variable declared as T x
02041 ///   into:
02042 ///
02043 ///      struct {
02044 ///        void *__isa;
02045 ///        void *__forwarding;
02046 ///        int32_t __flags;
02047 ///        int32_t __size;
02048 ///        void *__copy_helper;       // only if needed
02049 ///        void *__destroy_helper;    // only if needed
02050 ///        void *__byref_variable_layout;// only if needed
02051 ///        char padding[X];           // only if needed
02052 ///        T x;
02053 ///      } x
02054 ///
02055 const BlockByrefInfo &CodeGenFunction::getBlockByrefInfo(const VarDecl *D) {
02056   auto it = BlockByrefInfos.find(D);
02057   if (it != BlockByrefInfos.end())
02058     return it->second;
02059 
02060   llvm::StructType *byrefType =
02061     llvm::StructType::create(getLLVMContext(),
02062                              "struct.__block_byref_" + D->getNameAsString());
02063   
02064   QualType Ty = D->getType();
02065 
02066   CharUnits size;
02067   SmallVector<llvm::Type *, 8> types;
02068   
02069   // void *__isa;
02070   types.push_back(Int8PtrTy);
02071   size += getPointerSize();
02072   
02073   // void *__forwarding;
02074   types.push_back(llvm::PointerType::getUnqual(byrefType));
02075   size += getPointerSize();
02076   
02077   // int32_t __flags;
02078   types.push_back(Int32Ty);
02079   size += CharUnits::fromQuantity(4);
02080     
02081   // int32_t __size;
02082   types.push_back(Int32Ty);
02083   size += CharUnits::fromQuantity(4);
02084 
02085   // Note that this must match *exactly* the logic in buildByrefHelpers.
02086   bool hasCopyAndDispose = getContext().BlockRequiresCopying(Ty, D);
02087   if (hasCopyAndDispose) {
02088     /// void *__copy_helper;
02089     types.push_back(Int8PtrTy);
02090     size += getPointerSize();
02091     
02092     /// void *__destroy_helper;
02093     types.push_back(Int8PtrTy);
02094     size += getPointerSize();
02095   }
02096 
02097   bool HasByrefExtendedLayout = false;
02098   Qualifiers::ObjCLifetime Lifetime;
02099   if (getContext().getByrefLifetime(Ty, Lifetime, HasByrefExtendedLayout) &&
02100       HasByrefExtendedLayout) {
02101     /// void *__byref_variable_layout;
02102     types.push_back(Int8PtrTy);
02103     size += CharUnits::fromQuantity(PointerSizeInBytes);
02104   }
02105 
02106   // T x;
02107   llvm::Type *varTy = ConvertTypeForMem(Ty);
02108 
02109   bool packed = false;
02110   CharUnits varAlign = getContext().getDeclAlign(D);
02111   CharUnits varOffset = size.alignTo(varAlign);
02112 
02113   // We may have to insert padding.
02114   if (varOffset != size) {
02115     llvm::Type *paddingTy =
02116       llvm::ArrayType::get(Int8Ty, (varOffset - size).getQuantity());
02117 
02118     types.push_back(paddingTy);
02119     size = varOffset;
02120 
02121   // Conversely, we might have to prevent LLVM from inserting padding.
02122   } else if (CGM.getDataLayout().getABITypeAlignment(varTy)
02123                > varAlign.getQuantity()) {
02124     packed = true;
02125   }
02126   types.push_back(varTy);
02127 
02128   byrefType->setBody(types, packed);
02129 
02130   BlockByrefInfo info;
02131   info.Type = byrefType;
02132   info.FieldIndex = types.size() - 1;
02133   info.FieldOffset = varOffset;
02134   info.ByrefAlignment = std::max(varAlign, getPointerAlign());
02135 
02136   auto pair = BlockByrefInfos.insert({D, info});
02137   assert(pair.second && "info was inserted recursively?");
02138   return pair.first->second;
02139 }
02140 
02141 /// Initialize the structural components of a __block variable, i.e.
02142 /// everything but the actual object.
02143 void CodeGenFunction::emitByrefStructureInit(const AutoVarEmission &emission) {
02144   // Find the address of the local.
02145   Address addr = emission.Addr;
02146 
02147   // That's an alloca of the byref structure type.
02148   llvm::StructType *byrefType = cast<llvm::StructType>(
02149     cast<llvm::PointerType>(addr.getPointer()->getType())->getElementType());
02150 
02151   unsigned nextHeaderIndex = 0;
02152   CharUnits nextHeaderOffset;
02153   auto storeHeaderField = [&](llvm::Value *value, CharUnits fieldSize,
02154                               const Twine &name) {
02155     auto fieldAddr = Builder.CreateStructGEP(addr, nextHeaderIndex,
02156                                              nextHeaderOffset, name);
02157     Builder.CreateStore(value, fieldAddr);
02158 
02159     nextHeaderIndex++;
02160     nextHeaderOffset += fieldSize;
02161   };
02162 
02163   // Build the byref helpers if necessary.  This is null if we don't need any.
02164   BlockByrefHelpers *helpers = buildByrefHelpers(*byrefType, emission);
02165 
02166   const VarDecl &D = *emission.Variable;
02167   QualType type = D.getType();
02168 
02169   bool HasByrefExtendedLayout;
02170   Qualifiers::ObjCLifetime ByrefLifetime;
02171   bool ByRefHasLifetime =
02172     getContext().getByrefLifetime(type, ByrefLifetime, HasByrefExtendedLayout);
02173 
02174   llvm::Value *V;
02175 
02176   // Initialize the 'isa', which is just 0 or 1.
02177   int isa = 0;
02178   if (type.isObjCGCWeak())
02179     isa = 1;
02180   V = Builder.CreateIntToPtr(Builder.getInt32(isa), Int8PtrTy, "isa");
02181   storeHeaderField(V, getPointerSize(), "byref.isa");
02182 
02183   // Store the address of the variable into its own forwarding pointer.
02184   storeHeaderField(addr.getPointer(), getPointerSize(), "byref.forwarding");
02185 
02186   // Blocks ABI:
02187   //   c) the flags field is set to either 0 if no helper functions are
02188   //      needed or BLOCK_BYREF_HAS_COPY_DISPOSE if they are,
02189   BlockFlags flags;
02190   if (helpers) flags |= BLOCK_BYREF_HAS_COPY_DISPOSE;
02191   if (ByRefHasLifetime) {
02192     if (HasByrefExtendedLayout) flags |= BLOCK_BYREF_LAYOUT_EXTENDED;
02193       else switch (ByrefLifetime) {
02194         case Qualifiers::OCL_Strong:
02195           flags |= BLOCK_BYREF_LAYOUT_STRONG;
02196           break;
02197         case Qualifiers::OCL_Weak:
02198           flags |= BLOCK_BYREF_LAYOUT_WEAK;
02199           break;
02200         case Qualifiers::OCL_ExplicitNone:
02201           flags |= BLOCK_BYREF_LAYOUT_UNRETAINED;
02202           break;
02203         case Qualifiers::OCL_None:
02204           if (!type->isObjCObjectPointerType() && !type->isBlockPointerType())
02205             flags |= BLOCK_BYREF_LAYOUT_NON_OBJECT;
02206           break;
02207         default:
02208           break;
02209       }
02210     if (CGM.getLangOpts().ObjCGCBitmapPrint) {
02211       printf("\n Inline flag for BYREF variable layout (%d):", flags.getBitMask());
02212       if (flags & BLOCK_BYREF_HAS_COPY_DISPOSE)
02213         printf(" BLOCK_BYREF_HAS_COPY_DISPOSE");
02214       if (flags & BLOCK_BYREF_LAYOUT_MASK) {
02215         BlockFlags ThisFlag(flags.getBitMask() & BLOCK_BYREF_LAYOUT_MASK);
02216         if (ThisFlag ==  BLOCK_BYREF_LAYOUT_EXTENDED)
02217           printf(" BLOCK_BYREF_LAYOUT_EXTENDED");
02218         if (ThisFlag ==  BLOCK_BYREF_LAYOUT_STRONG)
02219           printf(" BLOCK_BYREF_LAYOUT_STRONG");
02220         if (ThisFlag == BLOCK_BYREF_LAYOUT_WEAK)
02221           printf(" BLOCK_BYREF_LAYOUT_WEAK");
02222         if (ThisFlag == BLOCK_BYREF_LAYOUT_UNRETAINED)
02223           printf(" BLOCK_BYREF_LAYOUT_UNRETAINED");
02224         if (ThisFlag == BLOCK_BYREF_LAYOUT_NON_OBJECT)
02225           printf(" BLOCK_BYREF_LAYOUT_NON_OBJECT");
02226       }
02227       printf("\n");
02228     }
02229   }
02230   storeHeaderField(llvm::ConstantInt::get(IntTy, flags.getBitMask()),
02231                    getIntSize(), "byref.flags");
02232 
02233   CharUnits byrefSize = CGM.GetTargetTypeStoreSize(byrefType);
02234   V = llvm::ConstantInt::get(IntTy, byrefSize.getQuantity());
02235   storeHeaderField(V, getIntSize(), "byref.size");
02236 
02237   if (helpers) {
02238     storeHeaderField(helpers->CopyHelper, getPointerSize(),
02239                      "byref.copyHelper");
02240     storeHeaderField(helpers->DisposeHelper, getPointerSize(),
02241                      "byref.disposeHelper");
02242   }
02243 
02244   if (ByRefHasLifetime && HasByrefExtendedLayout) {
02245     auto layoutInfo = CGM.getObjCRuntime().BuildByrefLayout(CGM, type);
02246     storeHeaderField(layoutInfo, getPointerSize(), "byref.layout");
02247   }
02248 }
02249 
02250 void CodeGenFunction::BuildBlockRelease(llvm::Value *V, BlockFieldFlags flags) {
02251   llvm::Value *F = CGM.getBlockObjectDispose();
02252   llvm::Value *args[] = {
02253     Builder.CreateBitCast(V, Int8PtrTy),
02254     llvm::ConstantInt::get(Int32Ty, flags.getBitMask())
02255   };
02256   EmitNounwindRuntimeCall(F, args); // FIXME: throwing destructors?
02257 }
02258 
02259 namespace {
02260   /// Release a __block variable.
02261   struct CallBlockRelease final : EHScopeStack::Cleanup {
02262     llvm::Value *Addr;
02263     CallBlockRelease(llvm::Value *Addr) : Addr(Addr) {}
02264 
02265     void Emit(CodeGenFunction &CGF, Flags flags) override {
02266       // Should we be passing FIELD_IS_WEAK here?
02267       CGF.BuildBlockRelease(Addr, BLOCK_FIELD_IS_BYREF);
02268     }
02269   };
02270 } // end anonymous namespace
02271 
02272 /// Enter a cleanup to destroy a __block variable.  Note that this
02273 /// cleanup should be a no-op if the variable hasn't left the stack
02274 /// yet; if a cleanup is required for the variable itself, that needs
02275 /// to be done externally.
02276 void CodeGenFunction::enterByrefCleanup(const AutoVarEmission &emission) {
02277   // We don't enter this cleanup if we're in pure-GC mode.
02278   if (CGM.getLangOpts().getGC() == LangOptions::GCOnly)
02279     return;
02280 
02281   EHStack.pushCleanup<CallBlockRelease>(NormalAndEHCleanup,
02282                                         emission.Addr.getPointer());
02283 }
02284 
02285 /// Adjust the declaration of something from the blocks API.
02286 static void configureBlocksRuntimeObject(CodeGenModule &CGM,
02287                                          llvm::Constant *C) {
02288   if (!CGM.getLangOpts().BlocksRuntimeOptional) return;
02289 
02290   auto *GV = cast<llvm::GlobalValue>(C->stripPointerCasts());
02291   if (GV->isDeclaration() && GV->hasExternalLinkage())
02292     GV->setLinkage(llvm::GlobalValue::ExternalWeakLinkage);
02293 }
02294 
02295 llvm::Constant *CodeGenModule::getBlockObjectDispose() {
02296   if (BlockObjectDispose)
02297     return BlockObjectDispose;
02298 
02299   llvm::Type *args[] = { Int8PtrTy, Int32Ty };
02300   llvm::FunctionType *fty
02301     = llvm::FunctionType::get(VoidTy, args, false);
02302   BlockObjectDispose = CreateRuntimeFunction(fty, "_Block_object_dispose");
02303   configureBlocksRuntimeObject(*this, BlockObjectDispose);
02304   return BlockObjectDispose;
02305 }
02306 
02307 llvm::Constant *CodeGenModule::getBlockObjectAssign() {
02308   if (BlockObjectAssign)
02309     return BlockObjectAssign;
02310 
02311   llvm::Type *args[] = { Int8PtrTy, Int8PtrTy, Int32Ty };
02312   llvm::FunctionType *fty
02313     = llvm::FunctionType::get(VoidTy, args, false);
02314   BlockObjectAssign = CreateRuntimeFunction(fty, "_Block_object_assign");
02315   configureBlocksRuntimeObject(*this, BlockObjectAssign);
02316   return BlockObjectAssign;
02317 }
02318 
02319 llvm::Constant *CodeGenModule::getNSConcreteGlobalBlock() {
02320   if (NSConcreteGlobalBlock)
02321     return NSConcreteGlobalBlock;
02322 
02323   NSConcreteGlobalBlock = GetOrCreateLLVMGlobal("_NSConcreteGlobalBlock",
02324                                                 Int8PtrTy->getPointerTo(),
02325                                                 nullptr);
02326   configureBlocksRuntimeObject(*this, NSConcreteGlobalBlock);
02327   return NSConcreteGlobalBlock;
02328 }
02329 
02330 llvm::Constant *CodeGenModule::getNSConcreteStackBlock() {
02331   if (NSConcreteStackBlock)
02332     return NSConcreteStackBlock;
02333 
02334   NSConcreteStackBlock = GetOrCreateLLVMGlobal("_NSConcreteStackBlock",
02335                                                Int8PtrTy->getPointerTo(),
02336                                                nullptr);
02337   configureBlocksRuntimeObject(*this, NSConcreteStackBlock);
02338   return NSConcreteStackBlock;  
02339 }