CodeGenFunction.h revision d26bc76c98006609002d9930f8840490e88ac5b5
1//===-- CodeGenFunction.h - Per-Function state for LLVM CodeGen -*- C++ -*-===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This is the internal per-function state used for llvm translation. 11// 12//===----------------------------------------------------------------------===// 13 14#ifndef CLANG_CODEGEN_CODEGENFUNCTION_H 15#define CLANG_CODEGEN_CODEGENFUNCTION_H 16 17#include "clang/AST/Type.h" 18#include "clang/AST/ExprCXX.h" 19#include "clang/AST/ExprObjC.h" 20#include "clang/AST/CharUnits.h" 21#include "clang/Basic/ABI.h" 22#include "clang/Basic/TargetInfo.h" 23#include "llvm/ADT/DenseMap.h" 24#include "llvm/ADT/SmallVector.h" 25#include "llvm/Support/ValueHandle.h" 26#include "CodeGenModule.h" 27#include "CGBuilder.h" 28#include "CGValue.h" 29 30namespace llvm { 31 class BasicBlock; 32 class LLVMContext; 33 class MDNode; 34 class Module; 35 class SwitchInst; 36 class Twine; 37 class Value; 38 class CallSite; 39} 40 41namespace clang { 42 class APValue; 43 class ASTContext; 44 class CXXDestructorDecl; 45 class CXXTryStmt; 46 class Decl; 47 class LabelDecl; 48 class EnumConstantDecl; 49 class FunctionDecl; 50 class FunctionProtoType; 51 class LabelStmt; 52 class ObjCContainerDecl; 53 class ObjCInterfaceDecl; 54 class ObjCIvarDecl; 55 class ObjCMethodDecl; 56 class ObjCImplementationDecl; 57 class ObjCPropertyImplDecl; 58 class TargetInfo; 59 class TargetCodeGenInfo; 60 class VarDecl; 61 class ObjCForCollectionStmt; 62 class ObjCAtTryStmt; 63 class ObjCAtThrowStmt; 64 class ObjCAtSynchronizedStmt; 65 66namespace CodeGen { 67 class CodeGenTypes; 68 class CGDebugInfo; 69 class CGFunctionInfo; 70 class CGRecordLayout; 71 class CGBlockInfo; 72 class CGCXXABI; 73 class BlockFlags; 74 class BlockFieldFlags; 75 76/// A branch fixup. These are required when emitting a goto to a 77/// label which hasn't been emitted yet. The goto is optimistically 78/// emitted as a branch to the basic block for the label, and (if it 79/// occurs in a scope with non-trivial cleanups) a fixup is added to 80/// the innermost cleanup. When a (normal) cleanup is popped, any 81/// unresolved fixups in that scope are threaded through the cleanup. 82struct BranchFixup { 83 /// The block containing the terminator which needs to be modified 84 /// into a switch if this fixup is resolved into the current scope. 85 /// If null, LatestBranch points directly to the destination. 86 llvm::BasicBlock *OptimisticBranchBlock; 87 88 /// The ultimate destination of the branch. 89 /// 90 /// This can be set to null to indicate that this fixup was 91 /// successfully resolved. 92 llvm::BasicBlock *Destination; 93 94 /// The destination index value. 95 unsigned DestinationIndex; 96 97 /// The initial branch of the fixup. 98 llvm::BranchInst *InitialBranch; 99}; 100 101template <class T> struct InvariantValue { 102 typedef T type; 103 typedef T saved_type; 104 static bool needsSaving(type value) { return false; } 105 static saved_type save(CodeGenFunction &CGF, type value) { return value; } 106 static type restore(CodeGenFunction &CGF, saved_type value) { return value; } 107}; 108 109/// A metaprogramming class for ensuring that a value will dominate an 110/// arbitrary position in a function. 111template <class T> struct DominatingValue : InvariantValue<T> {}; 112 113template <class T, bool mightBeInstruction = 114 llvm::is_base_of<llvm::Value, T>::value && 115 !llvm::is_base_of<llvm::Constant, T>::value && 116 !llvm::is_base_of<llvm::BasicBlock, T>::value> 117struct DominatingPointer; 118template <class T> struct DominatingPointer<T,false> : InvariantValue<T*> {}; 119// template <class T> struct DominatingPointer<T,true> at end of file 120 121template <class T> struct DominatingValue<T*> : DominatingPointer<T> {}; 122 123enum CleanupKind { 124 EHCleanup = 0x1, 125 NormalCleanup = 0x2, 126 NormalAndEHCleanup = EHCleanup | NormalCleanup, 127 128 InactiveCleanup = 0x4, 129 InactiveEHCleanup = EHCleanup | InactiveCleanup, 130 InactiveNormalCleanup = NormalCleanup | InactiveCleanup, 131 InactiveNormalAndEHCleanup = NormalAndEHCleanup | InactiveCleanup 132}; 133 134/// A stack of scopes which respond to exceptions, including cleanups 135/// and catch blocks. 136class EHScopeStack { 137public: 138 /// A saved depth on the scope stack. This is necessary because 139 /// pushing scopes onto the stack invalidates iterators. 140 class stable_iterator { 141 friend class EHScopeStack; 142 143 /// Offset from StartOfData to EndOfBuffer. 144 ptrdiff_t Size; 145 146 stable_iterator(ptrdiff_t Size) : Size(Size) {} 147 148 public: 149 static stable_iterator invalid() { return stable_iterator(-1); } 150 stable_iterator() : Size(-1) {} 151 152 bool isValid() const { return Size >= 0; } 153 154 /// Returns true if this scope encloses I. 155 /// Returns false if I is invalid. 156 /// This scope must be valid. 157 bool encloses(stable_iterator I) const { return Size <= I.Size; } 158 159 /// Returns true if this scope strictly encloses I: that is, 160 /// if it encloses I and is not I. 161 /// Returns false is I is invalid. 162 /// This scope must be valid. 163 bool strictlyEncloses(stable_iterator I) const { return Size < I.Size; } 164 165 friend bool operator==(stable_iterator A, stable_iterator B) { 166 return A.Size == B.Size; 167 } 168 friend bool operator!=(stable_iterator A, stable_iterator B) { 169 return A.Size != B.Size; 170 } 171 }; 172 173 /// Information for lazily generating a cleanup. Subclasses must be 174 /// POD-like: cleanups will not be destructed, and they will be 175 /// allocated on the cleanup stack and freely copied and moved 176 /// around. 177 /// 178 /// Cleanup implementations should generally be declared in an 179 /// anonymous namespace. 180 class Cleanup { 181 public: 182 // Anchor the construction vtable. We use the destructor because 183 // gcc gives an obnoxious warning if there are virtual methods 184 // with an accessible non-virtual destructor. Unfortunately, 185 // declaring this destructor makes it non-trivial, but there 186 // doesn't seem to be any other way around this warning. 187 // 188 // This destructor will never be called. 189 virtual ~Cleanup(); 190 191 /// Emit the cleanup. For normal cleanups, this is run in the 192 /// same EH context as when the cleanup was pushed, i.e. the 193 /// immediately-enclosing context of the cleanup scope. For 194 /// EH cleanups, this is run in a terminate context. 195 /// 196 // \param IsForEHCleanup true if this is for an EH cleanup, false 197 /// if for a normal cleanup. 198 virtual void Emit(CodeGenFunction &CGF, bool IsForEHCleanup) = 0; 199 }; 200 201 /// UnconditionalCleanupN stores its N parameters and just passes 202 /// them to the real cleanup function. 203 template <class T, class A0> 204 class UnconditionalCleanup1 : public Cleanup { 205 A0 a0; 206 public: 207 UnconditionalCleanup1(A0 a0) : a0(a0) {} 208 void Emit(CodeGenFunction &CGF, bool IsForEHCleanup) { 209 T::Emit(CGF, IsForEHCleanup, a0); 210 } 211 }; 212 213 template <class T, class A0, class A1> 214 class UnconditionalCleanup2 : public Cleanup { 215 A0 a0; A1 a1; 216 public: 217 UnconditionalCleanup2(A0 a0, A1 a1) : a0(a0), a1(a1) {} 218 void Emit(CodeGenFunction &CGF, bool IsForEHCleanup) { 219 T::Emit(CGF, IsForEHCleanup, a0, a1); 220 } 221 }; 222 223 /// ConditionalCleanupN stores the saved form of its N parameters, 224 /// then restores them and performs the cleanup. 225 template <class T, class A0> 226 class ConditionalCleanup1 : public Cleanup { 227 typedef typename DominatingValue<A0>::saved_type A0_saved; 228 A0_saved a0_saved; 229 230 void Emit(CodeGenFunction &CGF, bool IsForEHCleanup) { 231 A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved); 232 T::Emit(CGF, IsForEHCleanup, a0); 233 } 234 235 public: 236 ConditionalCleanup1(A0_saved a0) 237 : a0_saved(a0) {} 238 }; 239 240 template <class T, class A0, class A1> 241 class ConditionalCleanup2 : public Cleanup { 242 typedef typename DominatingValue<A0>::saved_type A0_saved; 243 typedef typename DominatingValue<A1>::saved_type A1_saved; 244 A0_saved a0_saved; 245 A1_saved a1_saved; 246 247 void Emit(CodeGenFunction &CGF, bool IsForEHCleanup) { 248 A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved); 249 A1 a1 = DominatingValue<A1>::restore(CGF, a1_saved); 250 T::Emit(CGF, IsForEHCleanup, a0, a1); 251 } 252 253 public: 254 ConditionalCleanup2(A0_saved a0, A1_saved a1) 255 : a0_saved(a0), a1_saved(a1) {} 256 }; 257 258private: 259 // The implementation for this class is in CGException.h and 260 // CGException.cpp; the definition is here because it's used as a 261 // member of CodeGenFunction. 262 263 /// The start of the scope-stack buffer, i.e. the allocated pointer 264 /// for the buffer. All of these pointers are either simultaneously 265 /// null or simultaneously valid. 266 char *StartOfBuffer; 267 268 /// The end of the buffer. 269 char *EndOfBuffer; 270 271 /// The first valid entry in the buffer. 272 char *StartOfData; 273 274 /// The innermost normal cleanup on the stack. 275 stable_iterator InnermostNormalCleanup; 276 277 /// The innermost EH cleanup on the stack. 278 stable_iterator InnermostEHCleanup; 279 280 /// The number of catches on the stack. 281 unsigned CatchDepth; 282 283 /// The current EH destination index. Reset to FirstCatchIndex 284 /// whenever the last EH cleanup is popped. 285 unsigned NextEHDestIndex; 286 enum { FirstEHDestIndex = 1 }; 287 288 /// The current set of branch fixups. A branch fixup is a jump to 289 /// an as-yet unemitted label, i.e. a label for which we don't yet 290 /// know the EH stack depth. Whenever we pop a cleanup, we have 291 /// to thread all the current branch fixups through it. 292 /// 293 /// Fixups are recorded as the Use of the respective branch or 294 /// switch statement. The use points to the final destination. 295 /// When popping out of a cleanup, these uses are threaded through 296 /// the cleanup and adjusted to point to the new cleanup. 297 /// 298 /// Note that branches are allowed to jump into protected scopes 299 /// in certain situations; e.g. the following code is legal: 300 /// struct A { ~A(); }; // trivial ctor, non-trivial dtor 301 /// goto foo; 302 /// A a; 303 /// foo: 304 /// bar(); 305 llvm::SmallVector<BranchFixup, 8> BranchFixups; 306 307 char *allocate(size_t Size); 308 309 void *pushCleanup(CleanupKind K, size_t DataSize); 310 311public: 312 EHScopeStack() : StartOfBuffer(0), EndOfBuffer(0), StartOfData(0), 313 InnermostNormalCleanup(stable_end()), 314 InnermostEHCleanup(stable_end()), 315 CatchDepth(0), NextEHDestIndex(FirstEHDestIndex) {} 316 ~EHScopeStack() { delete[] StartOfBuffer; } 317 318 // Variadic templates would make this not terrible. 319 320 /// Push a lazily-created cleanup on the stack. 321 template <class T> 322 void pushCleanup(CleanupKind Kind) { 323 void *Buffer = pushCleanup(Kind, sizeof(T)); 324 Cleanup *Obj = new(Buffer) T(); 325 (void) Obj; 326 } 327 328 /// Push a lazily-created cleanup on the stack. 329 template <class T, class A0> 330 void pushCleanup(CleanupKind Kind, A0 a0) { 331 void *Buffer = pushCleanup(Kind, sizeof(T)); 332 Cleanup *Obj = new(Buffer) T(a0); 333 (void) Obj; 334 } 335 336 /// Push a lazily-created cleanup on the stack. 337 template <class T, class A0, class A1> 338 void pushCleanup(CleanupKind Kind, A0 a0, A1 a1) { 339 void *Buffer = pushCleanup(Kind, sizeof(T)); 340 Cleanup *Obj = new(Buffer) T(a0, a1); 341 (void) Obj; 342 } 343 344 /// Push a lazily-created cleanup on the stack. 345 template <class T, class A0, class A1, class A2> 346 void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2) { 347 void *Buffer = pushCleanup(Kind, sizeof(T)); 348 Cleanup *Obj = new(Buffer) T(a0, a1, a2); 349 (void) Obj; 350 } 351 352 /// Push a lazily-created cleanup on the stack. 353 template <class T, class A0, class A1, class A2, class A3> 354 void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3) { 355 void *Buffer = pushCleanup(Kind, sizeof(T)); 356 Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3); 357 (void) Obj; 358 } 359 360 /// Push a lazily-created cleanup on the stack. 361 template <class T, class A0, class A1, class A2, class A3, class A4> 362 void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3, A4 a4) { 363 void *Buffer = pushCleanup(Kind, sizeof(T)); 364 Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3, a4); 365 (void) Obj; 366 } 367 368 // Feel free to add more variants of the following: 369 370 /// Push a cleanup with non-constant storage requirements on the 371 /// stack. The cleanup type must provide an additional static method: 372 /// static size_t getExtraSize(size_t); 373 /// The argument to this method will be the value N, which will also 374 /// be passed as the first argument to the constructor. 375 /// 376 /// The data stored in the extra storage must obey the same 377 /// restrictions as normal cleanup member data. 378 /// 379 /// The pointer returned from this method is valid until the cleanup 380 /// stack is modified. 381 template <class T, class A0, class A1, class A2> 382 T *pushCleanupWithExtra(CleanupKind Kind, size_t N, A0 a0, A1 a1, A2 a2) { 383 void *Buffer = pushCleanup(Kind, sizeof(T) + T::getExtraSize(N)); 384 return new (Buffer) T(N, a0, a1, a2); 385 } 386 387 /// Pops a cleanup scope off the stack. This should only be called 388 /// by CodeGenFunction::PopCleanupBlock. 389 void popCleanup(); 390 391 /// Push a set of catch handlers on the stack. The catch is 392 /// uninitialized and will need to have the given number of handlers 393 /// set on it. 394 class EHCatchScope *pushCatch(unsigned NumHandlers); 395 396 /// Pops a catch scope off the stack. 397 void popCatch(); 398 399 /// Push an exceptions filter on the stack. 400 class EHFilterScope *pushFilter(unsigned NumFilters); 401 402 /// Pops an exceptions filter off the stack. 403 void popFilter(); 404 405 /// Push a terminate handler on the stack. 406 void pushTerminate(); 407 408 /// Pops a terminate handler off the stack. 409 void popTerminate(); 410 411 /// Determines whether the exception-scopes stack is empty. 412 bool empty() const { return StartOfData == EndOfBuffer; } 413 414 bool requiresLandingPad() const { 415 return (CatchDepth || hasEHCleanups()); 416 } 417 418 /// Determines whether there are any normal cleanups on the stack. 419 bool hasNormalCleanups() const { 420 return InnermostNormalCleanup != stable_end(); 421 } 422 423 /// Returns the innermost normal cleanup on the stack, or 424 /// stable_end() if there are no normal cleanups. 425 stable_iterator getInnermostNormalCleanup() const { 426 return InnermostNormalCleanup; 427 } 428 stable_iterator getInnermostActiveNormalCleanup() const; // CGException.h 429 430 /// Determines whether there are any EH cleanups on the stack. 431 bool hasEHCleanups() const { 432 return InnermostEHCleanup != stable_end(); 433 } 434 435 /// Returns the innermost EH cleanup on the stack, or stable_end() 436 /// if there are no EH cleanups. 437 stable_iterator getInnermostEHCleanup() const { 438 return InnermostEHCleanup; 439 } 440 stable_iterator getInnermostActiveEHCleanup() const; // CGException.h 441 442 /// An unstable reference to a scope-stack depth. Invalidated by 443 /// pushes but not pops. 444 class iterator; 445 446 /// Returns an iterator pointing to the innermost EH scope. 447 iterator begin() const; 448 449 /// Returns an iterator pointing to the outermost EH scope. 450 iterator end() const; 451 452 /// Create a stable reference to the top of the EH stack. The 453 /// returned reference is valid until that scope is popped off the 454 /// stack. 455 stable_iterator stable_begin() const { 456 return stable_iterator(EndOfBuffer - StartOfData); 457 } 458 459 /// Create a stable reference to the bottom of the EH stack. 460 static stable_iterator stable_end() { 461 return stable_iterator(0); 462 } 463 464 /// Translates an iterator into a stable_iterator. 465 stable_iterator stabilize(iterator it) const; 466 467 /// Finds the nearest cleanup enclosing the given iterator. 468 /// Returns stable_iterator::invalid() if there are no such cleanups. 469 stable_iterator getEnclosingEHCleanup(iterator it) const; 470 471 /// Turn a stable reference to a scope depth into a unstable pointer 472 /// to the EH stack. 473 iterator find(stable_iterator save) const; 474 475 /// Removes the cleanup pointed to by the given stable_iterator. 476 void removeCleanup(stable_iterator save); 477 478 /// Add a branch fixup to the current cleanup scope. 479 BranchFixup &addBranchFixup() { 480 assert(hasNormalCleanups() && "adding fixup in scope without cleanups"); 481 BranchFixups.push_back(BranchFixup()); 482 return BranchFixups.back(); 483 } 484 485 unsigned getNumBranchFixups() const { return BranchFixups.size(); } 486 BranchFixup &getBranchFixup(unsigned I) { 487 assert(I < getNumBranchFixups()); 488 return BranchFixups[I]; 489 } 490 491 /// Pops lazily-removed fixups from the end of the list. This 492 /// should only be called by procedures which have just popped a 493 /// cleanup or resolved one or more fixups. 494 void popNullFixups(); 495 496 /// Clears the branch-fixups list. This should only be called by 497 /// ResolveAllBranchFixups. 498 void clearFixups() { BranchFixups.clear(); } 499 500 /// Gets the next EH destination index. 501 unsigned getNextEHDestIndex() { return NextEHDestIndex++; } 502}; 503 504/// CodeGenFunction - This class organizes the per-function state that is used 505/// while generating LLVM code. 506class CodeGenFunction : public CodeGenTypeCache { 507 CodeGenFunction(const CodeGenFunction&); // DO NOT IMPLEMENT 508 void operator=(const CodeGenFunction&); // DO NOT IMPLEMENT 509 510 friend class CGCXXABI; 511public: 512 /// A jump destination is an abstract label, branching to which may 513 /// require a jump out through normal cleanups. 514 struct JumpDest { 515 JumpDest() : Block(0), ScopeDepth(), Index(0) {} 516 JumpDest(llvm::BasicBlock *Block, 517 EHScopeStack::stable_iterator Depth, 518 unsigned Index) 519 : Block(Block), ScopeDepth(Depth), Index(Index) {} 520 521 bool isValid() const { return Block != 0; } 522 llvm::BasicBlock *getBlock() const { return Block; } 523 EHScopeStack::stable_iterator getScopeDepth() const { return ScopeDepth; } 524 unsigned getDestIndex() const { return Index; } 525 526 private: 527 llvm::BasicBlock *Block; 528 EHScopeStack::stable_iterator ScopeDepth; 529 unsigned Index; 530 }; 531 532 /// An unwind destination is an abstract label, branching to which 533 /// may require a jump out through EH cleanups. 534 struct UnwindDest { 535 UnwindDest() : Block(0), ScopeDepth(), Index(0) {} 536 UnwindDest(llvm::BasicBlock *Block, 537 EHScopeStack::stable_iterator Depth, 538 unsigned Index) 539 : Block(Block), ScopeDepth(Depth), Index(Index) {} 540 541 bool isValid() const { return Block != 0; } 542 llvm::BasicBlock *getBlock() const { return Block; } 543 EHScopeStack::stable_iterator getScopeDepth() const { return ScopeDepth; } 544 unsigned getDestIndex() const { return Index; } 545 546 private: 547 llvm::BasicBlock *Block; 548 EHScopeStack::stable_iterator ScopeDepth; 549 unsigned Index; 550 }; 551 552 CodeGenModule &CGM; // Per-module state. 553 const TargetInfo &Target; 554 555 typedef std::pair<llvm::Value *, llvm::Value *> ComplexPairTy; 556 CGBuilderTy Builder; 557 558 /// CurFuncDecl - Holds the Decl for the current function or ObjC method. 559 /// This excludes BlockDecls. 560 const Decl *CurFuncDecl; 561 /// CurCodeDecl - This is the inner-most code context, which includes blocks. 562 const Decl *CurCodeDecl; 563 const CGFunctionInfo *CurFnInfo; 564 QualType FnRetTy; 565 llvm::Function *CurFn; 566 567 /// CurGD - The GlobalDecl for the current function being compiled. 568 GlobalDecl CurGD; 569 570 /// ReturnBlock - Unified return block. 571 JumpDest ReturnBlock; 572 573 /// ReturnValue - The temporary alloca to hold the return value. This is null 574 /// iff the function has no return value. 575 llvm::Value *ReturnValue; 576 577 /// RethrowBlock - Unified rethrow block. 578 UnwindDest RethrowBlock; 579 580 /// AllocaInsertPoint - This is an instruction in the entry block before which 581 /// we prefer to insert allocas. 582 llvm::AssertingVH<llvm::Instruction> AllocaInsertPt; 583 584 bool CatchUndefined; 585 586 const CodeGen::CGBlockInfo *BlockInfo; 587 llvm::Value *BlockPointer; 588 589 /// \brief A mapping from NRVO variables to the flags used to indicate 590 /// when the NRVO has been applied to this variable. 591 llvm::DenseMap<const VarDecl *, llvm::Value *> NRVOFlags; 592 593 EHScopeStack EHStack; 594 595 /// i32s containing the indexes of the cleanup destinations. 596 llvm::AllocaInst *NormalCleanupDest; 597 llvm::AllocaInst *EHCleanupDest; 598 599 unsigned NextCleanupDestIndex; 600 601 /// The exception slot. All landing pads write the current 602 /// exception pointer into this alloca. 603 llvm::Value *ExceptionSlot; 604 605 /// Emits a landing pad for the current EH stack. 606 llvm::BasicBlock *EmitLandingPad(); 607 608 llvm::BasicBlock *getInvokeDestImpl(); 609 610 /// Set up the last cleaup that was pushed as a conditional 611 /// full-expression cleanup. 612 void initFullExprCleanup(); 613 614 template <class T> 615 typename DominatingValue<T>::saved_type saveValueInCond(T value) { 616 return DominatingValue<T>::save(*this, value); 617 } 618 619public: 620 /// ObjCEHValueStack - Stack of Objective-C exception values, used for 621 /// rethrows. 622 llvm::SmallVector<llvm::Value*, 8> ObjCEHValueStack; 623 624 // A struct holding information about a finally block's IR 625 // generation. For now, doesn't actually hold anything. 626 struct FinallyInfo { 627 }; 628 629 FinallyInfo EnterFinallyBlock(const Stmt *Stmt, 630 llvm::Constant *BeginCatchFn, 631 llvm::Constant *EndCatchFn, 632 llvm::Constant *RethrowFn); 633 void ExitFinallyBlock(FinallyInfo &FinallyInfo); 634 635 /// pushFullExprCleanup - Push a cleanup to be run at the end of the 636 /// current full-expression. Safe against the possibility that 637 /// we're currently inside a conditionally-evaluated expression. 638 template <class T, class A0> 639 void pushFullExprCleanup(CleanupKind kind, A0 a0) { 640 // If we're not in a conditional branch, or if none of the 641 // arguments requires saving, then use the unconditional cleanup. 642 if (!isInConditionalBranch()) { 643 typedef EHScopeStack::UnconditionalCleanup1<T, A0> CleanupType; 644 return EHStack.pushCleanup<CleanupType>(kind, a0); 645 } 646 647 typename DominatingValue<A0>::saved_type a0_saved = saveValueInCond(a0); 648 649 typedef EHScopeStack::ConditionalCleanup1<T, A0> CleanupType; 650 EHStack.pushCleanup<CleanupType>(kind, a0_saved); 651 initFullExprCleanup(); 652 } 653 654 /// pushFullExprCleanup - Push a cleanup to be run at the end of the 655 /// current full-expression. Safe against the possibility that 656 /// we're currently inside a conditionally-evaluated expression. 657 template <class T, class A0, class A1> 658 void pushFullExprCleanup(CleanupKind kind, A0 a0, A1 a1) { 659 // If we're not in a conditional branch, or if none of the 660 // arguments requires saving, then use the unconditional cleanup. 661 if (!isInConditionalBranch()) { 662 typedef EHScopeStack::UnconditionalCleanup2<T, A0, A1> CleanupType; 663 return EHStack.pushCleanup<CleanupType>(kind, a0, a1); 664 } 665 666 typename DominatingValue<A0>::saved_type a0_saved = saveValueInCond(a0); 667 typename DominatingValue<A1>::saved_type a1_saved = saveValueInCond(a1); 668 669 typedef EHScopeStack::ConditionalCleanup2<T, A0, A1> CleanupType; 670 EHStack.pushCleanup<CleanupType>(kind, a0_saved, a1_saved); 671 initFullExprCleanup(); 672 } 673 674 /// PushDestructorCleanup - Push a cleanup to call the 675 /// complete-object destructor of an object of the given type at the 676 /// given address. Does nothing if T is not a C++ class type with a 677 /// non-trivial destructor. 678 void PushDestructorCleanup(QualType T, llvm::Value *Addr); 679 680 /// PushDestructorCleanup - Push a cleanup to call the 681 /// complete-object variant of the given destructor on the object at 682 /// the given address. 683 void PushDestructorCleanup(const CXXDestructorDecl *Dtor, 684 llvm::Value *Addr); 685 686 /// PopCleanupBlock - Will pop the cleanup entry on the stack and 687 /// process all branch fixups. 688 void PopCleanupBlock(bool FallThroughIsBranchThrough = false); 689 690 /// DeactivateCleanupBlock - Deactivates the given cleanup block. 691 /// The block cannot be reactivated. Pops it if it's the top of the 692 /// stack. 693 void DeactivateCleanupBlock(EHScopeStack::stable_iterator Cleanup); 694 695 /// ActivateCleanupBlock - Activates an initially-inactive cleanup. 696 /// Cannot be used to resurrect a deactivated cleanup. 697 void ActivateCleanupBlock(EHScopeStack::stable_iterator Cleanup); 698 699 /// \brief Enters a new scope for capturing cleanups, all of which 700 /// will be executed once the scope is exited. 701 class RunCleanupsScope { 702 CodeGenFunction& CGF; 703 EHScopeStack::stable_iterator CleanupStackDepth; 704 bool OldDidCallStackSave; 705 bool PerformCleanup; 706 707 RunCleanupsScope(const RunCleanupsScope &); // DO NOT IMPLEMENT 708 RunCleanupsScope &operator=(const RunCleanupsScope &); // DO NOT IMPLEMENT 709 710 public: 711 /// \brief Enter a new cleanup scope. 712 explicit RunCleanupsScope(CodeGenFunction &CGF) 713 : CGF(CGF), PerformCleanup(true) 714 { 715 CleanupStackDepth = CGF.EHStack.stable_begin(); 716 OldDidCallStackSave = CGF.DidCallStackSave; 717 CGF.DidCallStackSave = false; 718 } 719 720 /// \brief Exit this cleanup scope, emitting any accumulated 721 /// cleanups. 722 ~RunCleanupsScope() { 723 if (PerformCleanup) { 724 CGF.DidCallStackSave = OldDidCallStackSave; 725 CGF.PopCleanupBlocks(CleanupStackDepth); 726 } 727 } 728 729 /// \brief Determine whether this scope requires any cleanups. 730 bool requiresCleanups() const { 731 return CGF.EHStack.stable_begin() != CleanupStackDepth; 732 } 733 734 /// \brief Force the emission of cleanups now, instead of waiting 735 /// until this object is destroyed. 736 void ForceCleanup() { 737 assert(PerformCleanup && "Already forced cleanup"); 738 CGF.DidCallStackSave = OldDidCallStackSave; 739 CGF.PopCleanupBlocks(CleanupStackDepth); 740 PerformCleanup = false; 741 } 742 }; 743 744 745 /// PopCleanupBlocks - Takes the old cleanup stack size and emits 746 /// the cleanup blocks that have been added. 747 void PopCleanupBlocks(EHScopeStack::stable_iterator OldCleanupStackSize); 748 749 void ResolveBranchFixups(llvm::BasicBlock *Target); 750 751 /// The given basic block lies in the current EH scope, but may be a 752 /// target of a potentially scope-crossing jump; get a stable handle 753 /// to which we can perform this jump later. 754 JumpDest getJumpDestInCurrentScope(llvm::BasicBlock *Target) { 755 return JumpDest(Target, 756 EHStack.getInnermostNormalCleanup(), 757 NextCleanupDestIndex++); 758 } 759 760 /// The given basic block lies in the current EH scope, but may be a 761 /// target of a potentially scope-crossing jump; get a stable handle 762 /// to which we can perform this jump later. 763 JumpDest getJumpDestInCurrentScope(llvm::StringRef Name = llvm::StringRef()) { 764 return getJumpDestInCurrentScope(createBasicBlock(Name)); 765 } 766 767 /// EmitBranchThroughCleanup - Emit a branch from the current insert 768 /// block through the normal cleanup handling code (if any) and then 769 /// on to \arg Dest. 770 void EmitBranchThroughCleanup(JumpDest Dest); 771 772 /// EmitBranchThroughEHCleanup - Emit a branch from the current 773 /// insert block through the EH cleanup handling code (if any) and 774 /// then on to \arg Dest. 775 void EmitBranchThroughEHCleanup(UnwindDest Dest); 776 777 /// getRethrowDest - Returns the unified outermost-scope rethrow 778 /// destination. 779 UnwindDest getRethrowDest(); 780 781 /// An object to manage conditionally-evaluated expressions. 782 class ConditionalEvaluation { 783 llvm::BasicBlock *StartBB; 784 785 public: 786 ConditionalEvaluation(CodeGenFunction &CGF) 787 : StartBB(CGF.Builder.GetInsertBlock()) {} 788 789 void begin(CodeGenFunction &CGF) { 790 assert(CGF.OutermostConditional != this); 791 if (!CGF.OutermostConditional) 792 CGF.OutermostConditional = this; 793 } 794 795 void end(CodeGenFunction &CGF) { 796 assert(CGF.OutermostConditional != 0); 797 if (CGF.OutermostConditional == this) 798 CGF.OutermostConditional = 0; 799 } 800 801 /// Returns a block which will be executed prior to each 802 /// evaluation of the conditional code. 803 llvm::BasicBlock *getStartingBlock() const { 804 return StartBB; 805 } 806 }; 807 808 /// isInConditionalBranch - Return true if we're currently emitting 809 /// one branch or the other of a conditional expression. 810 bool isInConditionalBranch() const { return OutermostConditional != 0; } 811 812 /// An RAII object to record that we're evaluating a statement 813 /// expression. 814 class StmtExprEvaluation { 815 CodeGenFunction &CGF; 816 817 /// We have to save the outermost conditional: cleanups in a 818 /// statement expression aren't conditional just because the 819 /// StmtExpr is. 820 ConditionalEvaluation *SavedOutermostConditional; 821 822 public: 823 StmtExprEvaluation(CodeGenFunction &CGF) 824 : CGF(CGF), SavedOutermostConditional(CGF.OutermostConditional) { 825 CGF.OutermostConditional = 0; 826 } 827 828 ~StmtExprEvaluation() { 829 CGF.OutermostConditional = SavedOutermostConditional; 830 CGF.EnsureInsertPoint(); 831 } 832 }; 833 834 /// An object which temporarily prevents a value from being 835 /// destroyed by aggressive peephole optimizations that assume that 836 /// all uses of a value have been realized in the IR. 837 class PeepholeProtection { 838 llvm::Instruction *Inst; 839 friend class CodeGenFunction; 840 841 public: 842 PeepholeProtection() : Inst(0) {} 843 }; 844 845 /// An RAII object to set (and then clear) a mapping for an OpaqueValueExpr. 846 class OpaqueValueMapping { 847 CodeGenFunction &CGF; 848 const OpaqueValueExpr *OpaqueValue; 849 bool BoundLValue; 850 CodeGenFunction::PeepholeProtection Protection; 851 852 public: 853 static bool shouldBindAsLValue(const Expr *expr) { 854 return expr->isGLValue() || expr->getType()->isRecordType(); 855 } 856 857 /// Build the opaque value mapping for the given conditional 858 /// operator if it's the GNU ?: extension. This is a common 859 /// enough pattern that the convenience operator is really 860 /// helpful. 861 /// 862 OpaqueValueMapping(CodeGenFunction &CGF, 863 const AbstractConditionalOperator *op) : CGF(CGF) { 864 if (isa<ConditionalOperator>(op)) { 865 OpaqueValue = 0; 866 BoundLValue = false; 867 return; 868 } 869 870 const BinaryConditionalOperator *e = cast<BinaryConditionalOperator>(op); 871 init(e->getOpaqueValue(), e->getCommon()); 872 } 873 874 OpaqueValueMapping(CodeGenFunction &CGF, 875 const OpaqueValueExpr *opaqueValue, 876 LValue lvalue) 877 : CGF(CGF), OpaqueValue(opaqueValue), BoundLValue(true) { 878 assert(opaqueValue && "no opaque value expression!"); 879 assert(shouldBindAsLValue(opaqueValue)); 880 initLValue(lvalue); 881 } 882 883 OpaqueValueMapping(CodeGenFunction &CGF, 884 const OpaqueValueExpr *opaqueValue, 885 RValue rvalue) 886 : CGF(CGF), OpaqueValue(opaqueValue), BoundLValue(false) { 887 assert(opaqueValue && "no opaque value expression!"); 888 assert(!shouldBindAsLValue(opaqueValue)); 889 initRValue(rvalue); 890 } 891 892 void pop() { 893 assert(OpaqueValue && "mapping already popped!"); 894 popImpl(); 895 OpaqueValue = 0; 896 } 897 898 ~OpaqueValueMapping() { 899 if (OpaqueValue) popImpl(); 900 } 901 902 private: 903 void popImpl() { 904 if (BoundLValue) 905 CGF.OpaqueLValues.erase(OpaqueValue); 906 else { 907 CGF.OpaqueRValues.erase(OpaqueValue); 908 CGF.unprotectFromPeepholes(Protection); 909 } 910 } 911 912 void init(const OpaqueValueExpr *ov, const Expr *e) { 913 OpaqueValue = ov; 914 BoundLValue = shouldBindAsLValue(ov); 915 assert(BoundLValue == shouldBindAsLValue(e) 916 && "inconsistent expression value kinds!"); 917 if (BoundLValue) 918 initLValue(CGF.EmitLValue(e)); 919 else 920 initRValue(CGF.EmitAnyExpr(e)); 921 } 922 923 void initLValue(const LValue &lv) { 924 CGF.OpaqueLValues.insert(std::make_pair(OpaqueValue, lv)); 925 } 926 927 void initRValue(const RValue &rv) { 928 // Work around an extremely aggressive peephole optimization in 929 // EmitScalarConversion which assumes that all other uses of a 930 // value are extant. 931 Protection = CGF.protectFromPeepholes(rv); 932 CGF.OpaqueRValues.insert(std::make_pair(OpaqueValue, rv)); 933 } 934 }; 935 936 /// getByrefValueFieldNumber - Given a declaration, returns the LLVM field 937 /// number that holds the value. 938 unsigned getByRefValueLLVMField(const ValueDecl *VD) const; 939 940 /// BuildBlockByrefAddress - Computes address location of the 941 /// variable which is declared as __block. 942 llvm::Value *BuildBlockByrefAddress(llvm::Value *BaseAddr, 943 const VarDecl *V); 944private: 945 CGDebugInfo *DebugInfo; 946 bool DisableDebugInfo; 947 948 /// IndirectBranch - The first time an indirect goto is seen we create a block 949 /// with an indirect branch. Every time we see the address of a label taken, 950 /// we add the label to the indirect goto. Every subsequent indirect goto is 951 /// codegen'd as a jump to the IndirectBranch's basic block. 952 llvm::IndirectBrInst *IndirectBranch; 953 954 /// LocalDeclMap - This keeps track of the LLVM allocas or globals for local C 955 /// decls. 956 typedef llvm::DenseMap<const Decl*, llvm::Value*> DeclMapTy; 957 DeclMapTy LocalDeclMap; 958 959 /// LabelMap - This keeps track of the LLVM basic block for each C label. 960 llvm::DenseMap<const LabelDecl*, JumpDest> LabelMap; 961 962 // BreakContinueStack - This keeps track of where break and continue 963 // statements should jump to. 964 struct BreakContinue { 965 BreakContinue(JumpDest Break, JumpDest Continue) 966 : BreakBlock(Break), ContinueBlock(Continue) {} 967 968 JumpDest BreakBlock; 969 JumpDest ContinueBlock; 970 }; 971 llvm::SmallVector<BreakContinue, 8> BreakContinueStack; 972 973 /// SwitchInsn - This is nearest current switch instruction. It is null if if 974 /// current context is not in a switch. 975 llvm::SwitchInst *SwitchInsn; 976 977 /// CaseRangeBlock - This block holds if condition check for last case 978 /// statement range in current switch instruction. 979 llvm::BasicBlock *CaseRangeBlock; 980 981 /// OpaqueLValues - Keeps track of the current set of opaque value 982 /// expressions. 983 llvm::DenseMap<const OpaqueValueExpr *, LValue> OpaqueLValues; 984 llvm::DenseMap<const OpaqueValueExpr *, RValue> OpaqueRValues; 985 986 // VLASizeMap - This keeps track of the associated size for each VLA type. 987 // We track this by the size expression rather than the type itself because 988 // in certain situations, like a const qualifier applied to an VLA typedef, 989 // multiple VLA types can share the same size expression. 990 // FIXME: Maybe this could be a stack of maps that is pushed/popped as we 991 // enter/leave scopes. 992 llvm::DenseMap<const Expr*, llvm::Value*> VLASizeMap; 993 994 /// DidCallStackSave - Whether llvm.stacksave has been called. Used to avoid 995 /// calling llvm.stacksave for multiple VLAs in the same scope. 996 bool DidCallStackSave; 997 998 /// A block containing a single 'unreachable' instruction. Created 999 /// lazily by getUnreachableBlock(). 1000 llvm::BasicBlock *UnreachableBlock; 1001 1002 /// CXXThisDecl - When generating code for a C++ member function, 1003 /// this will hold the implicit 'this' declaration. 1004 ImplicitParamDecl *CXXThisDecl; 1005 llvm::Value *CXXThisValue; 1006 1007 /// CXXVTTDecl - When generating code for a base object constructor or 1008 /// base object destructor with virtual bases, this will hold the implicit 1009 /// VTT parameter. 1010 ImplicitParamDecl *CXXVTTDecl; 1011 llvm::Value *CXXVTTValue; 1012 1013 /// OutermostConditional - Points to the outermost active 1014 /// conditional control. This is used so that we know if a 1015 /// temporary should be destroyed conditionally. 1016 ConditionalEvaluation *OutermostConditional; 1017 1018 1019 /// ByrefValueInfoMap - For each __block variable, contains a pair of the LLVM 1020 /// type as well as the field number that contains the actual data. 1021 llvm::DenseMap<const ValueDecl *, std::pair<const llvm::Type *, 1022 unsigned> > ByRefValueInfo; 1023 1024 llvm::BasicBlock *TerminateLandingPad; 1025 llvm::BasicBlock *TerminateHandler; 1026 llvm::BasicBlock *TrapBB; 1027 1028public: 1029 CodeGenFunction(CodeGenModule &cgm); 1030 1031 CodeGenTypes &getTypes() const { return CGM.getTypes(); } 1032 ASTContext &getContext() const; 1033 CGDebugInfo *getDebugInfo() { 1034 if (DisableDebugInfo) 1035 return NULL; 1036 return DebugInfo; 1037 } 1038 void disableDebugInfo() { DisableDebugInfo = true; } 1039 void enableDebugInfo() { DisableDebugInfo = false; } 1040 1041 1042 const LangOptions &getLangOptions() const { return CGM.getLangOptions(); } 1043 1044 /// Returns a pointer to the function's exception object slot, which 1045 /// is assigned in every landing pad. 1046 llvm::Value *getExceptionSlot(); 1047 1048 llvm::Value *getNormalCleanupDestSlot(); 1049 llvm::Value *getEHCleanupDestSlot(); 1050 1051 llvm::BasicBlock *getUnreachableBlock() { 1052 if (!UnreachableBlock) { 1053 UnreachableBlock = createBasicBlock("unreachable"); 1054 new llvm::UnreachableInst(getLLVMContext(), UnreachableBlock); 1055 } 1056 return UnreachableBlock; 1057 } 1058 1059 llvm::BasicBlock *getInvokeDest() { 1060 if (!EHStack.requiresLandingPad()) return 0; 1061 return getInvokeDestImpl(); 1062 } 1063 1064 llvm::LLVMContext &getLLVMContext() { return CGM.getLLVMContext(); } 1065 1066 //===--------------------------------------------------------------------===// 1067 // Objective-C 1068 //===--------------------------------------------------------------------===// 1069 1070 void GenerateObjCMethod(const ObjCMethodDecl *OMD); 1071 1072 void StartObjCMethod(const ObjCMethodDecl *MD, 1073 const ObjCContainerDecl *CD); 1074 1075 /// GenerateObjCGetter - Synthesize an Objective-C property getter function. 1076 void GenerateObjCGetter(ObjCImplementationDecl *IMP, 1077 const ObjCPropertyImplDecl *PID); 1078 void GenerateObjCGetterBody(ObjCIvarDecl *Ivar, bool IsAtomic, bool IsStrong); 1079 void GenerateObjCAtomicSetterBody(ObjCMethodDecl *OMD, 1080 ObjCIvarDecl *Ivar); 1081 1082 void GenerateObjCCtorDtorMethod(ObjCImplementationDecl *IMP, 1083 ObjCMethodDecl *MD, bool ctor); 1084 1085 /// GenerateObjCSetter - Synthesize an Objective-C property setter function 1086 /// for the given property. 1087 void GenerateObjCSetter(ObjCImplementationDecl *IMP, 1088 const ObjCPropertyImplDecl *PID); 1089 bool IndirectObjCSetterArg(const CGFunctionInfo &FI); 1090 bool IvarTypeWithAggrGCObjects(QualType Ty); 1091 1092 //===--------------------------------------------------------------------===// 1093 // Block Bits 1094 //===--------------------------------------------------------------------===// 1095 1096 llvm::Value *EmitBlockLiteral(const BlockExpr *); 1097 llvm::Constant *BuildDescriptorBlockDecl(const BlockExpr *, 1098 const CGBlockInfo &Info, 1099 const llvm::StructType *, 1100 llvm::Constant *BlockVarLayout); 1101 1102 llvm::Function *GenerateBlockFunction(GlobalDecl GD, 1103 const CGBlockInfo &Info, 1104 const Decl *OuterFuncDecl, 1105 const DeclMapTy &ldm); 1106 1107 llvm::Constant *GenerateCopyHelperFunction(const CGBlockInfo &blockInfo); 1108 llvm::Constant *GenerateDestroyHelperFunction(const CGBlockInfo &blockInfo); 1109 1110 llvm::Constant *GeneratebyrefCopyHelperFunction(const llvm::Type *, 1111 BlockFieldFlags flags, 1112 const VarDecl *BD); 1113 llvm::Constant *GeneratebyrefDestroyHelperFunction(const llvm::Type *T, 1114 BlockFieldFlags flags, 1115 const VarDecl *BD); 1116 1117 void BuildBlockRelease(llvm::Value *DeclPtr, BlockFieldFlags flags); 1118 1119 llvm::Value *LoadBlockStruct() { 1120 assert(BlockPointer && "no block pointer set!"); 1121 return BlockPointer; 1122 } 1123 1124 void AllocateBlockCXXThisPointer(const CXXThisExpr *E); 1125 void AllocateBlockDecl(const BlockDeclRefExpr *E); 1126 llvm::Value *GetAddrOfBlockDecl(const BlockDeclRefExpr *E) { 1127 return GetAddrOfBlockDecl(E->getDecl(), E->isByRef()); 1128 } 1129 llvm::Value *GetAddrOfBlockDecl(const VarDecl *var, bool ByRef); 1130 const llvm::Type *BuildByRefType(const VarDecl *var); 1131 1132 void GenerateCode(GlobalDecl GD, llvm::Function *Fn, 1133 const CGFunctionInfo &FnInfo); 1134 void StartFunction(GlobalDecl GD, QualType RetTy, 1135 llvm::Function *Fn, 1136 const CGFunctionInfo &FnInfo, 1137 const FunctionArgList &Args, 1138 SourceLocation StartLoc); 1139 1140 void EmitConstructorBody(FunctionArgList &Args); 1141 void EmitDestructorBody(FunctionArgList &Args); 1142 void EmitFunctionBody(FunctionArgList &Args); 1143 1144 /// EmitReturnBlock - Emit the unified return block, trying to avoid its 1145 /// emission when possible. 1146 void EmitReturnBlock(); 1147 1148 /// FinishFunction - Complete IR generation of the current function. It is 1149 /// legal to call this function even if there is no current insertion point. 1150 void FinishFunction(SourceLocation EndLoc=SourceLocation()); 1151 1152 /// GenerateThunk - Generate a thunk for the given method. 1153 void GenerateThunk(llvm::Function *Fn, const CGFunctionInfo &FnInfo, 1154 GlobalDecl GD, const ThunkInfo &Thunk); 1155 1156 void EmitCtorPrologue(const CXXConstructorDecl *CD, CXXCtorType Type, 1157 FunctionArgList &Args); 1158 1159 /// InitializeVTablePointer - Initialize the vtable pointer of the given 1160 /// subobject. 1161 /// 1162 void InitializeVTablePointer(BaseSubobject Base, 1163 const CXXRecordDecl *NearestVBase, 1164 uint64_t OffsetFromNearestVBase, 1165 llvm::Constant *VTable, 1166 const CXXRecordDecl *VTableClass); 1167 1168 typedef llvm::SmallPtrSet<const CXXRecordDecl *, 4> VisitedVirtualBasesSetTy; 1169 void InitializeVTablePointers(BaseSubobject Base, 1170 const CXXRecordDecl *NearestVBase, 1171 uint64_t OffsetFromNearestVBase, 1172 bool BaseIsNonVirtualPrimaryBase, 1173 llvm::Constant *VTable, 1174 const CXXRecordDecl *VTableClass, 1175 VisitedVirtualBasesSetTy& VBases); 1176 1177 void InitializeVTablePointers(const CXXRecordDecl *ClassDecl); 1178 1179 /// GetVTablePtr - Return the Value of the vtable pointer member pointed 1180 /// to by This. 1181 llvm::Value *GetVTablePtr(llvm::Value *This, const llvm::Type *Ty); 1182 1183 /// EnterDtorCleanups - Enter the cleanups necessary to complete the 1184 /// given phase of destruction for a destructor. The end result 1185 /// should call destructors on members and base classes in reverse 1186 /// order of their construction. 1187 void EnterDtorCleanups(const CXXDestructorDecl *Dtor, CXXDtorType Type); 1188 1189 /// ShouldInstrumentFunction - Return true if the current function should be 1190 /// instrumented with __cyg_profile_func_* calls 1191 bool ShouldInstrumentFunction(); 1192 1193 /// EmitFunctionInstrumentation - Emit LLVM code to call the specified 1194 /// instrumentation function with the current function and the call site, if 1195 /// function instrumentation is enabled. 1196 void EmitFunctionInstrumentation(const char *Fn); 1197 1198 /// EmitMCountInstrumentation - Emit call to .mcount. 1199 void EmitMCountInstrumentation(); 1200 1201 /// EmitFunctionProlog - Emit the target specific LLVM code to load the 1202 /// arguments for the given function. This is also responsible for naming the 1203 /// LLVM function arguments. 1204 void EmitFunctionProlog(const CGFunctionInfo &FI, 1205 llvm::Function *Fn, 1206 const FunctionArgList &Args); 1207 1208 /// EmitFunctionEpilog - Emit the target specific LLVM code to return the 1209 /// given temporary. 1210 void EmitFunctionEpilog(const CGFunctionInfo &FI); 1211 1212 /// EmitStartEHSpec - Emit the start of the exception spec. 1213 void EmitStartEHSpec(const Decl *D); 1214 1215 /// EmitEndEHSpec - Emit the end of the exception spec. 1216 void EmitEndEHSpec(const Decl *D); 1217 1218 /// getTerminateLandingPad - Return a landing pad that just calls terminate. 1219 llvm::BasicBlock *getTerminateLandingPad(); 1220 1221 /// getTerminateHandler - Return a handler (not a landing pad, just 1222 /// a catch handler) that just calls terminate. This is used when 1223 /// a terminate scope encloses a try. 1224 llvm::BasicBlock *getTerminateHandler(); 1225 1226 const llvm::Type *ConvertTypeForMem(QualType T); 1227 const llvm::Type *ConvertType(QualType T); 1228 const llvm::Type *ConvertType(const TypeDecl *T) { 1229 return ConvertType(getContext().getTypeDeclType(T)); 1230 } 1231 1232 /// LoadObjCSelf - Load the value of self. This function is only valid while 1233 /// generating code for an Objective-C method. 1234 llvm::Value *LoadObjCSelf(); 1235 1236 /// TypeOfSelfObject - Return type of object that this self represents. 1237 QualType TypeOfSelfObject(); 1238 1239 /// hasAggregateLLVMType - Return true if the specified AST type will map into 1240 /// an aggregate LLVM type or is void. 1241 static bool hasAggregateLLVMType(QualType T); 1242 1243 /// createBasicBlock - Create an LLVM basic block. 1244 llvm::BasicBlock *createBasicBlock(llvm::StringRef name = "", 1245 llvm::Function *parent = 0, 1246 llvm::BasicBlock *before = 0) { 1247#ifdef NDEBUG 1248 return llvm::BasicBlock::Create(getLLVMContext(), "", parent, before); 1249#else 1250 return llvm::BasicBlock::Create(getLLVMContext(), name, parent, before); 1251#endif 1252 } 1253 1254 /// getBasicBlockForLabel - Return the LLVM basicblock that the specified 1255 /// label maps to. 1256 JumpDest getJumpDestForLabel(const LabelDecl *S); 1257 1258 /// SimplifyForwardingBlocks - If the given basic block is only a branch to 1259 /// another basic block, simplify it. This assumes that no other code could 1260 /// potentially reference the basic block. 1261 void SimplifyForwardingBlocks(llvm::BasicBlock *BB); 1262 1263 /// EmitBlock - Emit the given block \arg BB and set it as the insert point, 1264 /// adding a fall-through branch from the current insert block if 1265 /// necessary. It is legal to call this function even if there is no current 1266 /// insertion point. 1267 /// 1268 /// IsFinished - If true, indicates that the caller has finished emitting 1269 /// branches to the given block and does not expect to emit code into it. This 1270 /// means the block can be ignored if it is unreachable. 1271 void EmitBlock(llvm::BasicBlock *BB, bool IsFinished=false); 1272 1273 /// EmitBranch - Emit a branch to the specified basic block from the current 1274 /// insert block, taking care to avoid creation of branches from dummy 1275 /// blocks. It is legal to call this function even if there is no current 1276 /// insertion point. 1277 /// 1278 /// This function clears the current insertion point. The caller should follow 1279 /// calls to this function with calls to Emit*Block prior to generation new 1280 /// code. 1281 void EmitBranch(llvm::BasicBlock *Block); 1282 1283 /// HaveInsertPoint - True if an insertion point is defined. If not, this 1284 /// indicates that the current code being emitted is unreachable. 1285 bool HaveInsertPoint() const { 1286 return Builder.GetInsertBlock() != 0; 1287 } 1288 1289 /// EnsureInsertPoint - Ensure that an insertion point is defined so that 1290 /// emitted IR has a place to go. Note that by definition, if this function 1291 /// creates a block then that block is unreachable; callers may do better to 1292 /// detect when no insertion point is defined and simply skip IR generation. 1293 void EnsureInsertPoint() { 1294 if (!HaveInsertPoint()) 1295 EmitBlock(createBasicBlock()); 1296 } 1297 1298 /// ErrorUnsupported - Print out an error that codegen doesn't support the 1299 /// specified stmt yet. 1300 void ErrorUnsupported(const Stmt *S, const char *Type, 1301 bool OmitOnError=false); 1302 1303 //===--------------------------------------------------------------------===// 1304 // Helpers 1305 //===--------------------------------------------------------------------===// 1306 1307 LValue MakeAddrLValue(llvm::Value *V, QualType T, unsigned Alignment = 0) { 1308 return LValue::MakeAddr(V, T, Alignment, getContext(), 1309 CGM.getTBAAInfo(T)); 1310 } 1311 1312 /// CreateTempAlloca - This creates a alloca and inserts it into the entry 1313 /// block. The caller is responsible for setting an appropriate alignment on 1314 /// the alloca. 1315 llvm::AllocaInst *CreateTempAlloca(const llvm::Type *Ty, 1316 const llvm::Twine &Name = "tmp"); 1317 1318 /// InitTempAlloca - Provide an initial value for the given alloca. 1319 void InitTempAlloca(llvm::AllocaInst *Alloca, llvm::Value *Value); 1320 1321 /// CreateIRTemp - Create a temporary IR object of the given type, with 1322 /// appropriate alignment. This routine should only be used when an temporary 1323 /// value needs to be stored into an alloca (for example, to avoid explicit 1324 /// PHI construction), but the type is the IR type, not the type appropriate 1325 /// for storing in memory. 1326 llvm::AllocaInst *CreateIRTemp(QualType T, const llvm::Twine &Name = "tmp"); 1327 1328 /// CreateMemTemp - Create a temporary memory object of the given type, with 1329 /// appropriate alignment. 1330 llvm::AllocaInst *CreateMemTemp(QualType T, const llvm::Twine &Name = "tmp"); 1331 1332 /// CreateAggTemp - Create a temporary memory object for the given 1333 /// aggregate type. 1334 AggValueSlot CreateAggTemp(QualType T, const llvm::Twine &Name = "tmp") { 1335 return AggValueSlot::forAddr(CreateMemTemp(T, Name), false, false); 1336 } 1337 1338 /// Emit a cast to void* in the appropriate address space. 1339 llvm::Value *EmitCastToVoidPtr(llvm::Value *value); 1340 1341 /// EvaluateExprAsBool - Perform the usual unary conversions on the specified 1342 /// expression and compare the result against zero, returning an Int1Ty value. 1343 llvm::Value *EvaluateExprAsBool(const Expr *E); 1344 1345 /// EmitIgnoredExpr - Emit an expression in a context which ignores the result. 1346 void EmitIgnoredExpr(const Expr *E); 1347 1348 /// EmitAnyExpr - Emit code to compute the specified expression which can have 1349 /// any type. The result is returned as an RValue struct. If this is an 1350 /// aggregate expression, the aggloc/agglocvolatile arguments indicate where 1351 /// the result should be returned. 1352 /// 1353 /// \param IgnoreResult - True if the resulting value isn't used. 1354 RValue EmitAnyExpr(const Expr *E, 1355 AggValueSlot AggSlot = AggValueSlot::ignored(), 1356 bool IgnoreResult = false); 1357 1358 // EmitVAListRef - Emit a "reference" to a va_list; this is either the address 1359 // or the value of the expression, depending on how va_list is defined. 1360 llvm::Value *EmitVAListRef(const Expr *E); 1361 1362 /// EmitAnyExprToTemp - Similary to EmitAnyExpr(), however, the result will 1363 /// always be accessible even if no aggregate location is provided. 1364 RValue EmitAnyExprToTemp(const Expr *E); 1365 1366 /// EmitAnyExprToMem - Emits the code necessary to evaluate an 1367 /// arbitrary expression into the given memory location. 1368 void EmitAnyExprToMem(const Expr *E, llvm::Value *Location, 1369 bool IsLocationVolatile, 1370 bool IsInitializer); 1371 1372 /// EmitExprAsInit - Emits the code necessary to initialize a 1373 /// location in memory with the given initializer. 1374 void EmitExprAsInit(const Expr *init, const VarDecl *var, 1375 llvm::Value *loc, CharUnits alignment, 1376 bool capturedByInit); 1377 1378 /// EmitAggregateCopy - Emit an aggrate copy. 1379 /// 1380 /// \param isVolatile - True iff either the source or the destination is 1381 /// volatile. 1382 void EmitAggregateCopy(llvm::Value *DestPtr, llvm::Value *SrcPtr, 1383 QualType EltTy, bool isVolatile=false); 1384 1385 /// StartBlock - Start new block named N. If insert block is a dummy block 1386 /// then reuse it. 1387 void StartBlock(const char *N); 1388 1389 /// GetAddrOfStaticLocalVar - Return the address of a static local variable. 1390 llvm::Constant *GetAddrOfStaticLocalVar(const VarDecl *BVD) { 1391 return cast<llvm::Constant>(GetAddrOfLocalVar(BVD)); 1392 } 1393 1394 /// GetAddrOfLocalVar - Return the address of a local variable. 1395 llvm::Value *GetAddrOfLocalVar(const VarDecl *VD) { 1396 llvm::Value *Res = LocalDeclMap[VD]; 1397 assert(Res && "Invalid argument to GetAddrOfLocalVar(), no decl!"); 1398 return Res; 1399 } 1400 1401 /// getOpaqueLValueMapping - Given an opaque value expression (which 1402 /// must be mapped to an l-value), return its mapping. 1403 const LValue &getOpaqueLValueMapping(const OpaqueValueExpr *e) { 1404 assert(OpaqueValueMapping::shouldBindAsLValue(e)); 1405 1406 llvm::DenseMap<const OpaqueValueExpr*,LValue>::iterator 1407 it = OpaqueLValues.find(e); 1408 assert(it != OpaqueLValues.end() && "no mapping for opaque value!"); 1409 return it->second; 1410 } 1411 1412 /// getOpaqueRValueMapping - Given an opaque value expression (which 1413 /// must be mapped to an r-value), return its mapping. 1414 const RValue &getOpaqueRValueMapping(const OpaqueValueExpr *e) { 1415 assert(!OpaqueValueMapping::shouldBindAsLValue(e)); 1416 1417 llvm::DenseMap<const OpaqueValueExpr*,RValue>::iterator 1418 it = OpaqueRValues.find(e); 1419 assert(it != OpaqueRValues.end() && "no mapping for opaque value!"); 1420 return it->second; 1421 } 1422 1423 /// getAccessedFieldNo - Given an encoded value and a result number, return 1424 /// the input field number being accessed. 1425 static unsigned getAccessedFieldNo(unsigned Idx, const llvm::Constant *Elts); 1426 1427 llvm::BlockAddress *GetAddrOfLabel(const LabelDecl *L); 1428 llvm::BasicBlock *GetIndirectGotoBlock(); 1429 1430 /// EmitNullInitialization - Generate code to set a value of the given type to 1431 /// null, If the type contains data member pointers, they will be initialized 1432 /// to -1 in accordance with the Itanium C++ ABI. 1433 void EmitNullInitialization(llvm::Value *DestPtr, QualType Ty); 1434 1435 // EmitVAArg - Generate code to get an argument from the passed in pointer 1436 // and update it accordingly. The return value is a pointer to the argument. 1437 // FIXME: We should be able to get rid of this method and use the va_arg 1438 // instruction in LLVM instead once it works well enough. 1439 llvm::Value *EmitVAArg(llvm::Value *VAListAddr, QualType Ty); 1440 1441 /// EmitVLASize - Generate code for any VLA size expressions that might occur 1442 /// in a variably modified type. If Ty is a VLA, will return the value that 1443 /// corresponds to the size in bytes of the VLA type. Will return 0 otherwise. 1444 /// 1445 /// This function can be called with a null (unreachable) insert point. 1446 llvm::Value *EmitVLASize(QualType Ty); 1447 1448 // GetVLASize - Returns an LLVM value that corresponds to the size in bytes 1449 // of a variable length array type. 1450 llvm::Value *GetVLASize(const VariableArrayType *); 1451 1452 /// LoadCXXThis - Load the value of 'this'. This function is only valid while 1453 /// generating code for an C++ member function. 1454 llvm::Value *LoadCXXThis() { 1455 assert(CXXThisValue && "no 'this' value for this function"); 1456 return CXXThisValue; 1457 } 1458 1459 /// LoadCXXVTT - Load the VTT parameter to base constructors/destructors have 1460 /// virtual bases. 1461 llvm::Value *LoadCXXVTT() { 1462 assert(CXXVTTValue && "no VTT value for this function"); 1463 return CXXVTTValue; 1464 } 1465 1466 /// GetAddressOfBaseOfCompleteClass - Convert the given pointer to a 1467 /// complete class to the given direct base. 1468 llvm::Value * 1469 GetAddressOfDirectBaseInCompleteClass(llvm::Value *Value, 1470 const CXXRecordDecl *Derived, 1471 const CXXRecordDecl *Base, 1472 bool BaseIsVirtual); 1473 1474 /// GetAddressOfBaseClass - This function will add the necessary delta to the 1475 /// load of 'this' and returns address of the base class. 1476 llvm::Value *GetAddressOfBaseClass(llvm::Value *Value, 1477 const CXXRecordDecl *Derived, 1478 CastExpr::path_const_iterator PathBegin, 1479 CastExpr::path_const_iterator PathEnd, 1480 bool NullCheckValue); 1481 1482 llvm::Value *GetAddressOfDerivedClass(llvm::Value *Value, 1483 const CXXRecordDecl *Derived, 1484 CastExpr::path_const_iterator PathBegin, 1485 CastExpr::path_const_iterator PathEnd, 1486 bool NullCheckValue); 1487 1488 llvm::Value *GetVirtualBaseClassOffset(llvm::Value *This, 1489 const CXXRecordDecl *ClassDecl, 1490 const CXXRecordDecl *BaseClassDecl); 1491 1492 void EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor, 1493 CXXCtorType CtorType, 1494 const FunctionArgList &Args); 1495 void EmitCXXConstructorCall(const CXXConstructorDecl *D, CXXCtorType Type, 1496 bool ForVirtualBase, llvm::Value *This, 1497 CallExpr::const_arg_iterator ArgBeg, 1498 CallExpr::const_arg_iterator ArgEnd); 1499 1500 void EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D, 1501 llvm::Value *This, llvm::Value *Src, 1502 CallExpr::const_arg_iterator ArgBeg, 1503 CallExpr::const_arg_iterator ArgEnd); 1504 1505 void EmitCXXAggrConstructorCall(const CXXConstructorDecl *D, 1506 const ConstantArrayType *ArrayTy, 1507 llvm::Value *ArrayPtr, 1508 CallExpr::const_arg_iterator ArgBeg, 1509 CallExpr::const_arg_iterator ArgEnd, 1510 bool ZeroInitialization = false); 1511 1512 void EmitCXXAggrConstructorCall(const CXXConstructorDecl *D, 1513 llvm::Value *NumElements, 1514 llvm::Value *ArrayPtr, 1515 CallExpr::const_arg_iterator ArgBeg, 1516 CallExpr::const_arg_iterator ArgEnd, 1517 bool ZeroInitialization = false); 1518 1519 void EmitCXXAggrDestructorCall(const CXXDestructorDecl *D, 1520 const ArrayType *Array, 1521 llvm::Value *This); 1522 1523 void EmitCXXAggrDestructorCall(const CXXDestructorDecl *D, 1524 llvm::Value *NumElements, 1525 llvm::Value *This); 1526 1527 llvm::Function *GenerateCXXAggrDestructorHelper(const CXXDestructorDecl *D, 1528 const ArrayType *Array, 1529 llvm::Value *This); 1530 1531 void EmitCXXDestructorCall(const CXXDestructorDecl *D, CXXDtorType Type, 1532 bool ForVirtualBase, llvm::Value *This); 1533 1534 void EmitNewArrayInitializer(const CXXNewExpr *E, llvm::Value *NewPtr, 1535 llvm::Value *NumElements); 1536 1537 void EmitCXXTemporary(const CXXTemporary *Temporary, llvm::Value *Ptr); 1538 1539 llvm::Value *EmitCXXNewExpr(const CXXNewExpr *E); 1540 void EmitCXXDeleteExpr(const CXXDeleteExpr *E); 1541 1542 void EmitDeleteCall(const FunctionDecl *DeleteFD, llvm::Value *Ptr, 1543 QualType DeleteTy); 1544 1545 llvm::Value* EmitCXXTypeidExpr(const CXXTypeidExpr *E); 1546 llvm::Value *EmitDynamicCast(llvm::Value *V, const CXXDynamicCastExpr *DCE); 1547 1548 void EmitCheck(llvm::Value *, unsigned Size); 1549 1550 llvm::Value *EmitScalarPrePostIncDec(const UnaryOperator *E, LValue LV, 1551 bool isInc, bool isPre); 1552 ComplexPairTy EmitComplexPrePostIncDec(const UnaryOperator *E, LValue LV, 1553 bool isInc, bool isPre); 1554 //===--------------------------------------------------------------------===// 1555 // Declaration Emission 1556 //===--------------------------------------------------------------------===// 1557 1558 /// EmitDecl - Emit a declaration. 1559 /// 1560 /// This function can be called with a null (unreachable) insert point. 1561 void EmitDecl(const Decl &D); 1562 1563 /// EmitVarDecl - Emit a local variable declaration. 1564 /// 1565 /// This function can be called with a null (unreachable) insert point. 1566 void EmitVarDecl(const VarDecl &D); 1567 1568 typedef void SpecialInitFn(CodeGenFunction &Init, const VarDecl &D, 1569 llvm::Value *Address); 1570 1571 /// EmitAutoVarDecl - Emit an auto variable declaration. 1572 /// 1573 /// This function can be called with a null (unreachable) insert point. 1574 void EmitAutoVarDecl(const VarDecl &D); 1575 1576 class AutoVarEmission { 1577 friend class CodeGenFunction; 1578 1579 const VarDecl *Variable; 1580 1581 /// The alignment of the variable. 1582 CharUnits Alignment; 1583 1584 /// The address of the alloca. Null if the variable was emitted 1585 /// as a global constant. 1586 llvm::Value *Address; 1587 1588 llvm::Value *NRVOFlag; 1589 1590 /// True if the variable is a __block variable. 1591 bool IsByRef; 1592 1593 /// True if the variable is of aggregate type and has a constant 1594 /// initializer. 1595 bool IsConstantAggregate; 1596 1597 struct Invalid {}; 1598 AutoVarEmission(Invalid) : Variable(0) {} 1599 1600 AutoVarEmission(const VarDecl &variable) 1601 : Variable(&variable), Address(0), NRVOFlag(0), 1602 IsByRef(false), IsConstantAggregate(false) {} 1603 1604 bool wasEmittedAsGlobal() const { return Address == 0; } 1605 1606 public: 1607 static AutoVarEmission invalid() { return AutoVarEmission(Invalid()); } 1608 1609 /// Returns the address of the object within this declaration. 1610 /// Note that this does not chase the forwarding pointer for 1611 /// __block decls. 1612 llvm::Value *getObjectAddress(CodeGenFunction &CGF) const { 1613 if (!IsByRef) return Address; 1614 1615 return CGF.Builder.CreateStructGEP(Address, 1616 CGF.getByRefValueLLVMField(Variable), 1617 Variable->getNameAsString()); 1618 } 1619 }; 1620 AutoVarEmission EmitAutoVarAlloca(const VarDecl &var); 1621 void EmitAutoVarInit(const AutoVarEmission &emission); 1622 void EmitAutoVarCleanups(const AutoVarEmission &emission); 1623 1624 void EmitStaticVarDecl(const VarDecl &D, 1625 llvm::GlobalValue::LinkageTypes Linkage); 1626 1627 /// EmitParmDecl - Emit a ParmVarDecl or an ImplicitParamDecl. 1628 void EmitParmDecl(const VarDecl &D, llvm::Value *Arg, unsigned ArgNo); 1629 1630 /// protectFromPeepholes - Protect a value that we're intending to 1631 /// store to the side, but which will probably be used later, from 1632 /// aggressive peepholing optimizations that might delete it. 1633 /// 1634 /// Pass the result to unprotectFromPeepholes to declare that 1635 /// protection is no longer required. 1636 /// 1637 /// There's no particular reason why this shouldn't apply to 1638 /// l-values, it's just that no existing peepholes work on pointers. 1639 PeepholeProtection protectFromPeepholes(RValue rvalue); 1640 void unprotectFromPeepholes(PeepholeProtection protection); 1641 1642 //===--------------------------------------------------------------------===// 1643 // Statement Emission 1644 //===--------------------------------------------------------------------===// 1645 1646 /// EmitStopPoint - Emit a debug stoppoint if we are emitting debug info. 1647 void EmitStopPoint(const Stmt *S); 1648 1649 /// EmitStmt - Emit the code for the statement \arg S. It is legal to call 1650 /// this function even if there is no current insertion point. 1651 /// 1652 /// This function may clear the current insertion point; callers should use 1653 /// EnsureInsertPoint if they wish to subsequently generate code without first 1654 /// calling EmitBlock, EmitBranch, or EmitStmt. 1655 void EmitStmt(const Stmt *S); 1656 1657 /// EmitSimpleStmt - Try to emit a "simple" statement which does not 1658 /// necessarily require an insertion point or debug information; typically 1659 /// because the statement amounts to a jump or a container of other 1660 /// statements. 1661 /// 1662 /// \return True if the statement was handled. 1663 bool EmitSimpleStmt(const Stmt *S); 1664 1665 RValue EmitCompoundStmt(const CompoundStmt &S, bool GetLast = false, 1666 AggValueSlot AVS = AggValueSlot::ignored()); 1667 1668 /// EmitLabel - Emit the block for the given label. It is legal to call this 1669 /// function even if there is no current insertion point. 1670 void EmitLabel(const LabelDecl *D); // helper for EmitLabelStmt. 1671 1672 void EmitLabelStmt(const LabelStmt &S); 1673 void EmitGotoStmt(const GotoStmt &S); 1674 void EmitIndirectGotoStmt(const IndirectGotoStmt &S); 1675 void EmitIfStmt(const IfStmt &S); 1676 void EmitWhileStmt(const WhileStmt &S); 1677 void EmitDoStmt(const DoStmt &S); 1678 void EmitForStmt(const ForStmt &S); 1679 void EmitReturnStmt(const ReturnStmt &S); 1680 void EmitDeclStmt(const DeclStmt &S); 1681 void EmitBreakStmt(const BreakStmt &S); 1682 void EmitContinueStmt(const ContinueStmt &S); 1683 void EmitSwitchStmt(const SwitchStmt &S); 1684 void EmitDefaultStmt(const DefaultStmt &S); 1685 void EmitCaseStmt(const CaseStmt &S); 1686 void EmitCaseStmtRange(const CaseStmt &S); 1687 void EmitAsmStmt(const AsmStmt &S); 1688 1689 void EmitObjCForCollectionStmt(const ObjCForCollectionStmt &S); 1690 void EmitObjCAtTryStmt(const ObjCAtTryStmt &S); 1691 void EmitObjCAtThrowStmt(const ObjCAtThrowStmt &S); 1692 void EmitObjCAtSynchronizedStmt(const ObjCAtSynchronizedStmt &S); 1693 1694 llvm::Constant *getUnwindResumeOrRethrowFn(); 1695 void EnterCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock = false); 1696 void ExitCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock = false); 1697 1698 void EmitCXXTryStmt(const CXXTryStmt &S); 1699 1700 //===--------------------------------------------------------------------===// 1701 // LValue Expression Emission 1702 //===--------------------------------------------------------------------===// 1703 1704 /// GetUndefRValue - Get an appropriate 'undef' rvalue for the given type. 1705 RValue GetUndefRValue(QualType Ty); 1706 1707 /// EmitUnsupportedRValue - Emit a dummy r-value using the type of E 1708 /// and issue an ErrorUnsupported style diagnostic (using the 1709 /// provided Name). 1710 RValue EmitUnsupportedRValue(const Expr *E, 1711 const char *Name); 1712 1713 /// EmitUnsupportedLValue - Emit a dummy l-value using the type of E and issue 1714 /// an ErrorUnsupported style diagnostic (using the provided Name). 1715 LValue EmitUnsupportedLValue(const Expr *E, 1716 const char *Name); 1717 1718 /// EmitLValue - Emit code to compute a designator that specifies the location 1719 /// of the expression. 1720 /// 1721 /// This can return one of two things: a simple address or a bitfield 1722 /// reference. In either case, the LLVM Value* in the LValue structure is 1723 /// guaranteed to be an LLVM pointer type. 1724 /// 1725 /// If this returns a bitfield reference, nothing about the pointee type of 1726 /// the LLVM value is known: For example, it may not be a pointer to an 1727 /// integer. 1728 /// 1729 /// If this returns a normal address, and if the lvalue's C type is fixed 1730 /// size, this method guarantees that the returned pointer type will point to 1731 /// an LLVM type of the same size of the lvalue's type. If the lvalue has a 1732 /// variable length type, this is not possible. 1733 /// 1734 LValue EmitLValue(const Expr *E); 1735 1736 /// EmitCheckedLValue - Same as EmitLValue but additionally we generate 1737 /// checking code to guard against undefined behavior. This is only 1738 /// suitable when we know that the address will be used to access the 1739 /// object. 1740 LValue EmitCheckedLValue(const Expr *E); 1741 1742 /// EmitToMemory - Change a scalar value from its value 1743 /// representation to its in-memory representation. 1744 llvm::Value *EmitToMemory(llvm::Value *Value, QualType Ty); 1745 1746 /// EmitFromMemory - Change a scalar value from its memory 1747 /// representation to its value representation. 1748 llvm::Value *EmitFromMemory(llvm::Value *Value, QualType Ty); 1749 1750 /// EmitLoadOfScalar - Load a scalar value from an address, taking 1751 /// care to appropriately convert from the memory representation to 1752 /// the LLVM value representation. 1753 llvm::Value *EmitLoadOfScalar(llvm::Value *Addr, bool Volatile, 1754 unsigned Alignment, QualType Ty, 1755 llvm::MDNode *TBAAInfo = 0); 1756 1757 /// EmitStoreOfScalar - Store a scalar value to an address, taking 1758 /// care to appropriately convert from the memory representation to 1759 /// the LLVM value representation. 1760 void EmitStoreOfScalar(llvm::Value *Value, llvm::Value *Addr, 1761 bool Volatile, unsigned Alignment, QualType Ty, 1762 llvm::MDNode *TBAAInfo = 0); 1763 1764 /// EmitLoadOfLValue - Given an expression that represents a value lvalue, 1765 /// this method emits the address of the lvalue, then loads the result as an 1766 /// rvalue, returning the rvalue. 1767 RValue EmitLoadOfLValue(LValue V, QualType LVType); 1768 RValue EmitLoadOfExtVectorElementLValue(LValue V, QualType LVType); 1769 RValue EmitLoadOfBitfieldLValue(LValue LV, QualType ExprType); 1770 RValue EmitLoadOfPropertyRefLValue(LValue LV, 1771 ReturnValueSlot Return = ReturnValueSlot()); 1772 1773 /// EmitStoreThroughLValue - Store the specified rvalue into the specified 1774 /// lvalue, where both are guaranteed to the have the same type, and that type 1775 /// is 'Ty'. 1776 void EmitStoreThroughLValue(RValue Src, LValue Dst, QualType Ty); 1777 void EmitStoreThroughExtVectorComponentLValue(RValue Src, LValue Dst, 1778 QualType Ty); 1779 void EmitStoreThroughPropertyRefLValue(RValue Src, LValue Dst); 1780 1781 /// EmitStoreThroughLValue - Store Src into Dst with same constraints as 1782 /// EmitStoreThroughLValue. 1783 /// 1784 /// \param Result [out] - If non-null, this will be set to a Value* for the 1785 /// bit-field contents after the store, appropriate for use as the result of 1786 /// an assignment to the bit-field. 1787 void EmitStoreThroughBitfieldLValue(RValue Src, LValue Dst, QualType Ty, 1788 llvm::Value **Result=0); 1789 1790 /// Emit an l-value for an assignment (simple or compound) of complex type. 1791 LValue EmitComplexAssignmentLValue(const BinaryOperator *E); 1792 LValue EmitComplexCompoundAssignmentLValue(const CompoundAssignOperator *E); 1793 1794 // Note: only availabe for agg return types 1795 LValue EmitBinaryOperatorLValue(const BinaryOperator *E); 1796 LValue EmitCompoundAssignmentLValue(const CompoundAssignOperator *E); 1797 // Note: only available for agg return types 1798 LValue EmitCallExprLValue(const CallExpr *E); 1799 // Note: only available for agg return types 1800 LValue EmitVAArgExprLValue(const VAArgExpr *E); 1801 LValue EmitDeclRefLValue(const DeclRefExpr *E); 1802 LValue EmitStringLiteralLValue(const StringLiteral *E); 1803 LValue EmitObjCEncodeExprLValue(const ObjCEncodeExpr *E); 1804 LValue EmitPredefinedLValue(const PredefinedExpr *E); 1805 LValue EmitUnaryOpLValue(const UnaryOperator *E); 1806 LValue EmitArraySubscriptExpr(const ArraySubscriptExpr *E); 1807 LValue EmitExtVectorElementExpr(const ExtVectorElementExpr *E); 1808 LValue EmitMemberExpr(const MemberExpr *E); 1809 LValue EmitObjCIsaExpr(const ObjCIsaExpr *E); 1810 LValue EmitCompoundLiteralLValue(const CompoundLiteralExpr *E); 1811 LValue EmitConditionalOperatorLValue(const AbstractConditionalOperator *E); 1812 LValue EmitCastLValue(const CastExpr *E); 1813 LValue EmitNullInitializationLValue(const CXXScalarValueInitExpr *E); 1814 LValue EmitOpaqueValueLValue(const OpaqueValueExpr *e); 1815 1816 llvm::Value *EmitIvarOffset(const ObjCInterfaceDecl *Interface, 1817 const ObjCIvarDecl *Ivar); 1818 LValue EmitLValueForAnonRecordField(llvm::Value* Base, 1819 const IndirectFieldDecl* Field, 1820 unsigned CVRQualifiers); 1821 LValue EmitLValueForField(llvm::Value* Base, const FieldDecl* Field, 1822 unsigned CVRQualifiers); 1823 1824 /// EmitLValueForFieldInitialization - Like EmitLValueForField, except that 1825 /// if the Field is a reference, this will return the address of the reference 1826 /// and not the address of the value stored in the reference. 1827 LValue EmitLValueForFieldInitialization(llvm::Value* Base, 1828 const FieldDecl* Field, 1829 unsigned CVRQualifiers); 1830 1831 LValue EmitLValueForIvar(QualType ObjectTy, 1832 llvm::Value* Base, const ObjCIvarDecl *Ivar, 1833 unsigned CVRQualifiers); 1834 1835 LValue EmitLValueForBitfield(llvm::Value* Base, const FieldDecl* Field, 1836 unsigned CVRQualifiers); 1837 1838 LValue EmitBlockDeclRefLValue(const BlockDeclRefExpr *E); 1839 1840 LValue EmitCXXConstructLValue(const CXXConstructExpr *E); 1841 LValue EmitCXXBindTemporaryLValue(const CXXBindTemporaryExpr *E); 1842 LValue EmitExprWithCleanupsLValue(const ExprWithCleanups *E); 1843 LValue EmitCXXTypeidLValue(const CXXTypeidExpr *E); 1844 1845 LValue EmitObjCMessageExprLValue(const ObjCMessageExpr *E); 1846 LValue EmitObjCIvarRefLValue(const ObjCIvarRefExpr *E); 1847 LValue EmitObjCPropertyRefLValue(const ObjCPropertyRefExpr *E); 1848 LValue EmitStmtExprLValue(const StmtExpr *E); 1849 LValue EmitPointerToDataMemberBinaryExpr(const BinaryOperator *E); 1850 LValue EmitObjCSelectorLValue(const ObjCSelectorExpr *E); 1851 void EmitDeclRefExprDbgValue(const DeclRefExpr *E, llvm::Constant *Init); 1852 1853 //===--------------------------------------------------------------------===// 1854 // Scalar Expression Emission 1855 //===--------------------------------------------------------------------===// 1856 1857 /// EmitCall - Generate a call of the given function, expecting the given 1858 /// result type, and using the given argument list which specifies both the 1859 /// LLVM arguments and the types they were derived from. 1860 /// 1861 /// \param TargetDecl - If given, the decl of the function in a direct call; 1862 /// used to set attributes on the call (noreturn, etc.). 1863 RValue EmitCall(const CGFunctionInfo &FnInfo, 1864 llvm::Value *Callee, 1865 ReturnValueSlot ReturnValue, 1866 const CallArgList &Args, 1867 const Decl *TargetDecl = 0, 1868 llvm::Instruction **callOrInvoke = 0); 1869 1870 RValue EmitCall(QualType FnType, llvm::Value *Callee, 1871 ReturnValueSlot ReturnValue, 1872 CallExpr::const_arg_iterator ArgBeg, 1873 CallExpr::const_arg_iterator ArgEnd, 1874 const Decl *TargetDecl = 0); 1875 RValue EmitCallExpr(const CallExpr *E, 1876 ReturnValueSlot ReturnValue = ReturnValueSlot()); 1877 1878 llvm::CallSite EmitCallOrInvoke(llvm::Value *Callee, 1879 llvm::Value * const *ArgBegin, 1880 llvm::Value * const *ArgEnd, 1881 const llvm::Twine &Name = ""); 1882 1883 llvm::Value *BuildVirtualCall(const CXXMethodDecl *MD, llvm::Value *This, 1884 const llvm::Type *Ty); 1885 llvm::Value *BuildVirtualCall(const CXXDestructorDecl *DD, CXXDtorType Type, 1886 llvm::Value *This, const llvm::Type *Ty); 1887 llvm::Value *BuildAppleKextVirtualCall(const CXXMethodDecl *MD, 1888 NestedNameSpecifier *Qual, 1889 const llvm::Type *Ty); 1890 1891 llvm::Value *BuildAppleKextVirtualDestructorCall(const CXXDestructorDecl *DD, 1892 CXXDtorType Type, 1893 const CXXRecordDecl *RD); 1894 1895 RValue EmitCXXMemberCall(const CXXMethodDecl *MD, 1896 llvm::Value *Callee, 1897 ReturnValueSlot ReturnValue, 1898 llvm::Value *This, 1899 llvm::Value *VTT, 1900 CallExpr::const_arg_iterator ArgBeg, 1901 CallExpr::const_arg_iterator ArgEnd); 1902 RValue EmitCXXMemberCallExpr(const CXXMemberCallExpr *E, 1903 ReturnValueSlot ReturnValue); 1904 RValue EmitCXXMemberPointerCallExpr(const CXXMemberCallExpr *E, 1905 ReturnValueSlot ReturnValue); 1906 1907 RValue EmitCXXOperatorMemberCallExpr(const CXXOperatorCallExpr *E, 1908 const CXXMethodDecl *MD, 1909 ReturnValueSlot ReturnValue); 1910 1911 1912 RValue EmitBuiltinExpr(const FunctionDecl *FD, 1913 unsigned BuiltinID, const CallExpr *E); 1914 1915 RValue EmitBlockCallExpr(const CallExpr *E, ReturnValueSlot ReturnValue); 1916 1917 /// EmitTargetBuiltinExpr - Emit the given builtin call. Returns 0 if the call 1918 /// is unhandled by the current target. 1919 llvm::Value *EmitTargetBuiltinExpr(unsigned BuiltinID, const CallExpr *E); 1920 1921 llvm::Value *EmitARMBuiltinExpr(unsigned BuiltinID, const CallExpr *E); 1922 llvm::Value *EmitNeonCall(llvm::Function *F, 1923 llvm::SmallVectorImpl<llvm::Value*> &O, 1924 const char *name, 1925 unsigned shift = 0, bool rightshift = false); 1926 llvm::Value *EmitNeonSplat(llvm::Value *V, llvm::Constant *Idx); 1927 llvm::Value *EmitNeonShiftVector(llvm::Value *V, const llvm::Type *Ty, 1928 bool negateForRightShift); 1929 1930 llvm::Value *BuildVector(const llvm::SmallVectorImpl<llvm::Value*> &Ops); 1931 llvm::Value *EmitX86BuiltinExpr(unsigned BuiltinID, const CallExpr *E); 1932 llvm::Value *EmitPPCBuiltinExpr(unsigned BuiltinID, const CallExpr *E); 1933 1934 llvm::Value *EmitObjCProtocolExpr(const ObjCProtocolExpr *E); 1935 llvm::Value *EmitObjCStringLiteral(const ObjCStringLiteral *E); 1936 llvm::Value *EmitObjCSelectorExpr(const ObjCSelectorExpr *E); 1937 RValue EmitObjCMessageExpr(const ObjCMessageExpr *E, 1938 ReturnValueSlot Return = ReturnValueSlot()); 1939 1940 /// EmitReferenceBindingToExpr - Emits a reference binding to the passed in 1941 /// expression. Will emit a temporary variable if E is not an LValue. 1942 RValue EmitReferenceBindingToExpr(const Expr* E, 1943 const NamedDecl *InitializedDecl); 1944 1945 //===--------------------------------------------------------------------===// 1946 // Expression Emission 1947 //===--------------------------------------------------------------------===// 1948 1949 // Expressions are broken into three classes: scalar, complex, aggregate. 1950 1951 /// EmitScalarExpr - Emit the computation of the specified expression of LLVM 1952 /// scalar type, returning the result. 1953 llvm::Value *EmitScalarExpr(const Expr *E , bool IgnoreResultAssign = false); 1954 1955 /// EmitScalarConversion - Emit a conversion from the specified type to the 1956 /// specified destination type, both of which are LLVM scalar types. 1957 llvm::Value *EmitScalarConversion(llvm::Value *Src, QualType SrcTy, 1958 QualType DstTy); 1959 1960 /// EmitComplexToScalarConversion - Emit a conversion from the specified 1961 /// complex type to the specified destination type, where the destination type 1962 /// is an LLVM scalar type. 1963 llvm::Value *EmitComplexToScalarConversion(ComplexPairTy Src, QualType SrcTy, 1964 QualType DstTy); 1965 1966 1967 /// EmitAggExpr - Emit the computation of the specified expression 1968 /// of aggregate type. The result is computed into the given slot, 1969 /// which may be null to indicate that the value is not needed. 1970 void EmitAggExpr(const Expr *E, AggValueSlot AS, bool IgnoreResult = false); 1971 1972 /// EmitAggExprToLValue - Emit the computation of the specified expression of 1973 /// aggregate type into a temporary LValue. 1974 LValue EmitAggExprToLValue(const Expr *E); 1975 1976 /// EmitGCMemmoveCollectable - Emit special API for structs with object 1977 /// pointers. 1978 void EmitGCMemmoveCollectable(llvm::Value *DestPtr, llvm::Value *SrcPtr, 1979 QualType Ty); 1980 1981 /// EmitComplexExpr - Emit the computation of the specified expression of 1982 /// complex type, returning the result. 1983 ComplexPairTy EmitComplexExpr(const Expr *E, 1984 bool IgnoreReal = false, 1985 bool IgnoreImag = false); 1986 1987 /// EmitComplexExprIntoAddr - Emit the computation of the specified expression 1988 /// of complex type, storing into the specified Value*. 1989 void EmitComplexExprIntoAddr(const Expr *E, llvm::Value *DestAddr, 1990 bool DestIsVolatile); 1991 1992 /// StoreComplexToAddr - Store a complex number into the specified address. 1993 void StoreComplexToAddr(ComplexPairTy V, llvm::Value *DestAddr, 1994 bool DestIsVolatile); 1995 /// LoadComplexFromAddr - Load a complex number from the specified address. 1996 ComplexPairTy LoadComplexFromAddr(llvm::Value *SrcAddr, bool SrcIsVolatile); 1997 1998 /// CreateStaticVarDecl - Create a zero-initialized LLVM global for 1999 /// a static local variable. 2000 llvm::GlobalVariable *CreateStaticVarDecl(const VarDecl &D, 2001 const char *Separator, 2002 llvm::GlobalValue::LinkageTypes Linkage); 2003 2004 /// AddInitializerToStaticVarDecl - Add the initializer for 'D' to the 2005 /// global variable that has already been created for it. If the initializer 2006 /// has a different type than GV does, this may free GV and return a different 2007 /// one. Otherwise it just returns GV. 2008 llvm::GlobalVariable * 2009 AddInitializerToStaticVarDecl(const VarDecl &D, 2010 llvm::GlobalVariable *GV); 2011 2012 2013 /// EmitCXXGlobalVarDeclInit - Create the initializer for a C++ 2014 /// variable with global storage. 2015 void EmitCXXGlobalVarDeclInit(const VarDecl &D, llvm::Constant *DeclPtr); 2016 2017 /// EmitCXXGlobalDtorRegistration - Emits a call to register the global ptr 2018 /// with the C++ runtime so that its destructor will be called at exit. 2019 void EmitCXXGlobalDtorRegistration(llvm::Constant *DtorFn, 2020 llvm::Constant *DeclPtr); 2021 2022 /// Emit code in this function to perform a guarded variable 2023 /// initialization. Guarded initializations are used when it's not 2024 /// possible to prove that an initialization will be done exactly 2025 /// once, e.g. with a static local variable or a static data member 2026 /// of a class template. 2027 void EmitCXXGuardedInit(const VarDecl &D, llvm::GlobalVariable *DeclPtr); 2028 2029 /// GenerateCXXGlobalInitFunc - Generates code for initializing global 2030 /// variables. 2031 void GenerateCXXGlobalInitFunc(llvm::Function *Fn, 2032 llvm::Constant **Decls, 2033 unsigned NumDecls); 2034 2035 /// GenerateCXXGlobalDtorFunc - Generates code for destroying global 2036 /// variables. 2037 void GenerateCXXGlobalDtorFunc(llvm::Function *Fn, 2038 const std::vector<std::pair<llvm::WeakVH, 2039 llvm::Constant*> > &DtorsAndObjects); 2040 2041 void GenerateCXXGlobalVarDeclInitFunc(llvm::Function *Fn, 2042 const VarDecl *D, 2043 llvm::GlobalVariable *Addr); 2044 2045 void EmitCXXConstructExpr(const CXXConstructExpr *E, AggValueSlot Dest); 2046 2047 void EmitSynthesizedCXXCopyCtor(llvm::Value *Dest, llvm::Value *Src, 2048 const Expr *Exp); 2049 2050 RValue EmitExprWithCleanups(const ExprWithCleanups *E, 2051 AggValueSlot Slot =AggValueSlot::ignored()); 2052 2053 void EmitCXXThrowExpr(const CXXThrowExpr *E); 2054 2055 //===--------------------------------------------------------------------===// 2056 // Internal Helpers 2057 //===--------------------------------------------------------------------===// 2058 2059 /// ContainsLabel - Return true if the statement contains a label in it. If 2060 /// this statement is not executed normally, it not containing a label means 2061 /// that we can just remove the code. 2062 static bool ContainsLabel(const Stmt *S, bool IgnoreCaseStmts = false); 2063 2064 /// containsBreak - Return true if the statement contains a break out of it. 2065 /// If the statement (recursively) contains a switch or loop with a break 2066 /// inside of it, this is fine. 2067 static bool containsBreak(const Stmt *S); 2068 2069 /// ConstantFoldsToSimpleInteger - If the specified expression does not fold 2070 /// to a constant, or if it does but contains a label, return false. If it 2071 /// constant folds return true and set the boolean result in Result. 2072 bool ConstantFoldsToSimpleInteger(const Expr *Cond, bool &Result); 2073 2074 /// ConstantFoldsToSimpleInteger - If the specified expression does not fold 2075 /// to a constant, or if it does but contains a label, return false. If it 2076 /// constant folds return true and set the folded value. 2077 bool ConstantFoldsToSimpleInteger(const Expr *Cond, llvm::APInt &Result); 2078 2079 /// EmitBranchOnBoolExpr - Emit a branch on a boolean condition (e.g. for an 2080 /// if statement) to the specified blocks. Based on the condition, this might 2081 /// try to simplify the codegen of the conditional based on the branch. 2082 void EmitBranchOnBoolExpr(const Expr *Cond, llvm::BasicBlock *TrueBlock, 2083 llvm::BasicBlock *FalseBlock); 2084 2085 /// getTrapBB - Create a basic block that will call the trap intrinsic. We'll 2086 /// generate a branch around the created basic block as necessary. 2087 llvm::BasicBlock *getTrapBB(); 2088 2089 /// EmitCallArg - Emit a single call argument. 2090 RValue EmitCallArg(const Expr *E, QualType ArgType); 2091 2092 /// EmitDelegateCallArg - We are performing a delegate call; that 2093 /// is, the current function is delegating to another one. Produce 2094 /// a r-value suitable for passing the given parameter. 2095 RValue EmitDelegateCallArg(const VarDecl *Param); 2096 2097private: 2098 void EmitReturnOfRValue(RValue RV, QualType Ty); 2099 2100 /// ExpandTypeFromArgs - Reconstruct a structure of type \arg Ty 2101 /// from function arguments into \arg Dst. See ABIArgInfo::Expand. 2102 /// 2103 /// \param AI - The first function argument of the expansion. 2104 /// \return The argument following the last expanded function 2105 /// argument. 2106 llvm::Function::arg_iterator 2107 ExpandTypeFromArgs(QualType Ty, LValue Dst, 2108 llvm::Function::arg_iterator AI); 2109 2110 /// ExpandTypeToArgs - Expand an RValue \arg Src, with the LLVM type for \arg 2111 /// Ty, into individual arguments on the provided vector \arg Args. See 2112 /// ABIArgInfo::Expand. 2113 void ExpandTypeToArgs(QualType Ty, RValue Src, 2114 llvm::SmallVector<llvm::Value*, 16> &Args); 2115 2116 llvm::Value* EmitAsmInput(const AsmStmt &S, 2117 const TargetInfo::ConstraintInfo &Info, 2118 const Expr *InputExpr, std::string &ConstraintStr); 2119 2120 llvm::Value* EmitAsmInputLValue(const AsmStmt &S, 2121 const TargetInfo::ConstraintInfo &Info, 2122 LValue InputValue, QualType InputType, 2123 std::string &ConstraintStr); 2124 2125 /// EmitCallArgs - Emit call arguments for a function. 2126 /// The CallArgTypeInfo parameter is used for iterating over the known 2127 /// argument types of the function being called. 2128 template<typename T> 2129 void EmitCallArgs(CallArgList& Args, const T* CallArgTypeInfo, 2130 CallExpr::const_arg_iterator ArgBeg, 2131 CallExpr::const_arg_iterator ArgEnd) { 2132 CallExpr::const_arg_iterator Arg = ArgBeg; 2133 2134 // First, use the argument types that the type info knows about 2135 if (CallArgTypeInfo) { 2136 for (typename T::arg_type_iterator I = CallArgTypeInfo->arg_type_begin(), 2137 E = CallArgTypeInfo->arg_type_end(); I != E; ++I, ++Arg) { 2138 assert(Arg != ArgEnd && "Running over edge of argument list!"); 2139 QualType ArgType = *I; 2140#ifndef NDEBUG 2141 QualType ActualArgType = Arg->getType(); 2142 if (ArgType->isPointerType() && ActualArgType->isPointerType()) { 2143 QualType ActualBaseType = 2144 ActualArgType->getAs<PointerType>()->getPointeeType(); 2145 QualType ArgBaseType = 2146 ArgType->getAs<PointerType>()->getPointeeType(); 2147 if (ArgBaseType->isVariableArrayType()) { 2148 if (const VariableArrayType *VAT = 2149 getContext().getAsVariableArrayType(ActualBaseType)) { 2150 if (!VAT->getSizeExpr()) 2151 ActualArgType = ArgType; 2152 } 2153 } 2154 } 2155 assert(getContext().getCanonicalType(ArgType.getNonReferenceType()). 2156 getTypePtr() == 2157 getContext().getCanonicalType(ActualArgType).getTypePtr() && 2158 "type mismatch in call argument!"); 2159#endif 2160 Args.push_back(std::make_pair(EmitCallArg(*Arg, ArgType), 2161 ArgType)); 2162 } 2163 2164 // Either we've emitted all the call args, or we have a call to a 2165 // variadic function. 2166 assert((Arg == ArgEnd || CallArgTypeInfo->isVariadic()) && 2167 "Extra arguments in non-variadic function!"); 2168 2169 } 2170 2171 // If we still have any arguments, emit them using the type of the argument. 2172 for (; Arg != ArgEnd; ++Arg) { 2173 QualType ArgType = Arg->getType(); 2174 Args.push_back(std::make_pair(EmitCallArg(*Arg, ArgType), 2175 ArgType)); 2176 } 2177 } 2178 2179 const TargetCodeGenInfo &getTargetHooks() const { 2180 return CGM.getTargetCodeGenInfo(); 2181 } 2182 2183 void EmitDeclMetadata(); 2184}; 2185 2186/// Helper class with most of the code for saving a value for a 2187/// conditional expression cleanup. 2188struct DominatingLLVMValue { 2189 typedef llvm::PointerIntPair<llvm::Value*, 1, bool> saved_type; 2190 2191 /// Answer whether the given value needs extra work to be saved. 2192 static bool needsSaving(llvm::Value *value) { 2193 // If it's not an instruction, we don't need to save. 2194 if (!isa<llvm::Instruction>(value)) return false; 2195 2196 // If it's an instruction in the entry block, we don't need to save. 2197 llvm::BasicBlock *block = cast<llvm::Instruction>(value)->getParent(); 2198 return (block != &block->getParent()->getEntryBlock()); 2199 } 2200 2201 /// Try to save the given value. 2202 static saved_type save(CodeGenFunction &CGF, llvm::Value *value) { 2203 if (!needsSaving(value)) return saved_type(value, false); 2204 2205 // Otherwise we need an alloca. 2206 llvm::Value *alloca = 2207 CGF.CreateTempAlloca(value->getType(), "cond-cleanup.save"); 2208 CGF.Builder.CreateStore(value, alloca); 2209 2210 return saved_type(alloca, true); 2211 } 2212 2213 static llvm::Value *restore(CodeGenFunction &CGF, saved_type value) { 2214 if (!value.getInt()) return value.getPointer(); 2215 return CGF.Builder.CreateLoad(value.getPointer()); 2216 } 2217}; 2218 2219/// A partial specialization of DominatingValue for llvm::Values that 2220/// might be llvm::Instructions. 2221template <class T> struct DominatingPointer<T,true> : DominatingLLVMValue { 2222 typedef T *type; 2223 static type restore(CodeGenFunction &CGF, saved_type value) { 2224 return static_cast<T*>(DominatingLLVMValue::restore(CGF, value)); 2225 } 2226}; 2227 2228/// A specialization of DominatingValue for RValue. 2229template <> struct DominatingValue<RValue> { 2230 typedef RValue type; 2231 class saved_type { 2232 enum Kind { ScalarLiteral, ScalarAddress, AggregateLiteral, 2233 AggregateAddress, ComplexAddress }; 2234 2235 llvm::Value *Value; 2236 Kind K; 2237 saved_type(llvm::Value *v, Kind k) : Value(v), K(k) {} 2238 2239 public: 2240 static bool needsSaving(RValue value); 2241 static saved_type save(CodeGenFunction &CGF, RValue value); 2242 RValue restore(CodeGenFunction &CGF); 2243 2244 // implementations in CGExprCXX.cpp 2245 }; 2246 2247 static bool needsSaving(type value) { 2248 return saved_type::needsSaving(value); 2249 } 2250 static saved_type save(CodeGenFunction &CGF, type value) { 2251 return saved_type::save(CGF, value); 2252 } 2253 static type restore(CodeGenFunction &CGF, saved_type value) { 2254 return value.restore(CGF); 2255 } 2256}; 2257 2258} // end namespace CodeGen 2259} // end namespace clang 2260 2261#endif 2262