CodeGenFunction.h revision aa3b57ee9e36a805371e2a543383225cdd2a5d83
1//===-- CodeGenFunction.h - Per-Function state for LLVM CodeGen -*- C++ -*-===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This is the internal per-function state used for llvm translation. 11// 12//===----------------------------------------------------------------------===// 13 14#ifndef CLANG_CODEGEN_CODEGENFUNCTION_H 15#define CLANG_CODEGEN_CODEGENFUNCTION_H 16 17#include "clang/AST/Type.h" 18#include "clang/AST/ExprCXX.h" 19#include "clang/AST/ExprObjC.h" 20#include "clang/AST/CharUnits.h" 21#include "clang/Basic/TargetInfo.h" 22#include "llvm/ADT/DenseMap.h" 23#include "llvm/ADT/SmallVector.h" 24#include "llvm/Support/ValueHandle.h" 25#include "CodeGenModule.h" 26#include "CGBlocks.h" 27#include "CGBuilder.h" 28#include "CGCall.h" 29#include "CGCXX.h" 30#include "CGValue.h" 31 32namespace llvm { 33 class BasicBlock; 34 class LLVMContext; 35 class MDNode; 36 class Module; 37 class SwitchInst; 38 class Twine; 39 class Value; 40 class CallSite; 41} 42 43namespace clang { 44 class APValue; 45 class ASTContext; 46 class CXXDestructorDecl; 47 class CXXTryStmt; 48 class Decl; 49 class EnumConstantDecl; 50 class FunctionDecl; 51 class FunctionProtoType; 52 class LabelStmt; 53 class ObjCContainerDecl; 54 class ObjCInterfaceDecl; 55 class ObjCIvarDecl; 56 class ObjCMethodDecl; 57 class ObjCImplementationDecl; 58 class ObjCPropertyImplDecl; 59 class TargetInfo; 60 class TargetCodeGenInfo; 61 class VarDecl; 62 class ObjCForCollectionStmt; 63 class ObjCAtTryStmt; 64 class ObjCAtThrowStmt; 65 class ObjCAtSynchronizedStmt; 66 67namespace CodeGen { 68 class CodeGenTypes; 69 class CGDebugInfo; 70 class CGFunctionInfo; 71 class CGRecordLayout; 72 class CGBlockInfo; 73 class CGCXXABI; 74 75/// A branch fixup. These are required when emitting a goto to a 76/// label which hasn't been emitted yet. The goto is optimistically 77/// emitted as a branch to the basic block for the label, and (if it 78/// occurs in a scope with non-trivial cleanups) a fixup is added to 79/// the innermost cleanup. When a (normal) cleanup is popped, any 80/// unresolved fixups in that scope are threaded through the cleanup. 81struct BranchFixup { 82 /// The block containing the terminator which needs to be modified 83 /// into a switch if this fixup is resolved into the current scope. 84 /// If null, LatestBranch points directly to the destination. 85 llvm::BasicBlock *OptimisticBranchBlock; 86 87 /// The ultimate destination of the branch. 88 /// 89 /// This can be set to null to indicate that this fixup was 90 /// successfully resolved. 91 llvm::BasicBlock *Destination; 92 93 /// The destination index value. 94 unsigned DestinationIndex; 95 96 /// The initial branch of the fixup. 97 llvm::BranchInst *InitialBranch; 98}; 99 100enum CleanupKind { 101 EHCleanup = 0x1, 102 NormalCleanup = 0x2, 103 NormalAndEHCleanup = EHCleanup | NormalCleanup, 104 105 InactiveCleanup = 0x4, 106 InactiveEHCleanup = EHCleanup | InactiveCleanup, 107 InactiveNormalCleanup = NormalCleanup | InactiveCleanup, 108 InactiveNormalAndEHCleanup = NormalAndEHCleanup | InactiveCleanup 109}; 110 111/// A stack of scopes which respond to exceptions, including cleanups 112/// and catch blocks. 113class EHScopeStack { 114public: 115 /// A saved depth on the scope stack. This is necessary because 116 /// pushing scopes onto the stack invalidates iterators. 117 class stable_iterator { 118 friend class EHScopeStack; 119 120 /// Offset from StartOfData to EndOfBuffer. 121 ptrdiff_t Size; 122 123 stable_iterator(ptrdiff_t Size) : Size(Size) {} 124 125 public: 126 static stable_iterator invalid() { return stable_iterator(-1); } 127 stable_iterator() : Size(-1) {} 128 129 bool isValid() const { return Size >= 0; } 130 131 /// Returns true if this scope encloses I. 132 /// Returns false if I is invalid. 133 /// This scope must be valid. 134 bool encloses(stable_iterator I) const { return Size <= I.Size; } 135 136 /// Returns true if this scope strictly encloses I: that is, 137 /// if it encloses I and is not I. 138 /// Returns false is I is invalid. 139 /// This scope must be valid. 140 bool strictlyEncloses(stable_iterator I) const { return Size < I.Size; } 141 142 friend bool operator==(stable_iterator A, stable_iterator B) { 143 return A.Size == B.Size; 144 } 145 friend bool operator!=(stable_iterator A, stable_iterator B) { 146 return A.Size != B.Size; 147 } 148 }; 149 150 /// Information for lazily generating a cleanup. Subclasses must be 151 /// POD-like: cleanups will not be destructed, and they will be 152 /// allocated on the cleanup stack and freely copied and moved 153 /// around. 154 /// 155 /// Cleanup implementations should generally be declared in an 156 /// anonymous namespace. 157 class Cleanup { 158 public: 159 // Anchor the construction vtable. We use the destructor because 160 // gcc gives an obnoxious warning if there are virtual methods 161 // with an accessible non-virtual destructor. Unfortunately, 162 // declaring this destructor makes it non-trivial, but there 163 // doesn't seem to be any other way around this warning. 164 // 165 // This destructor will never be called. 166 virtual ~Cleanup(); 167 168 /// Emit the cleanup. For normal cleanups, this is run in the 169 /// same EH context as when the cleanup was pushed, i.e. the 170 /// immediately-enclosing context of the cleanup scope. For 171 /// EH cleanups, this is run in a terminate context. 172 /// 173 // \param IsForEHCleanup true if this is for an EH cleanup, false 174 /// if for a normal cleanup. 175 virtual void Emit(CodeGenFunction &CGF, bool IsForEHCleanup) = 0; 176 }; 177 178private: 179 // The implementation for this class is in CGException.h and 180 // CGException.cpp; the definition is here because it's used as a 181 // member of CodeGenFunction. 182 183 /// The start of the scope-stack buffer, i.e. the allocated pointer 184 /// for the buffer. All of these pointers are either simultaneously 185 /// null or simultaneously valid. 186 char *StartOfBuffer; 187 188 /// The end of the buffer. 189 char *EndOfBuffer; 190 191 /// The first valid entry in the buffer. 192 char *StartOfData; 193 194 /// The innermost normal cleanup on the stack. 195 stable_iterator InnermostNormalCleanup; 196 197 /// The innermost EH cleanup on the stack. 198 stable_iterator InnermostEHCleanup; 199 200 /// The number of catches on the stack. 201 unsigned CatchDepth; 202 203 /// The current EH destination index. Reset to FirstCatchIndex 204 /// whenever the last EH cleanup is popped. 205 unsigned NextEHDestIndex; 206 enum { FirstEHDestIndex = 1 }; 207 208 /// The current set of branch fixups. A branch fixup is a jump to 209 /// an as-yet unemitted label, i.e. a label for which we don't yet 210 /// know the EH stack depth. Whenever we pop a cleanup, we have 211 /// to thread all the current branch fixups through it. 212 /// 213 /// Fixups are recorded as the Use of the respective branch or 214 /// switch statement. The use points to the final destination. 215 /// When popping out of a cleanup, these uses are threaded through 216 /// the cleanup and adjusted to point to the new cleanup. 217 /// 218 /// Note that branches are allowed to jump into protected scopes 219 /// in certain situations; e.g. the following code is legal: 220 /// struct A { ~A(); }; // trivial ctor, non-trivial dtor 221 /// goto foo; 222 /// A a; 223 /// foo: 224 /// bar(); 225 llvm::SmallVector<BranchFixup, 8> BranchFixups; 226 227 char *allocate(size_t Size); 228 229 void *pushCleanup(CleanupKind K, size_t DataSize); 230 231public: 232 EHScopeStack() : StartOfBuffer(0), EndOfBuffer(0), StartOfData(0), 233 InnermostNormalCleanup(stable_end()), 234 InnermostEHCleanup(stable_end()), 235 CatchDepth(0), NextEHDestIndex(FirstEHDestIndex) {} 236 ~EHScopeStack() { delete[] StartOfBuffer; } 237 238 // Variadic templates would make this not terrible. 239 240 /// Push a lazily-created cleanup on the stack. 241 template <class T> 242 void pushCleanup(CleanupKind Kind) { 243 void *Buffer = pushCleanup(Kind, sizeof(T)); 244 Cleanup *Obj = new(Buffer) T(); 245 (void) Obj; 246 } 247 248 /// Push a lazily-created cleanup on the stack. 249 template <class T, class A0> 250 void pushCleanup(CleanupKind Kind, A0 a0) { 251 void *Buffer = pushCleanup(Kind, sizeof(T)); 252 Cleanup *Obj = new(Buffer) T(a0); 253 (void) Obj; 254 } 255 256 /// Push a lazily-created cleanup on the stack. 257 template <class T, class A0, class A1> 258 void pushCleanup(CleanupKind Kind, A0 a0, A1 a1) { 259 void *Buffer = pushCleanup(Kind, sizeof(T)); 260 Cleanup *Obj = new(Buffer) T(a0, a1); 261 (void) Obj; 262 } 263 264 /// Push a lazily-created cleanup on the stack. 265 template <class T, class A0, class A1, class A2> 266 void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2) { 267 void *Buffer = pushCleanup(Kind, sizeof(T)); 268 Cleanup *Obj = new(Buffer) T(a0, a1, a2); 269 (void) Obj; 270 } 271 272 /// Push a lazily-created cleanup on the stack. 273 template <class T, class A0, class A1, class A2, class A3> 274 void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3) { 275 void *Buffer = pushCleanup(Kind, sizeof(T)); 276 Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3); 277 (void) Obj; 278 } 279 280 /// Push a lazily-created cleanup on the stack. 281 template <class T, class A0, class A1, class A2, class A3, class A4> 282 void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3, A4 a4) { 283 void *Buffer = pushCleanup(Kind, sizeof(T)); 284 Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3, a4); 285 (void) Obj; 286 } 287 288 // Feel free to add more variants of the following: 289 290 /// Push a cleanup with non-constant storage requirements on the 291 /// stack. The cleanup type must provide an additional static method: 292 /// static size_t getExtraSize(size_t); 293 /// The argument to this method will be the value N, which will also 294 /// be passed as the first argument to the constructor. 295 /// 296 /// The data stored in the extra storage must obey the same 297 /// restrictions as normal cleanup member data. 298 /// 299 /// The pointer returned from this method is valid until the cleanup 300 /// stack is modified. 301 template <class T, class A0, class A1, class A2> 302 T *pushCleanupWithExtra(CleanupKind Kind, size_t N, A0 a0, A1 a1, A2 a2) { 303 void *Buffer = pushCleanup(Kind, sizeof(T) + T::getExtraSize(N)); 304 return new (Buffer) T(N, a0, a1, a2); 305 } 306 307 /// Pops a cleanup scope off the stack. This should only be called 308 /// by CodeGenFunction::PopCleanupBlock. 309 void popCleanup(); 310 311 /// Push a set of catch handlers on the stack. The catch is 312 /// uninitialized and will need to have the given number of handlers 313 /// set on it. 314 class EHCatchScope *pushCatch(unsigned NumHandlers); 315 316 /// Pops a catch scope off the stack. 317 void popCatch(); 318 319 /// Push an exceptions filter on the stack. 320 class EHFilterScope *pushFilter(unsigned NumFilters); 321 322 /// Pops an exceptions filter off the stack. 323 void popFilter(); 324 325 /// Push a terminate handler on the stack. 326 void pushTerminate(); 327 328 /// Pops a terminate handler off the stack. 329 void popTerminate(); 330 331 /// Determines whether the exception-scopes stack is empty. 332 bool empty() const { return StartOfData == EndOfBuffer; } 333 334 bool requiresLandingPad() const { 335 return (CatchDepth || hasEHCleanups()); 336 } 337 338 /// Determines whether there are any normal cleanups on the stack. 339 bool hasNormalCleanups() const { 340 return InnermostNormalCleanup != stable_end(); 341 } 342 343 /// Returns the innermost normal cleanup on the stack, or 344 /// stable_end() if there are no normal cleanups. 345 stable_iterator getInnermostNormalCleanup() const { 346 return InnermostNormalCleanup; 347 } 348 stable_iterator getInnermostActiveNormalCleanup() const; // CGException.h 349 350 /// Determines whether there are any EH cleanups on the stack. 351 bool hasEHCleanups() const { 352 return InnermostEHCleanup != stable_end(); 353 } 354 355 /// Returns the innermost EH cleanup on the stack, or stable_end() 356 /// if there are no EH cleanups. 357 stable_iterator getInnermostEHCleanup() const { 358 return InnermostEHCleanup; 359 } 360 stable_iterator getInnermostActiveEHCleanup() const; // CGException.h 361 362 /// An unstable reference to a scope-stack depth. Invalidated by 363 /// pushes but not pops. 364 class iterator; 365 366 /// Returns an iterator pointing to the innermost EH scope. 367 iterator begin() const; 368 369 /// Returns an iterator pointing to the outermost EH scope. 370 iterator end() const; 371 372 /// Create a stable reference to the top of the EH stack. The 373 /// returned reference is valid until that scope is popped off the 374 /// stack. 375 stable_iterator stable_begin() const { 376 return stable_iterator(EndOfBuffer - StartOfData); 377 } 378 379 /// Create a stable reference to the bottom of the EH stack. 380 static stable_iterator stable_end() { 381 return stable_iterator(0); 382 } 383 384 /// Translates an iterator into a stable_iterator. 385 stable_iterator stabilize(iterator it) const; 386 387 /// Finds the nearest cleanup enclosing the given iterator. 388 /// Returns stable_iterator::invalid() if there are no such cleanups. 389 stable_iterator getEnclosingEHCleanup(iterator it) const; 390 391 /// Turn a stable reference to a scope depth into a unstable pointer 392 /// to the EH stack. 393 iterator find(stable_iterator save) const; 394 395 /// Removes the cleanup pointed to by the given stable_iterator. 396 void removeCleanup(stable_iterator save); 397 398 /// Add a branch fixup to the current cleanup scope. 399 BranchFixup &addBranchFixup() { 400 assert(hasNormalCleanups() && "adding fixup in scope without cleanups"); 401 BranchFixups.push_back(BranchFixup()); 402 return BranchFixups.back(); 403 } 404 405 unsigned getNumBranchFixups() const { return BranchFixups.size(); } 406 BranchFixup &getBranchFixup(unsigned I) { 407 assert(I < getNumBranchFixups()); 408 return BranchFixups[I]; 409 } 410 411 /// Pops lazily-removed fixups from the end of the list. This 412 /// should only be called by procedures which have just popped a 413 /// cleanup or resolved one or more fixups. 414 void popNullFixups(); 415 416 /// Clears the branch-fixups list. This should only be called by 417 /// ResolveAllBranchFixups. 418 void clearFixups() { BranchFixups.clear(); } 419 420 /// Gets the next EH destination index. 421 unsigned getNextEHDestIndex() { return NextEHDestIndex++; } 422}; 423 424/// CodeGenFunction - This class organizes the per-function state that is used 425/// while generating LLVM code. 426class CodeGenFunction : public BlockFunction { 427 CodeGenFunction(const CodeGenFunction&); // DO NOT IMPLEMENT 428 void operator=(const CodeGenFunction&); // DO NOT IMPLEMENT 429 430 friend class CGCXXABI; 431public: 432 /// A jump destination is an abstract label, branching to which may 433 /// require a jump out through normal cleanups. 434 struct JumpDest { 435 JumpDest() : Block(0), ScopeDepth(), Index(0) {} 436 JumpDest(llvm::BasicBlock *Block, 437 EHScopeStack::stable_iterator Depth, 438 unsigned Index) 439 : Block(Block), ScopeDepth(Depth), Index(Index) {} 440 441 bool isValid() const { return Block != 0; } 442 llvm::BasicBlock *getBlock() const { return Block; } 443 EHScopeStack::stable_iterator getScopeDepth() const { return ScopeDepth; } 444 unsigned getDestIndex() const { return Index; } 445 446 private: 447 llvm::BasicBlock *Block; 448 EHScopeStack::stable_iterator ScopeDepth; 449 unsigned Index; 450 }; 451 452 /// An unwind destination is an abstract label, branching to which 453 /// may require a jump out through EH cleanups. 454 struct UnwindDest { 455 UnwindDest() : Block(0), ScopeDepth(), Index(0) {} 456 UnwindDest(llvm::BasicBlock *Block, 457 EHScopeStack::stable_iterator Depth, 458 unsigned Index) 459 : Block(Block), ScopeDepth(Depth), Index(Index) {} 460 461 bool isValid() const { return Block != 0; } 462 llvm::BasicBlock *getBlock() const { return Block; } 463 EHScopeStack::stable_iterator getScopeDepth() const { return ScopeDepth; } 464 unsigned getDestIndex() const { return Index; } 465 466 private: 467 llvm::BasicBlock *Block; 468 EHScopeStack::stable_iterator ScopeDepth; 469 unsigned Index; 470 }; 471 472 CodeGenModule &CGM; // Per-module state. 473 const TargetInfo &Target; 474 475 typedef std::pair<llvm::Value *, llvm::Value *> ComplexPairTy; 476 CGBuilderTy Builder; 477 478 /// CurFuncDecl - Holds the Decl for the current function or ObjC method. 479 /// This excludes BlockDecls. 480 const Decl *CurFuncDecl; 481 /// CurCodeDecl - This is the inner-most code context, which includes blocks. 482 const Decl *CurCodeDecl; 483 const CGFunctionInfo *CurFnInfo; 484 QualType FnRetTy; 485 llvm::Function *CurFn; 486 487 /// CurGD - The GlobalDecl for the current function being compiled. 488 GlobalDecl CurGD; 489 490 /// ReturnBlock - Unified return block. 491 JumpDest ReturnBlock; 492 493 /// ReturnValue - The temporary alloca to hold the return value. This is null 494 /// iff the function has no return value. 495 llvm::Value *ReturnValue; 496 497 /// RethrowBlock - Unified rethrow block. 498 UnwindDest RethrowBlock; 499 500 /// AllocaInsertPoint - This is an instruction in the entry block before which 501 /// we prefer to insert allocas. 502 llvm::AssertingVH<llvm::Instruction> AllocaInsertPt; 503 504 // intptr_t, i32, i64 505 const llvm::IntegerType *IntPtrTy, *Int32Ty, *Int64Ty; 506 uint32_t LLVMPointerWidth; 507 508 bool Exceptions; 509 bool CatchUndefined; 510 511 /// \brief A mapping from NRVO variables to the flags used to indicate 512 /// when the NRVO has been applied to this variable. 513 llvm::DenseMap<const VarDecl *, llvm::Value *> NRVOFlags; 514 515 /// \brief A mapping from 'Save' expression in a conditional expression 516 /// to the IR for this expression. Used to implement IR gen. for Gnu 517 /// extension's missing LHS expression in a conditional operator expression. 518 llvm::DenseMap<const Expr *, llvm::Value *> ConditionalSaveExprs; 519 llvm::DenseMap<const Expr *, ComplexPairTy> ConditionalSaveComplexExprs; 520 521 EHScopeStack EHStack; 522 523 /// i32s containing the indexes of the cleanup destinations. 524 llvm::AllocaInst *NormalCleanupDest; 525 llvm::AllocaInst *EHCleanupDest; 526 527 unsigned NextCleanupDestIndex; 528 529 /// The exception slot. All landing pads write the current 530 /// exception pointer into this alloca. 531 llvm::Value *ExceptionSlot; 532 533 /// Emits a landing pad for the current EH stack. 534 llvm::BasicBlock *EmitLandingPad(); 535 536 llvm::BasicBlock *getInvokeDestImpl(); 537 538public: 539 /// ObjCEHValueStack - Stack of Objective-C exception values, used for 540 /// rethrows. 541 llvm::SmallVector<llvm::Value*, 8> ObjCEHValueStack; 542 543 // A struct holding information about a finally block's IR 544 // generation. For now, doesn't actually hold anything. 545 struct FinallyInfo { 546 }; 547 548 FinallyInfo EnterFinallyBlock(const Stmt *Stmt, 549 llvm::Constant *BeginCatchFn, 550 llvm::Constant *EndCatchFn, 551 llvm::Constant *RethrowFn); 552 void ExitFinallyBlock(FinallyInfo &FinallyInfo); 553 554 /// PushDestructorCleanup - Push a cleanup to call the 555 /// complete-object destructor of an object of the given type at the 556 /// given address. Does nothing if T is not a C++ class type with a 557 /// non-trivial destructor. 558 void PushDestructorCleanup(QualType T, llvm::Value *Addr); 559 560 /// PushDestructorCleanup - Push a cleanup to call the 561 /// complete-object variant of the given destructor on the object at 562 /// the given address. 563 void PushDestructorCleanup(const CXXDestructorDecl *Dtor, 564 llvm::Value *Addr); 565 566 /// PopCleanupBlock - Will pop the cleanup entry on the stack and 567 /// process all branch fixups. 568 void PopCleanupBlock(bool FallThroughIsBranchThrough = false); 569 570 /// DeactivateCleanupBlock - Deactivates the given cleanup block. 571 /// The block cannot be reactivated. Pops it if it's the top of the 572 /// stack. 573 void DeactivateCleanupBlock(EHScopeStack::stable_iterator Cleanup); 574 575 /// ActivateCleanupBlock - Activates an initially-inactive cleanup. 576 /// Cannot be used to resurrect a deactivated cleanup. 577 void ActivateCleanupBlock(EHScopeStack::stable_iterator Cleanup); 578 579 /// \brief Enters a new scope for capturing cleanups, all of which 580 /// will be executed once the scope is exited. 581 class RunCleanupsScope { 582 CodeGenFunction& CGF; 583 EHScopeStack::stable_iterator CleanupStackDepth; 584 bool OldDidCallStackSave; 585 bool PerformCleanup; 586 587 RunCleanupsScope(const RunCleanupsScope &); // DO NOT IMPLEMENT 588 RunCleanupsScope &operator=(const RunCleanupsScope &); // DO NOT IMPLEMENT 589 590 public: 591 /// \brief Enter a new cleanup scope. 592 explicit RunCleanupsScope(CodeGenFunction &CGF) 593 : CGF(CGF), PerformCleanup(true) 594 { 595 CleanupStackDepth = CGF.EHStack.stable_begin(); 596 OldDidCallStackSave = CGF.DidCallStackSave; 597 CGF.DidCallStackSave = false; 598 } 599 600 /// \brief Exit this cleanup scope, emitting any accumulated 601 /// cleanups. 602 ~RunCleanupsScope() { 603 if (PerformCleanup) { 604 CGF.DidCallStackSave = OldDidCallStackSave; 605 CGF.PopCleanupBlocks(CleanupStackDepth); 606 } 607 } 608 609 /// \brief Determine whether this scope requires any cleanups. 610 bool requiresCleanups() const { 611 return CGF.EHStack.stable_begin() != CleanupStackDepth; 612 } 613 614 /// \brief Force the emission of cleanups now, instead of waiting 615 /// until this object is destroyed. 616 void ForceCleanup() { 617 assert(PerformCleanup && "Already forced cleanup"); 618 CGF.DidCallStackSave = OldDidCallStackSave; 619 CGF.PopCleanupBlocks(CleanupStackDepth); 620 PerformCleanup = false; 621 } 622 }; 623 624 625 /// PopCleanupBlocks - Takes the old cleanup stack size and emits 626 /// the cleanup blocks that have been added. 627 void PopCleanupBlocks(EHScopeStack::stable_iterator OldCleanupStackSize); 628 629 void ResolveBranchFixups(llvm::BasicBlock *Target); 630 631 /// The given basic block lies in the current EH scope, but may be a 632 /// target of a potentially scope-crossing jump; get a stable handle 633 /// to which we can perform this jump later. 634 JumpDest getJumpDestInCurrentScope(llvm::BasicBlock *Target) { 635 return JumpDest(Target, 636 EHStack.getInnermostNormalCleanup(), 637 NextCleanupDestIndex++); 638 } 639 640 /// The given basic block lies in the current EH scope, but may be a 641 /// target of a potentially scope-crossing jump; get a stable handle 642 /// to which we can perform this jump later. 643 JumpDest getJumpDestInCurrentScope(const char *Name = 0) { 644 return getJumpDestInCurrentScope(createBasicBlock(Name)); 645 } 646 647 /// EmitBranchThroughCleanup - Emit a branch from the current insert 648 /// block through the normal cleanup handling code (if any) and then 649 /// on to \arg Dest. 650 void EmitBranchThroughCleanup(JumpDest Dest); 651 652 /// EmitBranchThroughEHCleanup - Emit a branch from the current 653 /// insert block through the EH cleanup handling code (if any) and 654 /// then on to \arg Dest. 655 void EmitBranchThroughEHCleanup(UnwindDest Dest); 656 657 /// getRethrowDest - Returns the unified outermost-scope rethrow 658 /// destination. 659 UnwindDest getRethrowDest(); 660 661 /// BeginConditionalBranch - Should be called before a conditional part of an 662 /// expression is emitted. For example, before the RHS of the expression below 663 /// is emitted: 664 /// 665 /// b && f(T()); 666 /// 667 /// This is used to make sure that any temporaries created in the conditional 668 /// branch are only destroyed if the branch is taken. 669 void BeginConditionalBranch() { 670 ++ConditionalBranchLevel; 671 } 672 673 /// EndConditionalBranch - Should be called after a conditional part of an 674 /// expression has been emitted. 675 void EndConditionalBranch() { 676 assert(ConditionalBranchLevel != 0 && 677 "Conditional branch mismatch!"); 678 679 --ConditionalBranchLevel; 680 } 681 682 /// isInConditionalBranch - Return true if we're currently emitting 683 /// one branch or the other of a conditional expression. 684 bool isInConditionalBranch() const { return ConditionalBranchLevel != 0; } 685 686private: 687 CGDebugInfo *DebugInfo; 688 689 /// IndirectBranch - The first time an indirect goto is seen we create a block 690 /// with an indirect branch. Every time we see the address of a label taken, 691 /// we add the label to the indirect goto. Every subsequent indirect goto is 692 /// codegen'd as a jump to the IndirectBranch's basic block. 693 llvm::IndirectBrInst *IndirectBranch; 694 695 /// LocalDeclMap - This keeps track of the LLVM allocas or globals for local C 696 /// decls. 697 llvm::DenseMap<const Decl*, llvm::Value*> LocalDeclMap; 698 699 /// LabelMap - This keeps track of the LLVM basic block for each C label. 700 llvm::DenseMap<const LabelStmt*, JumpDest> LabelMap; 701 702 // BreakContinueStack - This keeps track of where break and continue 703 // statements should jump to. 704 struct BreakContinue { 705 BreakContinue(JumpDest Break, JumpDest Continue) 706 : BreakBlock(Break), ContinueBlock(Continue) {} 707 708 JumpDest BreakBlock; 709 JumpDest ContinueBlock; 710 }; 711 llvm::SmallVector<BreakContinue, 8> BreakContinueStack; 712 713 /// SwitchInsn - This is nearest current switch instruction. It is null if if 714 /// current context is not in a switch. 715 llvm::SwitchInst *SwitchInsn; 716 717 /// CaseRangeBlock - This block holds if condition check for last case 718 /// statement range in current switch instruction. 719 llvm::BasicBlock *CaseRangeBlock; 720 721 // VLASizeMap - This keeps track of the associated size for each VLA type. 722 // We track this by the size expression rather than the type itself because 723 // in certain situations, like a const qualifier applied to an VLA typedef, 724 // multiple VLA types can share the same size expression. 725 // FIXME: Maybe this could be a stack of maps that is pushed/popped as we 726 // enter/leave scopes. 727 llvm::DenseMap<const Expr*, llvm::Value*> VLASizeMap; 728 729 /// DidCallStackSave - Whether llvm.stacksave has been called. Used to avoid 730 /// calling llvm.stacksave for multiple VLAs in the same scope. 731 bool DidCallStackSave; 732 733 /// A block containing a single 'unreachable' instruction. Created 734 /// lazily by getUnreachableBlock(). 735 llvm::BasicBlock *UnreachableBlock; 736 737 /// CXXThisDecl - When generating code for a C++ member function, 738 /// this will hold the implicit 'this' declaration. 739 ImplicitParamDecl *CXXThisDecl; 740 llvm::Value *CXXThisValue; 741 742 /// CXXVTTDecl - When generating code for a base object constructor or 743 /// base object destructor with virtual bases, this will hold the implicit 744 /// VTT parameter. 745 ImplicitParamDecl *CXXVTTDecl; 746 llvm::Value *CXXVTTValue; 747 748 /// ConditionalBranchLevel - Contains the nesting level of the current 749 /// conditional branch. This is used so that we know if a temporary should be 750 /// destroyed conditionally. 751 unsigned ConditionalBranchLevel; 752 753 754 /// ByrefValueInfoMap - For each __block variable, contains a pair of the LLVM 755 /// type as well as the field number that contains the actual data. 756 llvm::DenseMap<const ValueDecl *, std::pair<const llvm::Type *, 757 unsigned> > ByRefValueInfo; 758 759 /// getByrefValueFieldNumber - Given a declaration, returns the LLVM field 760 /// number that holds the value. 761 unsigned getByRefValueLLVMField(const ValueDecl *VD) const; 762 763 llvm::BasicBlock *TerminateLandingPad; 764 llvm::BasicBlock *TerminateHandler; 765 llvm::BasicBlock *TrapBB; 766 767public: 768 CodeGenFunction(CodeGenModule &cgm); 769 770 CodeGenTypes &getTypes() const { return CGM.getTypes(); } 771 ASTContext &getContext() const; 772 CGDebugInfo *getDebugInfo() { return DebugInfo; } 773 774 /// Returns a pointer to the function's exception object slot, which 775 /// is assigned in every landing pad. 776 llvm::Value *getExceptionSlot(); 777 778 llvm::Value *getNormalCleanupDestSlot(); 779 llvm::Value *getEHCleanupDestSlot(); 780 781 llvm::BasicBlock *getUnreachableBlock() { 782 if (!UnreachableBlock) { 783 UnreachableBlock = createBasicBlock("unreachable"); 784 new llvm::UnreachableInst(getLLVMContext(), UnreachableBlock); 785 } 786 return UnreachableBlock; 787 } 788 789 llvm::BasicBlock *getInvokeDest() { 790 if (!EHStack.requiresLandingPad()) return 0; 791 return getInvokeDestImpl(); 792 } 793 794 llvm::LLVMContext &getLLVMContext() { return VMContext; } 795 796 //===--------------------------------------------------------------------===// 797 // Objective-C 798 //===--------------------------------------------------------------------===// 799 800 void GenerateObjCMethod(const ObjCMethodDecl *OMD); 801 802 void StartObjCMethod(const ObjCMethodDecl *MD, 803 const ObjCContainerDecl *CD); 804 805 /// GenerateObjCGetter - Synthesize an Objective-C property getter function. 806 void GenerateObjCGetter(ObjCImplementationDecl *IMP, 807 const ObjCPropertyImplDecl *PID); 808 void GenerateObjCCtorDtorMethod(ObjCImplementationDecl *IMP, 809 ObjCMethodDecl *MD, bool ctor); 810 811 /// GenerateObjCSetter - Synthesize an Objective-C property setter function 812 /// for the given property. 813 void GenerateObjCSetter(ObjCImplementationDecl *IMP, 814 const ObjCPropertyImplDecl *PID); 815 bool IndirectObjCSetterArg(const CGFunctionInfo &FI); 816 bool IvarTypeWithAggrGCObjects(QualType Ty); 817 818 //===--------------------------------------------------------------------===// 819 // Block Bits 820 //===--------------------------------------------------------------------===// 821 822 llvm::Value *BuildBlockLiteralTmp(const BlockExpr *); 823 llvm::Constant *BuildDescriptorBlockDecl(const BlockExpr *, 824 const CGBlockInfo &Info, 825 const llvm::StructType *, 826 llvm::Constant *BlockVarLayout, 827 std::vector<HelperInfo> *); 828 829 llvm::Function *GenerateBlockFunction(GlobalDecl GD, 830 const BlockExpr *BExpr, 831 CGBlockInfo &Info, 832 const Decl *OuterFuncDecl, 833 llvm::Constant *& BlockVarLayout, 834 llvm::DenseMap<const Decl*, llvm::Value*> ldm); 835 836 llvm::Value *LoadBlockStruct(); 837 838 void AllocateBlockCXXThisPointer(const CXXThisExpr *E); 839 void AllocateBlockDecl(const BlockDeclRefExpr *E); 840 llvm::Value *GetAddrOfBlockDecl(const BlockDeclRefExpr *E) { 841 return GetAddrOfBlockDecl(E->getDecl(), E->isByRef()); 842 } 843 llvm::Value *GetAddrOfBlockDecl(const ValueDecl *D, bool ByRef); 844 const llvm::Type *BuildByRefType(const ValueDecl *D); 845 846 void GenerateCode(GlobalDecl GD, llvm::Function *Fn); 847 void StartFunction(GlobalDecl GD, QualType RetTy, 848 llvm::Function *Fn, 849 const FunctionArgList &Args, 850 SourceLocation StartLoc); 851 852 void EmitConstructorBody(FunctionArgList &Args); 853 void EmitDestructorBody(FunctionArgList &Args); 854 void EmitFunctionBody(FunctionArgList &Args); 855 856 /// EmitReturnBlock - Emit the unified return block, trying to avoid its 857 /// emission when possible. 858 void EmitReturnBlock(); 859 860 /// FinishFunction - Complete IR generation of the current function. It is 861 /// legal to call this function even if there is no current insertion point. 862 void FinishFunction(SourceLocation EndLoc=SourceLocation()); 863 864 /// GenerateThunk - Generate a thunk for the given method. 865 void GenerateThunk(llvm::Function *Fn, GlobalDecl GD, const ThunkInfo &Thunk); 866 867 void EmitCtorPrologue(const CXXConstructorDecl *CD, CXXCtorType Type, 868 FunctionArgList &Args); 869 870 /// InitializeVTablePointer - Initialize the vtable pointer of the given 871 /// subobject. 872 /// 873 void InitializeVTablePointer(BaseSubobject Base, 874 const CXXRecordDecl *NearestVBase, 875 uint64_t OffsetFromNearestVBase, 876 llvm::Constant *VTable, 877 const CXXRecordDecl *VTableClass); 878 879 typedef llvm::SmallPtrSet<const CXXRecordDecl *, 4> VisitedVirtualBasesSetTy; 880 void InitializeVTablePointers(BaseSubobject Base, 881 const CXXRecordDecl *NearestVBase, 882 uint64_t OffsetFromNearestVBase, 883 bool BaseIsNonVirtualPrimaryBase, 884 llvm::Constant *VTable, 885 const CXXRecordDecl *VTableClass, 886 VisitedVirtualBasesSetTy& VBases); 887 888 void InitializeVTablePointers(const CXXRecordDecl *ClassDecl); 889 890 891 /// EnterDtorCleanups - Enter the cleanups necessary to complete the 892 /// given phase of destruction for a destructor. The end result 893 /// should call destructors on members and base classes in reverse 894 /// order of their construction. 895 void EnterDtorCleanups(const CXXDestructorDecl *Dtor, CXXDtorType Type); 896 897 /// ShouldInstrumentFunction - Return true if the current function should be 898 /// instrumented with __cyg_profile_func_* calls 899 bool ShouldInstrumentFunction(); 900 901 /// EmitFunctionInstrumentation - Emit LLVM code to call the specified 902 /// instrumentation function with the current function and the call site, if 903 /// function instrumentation is enabled. 904 void EmitFunctionInstrumentation(const char *Fn); 905 906 /// EmitFunctionProlog - Emit the target specific LLVM code to load the 907 /// arguments for the given function. This is also responsible for naming the 908 /// LLVM function arguments. 909 void EmitFunctionProlog(const CGFunctionInfo &FI, 910 llvm::Function *Fn, 911 const FunctionArgList &Args); 912 913 /// EmitFunctionEpilog - Emit the target specific LLVM code to return the 914 /// given temporary. 915 void EmitFunctionEpilog(const CGFunctionInfo &FI); 916 917 /// EmitStartEHSpec - Emit the start of the exception spec. 918 void EmitStartEHSpec(const Decl *D); 919 920 /// EmitEndEHSpec - Emit the end of the exception spec. 921 void EmitEndEHSpec(const Decl *D); 922 923 /// getTerminateLandingPad - Return a landing pad that just calls terminate. 924 llvm::BasicBlock *getTerminateLandingPad(); 925 926 /// getTerminateHandler - Return a handler (not a landing pad, just 927 /// a catch handler) that just calls terminate. This is used when 928 /// a terminate scope encloses a try. 929 llvm::BasicBlock *getTerminateHandler(); 930 931 const llvm::Type *ConvertTypeForMem(QualType T); 932 const llvm::Type *ConvertType(QualType T); 933 const llvm::Type *ConvertType(const TypeDecl *T) { 934 return ConvertType(getContext().getTypeDeclType(T)); 935 } 936 937 /// LoadObjCSelf - Load the value of self. This function is only valid while 938 /// generating code for an Objective-C method. 939 llvm::Value *LoadObjCSelf(); 940 941 /// TypeOfSelfObject - Return type of object that this self represents. 942 QualType TypeOfSelfObject(); 943 944 /// hasAggregateLLVMType - Return true if the specified AST type will map into 945 /// an aggregate LLVM type or is void. 946 static bool hasAggregateLLVMType(QualType T); 947 948 /// createBasicBlock - Create an LLVM basic block. 949 llvm::BasicBlock *createBasicBlock(const char *Name="", 950 llvm::Function *Parent=0, 951 llvm::BasicBlock *InsertBefore=0) { 952#ifdef NDEBUG 953 return llvm::BasicBlock::Create(VMContext, "", Parent, InsertBefore); 954#else 955 return llvm::BasicBlock::Create(VMContext, Name, Parent, InsertBefore); 956#endif 957 } 958 959 /// getBasicBlockForLabel - Return the LLVM basicblock that the specified 960 /// label maps to. 961 JumpDest getJumpDestForLabel(const LabelStmt *S); 962 963 /// SimplifyForwardingBlocks - If the given basic block is only a branch to 964 /// another basic block, simplify it. This assumes that no other code could 965 /// potentially reference the basic block. 966 void SimplifyForwardingBlocks(llvm::BasicBlock *BB); 967 968 /// EmitBlock - Emit the given block \arg BB and set it as the insert point, 969 /// adding a fall-through branch from the current insert block if 970 /// necessary. It is legal to call this function even if there is no current 971 /// insertion point. 972 /// 973 /// IsFinished - If true, indicates that the caller has finished emitting 974 /// branches to the given block and does not expect to emit code into it. This 975 /// means the block can be ignored if it is unreachable. 976 void EmitBlock(llvm::BasicBlock *BB, bool IsFinished=false); 977 978 /// EmitBranch - Emit a branch to the specified basic block from the current 979 /// insert block, taking care to avoid creation of branches from dummy 980 /// blocks. It is legal to call this function even if there is no current 981 /// insertion point. 982 /// 983 /// This function clears the current insertion point. The caller should follow 984 /// calls to this function with calls to Emit*Block prior to generation new 985 /// code. 986 void EmitBranch(llvm::BasicBlock *Block); 987 988 /// HaveInsertPoint - True if an insertion point is defined. If not, this 989 /// indicates that the current code being emitted is unreachable. 990 bool HaveInsertPoint() const { 991 return Builder.GetInsertBlock() != 0; 992 } 993 994 /// EnsureInsertPoint - Ensure that an insertion point is defined so that 995 /// emitted IR has a place to go. Note that by definition, if this function 996 /// creates a block then that block is unreachable; callers may do better to 997 /// detect when no insertion point is defined and simply skip IR generation. 998 void EnsureInsertPoint() { 999 if (!HaveInsertPoint()) 1000 EmitBlock(createBasicBlock()); 1001 } 1002 1003 /// ErrorUnsupported - Print out an error that codegen doesn't support the 1004 /// specified stmt yet. 1005 void ErrorUnsupported(const Stmt *S, const char *Type, 1006 bool OmitOnError=false); 1007 1008 //===--------------------------------------------------------------------===// 1009 // Helpers 1010 //===--------------------------------------------------------------------===// 1011 1012 LValue MakeAddrLValue(llvm::Value *V, QualType T, unsigned Alignment = 0) { 1013 return LValue::MakeAddr(V, T, Alignment, getContext()); 1014 } 1015 1016 /// CreateTempAlloca - This creates a alloca and inserts it into the entry 1017 /// block. The caller is responsible for setting an appropriate alignment on 1018 /// the alloca. 1019 llvm::AllocaInst *CreateTempAlloca(const llvm::Type *Ty, 1020 const llvm::Twine &Name = "tmp"); 1021 1022 /// InitTempAlloca - Provide an initial value for the given alloca. 1023 void InitTempAlloca(llvm::AllocaInst *Alloca, llvm::Value *Value); 1024 1025 /// CreateIRTemp - Create a temporary IR object of the given type, with 1026 /// appropriate alignment. This routine should only be used when an temporary 1027 /// value needs to be stored into an alloca (for example, to avoid explicit 1028 /// PHI construction), but the type is the IR type, not the type appropriate 1029 /// for storing in memory. 1030 llvm::AllocaInst *CreateIRTemp(QualType T, const llvm::Twine &Name = "tmp"); 1031 1032 /// CreateMemTemp - Create a temporary memory object of the given type, with 1033 /// appropriate alignment. 1034 llvm::AllocaInst *CreateMemTemp(QualType T, const llvm::Twine &Name = "tmp"); 1035 1036 /// CreateAggTemp - Create a temporary memory object for the given 1037 /// aggregate type. 1038 AggValueSlot CreateAggTemp(QualType T, const llvm::Twine &Name = "tmp") { 1039 return AggValueSlot::forAddr(CreateMemTemp(T, Name), false, false); 1040 } 1041 1042 /// EvaluateExprAsBool - Perform the usual unary conversions on the specified 1043 /// expression and compare the result against zero, returning an Int1Ty value. 1044 llvm::Value *EvaluateExprAsBool(const Expr *E); 1045 1046 /// EmitAnyExpr - Emit code to compute the specified expression which can have 1047 /// any type. The result is returned as an RValue struct. If this is an 1048 /// aggregate expression, the aggloc/agglocvolatile arguments indicate where 1049 /// the result should be returned. 1050 /// 1051 /// \param IgnoreResult - True if the resulting value isn't used. 1052 RValue EmitAnyExpr(const Expr *E, 1053 AggValueSlot AggSlot = AggValueSlot::ignored(), 1054 bool IgnoreResult = false); 1055 1056 // EmitVAListRef - Emit a "reference" to a va_list; this is either the address 1057 // or the value of the expression, depending on how va_list is defined. 1058 llvm::Value *EmitVAListRef(const Expr *E); 1059 1060 /// EmitAnyExprToTemp - Similary to EmitAnyExpr(), however, the result will 1061 /// always be accessible even if no aggregate location is provided. 1062 RValue EmitAnyExprToTemp(const Expr *E); 1063 1064 /// EmitsAnyExprToMem - Emits the code necessary to evaluate an 1065 /// arbitrary expression into the given memory location. 1066 void EmitAnyExprToMem(const Expr *E, llvm::Value *Location, 1067 bool IsLocationVolatile, 1068 bool IsInitializer); 1069 1070 /// EmitAggregateCopy - Emit an aggrate copy. 1071 /// 1072 /// \param isVolatile - True iff either the source or the destination is 1073 /// volatile. 1074 void EmitAggregateCopy(llvm::Value *DestPtr, llvm::Value *SrcPtr, 1075 QualType EltTy, bool isVolatile=false); 1076 1077 /// StartBlock - Start new block named N. If insert block is a dummy block 1078 /// then reuse it. 1079 void StartBlock(const char *N); 1080 1081 /// GetAddrOfStaticLocalVar - Return the address of a static local variable. 1082 llvm::Constant *GetAddrOfStaticLocalVar(const VarDecl *BVD) { 1083 return cast<llvm::Constant>(GetAddrOfLocalVar(BVD)); 1084 } 1085 1086 /// GetAddrOfLocalVar - Return the address of a local variable. 1087 llvm::Value *GetAddrOfLocalVar(const VarDecl *VD) { 1088 llvm::Value *Res = LocalDeclMap[VD]; 1089 assert(Res && "Invalid argument to GetAddrOfLocalVar(), no decl!"); 1090 return Res; 1091 } 1092 1093 /// getAccessedFieldNo - Given an encoded value and a result number, return 1094 /// the input field number being accessed. 1095 static unsigned getAccessedFieldNo(unsigned Idx, const llvm::Constant *Elts); 1096 1097 llvm::BlockAddress *GetAddrOfLabel(const LabelStmt *L); 1098 llvm::BasicBlock *GetIndirectGotoBlock(); 1099 1100 /// EmitNullInitialization - Generate code to set a value of the given type to 1101 /// null, If the type contains data member pointers, they will be initialized 1102 /// to -1 in accordance with the Itanium C++ ABI. 1103 void EmitNullInitialization(llvm::Value *DestPtr, QualType Ty); 1104 1105 // EmitVAArg - Generate code to get an argument from the passed in pointer 1106 // and update it accordingly. The return value is a pointer to the argument. 1107 // FIXME: We should be able to get rid of this method and use the va_arg 1108 // instruction in LLVM instead once it works well enough. 1109 llvm::Value *EmitVAArg(llvm::Value *VAListAddr, QualType Ty); 1110 1111 /// EmitVLASize - Generate code for any VLA size expressions that might occur 1112 /// in a variably modified type. If Ty is a VLA, will return the value that 1113 /// corresponds to the size in bytes of the VLA type. Will return 0 otherwise. 1114 /// 1115 /// This function can be called with a null (unreachable) insert point. 1116 llvm::Value *EmitVLASize(QualType Ty); 1117 1118 // GetVLASize - Returns an LLVM value that corresponds to the size in bytes 1119 // of a variable length array type. 1120 llvm::Value *GetVLASize(const VariableArrayType *); 1121 1122 /// LoadCXXThis - Load the value of 'this'. This function is only valid while 1123 /// generating code for an C++ member function. 1124 llvm::Value *LoadCXXThis() { 1125 assert(CXXThisValue && "no 'this' value for this function"); 1126 return CXXThisValue; 1127 } 1128 1129 /// LoadCXXVTT - Load the VTT parameter to base constructors/destructors have 1130 /// virtual bases. 1131 llvm::Value *LoadCXXVTT() { 1132 assert(CXXVTTValue && "no VTT value for this function"); 1133 return CXXVTTValue; 1134 } 1135 1136 /// GetAddressOfBaseOfCompleteClass - Convert the given pointer to a 1137 /// complete class to the given direct base. 1138 llvm::Value * 1139 GetAddressOfDirectBaseInCompleteClass(llvm::Value *Value, 1140 const CXXRecordDecl *Derived, 1141 const CXXRecordDecl *Base, 1142 bool BaseIsVirtual); 1143 1144 /// GetAddressOfBaseClass - This function will add the necessary delta to the 1145 /// load of 'this' and returns address of the base class. 1146 llvm::Value *GetAddressOfBaseClass(llvm::Value *Value, 1147 const CXXRecordDecl *Derived, 1148 CastExpr::path_const_iterator PathBegin, 1149 CastExpr::path_const_iterator PathEnd, 1150 bool NullCheckValue); 1151 1152 llvm::Value *GetAddressOfDerivedClass(llvm::Value *Value, 1153 const CXXRecordDecl *Derived, 1154 CastExpr::path_const_iterator PathBegin, 1155 CastExpr::path_const_iterator PathEnd, 1156 bool NullCheckValue); 1157 1158 llvm::Value *GetVirtualBaseClassOffset(llvm::Value *This, 1159 const CXXRecordDecl *ClassDecl, 1160 const CXXRecordDecl *BaseClassDecl); 1161 1162 void EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor, 1163 CXXCtorType CtorType, 1164 const FunctionArgList &Args); 1165 void EmitCXXConstructorCall(const CXXConstructorDecl *D, CXXCtorType Type, 1166 bool ForVirtualBase, llvm::Value *This, 1167 CallExpr::const_arg_iterator ArgBeg, 1168 CallExpr::const_arg_iterator ArgEnd); 1169 1170 void EmitCXXAggrConstructorCall(const CXXConstructorDecl *D, 1171 const ConstantArrayType *ArrayTy, 1172 llvm::Value *ArrayPtr, 1173 CallExpr::const_arg_iterator ArgBeg, 1174 CallExpr::const_arg_iterator ArgEnd, 1175 bool ZeroInitialization = false); 1176 1177 void EmitCXXAggrConstructorCall(const CXXConstructorDecl *D, 1178 llvm::Value *NumElements, 1179 llvm::Value *ArrayPtr, 1180 CallExpr::const_arg_iterator ArgBeg, 1181 CallExpr::const_arg_iterator ArgEnd, 1182 bool ZeroInitialization = false); 1183 1184 void EmitCXXAggrDestructorCall(const CXXDestructorDecl *D, 1185 const ArrayType *Array, 1186 llvm::Value *This); 1187 1188 void EmitCXXAggrDestructorCall(const CXXDestructorDecl *D, 1189 llvm::Value *NumElements, 1190 llvm::Value *This); 1191 1192 llvm::Function *GenerateCXXAggrDestructorHelper(const CXXDestructorDecl *D, 1193 const ArrayType *Array, 1194 llvm::Value *This); 1195 1196 void EmitCXXDestructorCall(const CXXDestructorDecl *D, CXXDtorType Type, 1197 bool ForVirtualBase, llvm::Value *This); 1198 1199 void EmitNewArrayInitializer(const CXXNewExpr *E, llvm::Value *NewPtr, 1200 llvm::Value *NumElements); 1201 1202 void EmitCXXTemporary(const CXXTemporary *Temporary, llvm::Value *Ptr); 1203 1204 llvm::Value *EmitCXXNewExpr(const CXXNewExpr *E); 1205 void EmitCXXDeleteExpr(const CXXDeleteExpr *E); 1206 1207 void EmitDeleteCall(const FunctionDecl *DeleteFD, llvm::Value *Ptr, 1208 QualType DeleteTy); 1209 1210 llvm::Value* EmitCXXTypeidExpr(const CXXTypeidExpr *E); 1211 llvm::Value *EmitDynamicCast(llvm::Value *V, const CXXDynamicCastExpr *DCE); 1212 1213 void EmitCheck(llvm::Value *, unsigned Size); 1214 1215 llvm::Value *EmitScalarPrePostIncDec(const UnaryOperator *E, LValue LV, 1216 bool isInc, bool isPre); 1217 ComplexPairTy EmitComplexPrePostIncDec(const UnaryOperator *E, LValue LV, 1218 bool isInc, bool isPre); 1219 //===--------------------------------------------------------------------===// 1220 // Declaration Emission 1221 //===--------------------------------------------------------------------===// 1222 1223 /// EmitDecl - Emit a declaration. 1224 /// 1225 /// This function can be called with a null (unreachable) insert point. 1226 void EmitDecl(const Decl &D); 1227 1228 /// EmitBlockVarDecl - Emit a block variable declaration. 1229 /// 1230 /// This function can be called with a null (unreachable) insert point. 1231 void EmitBlockVarDecl(const VarDecl &D); 1232 1233 typedef void SpecialInitFn(CodeGenFunction &Init, const VarDecl &D, 1234 llvm::Value *Address); 1235 1236 /// EmitLocalBlockVarDecl - Emit a local block variable declaration. 1237 /// 1238 /// This function can be called with a null (unreachable) insert point. 1239 void EmitLocalBlockVarDecl(const VarDecl &D, SpecialInitFn *SpecialInit = 0); 1240 1241 void EmitStaticBlockVarDecl(const VarDecl &D, 1242 llvm::GlobalValue::LinkageTypes Linkage); 1243 1244 /// EmitParmDecl - Emit a ParmVarDecl or an ImplicitParamDecl. 1245 void EmitParmDecl(const VarDecl &D, llvm::Value *Arg); 1246 1247 //===--------------------------------------------------------------------===// 1248 // Statement Emission 1249 //===--------------------------------------------------------------------===// 1250 1251 /// EmitStopPoint - Emit a debug stoppoint if we are emitting debug info. 1252 void EmitStopPoint(const Stmt *S); 1253 1254 /// EmitStmt - Emit the code for the statement \arg S. It is legal to call 1255 /// this function even if there is no current insertion point. 1256 /// 1257 /// This function may clear the current insertion point; callers should use 1258 /// EnsureInsertPoint if they wish to subsequently generate code without first 1259 /// calling EmitBlock, EmitBranch, or EmitStmt. 1260 void EmitStmt(const Stmt *S); 1261 1262 /// EmitSimpleStmt - Try to emit a "simple" statement which does not 1263 /// necessarily require an insertion point or debug information; typically 1264 /// because the statement amounts to a jump or a container of other 1265 /// statements. 1266 /// 1267 /// \return True if the statement was handled. 1268 bool EmitSimpleStmt(const Stmt *S); 1269 1270 RValue EmitCompoundStmt(const CompoundStmt &S, bool GetLast = false, 1271 AggValueSlot AVS = AggValueSlot::ignored()); 1272 1273 /// EmitLabel - Emit the block for the given label. It is legal to call this 1274 /// function even if there is no current insertion point. 1275 void EmitLabel(const LabelStmt &S); // helper for EmitLabelStmt. 1276 1277 void EmitLabelStmt(const LabelStmt &S); 1278 void EmitGotoStmt(const GotoStmt &S); 1279 void EmitIndirectGotoStmt(const IndirectGotoStmt &S); 1280 void EmitIfStmt(const IfStmt &S); 1281 void EmitWhileStmt(const WhileStmt &S); 1282 void EmitDoStmt(const DoStmt &S); 1283 void EmitForStmt(const ForStmt &S); 1284 void EmitReturnStmt(const ReturnStmt &S); 1285 void EmitDeclStmt(const DeclStmt &S); 1286 void EmitBreakStmt(const BreakStmt &S); 1287 void EmitContinueStmt(const ContinueStmt &S); 1288 void EmitSwitchStmt(const SwitchStmt &S); 1289 void EmitDefaultStmt(const DefaultStmt &S); 1290 void EmitCaseStmt(const CaseStmt &S); 1291 void EmitCaseStmtRange(const CaseStmt &S); 1292 void EmitAsmStmt(const AsmStmt &S); 1293 1294 void EmitObjCForCollectionStmt(const ObjCForCollectionStmt &S); 1295 void EmitObjCAtTryStmt(const ObjCAtTryStmt &S); 1296 void EmitObjCAtThrowStmt(const ObjCAtThrowStmt &S); 1297 void EmitObjCAtSynchronizedStmt(const ObjCAtSynchronizedStmt &S); 1298 1299 llvm::Constant *getUnwindResumeOrRethrowFn(); 1300 void EnterCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock = false); 1301 void ExitCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock = false); 1302 1303 void EmitCXXTryStmt(const CXXTryStmt &S); 1304 1305 //===--------------------------------------------------------------------===// 1306 // LValue Expression Emission 1307 //===--------------------------------------------------------------------===// 1308 1309 /// GetUndefRValue - Get an appropriate 'undef' rvalue for the given type. 1310 RValue GetUndefRValue(QualType Ty); 1311 1312 /// EmitUnsupportedRValue - Emit a dummy r-value using the type of E 1313 /// and issue an ErrorUnsupported style diagnostic (using the 1314 /// provided Name). 1315 RValue EmitUnsupportedRValue(const Expr *E, 1316 const char *Name); 1317 1318 /// EmitUnsupportedLValue - Emit a dummy l-value using the type of E and issue 1319 /// an ErrorUnsupported style diagnostic (using the provided Name). 1320 LValue EmitUnsupportedLValue(const Expr *E, 1321 const char *Name); 1322 1323 /// EmitLValue - Emit code to compute a designator that specifies the location 1324 /// of the expression. 1325 /// 1326 /// This can return one of two things: a simple address or a bitfield 1327 /// reference. In either case, the LLVM Value* in the LValue structure is 1328 /// guaranteed to be an LLVM pointer type. 1329 /// 1330 /// If this returns a bitfield reference, nothing about the pointee type of 1331 /// the LLVM value is known: For example, it may not be a pointer to an 1332 /// integer. 1333 /// 1334 /// If this returns a normal address, and if the lvalue's C type is fixed 1335 /// size, this method guarantees that the returned pointer type will point to 1336 /// an LLVM type of the same size of the lvalue's type. If the lvalue has a 1337 /// variable length type, this is not possible. 1338 /// 1339 LValue EmitLValue(const Expr *E); 1340 1341 /// EmitCheckedLValue - Same as EmitLValue but additionally we generate 1342 /// checking code to guard against undefined behavior. This is only 1343 /// suitable when we know that the address will be used to access the 1344 /// object. 1345 LValue EmitCheckedLValue(const Expr *E); 1346 1347 /// EmitLoadOfScalar - Load a scalar value from an address, taking 1348 /// care to appropriately convert from the memory representation to 1349 /// the LLVM value representation. 1350 llvm::Value *EmitLoadOfScalar(llvm::Value *Addr, bool Volatile, 1351 unsigned Alignment, QualType Ty); 1352 1353 /// EmitStoreOfScalar - Store a scalar value to an address, taking 1354 /// care to appropriately convert from the memory representation to 1355 /// the LLVM value representation. 1356 void EmitStoreOfScalar(llvm::Value *Value, llvm::Value *Addr, 1357 bool Volatile, unsigned Alignment, QualType Ty); 1358 1359 /// EmitLoadOfLValue - Given an expression that represents a value lvalue, 1360 /// this method emits the address of the lvalue, then loads the result as an 1361 /// rvalue, returning the rvalue. 1362 RValue EmitLoadOfLValue(LValue V, QualType LVType); 1363 RValue EmitLoadOfExtVectorElementLValue(LValue V, QualType LVType); 1364 RValue EmitLoadOfBitfieldLValue(LValue LV, QualType ExprType); 1365 RValue EmitLoadOfPropertyRefLValue(LValue LV, QualType ExprType); 1366 RValue EmitLoadOfKVCRefLValue(LValue LV, QualType ExprType); 1367 1368 1369 /// EmitStoreThroughLValue - Store the specified rvalue into the specified 1370 /// lvalue, where both are guaranteed to the have the same type, and that type 1371 /// is 'Ty'. 1372 void EmitStoreThroughLValue(RValue Src, LValue Dst, QualType Ty); 1373 void EmitStoreThroughExtVectorComponentLValue(RValue Src, LValue Dst, 1374 QualType Ty); 1375 void EmitStoreThroughPropertyRefLValue(RValue Src, LValue Dst, QualType Ty); 1376 void EmitStoreThroughKVCRefLValue(RValue Src, LValue Dst, QualType Ty); 1377 1378 /// EmitStoreThroughLValue - Store Src into Dst with same constraints as 1379 /// EmitStoreThroughLValue. 1380 /// 1381 /// \param Result [out] - If non-null, this will be set to a Value* for the 1382 /// bit-field contents after the store, appropriate for use as the result of 1383 /// an assignment to the bit-field. 1384 void EmitStoreThroughBitfieldLValue(RValue Src, LValue Dst, QualType Ty, 1385 llvm::Value **Result=0); 1386 1387 // Note: only availabe for agg return types 1388 LValue EmitBinaryOperatorLValue(const BinaryOperator *E); 1389 LValue EmitCompoundAssignOperatorLValue(const CompoundAssignOperator *E); 1390 // Note: only available for agg return types 1391 LValue EmitCallExprLValue(const CallExpr *E); 1392 // Note: only available for agg return types 1393 LValue EmitVAArgExprLValue(const VAArgExpr *E); 1394 LValue EmitDeclRefLValue(const DeclRefExpr *E); 1395 LValue EmitStringLiteralLValue(const StringLiteral *E); 1396 LValue EmitObjCEncodeExprLValue(const ObjCEncodeExpr *E); 1397 LValue EmitPredefinedLValue(const PredefinedExpr *E); 1398 LValue EmitUnaryOpLValue(const UnaryOperator *E); 1399 LValue EmitArraySubscriptExpr(const ArraySubscriptExpr *E); 1400 LValue EmitExtVectorElementExpr(const ExtVectorElementExpr *E); 1401 LValue EmitMemberExpr(const MemberExpr *E); 1402 LValue EmitObjCIsaExpr(const ObjCIsaExpr *E); 1403 LValue EmitCompoundLiteralLValue(const CompoundLiteralExpr *E); 1404 LValue EmitConditionalOperatorLValue(const ConditionalOperator *E); 1405 LValue EmitCastLValue(const CastExpr *E); 1406 LValue EmitNullInitializationLValue(const CXXScalarValueInitExpr *E); 1407 1408 llvm::Value *EmitIvarOffset(const ObjCInterfaceDecl *Interface, 1409 const ObjCIvarDecl *Ivar); 1410 LValue EmitLValueForAnonRecordField(llvm::Value* Base, 1411 const FieldDecl* Field, 1412 unsigned CVRQualifiers); 1413 LValue EmitLValueForField(llvm::Value* Base, const FieldDecl* Field, 1414 unsigned CVRQualifiers); 1415 1416 /// EmitLValueForFieldInitialization - Like EmitLValueForField, except that 1417 /// if the Field is a reference, this will return the address of the reference 1418 /// and not the address of the value stored in the reference. 1419 LValue EmitLValueForFieldInitialization(llvm::Value* Base, 1420 const FieldDecl* Field, 1421 unsigned CVRQualifiers); 1422 1423 LValue EmitLValueForIvar(QualType ObjectTy, 1424 llvm::Value* Base, const ObjCIvarDecl *Ivar, 1425 unsigned CVRQualifiers); 1426 1427 LValue EmitLValueForBitfield(llvm::Value* Base, const FieldDecl* Field, 1428 unsigned CVRQualifiers); 1429 1430 LValue EmitBlockDeclRefLValue(const BlockDeclRefExpr *E); 1431 1432 LValue EmitCXXConstructLValue(const CXXConstructExpr *E); 1433 LValue EmitCXXBindTemporaryLValue(const CXXBindTemporaryExpr *E); 1434 LValue EmitCXXExprWithTemporariesLValue(const CXXExprWithTemporaries *E); 1435 LValue EmitCXXTypeidLValue(const CXXTypeidExpr *E); 1436 1437 LValue EmitObjCMessageExprLValue(const ObjCMessageExpr *E); 1438 LValue EmitObjCIvarRefLValue(const ObjCIvarRefExpr *E); 1439 LValue EmitObjCPropertyRefLValue(const ObjCPropertyRefExpr *E); 1440 LValue EmitObjCKVCRefLValue(const ObjCImplicitSetterGetterRefExpr *E); 1441 LValue EmitObjCSuperExprLValue(const ObjCSuperExpr *E); 1442 LValue EmitStmtExprLValue(const StmtExpr *E); 1443 LValue EmitPointerToDataMemberBinaryExpr(const BinaryOperator *E); 1444 LValue EmitObjCSelectorLValue(const ObjCSelectorExpr *E); 1445 void EmitDeclRefExprDbgValue(const DeclRefExpr *E, llvm::ConstantInt *Init); 1446 //===--------------------------------------------------------------------===// 1447 // Scalar Expression Emission 1448 //===--------------------------------------------------------------------===// 1449 1450 /// EmitCall - Generate a call of the given function, expecting the given 1451 /// result type, and using the given argument list which specifies both the 1452 /// LLVM arguments and the types they were derived from. 1453 /// 1454 /// \param TargetDecl - If given, the decl of the function in a direct call; 1455 /// used to set attributes on the call (noreturn, etc.). 1456 RValue EmitCall(const CGFunctionInfo &FnInfo, 1457 llvm::Value *Callee, 1458 ReturnValueSlot ReturnValue, 1459 const CallArgList &Args, 1460 const Decl *TargetDecl = 0, 1461 llvm::Instruction **callOrInvoke = 0); 1462 1463 RValue EmitCall(QualType FnType, llvm::Value *Callee, 1464 ReturnValueSlot ReturnValue, 1465 CallExpr::const_arg_iterator ArgBeg, 1466 CallExpr::const_arg_iterator ArgEnd, 1467 const Decl *TargetDecl = 0); 1468 RValue EmitCallExpr(const CallExpr *E, 1469 ReturnValueSlot ReturnValue = ReturnValueSlot()); 1470 1471 llvm::CallSite EmitCallOrInvoke(llvm::Value *Callee, 1472 llvm::Value * const *ArgBegin, 1473 llvm::Value * const *ArgEnd, 1474 const llvm::Twine &Name = ""); 1475 1476 llvm::Value *BuildVirtualCall(const CXXMethodDecl *MD, llvm::Value *This, 1477 const llvm::Type *Ty); 1478 llvm::Value *BuildVirtualCall(const CXXDestructorDecl *DD, CXXDtorType Type, 1479 llvm::Value *&This, const llvm::Type *Ty); 1480 1481 RValue EmitCXXMemberCall(const CXXMethodDecl *MD, 1482 llvm::Value *Callee, 1483 ReturnValueSlot ReturnValue, 1484 llvm::Value *This, 1485 llvm::Value *VTT, 1486 CallExpr::const_arg_iterator ArgBeg, 1487 CallExpr::const_arg_iterator ArgEnd); 1488 RValue EmitCXXMemberCallExpr(const CXXMemberCallExpr *E, 1489 ReturnValueSlot ReturnValue); 1490 RValue EmitCXXMemberPointerCallExpr(const CXXMemberCallExpr *E, 1491 ReturnValueSlot ReturnValue); 1492 1493 RValue EmitCXXOperatorMemberCallExpr(const CXXOperatorCallExpr *E, 1494 const CXXMethodDecl *MD, 1495 ReturnValueSlot ReturnValue); 1496 1497 1498 RValue EmitBuiltinExpr(const FunctionDecl *FD, 1499 unsigned BuiltinID, const CallExpr *E); 1500 1501 RValue EmitBlockCallExpr(const CallExpr *E, ReturnValueSlot ReturnValue); 1502 1503 /// EmitTargetBuiltinExpr - Emit the given builtin call. Returns 0 if the call 1504 /// is unhandled by the current target. 1505 llvm::Value *EmitTargetBuiltinExpr(unsigned BuiltinID, const CallExpr *E); 1506 1507 llvm::Value *EmitARMBuiltinExpr(unsigned BuiltinID, const CallExpr *E); 1508 llvm::Value *EmitNeonCall(llvm::Function *F, 1509 llvm::SmallVectorImpl<llvm::Value*> &O, 1510 const char *name, bool splat = false, 1511 unsigned shift = 0, bool rightshift = false); 1512 llvm::Value *EmitNeonSplat(llvm::Value *V, llvm::Constant *Idx, 1513 bool widen = false); 1514 llvm::Value *EmitNeonShiftVector(llvm::Value *V, const llvm::Type *Ty, 1515 bool negateForRightShift); 1516 1517 llvm::Value *EmitX86BuiltinExpr(unsigned BuiltinID, const CallExpr *E); 1518 llvm::Value *EmitPPCBuiltinExpr(unsigned BuiltinID, const CallExpr *E); 1519 1520 llvm::Value *EmitObjCProtocolExpr(const ObjCProtocolExpr *E); 1521 llvm::Value *EmitObjCStringLiteral(const ObjCStringLiteral *E); 1522 llvm::Value *EmitObjCSelectorExpr(const ObjCSelectorExpr *E); 1523 RValue EmitObjCMessageExpr(const ObjCMessageExpr *E, 1524 ReturnValueSlot Return = ReturnValueSlot()); 1525 RValue EmitObjCPropertyGet(const Expr *E, 1526 ReturnValueSlot Return = ReturnValueSlot()); 1527 RValue EmitObjCSuperPropertyGet(const Expr *Exp, const Selector &S, 1528 ReturnValueSlot Return = ReturnValueSlot()); 1529 void EmitObjCPropertySet(const Expr *E, RValue Src); 1530 void EmitObjCSuperPropertySet(const Expr *E, const Selector &S, RValue Src); 1531 1532 1533 /// EmitReferenceBindingToExpr - Emits a reference binding to the passed in 1534 /// expression. Will emit a temporary variable if E is not an LValue. 1535 RValue EmitReferenceBindingToExpr(const Expr* E, 1536 const NamedDecl *InitializedDecl); 1537 1538 //===--------------------------------------------------------------------===// 1539 // Expression Emission 1540 //===--------------------------------------------------------------------===// 1541 1542 // Expressions are broken into three classes: scalar, complex, aggregate. 1543 1544 /// EmitScalarExpr - Emit the computation of the specified expression of LLVM 1545 /// scalar type, returning the result. 1546 llvm::Value *EmitScalarExpr(const Expr *E , bool IgnoreResultAssign = false); 1547 1548 /// EmitScalarConversion - Emit a conversion from the specified type to the 1549 /// specified destination type, both of which are LLVM scalar types. 1550 llvm::Value *EmitScalarConversion(llvm::Value *Src, QualType SrcTy, 1551 QualType DstTy); 1552 1553 /// EmitComplexToScalarConversion - Emit a conversion from the specified 1554 /// complex type to the specified destination type, where the destination type 1555 /// is an LLVM scalar type. 1556 llvm::Value *EmitComplexToScalarConversion(ComplexPairTy Src, QualType SrcTy, 1557 QualType DstTy); 1558 1559 1560 /// EmitAggExpr - Emit the computation of the specified expression 1561 /// of aggregate type. The result is computed into the given slot, 1562 /// which may be null to indicate that the value is not needed. 1563 void EmitAggExpr(const Expr *E, AggValueSlot AS, bool IgnoreResult = false); 1564 1565 /// EmitAggExprToLValue - Emit the computation of the specified expression of 1566 /// aggregate type into a temporary LValue. 1567 LValue EmitAggExprToLValue(const Expr *E); 1568 1569 /// EmitGCMemmoveCollectable - Emit special API for structs with object 1570 /// pointers. 1571 void EmitGCMemmoveCollectable(llvm::Value *DestPtr, llvm::Value *SrcPtr, 1572 QualType Ty); 1573 1574 /// EmitComplexExpr - Emit the computation of the specified expression of 1575 /// complex type, returning the result. 1576 ComplexPairTy EmitComplexExpr(const Expr *E, bool IgnoreReal = false, 1577 bool IgnoreImag = false, 1578 bool IgnoreRealAssign = false, 1579 bool IgnoreImagAssign = false); 1580 1581 /// EmitComplexExprIntoAddr - Emit the computation of the specified expression 1582 /// of complex type, storing into the specified Value*. 1583 void EmitComplexExprIntoAddr(const Expr *E, llvm::Value *DestAddr, 1584 bool DestIsVolatile); 1585 1586 /// StoreComplexToAddr - Store a complex number into the specified address. 1587 void StoreComplexToAddr(ComplexPairTy V, llvm::Value *DestAddr, 1588 bool DestIsVolatile); 1589 /// LoadComplexFromAddr - Load a complex number from the specified address. 1590 ComplexPairTy LoadComplexFromAddr(llvm::Value *SrcAddr, bool SrcIsVolatile); 1591 1592 /// CreateStaticBlockVarDecl - Create a zero-initialized LLVM global for a 1593 /// static block var decl. 1594 llvm::GlobalVariable *CreateStaticBlockVarDecl(const VarDecl &D, 1595 const char *Separator, 1596 llvm::GlobalValue::LinkageTypes Linkage); 1597 1598 /// AddInitializerToGlobalBlockVarDecl - Add the initializer for 'D' to the 1599 /// global variable that has already been created for it. If the initializer 1600 /// has a different type than GV does, this may free GV and return a different 1601 /// one. Otherwise it just returns GV. 1602 llvm::GlobalVariable * 1603 AddInitializerToGlobalBlockVarDecl(const VarDecl &D, 1604 llvm::GlobalVariable *GV); 1605 1606 1607 /// EmitCXXGlobalVarDeclInit - Create the initializer for a C++ 1608 /// variable with global storage. 1609 void EmitCXXGlobalVarDeclInit(const VarDecl &D, llvm::Constant *DeclPtr); 1610 1611 /// EmitCXXGlobalDtorRegistration - Emits a call to register the global ptr 1612 /// with the C++ runtime so that its destructor will be called at exit. 1613 void EmitCXXGlobalDtorRegistration(llvm::Constant *DtorFn, 1614 llvm::Constant *DeclPtr); 1615 1616 void EmitCXXStaticLocalInit(const VarDecl &D, llvm::GlobalVariable *DeclPtr); 1617 1618 /// GenerateCXXGlobalInitFunc - Generates code for initializing global 1619 /// variables. 1620 void GenerateCXXGlobalInitFunc(llvm::Function *Fn, 1621 llvm::Constant **Decls, 1622 unsigned NumDecls); 1623 1624 /// GenerateCXXGlobalDtorFunc - Generates code for destroying global 1625 /// variables. 1626 void GenerateCXXGlobalDtorFunc(llvm::Function *Fn, 1627 const std::vector<std::pair<llvm::WeakVH, 1628 llvm::Constant*> > &DtorsAndObjects); 1629 1630 void GenerateCXXGlobalVarDeclInitFunc(llvm::Function *Fn, const VarDecl *D); 1631 1632 void EmitCXXConstructExpr(const CXXConstructExpr *E, AggValueSlot Dest); 1633 1634 RValue EmitCXXExprWithTemporaries(const CXXExprWithTemporaries *E, 1635 AggValueSlot Slot 1636 = AggValueSlot::ignored()); 1637 1638 void EmitCXXThrowExpr(const CXXThrowExpr *E); 1639 1640 //===--------------------------------------------------------------------===// 1641 // Internal Helpers 1642 //===--------------------------------------------------------------------===// 1643 1644 /// ContainsLabel - Return true if the statement contains a label in it. If 1645 /// this statement is not executed normally, it not containing a label means 1646 /// that we can just remove the code. 1647 static bool ContainsLabel(const Stmt *S, bool IgnoreCaseStmts = false); 1648 1649 /// ConstantFoldsToSimpleInteger - If the specified expression does not fold 1650 /// to a constant, or if it does but contains a label, return 0. If it 1651 /// constant folds to 'true' and does not contain a label, return 1, if it 1652 /// constant folds to 'false' and does not contain a label, return -1. 1653 int ConstantFoldsToSimpleInteger(const Expr *Cond); 1654 1655 /// EmitBranchOnBoolExpr - Emit a branch on a boolean condition (e.g. for an 1656 /// if statement) to the specified blocks. Based on the condition, this might 1657 /// try to simplify the codegen of the conditional based on the branch. 1658 void EmitBranchOnBoolExpr(const Expr *Cond, llvm::BasicBlock *TrueBlock, 1659 llvm::BasicBlock *FalseBlock); 1660 1661 /// getTrapBB - Create a basic block that will call the trap intrinsic. We'll 1662 /// generate a branch around the created basic block as necessary. 1663 llvm::BasicBlock *getTrapBB(); 1664 1665 /// EmitCallArg - Emit a single call argument. 1666 RValue EmitCallArg(const Expr *E, QualType ArgType); 1667 1668 /// EmitDelegateCallArg - We are performing a delegate call; that 1669 /// is, the current function is delegating to another one. Produce 1670 /// a r-value suitable for passing the given parameter. 1671 RValue EmitDelegateCallArg(const VarDecl *Param); 1672 1673private: 1674 void EmitReturnOfRValue(RValue RV, QualType Ty); 1675 1676 /// ExpandTypeFromArgs - Reconstruct a structure of type \arg Ty 1677 /// from function arguments into \arg Dst. See ABIArgInfo::Expand. 1678 /// 1679 /// \param AI - The first function argument of the expansion. 1680 /// \return The argument following the last expanded function 1681 /// argument. 1682 llvm::Function::arg_iterator 1683 ExpandTypeFromArgs(QualType Ty, LValue Dst, 1684 llvm::Function::arg_iterator AI); 1685 1686 /// ExpandTypeToArgs - Expand an RValue \arg Src, with the LLVM type for \arg 1687 /// Ty, into individual arguments on the provided vector \arg Args. See 1688 /// ABIArgInfo::Expand. 1689 void ExpandTypeToArgs(QualType Ty, RValue Src, 1690 llvm::SmallVector<llvm::Value*, 16> &Args); 1691 1692 llvm::Value* EmitAsmInput(const AsmStmt &S, 1693 const TargetInfo::ConstraintInfo &Info, 1694 const Expr *InputExpr, std::string &ConstraintStr); 1695 1696 llvm::Value* EmitAsmInputLValue(const AsmStmt &S, 1697 const TargetInfo::ConstraintInfo &Info, 1698 LValue InputValue, QualType InputType, 1699 std::string &ConstraintStr); 1700 1701 /// EmitCallArgs - Emit call arguments for a function. 1702 /// The CallArgTypeInfo parameter is used for iterating over the known 1703 /// argument types of the function being called. 1704 template<typename T> 1705 void EmitCallArgs(CallArgList& Args, const T* CallArgTypeInfo, 1706 CallExpr::const_arg_iterator ArgBeg, 1707 CallExpr::const_arg_iterator ArgEnd) { 1708 CallExpr::const_arg_iterator Arg = ArgBeg; 1709 1710 // First, use the argument types that the type info knows about 1711 if (CallArgTypeInfo) { 1712 for (typename T::arg_type_iterator I = CallArgTypeInfo->arg_type_begin(), 1713 E = CallArgTypeInfo->arg_type_end(); I != E; ++I, ++Arg) { 1714 assert(Arg != ArgEnd && "Running over edge of argument list!"); 1715 QualType ArgType = *I; 1716 1717 assert(getContext().getCanonicalType(ArgType.getNonReferenceType()). 1718 getTypePtr() == 1719 getContext().getCanonicalType(Arg->getType()).getTypePtr() && 1720 "type mismatch in call argument!"); 1721 1722 Args.push_back(std::make_pair(EmitCallArg(*Arg, ArgType), 1723 ArgType)); 1724 } 1725 1726 // Either we've emitted all the call args, or we have a call to a 1727 // variadic function. 1728 assert((Arg == ArgEnd || CallArgTypeInfo->isVariadic()) && 1729 "Extra arguments in non-variadic function!"); 1730 1731 } 1732 1733 // If we still have any arguments, emit them using the type of the argument. 1734 for (; Arg != ArgEnd; ++Arg) { 1735 QualType ArgType = Arg->getType(); 1736 Args.push_back(std::make_pair(EmitCallArg(*Arg, ArgType), 1737 ArgType)); 1738 } 1739 } 1740 1741 const TargetCodeGenInfo &getTargetHooks() const { 1742 return CGM.getTargetCodeGenInfo(); 1743 } 1744 1745 void EmitDeclMetadata(); 1746}; 1747 1748/// CGBlockInfo - Information to generate a block literal. 1749class CGBlockInfo { 1750public: 1751 /// Name - The name of the block, kindof. 1752 const char *Name; 1753 1754 /// DeclRefs - Variables from parent scopes that have been 1755 /// imported into this block. 1756 llvm::SmallVector<const BlockDeclRefExpr *, 8> DeclRefs; 1757 1758 /// InnerBlocks - This block and the blocks it encloses. 1759 llvm::SmallPtrSet<const DeclContext *, 4> InnerBlocks; 1760 1761 /// CXXThisRef - Non-null if 'this' was required somewhere, in 1762 /// which case this is that expression. 1763 const CXXThisExpr *CXXThisRef; 1764 1765 /// NeedsObjCSelf - True if something in this block has an implicit 1766 /// reference to 'self'. 1767 bool NeedsObjCSelf; 1768 1769 /// These are initialized by GenerateBlockFunction. 1770 bool BlockHasCopyDispose; 1771 CharUnits BlockSize; 1772 CharUnits BlockAlign; 1773 llvm::SmallVector<const Expr*, 8> BlockLayout; 1774 1775 CGBlockInfo(const char *Name); 1776}; 1777 1778} // end namespace CodeGen 1779} // end namespace clang 1780 1781#endif 1782