DeadStoreElimination.cpp revision 98016511932510bd5ec5dad1319922d9daf18991
1//===- DeadStoreElimination.cpp - Fast Dead Store Elimination -------------===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file implements a trivial dead store elimination that only considers 11// basic-block local redundant stores. 12// 13// FIXME: This should eventually be extended to be a post-dominator tree 14// traversal. Doing so would be pretty trivial. 15// 16//===----------------------------------------------------------------------===// 17 18#define DEBUG_TYPE "dse" 19#include "llvm/Transforms/Scalar.h" 20#include "llvm/Constants.h" 21#include "llvm/Function.h" 22#include "llvm/Instructions.h" 23#include "llvm/IntrinsicInst.h" 24#include "llvm/Pass.h" 25#include "llvm/ADT/SmallPtrSet.h" 26#include "llvm/ADT/Statistic.h" 27#include "llvm/Analysis/AliasAnalysis.h" 28#include "llvm/Analysis/Dominators.h" 29#include "llvm/Analysis/MemoryBuiltins.h" 30#include "llvm/Analysis/MemoryDependenceAnalysis.h" 31#include "llvm/Target/TargetData.h" 32#include "llvm/Transforms/Utils/Local.h" 33using namespace llvm; 34 35STATISTIC(NumFastStores, "Number of stores deleted"); 36STATISTIC(NumFastOther , "Number of other instrs removed"); 37 38namespace { 39 struct DSE : public FunctionPass { 40 static char ID; // Pass identification, replacement for typeid 41 DSE() : FunctionPass(ID) { 42 initializeDSEPass(*PassRegistry::getPassRegistry()); 43 } 44 45 virtual bool runOnFunction(Function &F) { 46 bool Changed = false; 47 48 DominatorTree &DT = getAnalysis<DominatorTree>(); 49 50 for (Function::iterator I = F.begin(), E = F.end(); I != E; ++I) 51 // Only check non-dead blocks. Dead blocks may have strange pointer 52 // cycles that will confuse alias analysis. 53 if (DT.isReachableFromEntry(I)) 54 Changed |= runOnBasicBlock(*I); 55 return Changed; 56 } 57 58 bool runOnBasicBlock(BasicBlock &BB); 59 bool HandleFree(CallInst *F); 60 bool handleEndBlock(BasicBlock &BB); 61 bool RemoveUndeadPointers(Value *Ptr, uint64_t killPointerSize, 62 BasicBlock::iterator &BBI, 63 SmallPtrSet<Value*, 64> &deadPointers); 64 void DeleteDeadInstruction(Instruction *I, 65 SmallPtrSet<Value*, 64> *deadPointers = 0); 66 67 68 // getAnalysisUsage - We require post dominance frontiers (aka Control 69 // Dependence Graph) 70 virtual void getAnalysisUsage(AnalysisUsage &AU) const { 71 AU.setPreservesCFG(); 72 AU.addRequired<DominatorTree>(); 73 AU.addRequired<AliasAnalysis>(); 74 AU.addRequired<MemoryDependenceAnalysis>(); 75 AU.addPreserved<DominatorTree>(); 76 AU.addPreserved<MemoryDependenceAnalysis>(); 77 } 78 79 uint64_t getPointerSize(Value *V, AliasAnalysis &AA) const; 80 }; 81} 82 83char DSE::ID = 0; 84INITIALIZE_PASS_BEGIN(DSE, "dse", "Dead Store Elimination", false, false) 85INITIALIZE_PASS_DEPENDENCY(DominatorTree) 86INITIALIZE_PASS_DEPENDENCY(MemoryDependenceAnalysis) 87INITIALIZE_AG_DEPENDENCY(AliasAnalysis) 88INITIALIZE_PASS_END(DSE, "dse", "Dead Store Elimination", false, false) 89 90FunctionPass *llvm::createDeadStoreEliminationPass() { return new DSE(); } 91 92/// hasMemoryWrite - Does this instruction write some memory? This only returns 93/// true for things that we can analyze with other helpers below. 94static bool hasMemoryWrite(Instruction *I) { 95 if (isa<StoreInst>(I)) 96 return true; 97 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) { 98 switch (II->getIntrinsicID()) { 99 default: 100 return false; 101 case Intrinsic::memset: 102 case Intrinsic::memmove: 103 case Intrinsic::memcpy: 104 case Intrinsic::init_trampoline: 105 case Intrinsic::lifetime_end: 106 return true; 107 } 108 } 109 return false; 110} 111 112/// getLocForWrite - Return a Location stored to by the specified instruction. 113static AliasAnalysis::Location 114getLocForWrite(Instruction *Inst, AliasAnalysis &AA) { 115 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) 116 return AA.getLocation(SI); 117 118 if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(Inst)) { 119 // memcpy/memmove/memset. 120 AliasAnalysis::Location Loc = AA.getLocationForDest(MI); 121 // If we don't have target data around, an unknown size in Location means 122 // that we should use the size of the pointee type. This isn't valid for 123 // memset/memcpy, which writes more than an i8. 124 if (Loc.Size == AliasAnalysis::UnknownSize && AA.getTargetData() == 0) 125 return AliasAnalysis::Location(); 126 return Loc; 127 } 128 129 IntrinsicInst *II = dyn_cast<IntrinsicInst>(Inst); 130 if (II == 0) return AliasAnalysis::Location(); 131 132 switch (II->getIntrinsicID()) { 133 default: return AliasAnalysis::Location(); // Unhandled intrinsic. 134 case Intrinsic::init_trampoline: 135 // If we don't have target data around, an unknown size in Location means 136 // that we should use the size of the pointee type. This isn't valid for 137 // init.trampoline, which writes more than an i8. 138 if (AA.getTargetData() == 0) return AliasAnalysis::Location(); 139 140 // FIXME: We don't know the size of the trampoline, so we can't really 141 // handle it here. 142 return AliasAnalysis::Location(II->getArgOperand(0)); 143 case Intrinsic::lifetime_end: { 144 uint64_t Len = cast<ConstantInt>(II->getArgOperand(0))->getZExtValue(); 145 return AliasAnalysis::Location(II->getArgOperand(1), Len); 146 } 147 } 148} 149 150/// isRemovable - If the value of this instruction and the memory it writes to 151/// is unused, may we delete this instruction? 152static bool isRemovable(Instruction *I) { 153 // Don't remove volatile stores. 154 if (StoreInst *SI = dyn_cast<StoreInst>(I)) 155 return !SI->isVolatile(); 156 157 IntrinsicInst *II = cast<IntrinsicInst>(I); 158 switch (II->getIntrinsicID()) { 159 default: assert(0 && "doesn't pass 'hasMemoryWrite' predicate"); 160 case Intrinsic::lifetime_end: 161 // Never remove dead lifetime_end's, e.g. because it is followed by a 162 // free. 163 return false; 164 case Intrinsic::init_trampoline: 165 // Always safe to remove init_trampoline. 166 return true; 167 168 case Intrinsic::memset: 169 case Intrinsic::memmove: 170 case Intrinsic::memcpy: 171 // Don't remove volatile memory intrinsics. 172 return !cast<MemIntrinsic>(II)->isVolatile(); 173 } 174} 175 176/// getPointerOperand - Return the pointer that is being written to. 177static Value *getPointerOperand(Instruction *I) { 178 assert(hasMemoryWrite(I)); 179 if (StoreInst *SI = dyn_cast<StoreInst>(I)) 180 return SI->getPointerOperand(); 181 if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) 182 return MI->getArgOperand(0); 183 184 IntrinsicInst *II = cast<IntrinsicInst>(I); 185 switch (II->getIntrinsicID()) { 186 default: assert(false && "Unexpected intrinsic!"); 187 case Intrinsic::init_trampoline: 188 return II->getArgOperand(0); 189 case Intrinsic::lifetime_end: 190 return II->getArgOperand(1); 191 } 192} 193 194/// isCompleteOverwrite - Return true if a store to the 'Later' location 195/// completely overwrites a store to the 'Earlier' location. 196static bool isCompleteOverwrite(const AliasAnalysis::Location &Later, 197 const AliasAnalysis::Location &Earlier, 198 AliasAnalysis &AA) { 199 const Value *P1 = Later.Ptr->stripPointerCasts(); 200 const Value *P2 = Earlier.Ptr->stripPointerCasts(); 201 202 // Make sure that the start pointers are the same. 203 if (P1 != P2) 204 return false; 205 206 // If we don't know the sizes of either access, then we can't do a comparison. 207 if (Later.Size == AliasAnalysis::UnknownSize || 208 Earlier.Size == AliasAnalysis::UnknownSize) { 209 // If we have no TargetData information around, then the size of the store 210 // is inferrable from the pointee type. If they are the same type, then we 211 // know that the store is safe. 212 if (AA.getTargetData() == 0) 213 return Later.Ptr->getType() == Earlier.Ptr->getType(); 214 return false; 215 } 216 217 // Make sure that the Later size is >= the Earlier size. 218 if (Later.Size < Earlier.Size) 219 return false; 220 221 return true; 222} 223 224bool DSE::runOnBasicBlock(BasicBlock &BB) { 225 MemoryDependenceAnalysis &MD = getAnalysis<MemoryDependenceAnalysis>(); 226 AliasAnalysis &AA = getAnalysis<AliasAnalysis>(); 227 228 bool MadeChange = false; 229 230 // Do a top-down walk on the BB. 231 for (BasicBlock::iterator BBI = BB.begin(), BBE = BB.end(); BBI != BBE; ) { 232 Instruction *Inst = BBI++; 233 234 // Handle 'free' calls specially. 235 if (CallInst *F = isFreeCall(Inst)) { 236 MadeChange |= HandleFree(F); 237 continue; 238 } 239 240 // If we find something that writes memory, get its memory dependence. 241 if (!hasMemoryWrite(Inst)) 242 continue; 243 244 MemDepResult InstDep = MD.getDependency(Inst); 245 246 // Ignore non-local store liveness. 247 // FIXME: cross-block DSE would be fun. :) 248 if (InstDep.isNonLocal() || 249 // Ignore self dependence, which happens in the entry block of the 250 // function. 251 InstDep.getInst() == Inst) 252 continue; 253 254 // If we're storing the same value back to a pointer that we just 255 // loaded from, then the store can be removed. 256 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) { 257 if (LoadInst *DepLoad = dyn_cast<LoadInst>(InstDep.getInst())) { 258 if (SI->getPointerOperand() == DepLoad->getPointerOperand() && 259 SI->getOperand(0) == DepLoad && !SI->isVolatile()) { 260 // DeleteDeadInstruction can delete the current instruction. Save BBI 261 // in case we need it. 262 WeakVH NextInst(BBI); 263 264 DeleteDeadInstruction(SI); 265 266 if (NextInst == 0) // Next instruction deleted. 267 BBI = BB.begin(); 268 else if (BBI != BB.begin()) // Revisit this instruction if possible. 269 --BBI; 270 ++NumFastStores; 271 MadeChange = true; 272 continue; 273 } 274 } 275 } 276 277 // Figure out what location is being stored to. 278 AliasAnalysis::Location Loc = getLocForWrite(Inst, AA); 279 280 // If we didn't get a useful location, fail. 281 if (Loc.Ptr == 0) 282 continue; 283 284 while (!InstDep.isNonLocal()) { 285 // Get the memory clobbered by the instruction we depend on. MemDep will 286 // skip any instructions that 'Loc' clearly doesn't interact with. If we 287 // end up depending on a may- or must-aliased load, then we can't optimize 288 // away the store and we bail out. However, if we depend on on something 289 // that overwrites the memory location we *can* potentially optimize it. 290 // 291 // Find out what memory location the dependant instruction stores. 292 Instruction *DepWrite = InstDep.getInst(); 293 AliasAnalysis::Location DepLoc = getLocForWrite(DepWrite, AA); 294 // If we didn't get a useful location, or if it isn't a size, bail out. 295 if (DepLoc.Ptr == 0) 296 break; 297 298 // If we find a removable write that is completely obliterated by the 299 // store to 'Loc' then we can remove it. 300 if (isRemovable(DepWrite) && isCompleteOverwrite(Loc, DepLoc, AA)) { 301 // Delete the store and now-dead instructions that feed it. 302 DeleteDeadInstruction(DepWrite); 303 ++NumFastStores; 304 MadeChange = true; 305 306 // DeleteDeadInstruction can delete the current instruction in loop 307 // cases, reset BBI. 308 BBI = Inst; 309 if (BBI != BB.begin()) 310 --BBI; 311 break; 312 } 313 314 // If this is a may-aliased store that is clobbering the store value, we 315 // can keep searching past it for another must-aliased pointer that stores 316 // to the same location. For example, in: 317 // store -> P 318 // store -> Q 319 // store -> P 320 // we can remove the first store to P even though we don't know if P and Q 321 // alias. 322 if (DepWrite == &BB.front()) break; 323 324 // Can't look past this instruction if it might read 'Loc'. 325 if (AA.getModRefInfo(DepWrite, Loc) & AliasAnalysis::Ref) 326 break; 327 328 InstDep = MD.getPointerDependencyFrom(Loc, false, DepWrite, &BB); 329 } 330 } 331 332 // If this block ends in a return, unwind, or unreachable, all allocas are 333 // dead at its end, which means stores to them are also dead. 334 if (BB.getTerminator()->getNumSuccessors() == 0) 335 MadeChange |= handleEndBlock(BB); 336 337 return MadeChange; 338} 339 340/// HandleFree - Handle frees of entire structures whose dependency is a store 341/// to a field of that structure. 342bool DSE::HandleFree(CallInst *F) { 343 AliasAnalysis &AA = getAnalysis<AliasAnalysis>(); 344 MemoryDependenceAnalysis &MD = getAnalysis<MemoryDependenceAnalysis>(); 345 346 MemDepResult Dep = MD.getDependency(F); 347 do { 348 if (Dep.isNonLocal()) return false; 349 350 Instruction *Dependency = Dep.getInst(); 351 if (!hasMemoryWrite(Dependency) || !isRemovable(Dependency)) 352 return false; 353 354 Value *DepPointer = getPointerOperand(Dependency)->getUnderlyingObject(); 355 356 // Check for aliasing. 357 if (AA.alias(F->getArgOperand(0), 1, DepPointer, 1) != 358 AliasAnalysis::MustAlias) 359 return false; 360 361 // DCE instructions only used to calculate that store 362 DeleteDeadInstruction(Dependency); 363 ++NumFastStores; 364 365 // Inst's old Dependency is now deleted. Compute the next dependency, 366 // which may also be dead, as in 367 // s[0] = 0; 368 // s[1] = 0; // This has just been deleted. 369 // free(s); 370 Dep = MD.getDependency(F); 371 } while (!Dep.isNonLocal()); 372 373 return true; 374} 375 376/// handleEndBlock - Remove dead stores to stack-allocated locations in the 377/// function end block. Ex: 378/// %A = alloca i32 379/// ... 380/// store i32 1, i32* %A 381/// ret void 382bool DSE::handleEndBlock(BasicBlock &BB) { 383 AliasAnalysis &AA = getAnalysis<AliasAnalysis>(); 384 385 bool MadeChange = false; 386 387 // Pointers alloca'd in this function are dead in the end block 388 SmallPtrSet<Value*, 64> deadPointers; 389 390 // Find all of the alloca'd pointers in the entry block. 391 BasicBlock *Entry = BB.getParent()->begin(); 392 for (BasicBlock::iterator I = Entry->begin(), E = Entry->end(); I != E; ++I) 393 if (AllocaInst *AI = dyn_cast<AllocaInst>(I)) 394 deadPointers.insert(AI); 395 396 // Treat byval arguments the same, stores to them are dead at the end of the 397 // function. 398 for (Function::arg_iterator AI = BB.getParent()->arg_begin(), 399 AE = BB.getParent()->arg_end(); AI != AE; ++AI) 400 if (AI->hasByValAttr()) 401 deadPointers.insert(AI); 402 403 // Scan the basic block backwards 404 for (BasicBlock::iterator BBI = BB.end(); BBI != BB.begin(); ){ 405 --BBI; 406 407 // If we find a store whose pointer is dead. 408 if (hasMemoryWrite(BBI)) { 409 if (isRemovable(BBI)) { 410 // See through pointer-to-pointer bitcasts 411 Value *pointerOperand = getPointerOperand(BBI)->getUnderlyingObject(); 412 413 // Alloca'd pointers or byval arguments (which are functionally like 414 // alloca's) are valid candidates for removal. 415 if (deadPointers.count(pointerOperand)) { 416 // DCE instructions only used to calculate that store. 417 Instruction *Dead = BBI; 418 ++BBI; 419 DeleteDeadInstruction(Dead, &deadPointers); 420 ++NumFastStores; 421 MadeChange = true; 422 continue; 423 } 424 } 425 426 // Because a memcpy or memmove is also a load, we can't skip it if we 427 // didn't remove it. 428 if (!isa<MemTransferInst>(BBI)) 429 continue; 430 } 431 432 Value *killPointer = 0; 433 uint64_t killPointerSize = AliasAnalysis::UnknownSize; 434 435 // If we encounter a use of the pointer, it is no longer considered dead 436 if (LoadInst *L = dyn_cast<LoadInst>(BBI)) { 437 // However, if this load is unused and not volatile, we can go ahead and 438 // remove it, and not have to worry about it making our pointer undead! 439 if (L->use_empty() && !L->isVolatile()) { 440 ++BBI; 441 DeleteDeadInstruction(L, &deadPointers); 442 ++NumFastOther; 443 MadeChange = true; 444 continue; 445 } 446 447 killPointer = L->getPointerOperand(); 448 } else if (VAArgInst *V = dyn_cast<VAArgInst>(BBI)) { 449 killPointer = V->getOperand(0); 450 } else if (isa<MemTransferInst>(BBI) && 451 isa<ConstantInt>(cast<MemTransferInst>(BBI)->getLength())) { 452 killPointer = cast<MemTransferInst>(BBI)->getSource(); 453 killPointerSize = cast<ConstantInt>( 454 cast<MemTransferInst>(BBI)->getLength())->getZExtValue(); 455 } else if (AllocaInst *A = dyn_cast<AllocaInst>(BBI)) { 456 deadPointers.erase(A); 457 458 // Dead alloca's can be DCE'd when we reach them 459 if (A->use_empty()) { 460 ++BBI; 461 DeleteDeadInstruction(A, &deadPointers); 462 ++NumFastOther; 463 MadeChange = true; 464 } 465 466 continue; 467 } else if (CallSite CS = cast<Value>(BBI)) { 468 // If this call does not access memory, it can't 469 // be undeadifying any of our pointers. 470 if (AA.doesNotAccessMemory(CS)) 471 continue; 472 473 unsigned modRef = 0; 474 unsigned other = 0; 475 476 // Remove any pointers made undead by the call from the dead set 477 std::vector<Value*> dead; 478 for (SmallPtrSet<Value*, 64>::iterator I = deadPointers.begin(), 479 E = deadPointers.end(); I != E; ++I) { 480 // HACK: if we detect that our AA is imprecise, it's not 481 // worth it to scan the rest of the deadPointers set. Just 482 // assume that the AA will return ModRef for everything, and 483 // go ahead and bail. 484 if (modRef >= 16 && other == 0) { 485 deadPointers.clear(); 486 return MadeChange; 487 } 488 489 // See if the call site touches it 490 AliasAnalysis::ModRefResult A = 491 AA.getModRefInfo(CS, *I, getPointerSize(*I, AA)); 492 493 if (A == AliasAnalysis::ModRef) 494 ++modRef; 495 else 496 ++other; 497 498 if (A == AliasAnalysis::ModRef || A == AliasAnalysis::Ref) 499 dead.push_back(*I); 500 } 501 502 for (std::vector<Value*>::iterator I = dead.begin(), E = dead.end(); 503 I != E; ++I) 504 deadPointers.erase(*I); 505 506 continue; 507 } else if (isInstructionTriviallyDead(BBI)) { 508 // For any non-memory-affecting non-terminators, DCE them as we reach them 509 Instruction *Inst = BBI; 510 ++BBI; 511 DeleteDeadInstruction(Inst, &deadPointers); 512 ++NumFastOther; 513 MadeChange = true; 514 continue; 515 } 516 517 if (!killPointer) 518 continue; 519 520 killPointer = killPointer->getUnderlyingObject(); 521 522 // Deal with undead pointers 523 MadeChange |= RemoveUndeadPointers(killPointer, killPointerSize, BBI, 524 deadPointers); 525 } 526 527 return MadeChange; 528} 529 530/// RemoveUndeadPointers - check for uses of a pointer that make it 531/// undead when scanning for dead stores to alloca's. 532bool DSE::RemoveUndeadPointers(Value *killPointer, uint64_t killPointerSize, 533 BasicBlock::iterator &BBI, 534 SmallPtrSet<Value*, 64> &deadPointers) { 535 AliasAnalysis &AA = getAnalysis<AliasAnalysis>(); 536 537 // If the kill pointer can be easily reduced to an alloca, 538 // don't bother doing extraneous AA queries. 539 if (deadPointers.count(killPointer)) { 540 deadPointers.erase(killPointer); 541 return false; 542 } 543 544 // A global can't be in the dead pointer set. 545 if (isa<GlobalValue>(killPointer)) 546 return false; 547 548 bool MadeChange = false; 549 550 SmallVector<Value*, 16> undead; 551 552 for (SmallPtrSet<Value*, 64>::iterator I = deadPointers.begin(), 553 E = deadPointers.end(); I != E; ++I) { 554 // See if this pointer could alias it 555 AliasAnalysis::AliasResult A = AA.alias(*I, getPointerSize(*I, AA), 556 killPointer, killPointerSize); 557 558 // If it must-alias and a store, we can delete it 559 if (isa<StoreInst>(BBI) && A == AliasAnalysis::MustAlias) { 560 StoreInst *S = cast<StoreInst>(BBI); 561 562 // Remove it! 563 ++BBI; 564 DeleteDeadInstruction(S, &deadPointers); 565 ++NumFastStores; 566 MadeChange = true; 567 568 continue; 569 570 // Otherwise, it is undead 571 } else if (A != AliasAnalysis::NoAlias) 572 undead.push_back(*I); 573 } 574 575 for (SmallVector<Value*, 16>::iterator I = undead.begin(), E = undead.end(); 576 I != E; ++I) 577 deadPointers.erase(*I); 578 579 return MadeChange; 580} 581 582/// DeleteDeadInstruction - Delete this instruction. Before we do, go through 583/// and zero out all the operands of this instruction. If any of them become 584/// dead, delete them and the computation tree that feeds them. 585/// 586/// If ValueSet is non-null, remove any deleted instructions from it as well. 587/// 588void DSE::DeleteDeadInstruction(Instruction *I, 589 SmallPtrSet<Value*, 64> *ValueSet) { 590 SmallVector<Instruction*, 32> NowDeadInsts; 591 592 NowDeadInsts.push_back(I); 593 --NumFastOther; 594 595 // Before we touch this instruction, remove it from memdep! 596 MemoryDependenceAnalysis &MDA = getAnalysis<MemoryDependenceAnalysis>(); 597 do { 598 Instruction *DeadInst = NowDeadInsts.pop_back_val(); 599 600 ++NumFastOther; 601 602 // This instruction is dead, zap it, in stages. Start by removing it from 603 // MemDep, which needs to know the operands and needs it to be in the 604 // function. 605 MDA.removeInstruction(DeadInst); 606 607 for (unsigned op = 0, e = DeadInst->getNumOperands(); op != e; ++op) { 608 Value *Op = DeadInst->getOperand(op); 609 DeadInst->setOperand(op, 0); 610 611 // If this operand just became dead, add it to the NowDeadInsts list. 612 if (!Op->use_empty()) continue; 613 614 if (Instruction *OpI = dyn_cast<Instruction>(Op)) 615 if (isInstructionTriviallyDead(OpI)) 616 NowDeadInsts.push_back(OpI); 617 } 618 619 DeadInst->eraseFromParent(); 620 621 if (ValueSet) ValueSet->erase(DeadInst); 622 } while (!NowDeadInsts.empty()); 623} 624 625uint64_t DSE::getPointerSize(Value *V, AliasAnalysis &AA) const { 626 const TargetData *TD = AA.getTargetData(); 627 if (TD == 0) 628 return AliasAnalysis::UnknownSize; 629 630 if (AllocaInst *A = dyn_cast<AllocaInst>(V)) { 631 // Get size information for the alloca 632 if (ConstantInt *C = dyn_cast<ConstantInt>(A->getArraySize())) 633 return C->getZExtValue() * TD->getTypeAllocSize(A->getAllocatedType()); 634 return AliasAnalysis::UnknownSize; 635 } 636 637 assert(isa<Argument>(V) && "Expected AllocaInst or Argument!"); 638 const PointerType *PT = cast<PointerType>(V->getType()); 639 return TD->getTypeAllocSize(PT->getElementType()); 640} 641