AddressSanitizer.cpp revision 3e1d45bf44f882f3ee139d452dd50305d831a341
1//===-- AddressSanitizer.cpp - memory error detector ------------*- C++ -*-===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file is a part of AddressSanitizer, an address sanity checker. 11// Details of the algorithm: 12// http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm 13// 14//===----------------------------------------------------------------------===// 15 16#define DEBUG_TYPE "asan" 17 18#include "llvm/Transforms/Instrumentation.h" 19#include "llvm/ADT/ArrayRef.h" 20#include "llvm/ADT/DenseMap.h" 21#include "llvm/ADT/DepthFirstIterator.h" 22#include "llvm/ADT/OwningPtr.h" 23#include "llvm/ADT/SmallSet.h" 24#include "llvm/ADT/SmallString.h" 25#include "llvm/ADT/SmallVector.h" 26#include "llvm/ADT/StringExtras.h" 27#include "llvm/ADT/Triple.h" 28#include "llvm/DIBuilder.h" 29#include "llvm/IR/DataLayout.h" 30#include "llvm/IR/Function.h" 31#include "llvm/IR/IRBuilder.h" 32#include "llvm/IR/InlineAsm.h" 33#include "llvm/IR/IntrinsicInst.h" 34#include "llvm/IR/LLVMContext.h" 35#include "llvm/IR/Module.h" 36#include "llvm/IR/Type.h" 37#include "llvm/InstVisitor.h" 38#include "llvm/Support/CallSite.h" 39#include "llvm/Support/CommandLine.h" 40#include "llvm/Support/DataTypes.h" 41#include "llvm/Support/Debug.h" 42#include "llvm/Support/Endian.h" 43#include "llvm/Support/raw_ostream.h" 44#include "llvm/Support/system_error.h" 45#include "llvm/Transforms/Utils/BasicBlockUtils.h" 46#include "llvm/Transforms/Utils/BlackList.h" 47#include "llvm/Transforms/Utils/Local.h" 48#include "llvm/Transforms/Utils/ModuleUtils.h" 49#include <algorithm> 50#include <string> 51 52using namespace llvm; 53 54static const uint64_t kDefaultShadowScale = 3; 55static const uint64_t kDefaultShadowOffset32 = 1ULL << 29; 56static const uint64_t kDefaultShadowOffset64 = 1ULL << 44; 57static const uint64_t kDefaultShort64bitShadowOffset = 0x7FFF8000; // < 2G. 58static const uint64_t kPPC64_ShadowOffset64 = 1ULL << 41; 59static const uint64_t kMIPS32_ShadowOffset32 = 0x0aaa8000; 60 61static const size_t kMaxStackMallocSize = 1 << 16; // 64K 62static const uintptr_t kCurrentStackFrameMagic = 0x41B58AB3; 63static const uintptr_t kRetiredStackFrameMagic = 0x45E0360E; 64 65static const char *kAsanModuleCtorName = "asan.module_ctor"; 66static const char *kAsanModuleDtorName = "asan.module_dtor"; 67static const int kAsanCtorAndCtorPriority = 1; 68static const char *kAsanReportErrorTemplate = "__asan_report_"; 69static const char *kAsanReportLoadN = "__asan_report_load_n"; 70static const char *kAsanReportStoreN = "__asan_report_store_n"; 71static const char *kAsanRegisterGlobalsName = "__asan_register_globals"; 72static const char *kAsanUnregisterGlobalsName = "__asan_unregister_globals"; 73static const char *kAsanPoisonGlobalsName = "__asan_before_dynamic_init"; 74static const char *kAsanUnpoisonGlobalsName = "__asan_after_dynamic_init"; 75static const char *kAsanInitName = "__asan_init_v3"; 76static const char *kAsanHandleNoReturnName = "__asan_handle_no_return"; 77static const char *kAsanMappingOffsetName = "__asan_mapping_offset"; 78static const char *kAsanMappingScaleName = "__asan_mapping_scale"; 79static const char *kAsanStackMallocName = "__asan_stack_malloc"; 80static const char *kAsanStackFreeName = "__asan_stack_free"; 81static const char *kAsanGenPrefix = "__asan_gen_"; 82static const char *kAsanPoisonStackMemoryName = "__asan_poison_stack_memory"; 83static const char *kAsanUnpoisonStackMemoryName = 84 "__asan_unpoison_stack_memory"; 85 86static const int kAsanStackLeftRedzoneMagic = 0xf1; 87static const int kAsanStackMidRedzoneMagic = 0xf2; 88static const int kAsanStackRightRedzoneMagic = 0xf3; 89static const int kAsanStackPartialRedzoneMagic = 0xf4; 90 91// Accesses sizes are powers of two: 1, 2, 4, 8, 16. 92static const size_t kNumberOfAccessSizes = 5; 93 94// Command-line flags. 95 96// This flag may need to be replaced with -f[no-]asan-reads. 97static cl::opt<bool> ClInstrumentReads("asan-instrument-reads", 98 cl::desc("instrument read instructions"), cl::Hidden, cl::init(true)); 99static cl::opt<bool> ClInstrumentWrites("asan-instrument-writes", 100 cl::desc("instrument write instructions"), cl::Hidden, cl::init(true)); 101static cl::opt<bool> ClInstrumentAtomics("asan-instrument-atomics", 102 cl::desc("instrument atomic instructions (rmw, cmpxchg)"), 103 cl::Hidden, cl::init(true)); 104static cl::opt<bool> ClAlwaysSlowPath("asan-always-slow-path", 105 cl::desc("use instrumentation with slow path for all accesses"), 106 cl::Hidden, cl::init(false)); 107// This flag limits the number of instructions to be instrumented 108// in any given BB. Normally, this should be set to unlimited (INT_MAX), 109// but due to http://llvm.org/bugs/show_bug.cgi?id=12652 we temporary 110// set it to 10000. 111static cl::opt<int> ClMaxInsnsToInstrumentPerBB("asan-max-ins-per-bb", 112 cl::init(10000), 113 cl::desc("maximal number of instructions to instrument in any given BB"), 114 cl::Hidden); 115// This flag may need to be replaced with -f[no]asan-stack. 116static cl::opt<bool> ClStack("asan-stack", 117 cl::desc("Handle stack memory"), cl::Hidden, cl::init(true)); 118// This flag may need to be replaced with -f[no]asan-use-after-return. 119static cl::opt<bool> ClUseAfterReturn("asan-use-after-return", 120 cl::desc("Check return-after-free"), cl::Hidden, cl::init(false)); 121// This flag may need to be replaced with -f[no]asan-globals. 122static cl::opt<bool> ClGlobals("asan-globals", 123 cl::desc("Handle global objects"), cl::Hidden, cl::init(true)); 124static cl::opt<bool> ClInitializers("asan-initialization-order", 125 cl::desc("Handle C++ initializer order"), cl::Hidden, cl::init(false)); 126static cl::opt<bool> ClMemIntrin("asan-memintrin", 127 cl::desc("Handle memset/memcpy/memmove"), cl::Hidden, cl::init(true)); 128static cl::opt<bool> ClRealignStack("asan-realign-stack", 129 cl::desc("Realign stack to 32"), cl::Hidden, cl::init(true)); 130static cl::opt<std::string> ClBlacklistFile("asan-blacklist", 131 cl::desc("File containing the list of objects to ignore " 132 "during instrumentation"), cl::Hidden); 133 134// These flags allow to change the shadow mapping. 135// The shadow mapping looks like 136// Shadow = (Mem >> scale) + (1 << offset_log) 137static cl::opt<int> ClMappingScale("asan-mapping-scale", 138 cl::desc("scale of asan shadow mapping"), cl::Hidden, cl::init(0)); 139static cl::opt<int> ClMappingOffsetLog("asan-mapping-offset-log", 140 cl::desc("offset of asan shadow mapping"), cl::Hidden, cl::init(-1)); 141static cl::opt<bool> ClShort64BitOffset("asan-short-64bit-mapping-offset", 142 cl::desc("Use short immediate constant as the mapping offset for 64bit"), 143 cl::Hidden, cl::init(true)); 144 145// Optimization flags. Not user visible, used mostly for testing 146// and benchmarking the tool. 147static cl::opt<bool> ClOpt("asan-opt", 148 cl::desc("Optimize instrumentation"), cl::Hidden, cl::init(true)); 149static cl::opt<bool> ClOptSameTemp("asan-opt-same-temp", 150 cl::desc("Instrument the same temp just once"), cl::Hidden, 151 cl::init(true)); 152static cl::opt<bool> ClOptGlobals("asan-opt-globals", 153 cl::desc("Don't instrument scalar globals"), cl::Hidden, cl::init(true)); 154 155static cl::opt<bool> ClCheckLifetime("asan-check-lifetime", 156 cl::desc("Use llvm.lifetime intrinsics to insert extra checks"), 157 cl::Hidden, cl::init(false)); 158 159// Debug flags. 160static cl::opt<int> ClDebug("asan-debug", cl::desc("debug"), cl::Hidden, 161 cl::init(0)); 162static cl::opt<int> ClDebugStack("asan-debug-stack", cl::desc("debug stack"), 163 cl::Hidden, cl::init(0)); 164static cl::opt<std::string> ClDebugFunc("asan-debug-func", 165 cl::Hidden, cl::desc("Debug func")); 166static cl::opt<int> ClDebugMin("asan-debug-min", cl::desc("Debug min inst"), 167 cl::Hidden, cl::init(-1)); 168static cl::opt<int> ClDebugMax("asan-debug-max", cl::desc("Debug man inst"), 169 cl::Hidden, cl::init(-1)); 170 171namespace { 172/// A set of dynamically initialized globals extracted from metadata. 173class SetOfDynamicallyInitializedGlobals { 174 public: 175 void Init(Module& M) { 176 // Clang generates metadata identifying all dynamically initialized globals. 177 NamedMDNode *DynamicGlobals = 178 M.getNamedMetadata("llvm.asan.dynamically_initialized_globals"); 179 if (!DynamicGlobals) 180 return; 181 for (int i = 0, n = DynamicGlobals->getNumOperands(); i < n; ++i) { 182 MDNode *MDN = DynamicGlobals->getOperand(i); 183 assert(MDN->getNumOperands() == 1); 184 Value *VG = MDN->getOperand(0); 185 // The optimizer may optimize away a global entirely, in which case we 186 // cannot instrument access to it. 187 if (!VG) 188 continue; 189 DynInitGlobals.insert(cast<GlobalVariable>(VG)); 190 } 191 } 192 bool Contains(GlobalVariable *G) { return DynInitGlobals.count(G) != 0; } 193 private: 194 SmallSet<GlobalValue*, 32> DynInitGlobals; 195}; 196 197/// This struct defines the shadow mapping using the rule: 198/// shadow = (mem >> Scale) ADD-or-OR Offset. 199struct ShadowMapping { 200 int Scale; 201 uint64_t Offset; 202 bool OrShadowOffset; 203}; 204 205static ShadowMapping getShadowMapping(const Module &M, int LongSize, 206 bool ZeroBaseShadow) { 207 llvm::Triple TargetTriple(M.getTargetTriple()); 208 bool IsAndroid = TargetTriple.getEnvironment() == llvm::Triple::Android; 209 bool IsMacOSX = TargetTriple.getOS() == llvm::Triple::MacOSX; 210 bool IsPPC64 = TargetTriple.getArch() == llvm::Triple::ppc64; 211 bool IsX86_64 = TargetTriple.getArch() == llvm::Triple::x86_64; 212 bool IsMIPS32 = TargetTriple.getArch() == llvm::Triple::mips || 213 TargetTriple.getArch() == llvm::Triple::mipsel; 214 215 ShadowMapping Mapping; 216 217 // OR-ing shadow offset if more efficient (at least on x86), 218 // but on ppc64 we have to use add since the shadow offset is not neccesary 219 // 1/8-th of the address space. 220 Mapping.OrShadowOffset = !IsPPC64 && !ClShort64BitOffset; 221 222 Mapping.Offset = (IsAndroid || ZeroBaseShadow) ? 0 : 223 (LongSize == 32 ? 224 (IsMIPS32 ? kMIPS32_ShadowOffset32 : kDefaultShadowOffset32) : 225 IsPPC64 ? kPPC64_ShadowOffset64 : kDefaultShadowOffset64); 226 if (!ZeroBaseShadow && ClShort64BitOffset && IsX86_64 && !IsMacOSX) { 227 assert(LongSize == 64); 228 Mapping.Offset = kDefaultShort64bitShadowOffset; 229 } 230 if (!ZeroBaseShadow && ClMappingOffsetLog >= 0) { 231 // Zero offset log is the special case. 232 Mapping.Offset = (ClMappingOffsetLog == 0) ? 0 : 1ULL << ClMappingOffsetLog; 233 } 234 235 Mapping.Scale = kDefaultShadowScale; 236 if (ClMappingScale) { 237 Mapping.Scale = ClMappingScale; 238 } 239 240 return Mapping; 241} 242 243static size_t RedzoneSizeForScale(int MappingScale) { 244 // Redzone used for stack and globals is at least 32 bytes. 245 // For scales 6 and 7, the redzone has to be 64 and 128 bytes respectively. 246 return std::max(32U, 1U << MappingScale); 247} 248 249/// AddressSanitizer: instrument the code in module to find memory bugs. 250struct AddressSanitizer : public FunctionPass { 251 AddressSanitizer(bool CheckInitOrder = true, 252 bool CheckUseAfterReturn = false, 253 bool CheckLifetime = false, 254 StringRef BlacklistFile = StringRef(), 255 bool ZeroBaseShadow = false) 256 : FunctionPass(ID), 257 CheckInitOrder(CheckInitOrder || ClInitializers), 258 CheckUseAfterReturn(CheckUseAfterReturn || ClUseAfterReturn), 259 CheckLifetime(CheckLifetime || ClCheckLifetime), 260 BlacklistFile(BlacklistFile.empty() ? ClBlacklistFile 261 : BlacklistFile), 262 ZeroBaseShadow(ZeroBaseShadow) {} 263 virtual const char *getPassName() const { 264 return "AddressSanitizerFunctionPass"; 265 } 266 void instrumentMop(Instruction *I); 267 void instrumentAddress(Instruction *OrigIns, Instruction *InsertBefore, 268 Value *Addr, uint32_t TypeSize, bool IsWrite, 269 Value *SizeArgument); 270 Value *createSlowPathCmp(IRBuilder<> &IRB, Value *AddrLong, 271 Value *ShadowValue, uint32_t TypeSize); 272 Instruction *generateCrashCode(Instruction *InsertBefore, Value *Addr, 273 bool IsWrite, size_t AccessSizeIndex, 274 Value *SizeArgument); 275 bool instrumentMemIntrinsic(MemIntrinsic *MI); 276 void instrumentMemIntrinsicParam(Instruction *OrigIns, Value *Addr, 277 Value *Size, 278 Instruction *InsertBefore, bool IsWrite); 279 Value *memToShadow(Value *Shadow, IRBuilder<> &IRB); 280 bool runOnFunction(Function &F); 281 bool maybeInsertAsanInitAtFunctionEntry(Function &F); 282 void emitShadowMapping(Module &M, IRBuilder<> &IRB) const; 283 virtual bool doInitialization(Module &M); 284 static char ID; // Pass identification, replacement for typeid 285 286 private: 287 void initializeCallbacks(Module &M); 288 289 bool ShouldInstrumentGlobal(GlobalVariable *G); 290 bool LooksLikeCodeInBug11395(Instruction *I); 291 void FindDynamicInitializers(Module &M); 292 293 bool CheckInitOrder; 294 bool CheckUseAfterReturn; 295 bool CheckLifetime; 296 SmallString<64> BlacklistFile; 297 bool ZeroBaseShadow; 298 299 LLVMContext *C; 300 DataLayout *TD; 301 int LongSize; 302 Type *IntptrTy; 303 ShadowMapping Mapping; 304 Function *AsanCtorFunction; 305 Function *AsanInitFunction; 306 Function *AsanHandleNoReturnFunc; 307 OwningPtr<BlackList> BL; 308 // This array is indexed by AccessIsWrite and log2(AccessSize). 309 Function *AsanErrorCallback[2][kNumberOfAccessSizes]; 310 // This array is indexed by AccessIsWrite. 311 Function *AsanErrorCallbackSized[2]; 312 InlineAsm *EmptyAsm; 313 SetOfDynamicallyInitializedGlobals DynamicallyInitializedGlobals; 314 315 friend struct FunctionStackPoisoner; 316}; 317 318class AddressSanitizerModule : public ModulePass { 319 public: 320 AddressSanitizerModule(bool CheckInitOrder = true, 321 StringRef BlacklistFile = StringRef(), 322 bool ZeroBaseShadow = false) 323 : ModulePass(ID), 324 CheckInitOrder(CheckInitOrder || ClInitializers), 325 BlacklistFile(BlacklistFile.empty() ? ClBlacklistFile 326 : BlacklistFile), 327 ZeroBaseShadow(ZeroBaseShadow) {} 328 bool runOnModule(Module &M); 329 static char ID; // Pass identification, replacement for typeid 330 virtual const char *getPassName() const { 331 return "AddressSanitizerModule"; 332 } 333 334 private: 335 void initializeCallbacks(Module &M); 336 337 bool ShouldInstrumentGlobal(GlobalVariable *G); 338 void createInitializerPoisonCalls(Module &M, GlobalValue *ModuleName); 339 size_t RedzoneSize() const { 340 return RedzoneSizeForScale(Mapping.Scale); 341 } 342 343 bool CheckInitOrder; 344 SmallString<64> BlacklistFile; 345 bool ZeroBaseShadow; 346 347 OwningPtr<BlackList> BL; 348 SetOfDynamicallyInitializedGlobals DynamicallyInitializedGlobals; 349 Type *IntptrTy; 350 LLVMContext *C; 351 DataLayout *TD; 352 ShadowMapping Mapping; 353 Function *AsanPoisonGlobals; 354 Function *AsanUnpoisonGlobals; 355 Function *AsanRegisterGlobals; 356 Function *AsanUnregisterGlobals; 357}; 358 359// Stack poisoning does not play well with exception handling. 360// When an exception is thrown, we essentially bypass the code 361// that unpoisones the stack. This is why the run-time library has 362// to intercept __cxa_throw (as well as longjmp, etc) and unpoison the entire 363// stack in the interceptor. This however does not work inside the 364// actual function which catches the exception. Most likely because the 365// compiler hoists the load of the shadow value somewhere too high. 366// This causes asan to report a non-existing bug on 453.povray. 367// It sounds like an LLVM bug. 368struct FunctionStackPoisoner : public InstVisitor<FunctionStackPoisoner> { 369 Function &F; 370 AddressSanitizer &ASan; 371 DIBuilder DIB; 372 LLVMContext *C; 373 Type *IntptrTy; 374 Type *IntptrPtrTy; 375 ShadowMapping Mapping; 376 377 SmallVector<AllocaInst*, 16> AllocaVec; 378 SmallVector<Instruction*, 8> RetVec; 379 uint64_t TotalStackSize; 380 unsigned StackAlignment; 381 382 Function *AsanStackMallocFunc, *AsanStackFreeFunc; 383 Function *AsanPoisonStackMemoryFunc, *AsanUnpoisonStackMemoryFunc; 384 385 // Stores a place and arguments of poisoning/unpoisoning call for alloca. 386 struct AllocaPoisonCall { 387 IntrinsicInst *InsBefore; 388 uint64_t Size; 389 bool DoPoison; 390 }; 391 SmallVector<AllocaPoisonCall, 8> AllocaPoisonCallVec; 392 393 // Maps Value to an AllocaInst from which the Value is originated. 394 typedef DenseMap<Value*, AllocaInst*> AllocaForValueMapTy; 395 AllocaForValueMapTy AllocaForValue; 396 397 FunctionStackPoisoner(Function &F, AddressSanitizer &ASan) 398 : F(F), ASan(ASan), DIB(*F.getParent()), C(ASan.C), 399 IntptrTy(ASan.IntptrTy), IntptrPtrTy(PointerType::get(IntptrTy, 0)), 400 Mapping(ASan.Mapping), 401 TotalStackSize(0), StackAlignment(1 << Mapping.Scale) {} 402 403 bool runOnFunction() { 404 if (!ClStack) return false; 405 // Collect alloca, ret, lifetime instructions etc. 406 for (df_iterator<BasicBlock*> DI = df_begin(&F.getEntryBlock()), 407 DE = df_end(&F.getEntryBlock()); DI != DE; ++DI) { 408 BasicBlock *BB = *DI; 409 visit(*BB); 410 } 411 if (AllocaVec.empty()) return false; 412 413 initializeCallbacks(*F.getParent()); 414 415 poisonStack(); 416 417 if (ClDebugStack) { 418 DEBUG(dbgs() << F); 419 } 420 return true; 421 } 422 423 // Finds all static Alloca instructions and puts 424 // poisoned red zones around all of them. 425 // Then unpoison everything back before the function returns. 426 void poisonStack(); 427 428 // ----------------------- Visitors. 429 /// \brief Collect all Ret instructions. 430 void visitReturnInst(ReturnInst &RI) { 431 RetVec.push_back(&RI); 432 } 433 434 /// \brief Collect Alloca instructions we want (and can) handle. 435 void visitAllocaInst(AllocaInst &AI) { 436 if (!isInterestingAlloca(AI)) return; 437 438 StackAlignment = std::max(StackAlignment, AI.getAlignment()); 439 AllocaVec.push_back(&AI); 440 uint64_t AlignedSize = getAlignedAllocaSize(&AI); 441 TotalStackSize += AlignedSize; 442 } 443 444 /// \brief Collect lifetime intrinsic calls to check for use-after-scope 445 /// errors. 446 void visitIntrinsicInst(IntrinsicInst &II) { 447 if (!ASan.CheckLifetime) return; 448 Intrinsic::ID ID = II.getIntrinsicID(); 449 if (ID != Intrinsic::lifetime_start && 450 ID != Intrinsic::lifetime_end) 451 return; 452 // Found lifetime intrinsic, add ASan instrumentation if necessary. 453 ConstantInt *Size = dyn_cast<ConstantInt>(II.getArgOperand(0)); 454 // If size argument is undefined, don't do anything. 455 if (Size->isMinusOne()) return; 456 // Check that size doesn't saturate uint64_t and can 457 // be stored in IntptrTy. 458 const uint64_t SizeValue = Size->getValue().getLimitedValue(); 459 if (SizeValue == ~0ULL || 460 !ConstantInt::isValueValidForType(IntptrTy, SizeValue)) 461 return; 462 // Find alloca instruction that corresponds to llvm.lifetime argument. 463 AllocaInst *AI = findAllocaForValue(II.getArgOperand(1)); 464 if (!AI) return; 465 bool DoPoison = (ID == Intrinsic::lifetime_end); 466 AllocaPoisonCall APC = {&II, SizeValue, DoPoison}; 467 AllocaPoisonCallVec.push_back(APC); 468 } 469 470 // ---------------------- Helpers. 471 void initializeCallbacks(Module &M); 472 473 // Check if we want (and can) handle this alloca. 474 bool isInterestingAlloca(AllocaInst &AI) { 475 return (!AI.isArrayAllocation() && 476 AI.isStaticAlloca() && 477 AI.getAllocatedType()->isSized()); 478 } 479 480 size_t RedzoneSize() const { 481 return RedzoneSizeForScale(Mapping.Scale); 482 } 483 uint64_t getAllocaSizeInBytes(AllocaInst *AI) { 484 Type *Ty = AI->getAllocatedType(); 485 uint64_t SizeInBytes = ASan.TD->getTypeAllocSize(Ty); 486 return SizeInBytes; 487 } 488 uint64_t getAlignedSize(uint64_t SizeInBytes) { 489 size_t RZ = RedzoneSize(); 490 return ((SizeInBytes + RZ - 1) / RZ) * RZ; 491 } 492 uint64_t getAlignedAllocaSize(AllocaInst *AI) { 493 uint64_t SizeInBytes = getAllocaSizeInBytes(AI); 494 return getAlignedSize(SizeInBytes); 495 } 496 /// Finds alloca where the value comes from. 497 AllocaInst *findAllocaForValue(Value *V); 498 void poisonRedZones(const ArrayRef<AllocaInst*> &AllocaVec, IRBuilder<> IRB, 499 Value *ShadowBase, bool DoPoison); 500 void poisonAlloca(Value *V, uint64_t Size, IRBuilder<> IRB, bool DoPoison); 501}; 502 503} // namespace 504 505char AddressSanitizer::ID = 0; 506INITIALIZE_PASS(AddressSanitizer, "asan", 507 "AddressSanitizer: detects use-after-free and out-of-bounds bugs.", 508 false, false) 509FunctionPass *llvm::createAddressSanitizerFunctionPass( 510 bool CheckInitOrder, bool CheckUseAfterReturn, bool CheckLifetime, 511 StringRef BlacklistFile, bool ZeroBaseShadow) { 512 return new AddressSanitizer(CheckInitOrder, CheckUseAfterReturn, 513 CheckLifetime, BlacklistFile, ZeroBaseShadow); 514} 515 516char AddressSanitizerModule::ID = 0; 517INITIALIZE_PASS(AddressSanitizerModule, "asan-module", 518 "AddressSanitizer: detects use-after-free and out-of-bounds bugs." 519 "ModulePass", false, false) 520ModulePass *llvm::createAddressSanitizerModulePass( 521 bool CheckInitOrder, StringRef BlacklistFile, bool ZeroBaseShadow) { 522 return new AddressSanitizerModule(CheckInitOrder, BlacklistFile, 523 ZeroBaseShadow); 524} 525 526static size_t TypeSizeToSizeIndex(uint32_t TypeSize) { 527 size_t Res = countTrailingZeros(TypeSize / 8); 528 assert(Res < kNumberOfAccessSizes); 529 return Res; 530} 531 532// Create a constant for Str so that we can pass it to the run-time lib. 533static GlobalVariable *createPrivateGlobalForString(Module &M, StringRef Str) { 534 Constant *StrConst = ConstantDataArray::getString(M.getContext(), Str); 535 GlobalVariable *GV = new GlobalVariable(M, StrConst->getType(), true, 536 GlobalValue::PrivateLinkage, StrConst, 537 kAsanGenPrefix); 538 GV->setUnnamedAddr(true); // Ok to merge these. 539 GV->setAlignment(1); // Strings may not be merged w/o setting align 1. 540 return GV; 541} 542 543static bool GlobalWasGeneratedByAsan(GlobalVariable *G) { 544 return G->getName().find(kAsanGenPrefix) == 0; 545} 546 547Value *AddressSanitizer::memToShadow(Value *Shadow, IRBuilder<> &IRB) { 548 // Shadow >> scale 549 Shadow = IRB.CreateLShr(Shadow, Mapping.Scale); 550 if (Mapping.Offset == 0) 551 return Shadow; 552 // (Shadow >> scale) | offset 553 if (Mapping.OrShadowOffset) 554 return IRB.CreateOr(Shadow, ConstantInt::get(IntptrTy, Mapping.Offset)); 555 else 556 return IRB.CreateAdd(Shadow, ConstantInt::get(IntptrTy, Mapping.Offset)); 557} 558 559void AddressSanitizer::instrumentMemIntrinsicParam( 560 Instruction *OrigIns, 561 Value *Addr, Value *Size, Instruction *InsertBefore, bool IsWrite) { 562 IRBuilder<> IRB(InsertBefore); 563 if (Size->getType() != IntptrTy) 564 Size = IRB.CreateIntCast(Size, IntptrTy, false); 565 // Check the first byte. 566 instrumentAddress(OrigIns, InsertBefore, Addr, 8, IsWrite, Size); 567 // Check the last byte. 568 IRB.SetInsertPoint(InsertBefore); 569 Value *SizeMinusOne = IRB.CreateSub(Size, ConstantInt::get(IntptrTy, 1)); 570 Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy); 571 Value *AddrLast = IRB.CreateAdd(AddrLong, SizeMinusOne); 572 instrumentAddress(OrigIns, InsertBefore, AddrLast, 8, IsWrite, Size); 573} 574 575// Instrument memset/memmove/memcpy 576bool AddressSanitizer::instrumentMemIntrinsic(MemIntrinsic *MI) { 577 Value *Dst = MI->getDest(); 578 MemTransferInst *MemTran = dyn_cast<MemTransferInst>(MI); 579 Value *Src = MemTran ? MemTran->getSource() : 0; 580 Value *Length = MI->getLength(); 581 582 Constant *ConstLength = dyn_cast<Constant>(Length); 583 Instruction *InsertBefore = MI; 584 if (ConstLength) { 585 if (ConstLength->isNullValue()) return false; 586 } else { 587 // The size is not a constant so it could be zero -- check at run-time. 588 IRBuilder<> IRB(InsertBefore); 589 590 Value *Cmp = IRB.CreateICmpNE(Length, 591 Constant::getNullValue(Length->getType())); 592 InsertBefore = SplitBlockAndInsertIfThen(cast<Instruction>(Cmp), false); 593 } 594 595 instrumentMemIntrinsicParam(MI, Dst, Length, InsertBefore, true); 596 if (Src) 597 instrumentMemIntrinsicParam(MI, Src, Length, InsertBefore, false); 598 return true; 599} 600 601// If I is an interesting memory access, return the PointerOperand 602// and set IsWrite. Otherwise return NULL. 603static Value *isInterestingMemoryAccess(Instruction *I, bool *IsWrite) { 604 if (LoadInst *LI = dyn_cast<LoadInst>(I)) { 605 if (!ClInstrumentReads) return NULL; 606 *IsWrite = false; 607 return LI->getPointerOperand(); 608 } 609 if (StoreInst *SI = dyn_cast<StoreInst>(I)) { 610 if (!ClInstrumentWrites) return NULL; 611 *IsWrite = true; 612 return SI->getPointerOperand(); 613 } 614 if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I)) { 615 if (!ClInstrumentAtomics) return NULL; 616 *IsWrite = true; 617 return RMW->getPointerOperand(); 618 } 619 if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I)) { 620 if (!ClInstrumentAtomics) return NULL; 621 *IsWrite = true; 622 return XCHG->getPointerOperand(); 623 } 624 return NULL; 625} 626 627void AddressSanitizer::instrumentMop(Instruction *I) { 628 bool IsWrite = false; 629 Value *Addr = isInterestingMemoryAccess(I, &IsWrite); 630 assert(Addr); 631 if (ClOpt && ClOptGlobals) { 632 if (GlobalVariable *G = dyn_cast<GlobalVariable>(Addr)) { 633 // If initialization order checking is disabled, a simple access to a 634 // dynamically initialized global is always valid. 635 if (!CheckInitOrder) 636 return; 637 // If a global variable does not have dynamic initialization we don't 638 // have to instrument it. However, if a global does not have initailizer 639 // at all, we assume it has dynamic initializer (in other TU). 640 if (G->hasInitializer() && !DynamicallyInitializedGlobals.Contains(G)) 641 return; 642 } 643 } 644 645 Type *OrigPtrTy = Addr->getType(); 646 Type *OrigTy = cast<PointerType>(OrigPtrTy)->getElementType(); 647 648 assert(OrigTy->isSized()); 649 uint32_t TypeSize = TD->getTypeStoreSizeInBits(OrigTy); 650 651 assert((TypeSize % 8) == 0); 652 653 // Instrument a 1-, 2-, 4-, 8-, or 16- byte access with one check. 654 if (TypeSize == 8 || TypeSize == 16 || 655 TypeSize == 32 || TypeSize == 64 || TypeSize == 128) 656 return instrumentAddress(I, I, Addr, TypeSize, IsWrite, 0); 657 // Instrument unusual size (but still multiple of 8). 658 // We can not do it with a single check, so we do 1-byte check for the first 659 // and the last bytes. We call __asan_report_*_n(addr, real_size) to be able 660 // to report the actual access size. 661 IRBuilder<> IRB(I); 662 Value *LastByte = IRB.CreateIntToPtr( 663 IRB.CreateAdd(IRB.CreatePointerCast(Addr, IntptrTy), 664 ConstantInt::get(IntptrTy, TypeSize / 8 - 1)), 665 OrigPtrTy); 666 Value *Size = ConstantInt::get(IntptrTy, TypeSize / 8); 667 instrumentAddress(I, I, Addr, 8, IsWrite, Size); 668 instrumentAddress(I, I, LastByte, 8, IsWrite, Size); 669} 670 671// Validate the result of Module::getOrInsertFunction called for an interface 672// function of AddressSanitizer. If the instrumented module defines a function 673// with the same name, their prototypes must match, otherwise 674// getOrInsertFunction returns a bitcast. 675static Function *checkInterfaceFunction(Constant *FuncOrBitcast) { 676 if (isa<Function>(FuncOrBitcast)) return cast<Function>(FuncOrBitcast); 677 FuncOrBitcast->dump(); 678 report_fatal_error("trying to redefine an AddressSanitizer " 679 "interface function"); 680} 681 682Instruction *AddressSanitizer::generateCrashCode( 683 Instruction *InsertBefore, Value *Addr, 684 bool IsWrite, size_t AccessSizeIndex, Value *SizeArgument) { 685 IRBuilder<> IRB(InsertBefore); 686 CallInst *Call = SizeArgument 687 ? IRB.CreateCall2(AsanErrorCallbackSized[IsWrite], Addr, SizeArgument) 688 : IRB.CreateCall(AsanErrorCallback[IsWrite][AccessSizeIndex], Addr); 689 690 // We don't do Call->setDoesNotReturn() because the BB already has 691 // UnreachableInst at the end. 692 // This EmptyAsm is required to avoid callback merge. 693 IRB.CreateCall(EmptyAsm); 694 return Call; 695} 696 697Value *AddressSanitizer::createSlowPathCmp(IRBuilder<> &IRB, Value *AddrLong, 698 Value *ShadowValue, 699 uint32_t TypeSize) { 700 size_t Granularity = 1 << Mapping.Scale; 701 // Addr & (Granularity - 1) 702 Value *LastAccessedByte = IRB.CreateAnd( 703 AddrLong, ConstantInt::get(IntptrTy, Granularity - 1)); 704 // (Addr & (Granularity - 1)) + size - 1 705 if (TypeSize / 8 > 1) 706 LastAccessedByte = IRB.CreateAdd( 707 LastAccessedByte, ConstantInt::get(IntptrTy, TypeSize / 8 - 1)); 708 // (uint8_t) ((Addr & (Granularity-1)) + size - 1) 709 LastAccessedByte = IRB.CreateIntCast( 710 LastAccessedByte, ShadowValue->getType(), false); 711 // ((uint8_t) ((Addr & (Granularity-1)) + size - 1)) >= ShadowValue 712 return IRB.CreateICmpSGE(LastAccessedByte, ShadowValue); 713} 714 715void AddressSanitizer::instrumentAddress(Instruction *OrigIns, 716 Instruction *InsertBefore, 717 Value *Addr, uint32_t TypeSize, 718 bool IsWrite, Value *SizeArgument) { 719 IRBuilder<> IRB(InsertBefore); 720 Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy); 721 722 Type *ShadowTy = IntegerType::get( 723 *C, std::max(8U, TypeSize >> Mapping.Scale)); 724 Type *ShadowPtrTy = PointerType::get(ShadowTy, 0); 725 Value *ShadowPtr = memToShadow(AddrLong, IRB); 726 Value *CmpVal = Constant::getNullValue(ShadowTy); 727 Value *ShadowValue = IRB.CreateLoad( 728 IRB.CreateIntToPtr(ShadowPtr, ShadowPtrTy)); 729 730 Value *Cmp = IRB.CreateICmpNE(ShadowValue, CmpVal); 731 size_t AccessSizeIndex = TypeSizeToSizeIndex(TypeSize); 732 size_t Granularity = 1 << Mapping.Scale; 733 TerminatorInst *CrashTerm = 0; 734 735 if (ClAlwaysSlowPath || (TypeSize < 8 * Granularity)) { 736 TerminatorInst *CheckTerm = 737 SplitBlockAndInsertIfThen(cast<Instruction>(Cmp), false); 738 assert(dyn_cast<BranchInst>(CheckTerm)->isUnconditional()); 739 BasicBlock *NextBB = CheckTerm->getSuccessor(0); 740 IRB.SetInsertPoint(CheckTerm); 741 Value *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeSize); 742 BasicBlock *CrashBlock = 743 BasicBlock::Create(*C, "", NextBB->getParent(), NextBB); 744 CrashTerm = new UnreachableInst(*C, CrashBlock); 745 BranchInst *NewTerm = BranchInst::Create(CrashBlock, NextBB, Cmp2); 746 ReplaceInstWithInst(CheckTerm, NewTerm); 747 } else { 748 CrashTerm = SplitBlockAndInsertIfThen(cast<Instruction>(Cmp), true); 749 } 750 751 Instruction *Crash = generateCrashCode( 752 CrashTerm, AddrLong, IsWrite, AccessSizeIndex, SizeArgument); 753 Crash->setDebugLoc(OrigIns->getDebugLoc()); 754} 755 756void AddressSanitizerModule::createInitializerPoisonCalls( 757 Module &M, GlobalValue *ModuleName) { 758 // We do all of our poisoning and unpoisoning within _GLOBAL__I_a. 759 Function *GlobalInit = M.getFunction("_GLOBAL__I_a"); 760 // If that function is not present, this TU contains no globals, or they have 761 // all been optimized away 762 if (!GlobalInit) 763 return; 764 765 // Set up the arguments to our poison/unpoison functions. 766 IRBuilder<> IRB(GlobalInit->begin()->getFirstInsertionPt()); 767 768 // Add a call to poison all external globals before the given function starts. 769 Value *ModuleNameAddr = ConstantExpr::getPointerCast(ModuleName, IntptrTy); 770 IRB.CreateCall(AsanPoisonGlobals, ModuleNameAddr); 771 772 // Add calls to unpoison all globals before each return instruction. 773 for (Function::iterator I = GlobalInit->begin(), E = GlobalInit->end(); 774 I != E; ++I) { 775 if (ReturnInst *RI = dyn_cast<ReturnInst>(I->getTerminator())) { 776 CallInst::Create(AsanUnpoisonGlobals, "", RI); 777 } 778 } 779} 780 781bool AddressSanitizerModule::ShouldInstrumentGlobal(GlobalVariable *G) { 782 Type *Ty = cast<PointerType>(G->getType())->getElementType(); 783 DEBUG(dbgs() << "GLOBAL: " << *G << "\n"); 784 785 if (BL->isIn(*G)) return false; 786 if (!Ty->isSized()) return false; 787 if (!G->hasInitializer()) return false; 788 if (GlobalWasGeneratedByAsan(G)) return false; // Our own global. 789 // Touch only those globals that will not be defined in other modules. 790 // Don't handle ODR type linkages since other modules may be built w/o asan. 791 if (G->getLinkage() != GlobalVariable::ExternalLinkage && 792 G->getLinkage() != GlobalVariable::PrivateLinkage && 793 G->getLinkage() != GlobalVariable::InternalLinkage) 794 return false; 795 // Two problems with thread-locals: 796 // - The address of the main thread's copy can't be computed at link-time. 797 // - Need to poison all copies, not just the main thread's one. 798 if (G->isThreadLocal()) 799 return false; 800 // For now, just ignore this Alloca if the alignment is large. 801 if (G->getAlignment() > RedzoneSize()) return false; 802 803 // Ignore all the globals with the names starting with "\01L_OBJC_". 804 // Many of those are put into the .cstring section. The linker compresses 805 // that section by removing the spare \0s after the string terminator, so 806 // our redzones get broken. 807 if ((G->getName().find("\01L_OBJC_") == 0) || 808 (G->getName().find("\01l_OBJC_") == 0)) { 809 DEBUG(dbgs() << "Ignoring \\01L_OBJC_* global: " << *G); 810 return false; 811 } 812 813 if (G->hasSection()) { 814 StringRef Section(G->getSection()); 815 // Ignore the globals from the __OBJC section. The ObjC runtime assumes 816 // those conform to /usr/lib/objc/runtime.h, so we can't add redzones to 817 // them. 818 if ((Section.find("__OBJC,") == 0) || 819 (Section.find("__DATA, __objc_") == 0)) { 820 DEBUG(dbgs() << "Ignoring ObjC runtime global: " << *G); 821 return false; 822 } 823 // See http://code.google.com/p/address-sanitizer/issues/detail?id=32 824 // Constant CFString instances are compiled in the following way: 825 // -- the string buffer is emitted into 826 // __TEXT,__cstring,cstring_literals 827 // -- the constant NSConstantString structure referencing that buffer 828 // is placed into __DATA,__cfstring 829 // Therefore there's no point in placing redzones into __DATA,__cfstring. 830 // Moreover, it causes the linker to crash on OS X 10.7 831 if (Section.find("__DATA,__cfstring") == 0) { 832 DEBUG(dbgs() << "Ignoring CFString: " << *G); 833 return false; 834 } 835 } 836 837 return true; 838} 839 840void AddressSanitizerModule::initializeCallbacks(Module &M) { 841 IRBuilder<> IRB(*C); 842 // Declare our poisoning and unpoisoning functions. 843 AsanPoisonGlobals = checkInterfaceFunction(M.getOrInsertFunction( 844 kAsanPoisonGlobalsName, IRB.getVoidTy(), IntptrTy, NULL)); 845 AsanPoisonGlobals->setLinkage(Function::ExternalLinkage); 846 AsanUnpoisonGlobals = checkInterfaceFunction(M.getOrInsertFunction( 847 kAsanUnpoisonGlobalsName, IRB.getVoidTy(), NULL)); 848 AsanUnpoisonGlobals->setLinkage(Function::ExternalLinkage); 849 // Declare functions that register/unregister globals. 850 AsanRegisterGlobals = checkInterfaceFunction(M.getOrInsertFunction( 851 kAsanRegisterGlobalsName, IRB.getVoidTy(), 852 IntptrTy, IntptrTy, NULL)); 853 AsanRegisterGlobals->setLinkage(Function::ExternalLinkage); 854 AsanUnregisterGlobals = checkInterfaceFunction(M.getOrInsertFunction( 855 kAsanUnregisterGlobalsName, 856 IRB.getVoidTy(), IntptrTy, IntptrTy, NULL)); 857 AsanUnregisterGlobals->setLinkage(Function::ExternalLinkage); 858} 859 860// This function replaces all global variables with new variables that have 861// trailing redzones. It also creates a function that poisons 862// redzones and inserts this function into llvm.global_ctors. 863bool AddressSanitizerModule::runOnModule(Module &M) { 864 if (!ClGlobals) return false; 865 TD = getAnalysisIfAvailable<DataLayout>(); 866 if (!TD) 867 return false; 868 BL.reset(new BlackList(BlacklistFile)); 869 if (BL->isIn(M)) return false; 870 C = &(M.getContext()); 871 int LongSize = TD->getPointerSizeInBits(); 872 IntptrTy = Type::getIntNTy(*C, LongSize); 873 Mapping = getShadowMapping(M, LongSize, ZeroBaseShadow); 874 initializeCallbacks(M); 875 DynamicallyInitializedGlobals.Init(M); 876 877 SmallVector<GlobalVariable *, 16> GlobalsToChange; 878 879 for (Module::GlobalListType::iterator G = M.global_begin(), 880 E = M.global_end(); G != E; ++G) { 881 if (ShouldInstrumentGlobal(G)) 882 GlobalsToChange.push_back(G); 883 } 884 885 size_t n = GlobalsToChange.size(); 886 if (n == 0) return false; 887 888 // A global is described by a structure 889 // size_t beg; 890 // size_t size; 891 // size_t size_with_redzone; 892 // const char *name; 893 // const char *module_name; 894 // size_t has_dynamic_init; 895 // We initialize an array of such structures and pass it to a run-time call. 896 StructType *GlobalStructTy = StructType::get(IntptrTy, IntptrTy, 897 IntptrTy, IntptrTy, 898 IntptrTy, IntptrTy, NULL); 899 SmallVector<Constant *, 16> Initializers(n), DynamicInit; 900 901 902 Function *CtorFunc = M.getFunction(kAsanModuleCtorName); 903 assert(CtorFunc); 904 IRBuilder<> IRB(CtorFunc->getEntryBlock().getTerminator()); 905 906 bool HasDynamicallyInitializedGlobals = false; 907 908 GlobalVariable *ModuleName = createPrivateGlobalForString( 909 M, M.getModuleIdentifier()); 910 // We shouldn't merge same module names, as this string serves as unique 911 // module ID in runtime. 912 ModuleName->setUnnamedAddr(false); 913 914 for (size_t i = 0; i < n; i++) { 915 static const uint64_t kMaxGlobalRedzone = 1 << 18; 916 GlobalVariable *G = GlobalsToChange[i]; 917 PointerType *PtrTy = cast<PointerType>(G->getType()); 918 Type *Ty = PtrTy->getElementType(); 919 uint64_t SizeInBytes = TD->getTypeAllocSize(Ty); 920 uint64_t MinRZ = RedzoneSize(); 921 // MinRZ <= RZ <= kMaxGlobalRedzone 922 // and trying to make RZ to be ~ 1/4 of SizeInBytes. 923 uint64_t RZ = std::max(MinRZ, 924 std::min(kMaxGlobalRedzone, 925 (SizeInBytes / MinRZ / 4) * MinRZ)); 926 uint64_t RightRedzoneSize = RZ; 927 // Round up to MinRZ 928 if (SizeInBytes % MinRZ) 929 RightRedzoneSize += MinRZ - (SizeInBytes % MinRZ); 930 assert(((RightRedzoneSize + SizeInBytes) % MinRZ) == 0); 931 Type *RightRedZoneTy = ArrayType::get(IRB.getInt8Ty(), RightRedzoneSize); 932 // Determine whether this global should be poisoned in initialization. 933 bool GlobalHasDynamicInitializer = 934 DynamicallyInitializedGlobals.Contains(G); 935 // Don't check initialization order if this global is blacklisted. 936 GlobalHasDynamicInitializer &= !BL->isInInit(*G); 937 938 StructType *NewTy = StructType::get(Ty, RightRedZoneTy, NULL); 939 Constant *NewInitializer = ConstantStruct::get( 940 NewTy, G->getInitializer(), 941 Constant::getNullValue(RightRedZoneTy), NULL); 942 943 GlobalVariable *Name = createPrivateGlobalForString(M, G->getName()); 944 945 // Create a new global variable with enough space for a redzone. 946 GlobalVariable *NewGlobal = new GlobalVariable( 947 M, NewTy, G->isConstant(), G->getLinkage(), 948 NewInitializer, "", G, G->getThreadLocalMode()); 949 NewGlobal->copyAttributesFrom(G); 950 NewGlobal->setAlignment(MinRZ); 951 952 Value *Indices2[2]; 953 Indices2[0] = IRB.getInt32(0); 954 Indices2[1] = IRB.getInt32(0); 955 956 G->replaceAllUsesWith( 957 ConstantExpr::getGetElementPtr(NewGlobal, Indices2, true)); 958 NewGlobal->takeName(G); 959 G->eraseFromParent(); 960 961 Initializers[i] = ConstantStruct::get( 962 GlobalStructTy, 963 ConstantExpr::getPointerCast(NewGlobal, IntptrTy), 964 ConstantInt::get(IntptrTy, SizeInBytes), 965 ConstantInt::get(IntptrTy, SizeInBytes + RightRedzoneSize), 966 ConstantExpr::getPointerCast(Name, IntptrTy), 967 ConstantExpr::getPointerCast(ModuleName, IntptrTy), 968 ConstantInt::get(IntptrTy, GlobalHasDynamicInitializer), 969 NULL); 970 971 // Populate the first and last globals declared in this TU. 972 if (CheckInitOrder && GlobalHasDynamicInitializer) 973 HasDynamicallyInitializedGlobals = true; 974 975 DEBUG(dbgs() << "NEW GLOBAL: " << *NewGlobal << "\n"); 976 } 977 978 ArrayType *ArrayOfGlobalStructTy = ArrayType::get(GlobalStructTy, n); 979 GlobalVariable *AllGlobals = new GlobalVariable( 980 M, ArrayOfGlobalStructTy, false, GlobalVariable::PrivateLinkage, 981 ConstantArray::get(ArrayOfGlobalStructTy, Initializers), ""); 982 983 // Create calls for poisoning before initializers run and unpoisoning after. 984 if (CheckInitOrder && HasDynamicallyInitializedGlobals) 985 createInitializerPoisonCalls(M, ModuleName); 986 IRB.CreateCall2(AsanRegisterGlobals, 987 IRB.CreatePointerCast(AllGlobals, IntptrTy), 988 ConstantInt::get(IntptrTy, n)); 989 990 // We also need to unregister globals at the end, e.g. when a shared library 991 // gets closed. 992 Function *AsanDtorFunction = Function::Create( 993 FunctionType::get(Type::getVoidTy(*C), false), 994 GlobalValue::InternalLinkage, kAsanModuleDtorName, &M); 995 BasicBlock *AsanDtorBB = BasicBlock::Create(*C, "", AsanDtorFunction); 996 IRBuilder<> IRB_Dtor(ReturnInst::Create(*C, AsanDtorBB)); 997 IRB_Dtor.CreateCall2(AsanUnregisterGlobals, 998 IRB.CreatePointerCast(AllGlobals, IntptrTy), 999 ConstantInt::get(IntptrTy, n)); 1000 appendToGlobalDtors(M, AsanDtorFunction, kAsanCtorAndCtorPriority); 1001 1002 DEBUG(dbgs() << M); 1003 return true; 1004} 1005 1006void AddressSanitizer::initializeCallbacks(Module &M) { 1007 IRBuilder<> IRB(*C); 1008 // Create __asan_report* callbacks. 1009 for (size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) { 1010 for (size_t AccessSizeIndex = 0; AccessSizeIndex < kNumberOfAccessSizes; 1011 AccessSizeIndex++) { 1012 // IsWrite and TypeSize are encoded in the function name. 1013 std::string FunctionName = std::string(kAsanReportErrorTemplate) + 1014 (AccessIsWrite ? "store" : "load") + itostr(1 << AccessSizeIndex); 1015 // If we are merging crash callbacks, they have two parameters. 1016 AsanErrorCallback[AccessIsWrite][AccessSizeIndex] = 1017 checkInterfaceFunction(M.getOrInsertFunction( 1018 FunctionName, IRB.getVoidTy(), IntptrTy, NULL)); 1019 } 1020 } 1021 AsanErrorCallbackSized[0] = checkInterfaceFunction(M.getOrInsertFunction( 1022 kAsanReportLoadN, IRB.getVoidTy(), IntptrTy, IntptrTy, NULL)); 1023 AsanErrorCallbackSized[1] = checkInterfaceFunction(M.getOrInsertFunction( 1024 kAsanReportStoreN, IRB.getVoidTy(), IntptrTy, IntptrTy, NULL)); 1025 1026 AsanHandleNoReturnFunc = checkInterfaceFunction(M.getOrInsertFunction( 1027 kAsanHandleNoReturnName, IRB.getVoidTy(), NULL)); 1028 // We insert an empty inline asm after __asan_report* to avoid callback merge. 1029 EmptyAsm = InlineAsm::get(FunctionType::get(IRB.getVoidTy(), false), 1030 StringRef(""), StringRef(""), 1031 /*hasSideEffects=*/true); 1032} 1033 1034void AddressSanitizer::emitShadowMapping(Module &M, IRBuilder<> &IRB) const { 1035 // Tell the values of mapping offset and scale to the run-time. 1036 GlobalValue *asan_mapping_offset = 1037 new GlobalVariable(M, IntptrTy, true, GlobalValue::LinkOnceODRLinkage, 1038 ConstantInt::get(IntptrTy, Mapping.Offset), 1039 kAsanMappingOffsetName); 1040 // Read the global, otherwise it may be optimized away. 1041 IRB.CreateLoad(asan_mapping_offset, true); 1042 1043 GlobalValue *asan_mapping_scale = 1044 new GlobalVariable(M, IntptrTy, true, GlobalValue::LinkOnceODRLinkage, 1045 ConstantInt::get(IntptrTy, Mapping.Scale), 1046 kAsanMappingScaleName); 1047 // Read the global, otherwise it may be optimized away. 1048 IRB.CreateLoad(asan_mapping_scale, true); 1049} 1050 1051// virtual 1052bool AddressSanitizer::doInitialization(Module &M) { 1053 // Initialize the private fields. No one has accessed them before. 1054 TD = getAnalysisIfAvailable<DataLayout>(); 1055 1056 if (!TD) 1057 return false; 1058 BL.reset(new BlackList(BlacklistFile)); 1059 DynamicallyInitializedGlobals.Init(M); 1060 1061 C = &(M.getContext()); 1062 LongSize = TD->getPointerSizeInBits(); 1063 IntptrTy = Type::getIntNTy(*C, LongSize); 1064 1065 AsanCtorFunction = Function::Create( 1066 FunctionType::get(Type::getVoidTy(*C), false), 1067 GlobalValue::InternalLinkage, kAsanModuleCtorName, &M); 1068 BasicBlock *AsanCtorBB = BasicBlock::Create(*C, "", AsanCtorFunction); 1069 // call __asan_init in the module ctor. 1070 IRBuilder<> IRB(ReturnInst::Create(*C, AsanCtorBB)); 1071 AsanInitFunction = checkInterfaceFunction( 1072 M.getOrInsertFunction(kAsanInitName, IRB.getVoidTy(), NULL)); 1073 AsanInitFunction->setLinkage(Function::ExternalLinkage); 1074 IRB.CreateCall(AsanInitFunction); 1075 1076 Mapping = getShadowMapping(M, LongSize, ZeroBaseShadow); 1077 emitShadowMapping(M, IRB); 1078 1079 appendToGlobalCtors(M, AsanCtorFunction, kAsanCtorAndCtorPriority); 1080 return true; 1081} 1082 1083bool AddressSanitizer::maybeInsertAsanInitAtFunctionEntry(Function &F) { 1084 // For each NSObject descendant having a +load method, this method is invoked 1085 // by the ObjC runtime before any of the static constructors is called. 1086 // Therefore we need to instrument such methods with a call to __asan_init 1087 // at the beginning in order to initialize our runtime before any access to 1088 // the shadow memory. 1089 // We cannot just ignore these methods, because they may call other 1090 // instrumented functions. 1091 if (F.getName().find(" load]") != std::string::npos) { 1092 IRBuilder<> IRB(F.begin()->begin()); 1093 IRB.CreateCall(AsanInitFunction); 1094 return true; 1095 } 1096 return false; 1097} 1098 1099bool AddressSanitizer::runOnFunction(Function &F) { 1100 if (BL->isIn(F)) return false; 1101 if (&F == AsanCtorFunction) return false; 1102 if (F.getLinkage() == GlobalValue::AvailableExternallyLinkage) return false; 1103 DEBUG(dbgs() << "ASAN instrumenting:\n" << F << "\n"); 1104 initializeCallbacks(*F.getParent()); 1105 1106 // If needed, insert __asan_init before checking for SanitizeAddress attr. 1107 maybeInsertAsanInitAtFunctionEntry(F); 1108 1109 if (!F.getAttributes().hasAttribute(AttributeSet::FunctionIndex, 1110 Attribute::SanitizeAddress)) 1111 return false; 1112 1113 if (!ClDebugFunc.empty() && ClDebugFunc != F.getName()) 1114 return false; 1115 1116 // We want to instrument every address only once per basic block (unless there 1117 // are calls between uses). 1118 SmallSet<Value*, 16> TempsToInstrument; 1119 SmallVector<Instruction*, 16> ToInstrument; 1120 SmallVector<Instruction*, 8> NoReturnCalls; 1121 bool IsWrite; 1122 1123 // Fill the set of memory operations to instrument. 1124 for (Function::iterator FI = F.begin(), FE = F.end(); 1125 FI != FE; ++FI) { 1126 TempsToInstrument.clear(); 1127 int NumInsnsPerBB = 0; 1128 for (BasicBlock::iterator BI = FI->begin(), BE = FI->end(); 1129 BI != BE; ++BI) { 1130 if (LooksLikeCodeInBug11395(BI)) return false; 1131 if (Value *Addr = isInterestingMemoryAccess(BI, &IsWrite)) { 1132 if (ClOpt && ClOptSameTemp) { 1133 if (!TempsToInstrument.insert(Addr)) 1134 continue; // We've seen this temp in the current BB. 1135 } 1136 } else if (isa<MemIntrinsic>(BI) && ClMemIntrin) { 1137 // ok, take it. 1138 } else { 1139 CallSite CS(BI); 1140 if (CS) { 1141 // A call inside BB. 1142 TempsToInstrument.clear(); 1143 if (CS.doesNotReturn()) 1144 NoReturnCalls.push_back(CS.getInstruction()); 1145 } 1146 continue; 1147 } 1148 ToInstrument.push_back(BI); 1149 NumInsnsPerBB++; 1150 if (NumInsnsPerBB >= ClMaxInsnsToInstrumentPerBB) 1151 break; 1152 } 1153 } 1154 1155 // Instrument. 1156 int NumInstrumented = 0; 1157 for (size_t i = 0, n = ToInstrument.size(); i != n; i++) { 1158 Instruction *Inst = ToInstrument[i]; 1159 if (ClDebugMin < 0 || ClDebugMax < 0 || 1160 (NumInstrumented >= ClDebugMin && NumInstrumented <= ClDebugMax)) { 1161 if (isInterestingMemoryAccess(Inst, &IsWrite)) 1162 instrumentMop(Inst); 1163 else 1164 instrumentMemIntrinsic(cast<MemIntrinsic>(Inst)); 1165 } 1166 NumInstrumented++; 1167 } 1168 1169 FunctionStackPoisoner FSP(F, *this); 1170 bool ChangedStack = FSP.runOnFunction(); 1171 1172 // We must unpoison the stack before every NoReturn call (throw, _exit, etc). 1173 // See e.g. http://code.google.com/p/address-sanitizer/issues/detail?id=37 1174 for (size_t i = 0, n = NoReturnCalls.size(); i != n; i++) { 1175 Instruction *CI = NoReturnCalls[i]; 1176 IRBuilder<> IRB(CI); 1177 IRB.CreateCall(AsanHandleNoReturnFunc); 1178 } 1179 DEBUG(dbgs() << "ASAN done instrumenting:\n" << F << "\n"); 1180 1181 return NumInstrumented > 0 || ChangedStack || !NoReturnCalls.empty(); 1182} 1183 1184static uint64_t ValueForPoison(uint64_t PoisonByte, size_t ShadowRedzoneSize) { 1185 if (ShadowRedzoneSize == 1) return PoisonByte; 1186 if (ShadowRedzoneSize == 2) return (PoisonByte << 8) + PoisonByte; 1187 if (ShadowRedzoneSize == 4) 1188 return (PoisonByte << 24) + (PoisonByte << 16) + 1189 (PoisonByte << 8) + (PoisonByte); 1190 llvm_unreachable("ShadowRedzoneSize is either 1, 2 or 4"); 1191} 1192 1193static void PoisonShadowPartialRightRedzone(uint8_t *Shadow, 1194 size_t Size, 1195 size_t RZSize, 1196 size_t ShadowGranularity, 1197 uint8_t Magic) { 1198 for (size_t i = 0; i < RZSize; 1199 i+= ShadowGranularity, Shadow++) { 1200 if (i + ShadowGranularity <= Size) { 1201 *Shadow = 0; // fully addressable 1202 } else if (i >= Size) { 1203 *Shadow = Magic; // unaddressable 1204 } else { 1205 *Shadow = Size - i; // first Size-i bytes are addressable 1206 } 1207 } 1208} 1209 1210// Workaround for bug 11395: we don't want to instrument stack in functions 1211// with large assembly blobs (32-bit only), otherwise reg alloc may crash. 1212// FIXME: remove once the bug 11395 is fixed. 1213bool AddressSanitizer::LooksLikeCodeInBug11395(Instruction *I) { 1214 if (LongSize != 32) return false; 1215 CallInst *CI = dyn_cast<CallInst>(I); 1216 if (!CI || !CI->isInlineAsm()) return false; 1217 if (CI->getNumArgOperands() <= 5) return false; 1218 // We have inline assembly with quite a few arguments. 1219 return true; 1220} 1221 1222void FunctionStackPoisoner::initializeCallbacks(Module &M) { 1223 IRBuilder<> IRB(*C); 1224 AsanStackMallocFunc = checkInterfaceFunction(M.getOrInsertFunction( 1225 kAsanStackMallocName, IntptrTy, IntptrTy, IntptrTy, NULL)); 1226 AsanStackFreeFunc = checkInterfaceFunction(M.getOrInsertFunction( 1227 kAsanStackFreeName, IRB.getVoidTy(), 1228 IntptrTy, IntptrTy, IntptrTy, NULL)); 1229 AsanPoisonStackMemoryFunc = checkInterfaceFunction(M.getOrInsertFunction( 1230 kAsanPoisonStackMemoryName, IRB.getVoidTy(), IntptrTy, IntptrTy, NULL)); 1231 AsanUnpoisonStackMemoryFunc = checkInterfaceFunction(M.getOrInsertFunction( 1232 kAsanUnpoisonStackMemoryName, IRB.getVoidTy(), IntptrTy, IntptrTy, NULL)); 1233} 1234 1235void FunctionStackPoisoner::poisonRedZones( 1236 const ArrayRef<AllocaInst*> &AllocaVec, IRBuilder<> IRB, Value *ShadowBase, 1237 bool DoPoison) { 1238 size_t ShadowRZSize = RedzoneSize() >> Mapping.Scale; 1239 assert(ShadowRZSize >= 1 && ShadowRZSize <= 4); 1240 Type *RZTy = Type::getIntNTy(*C, ShadowRZSize * 8); 1241 Type *RZPtrTy = PointerType::get(RZTy, 0); 1242 1243 Value *PoisonLeft = ConstantInt::get(RZTy, 1244 ValueForPoison(DoPoison ? kAsanStackLeftRedzoneMagic : 0LL, ShadowRZSize)); 1245 Value *PoisonMid = ConstantInt::get(RZTy, 1246 ValueForPoison(DoPoison ? kAsanStackMidRedzoneMagic : 0LL, ShadowRZSize)); 1247 Value *PoisonRight = ConstantInt::get(RZTy, 1248 ValueForPoison(DoPoison ? kAsanStackRightRedzoneMagic : 0LL, ShadowRZSize)); 1249 1250 // poison the first red zone. 1251 IRB.CreateStore(PoisonLeft, IRB.CreateIntToPtr(ShadowBase, RZPtrTy)); 1252 1253 // poison all other red zones. 1254 uint64_t Pos = RedzoneSize(); 1255 for (size_t i = 0, n = AllocaVec.size(); i < n; i++) { 1256 AllocaInst *AI = AllocaVec[i]; 1257 uint64_t SizeInBytes = getAllocaSizeInBytes(AI); 1258 uint64_t AlignedSize = getAlignedAllocaSize(AI); 1259 assert(AlignedSize - SizeInBytes < RedzoneSize()); 1260 Value *Ptr = NULL; 1261 1262 Pos += AlignedSize; 1263 1264 assert(ShadowBase->getType() == IntptrTy); 1265 if (SizeInBytes < AlignedSize) { 1266 // Poison the partial redzone at right 1267 Ptr = IRB.CreateAdd( 1268 ShadowBase, ConstantInt::get(IntptrTy, 1269 (Pos >> Mapping.Scale) - ShadowRZSize)); 1270 size_t AddressableBytes = RedzoneSize() - (AlignedSize - SizeInBytes); 1271 uint32_t Poison = 0; 1272 if (DoPoison) { 1273 PoisonShadowPartialRightRedzone((uint8_t*)&Poison, AddressableBytes, 1274 RedzoneSize(), 1275 1ULL << Mapping.Scale, 1276 kAsanStackPartialRedzoneMagic); 1277 Poison = 1278 ASan.TD->isLittleEndian() 1279 ? support::endian::byte_swap<uint32_t, support::little>(Poison) 1280 : support::endian::byte_swap<uint32_t, support::big>(Poison); 1281 } 1282 Value *PartialPoison = ConstantInt::get(RZTy, Poison); 1283 IRB.CreateStore(PartialPoison, IRB.CreateIntToPtr(Ptr, RZPtrTy)); 1284 } 1285 1286 // Poison the full redzone at right. 1287 Ptr = IRB.CreateAdd(ShadowBase, 1288 ConstantInt::get(IntptrTy, Pos >> Mapping.Scale)); 1289 bool LastAlloca = (i == AllocaVec.size() - 1); 1290 Value *Poison = LastAlloca ? PoisonRight : PoisonMid; 1291 IRB.CreateStore(Poison, IRB.CreateIntToPtr(Ptr, RZPtrTy)); 1292 1293 Pos += RedzoneSize(); 1294 } 1295} 1296 1297void FunctionStackPoisoner::poisonStack() { 1298 uint64_t LocalStackSize = TotalStackSize + 1299 (AllocaVec.size() + 1) * RedzoneSize(); 1300 1301 bool DoStackMalloc = ASan.CheckUseAfterReturn 1302 && LocalStackSize <= kMaxStackMallocSize; 1303 1304 assert(AllocaVec.size() > 0); 1305 Instruction *InsBefore = AllocaVec[0]; 1306 IRBuilder<> IRB(InsBefore); 1307 1308 1309 Type *ByteArrayTy = ArrayType::get(IRB.getInt8Ty(), LocalStackSize); 1310 AllocaInst *MyAlloca = 1311 new AllocaInst(ByteArrayTy, "MyAlloca", InsBefore); 1312 if (ClRealignStack && StackAlignment < RedzoneSize()) 1313 StackAlignment = RedzoneSize(); 1314 MyAlloca->setAlignment(StackAlignment); 1315 assert(MyAlloca->isStaticAlloca()); 1316 Value *OrigStackBase = IRB.CreatePointerCast(MyAlloca, IntptrTy); 1317 Value *LocalStackBase = OrigStackBase; 1318 1319 if (DoStackMalloc) { 1320 LocalStackBase = IRB.CreateCall2(AsanStackMallocFunc, 1321 ConstantInt::get(IntptrTy, LocalStackSize), OrigStackBase); 1322 } 1323 1324 // This string will be parsed by the run-time (DescribeAddressIfStack). 1325 SmallString<2048> StackDescriptionStorage; 1326 raw_svector_ostream StackDescription(StackDescriptionStorage); 1327 StackDescription << AllocaVec.size() << " "; 1328 1329 // Insert poison calls for lifetime intrinsics for alloca. 1330 bool HavePoisonedAllocas = false; 1331 for (size_t i = 0, n = AllocaPoisonCallVec.size(); i < n; i++) { 1332 const AllocaPoisonCall &APC = AllocaPoisonCallVec[i]; 1333 IntrinsicInst *II = APC.InsBefore; 1334 AllocaInst *AI = findAllocaForValue(II->getArgOperand(1)); 1335 assert(AI); 1336 IRBuilder<> IRB(II); 1337 poisonAlloca(AI, APC.Size, IRB, APC.DoPoison); 1338 HavePoisonedAllocas |= APC.DoPoison; 1339 } 1340 1341 uint64_t Pos = RedzoneSize(); 1342 // Replace Alloca instructions with base+offset. 1343 for (size_t i = 0, n = AllocaVec.size(); i < n; i++) { 1344 AllocaInst *AI = AllocaVec[i]; 1345 uint64_t SizeInBytes = getAllocaSizeInBytes(AI); 1346 StringRef Name = AI->getName(); 1347 StackDescription << Pos << " " << SizeInBytes << " " 1348 << Name.size() << " " << Name << " "; 1349 uint64_t AlignedSize = getAlignedAllocaSize(AI); 1350 assert((AlignedSize % RedzoneSize()) == 0); 1351 Value *NewAllocaPtr = IRB.CreateIntToPtr( 1352 IRB.CreateAdd(LocalStackBase, ConstantInt::get(IntptrTy, Pos)), 1353 AI->getType()); 1354 replaceDbgDeclareForAlloca(AI, NewAllocaPtr, DIB); 1355 AI->replaceAllUsesWith(NewAllocaPtr); 1356 Pos += AlignedSize + RedzoneSize(); 1357 } 1358 assert(Pos == LocalStackSize); 1359 1360 // The left-most redzone has enough space for at least 4 pointers. 1361 // Write the Magic value to redzone[0]. 1362 Value *BasePlus0 = IRB.CreateIntToPtr(LocalStackBase, IntptrPtrTy); 1363 IRB.CreateStore(ConstantInt::get(IntptrTy, kCurrentStackFrameMagic), 1364 BasePlus0); 1365 // Write the frame description constant to redzone[1]. 1366 Value *BasePlus1 = IRB.CreateIntToPtr( 1367 IRB.CreateAdd(LocalStackBase, ConstantInt::get(IntptrTy, ASan.LongSize/8)), 1368 IntptrPtrTy); 1369 GlobalVariable *StackDescriptionGlobal = 1370 createPrivateGlobalForString(*F.getParent(), StackDescription.str()); 1371 Value *Description = IRB.CreatePointerCast(StackDescriptionGlobal, 1372 IntptrTy); 1373 IRB.CreateStore(Description, BasePlus1); 1374 // Write the PC to redzone[2]. 1375 Value *BasePlus2 = IRB.CreateIntToPtr( 1376 IRB.CreateAdd(LocalStackBase, ConstantInt::get(IntptrTy, 1377 2 * ASan.LongSize/8)), 1378 IntptrPtrTy); 1379 IRB.CreateStore(IRB.CreatePointerCast(&F, IntptrTy), BasePlus2); 1380 1381 // Poison the stack redzones at the entry. 1382 Value *ShadowBase = ASan.memToShadow(LocalStackBase, IRB); 1383 poisonRedZones(AllocaVec, IRB, ShadowBase, true); 1384 1385 // Unpoison the stack before all ret instructions. 1386 for (size_t i = 0, n = RetVec.size(); i < n; i++) { 1387 Instruction *Ret = RetVec[i]; 1388 IRBuilder<> IRBRet(Ret); 1389 // Mark the current frame as retired. 1390 IRBRet.CreateStore(ConstantInt::get(IntptrTy, kRetiredStackFrameMagic), 1391 BasePlus0); 1392 // Unpoison the stack. 1393 poisonRedZones(AllocaVec, IRBRet, ShadowBase, false); 1394 if (DoStackMalloc) { 1395 // In use-after-return mode, mark the whole stack frame unaddressable. 1396 IRBRet.CreateCall3(AsanStackFreeFunc, LocalStackBase, 1397 ConstantInt::get(IntptrTy, LocalStackSize), 1398 OrigStackBase); 1399 } else if (HavePoisonedAllocas) { 1400 // If we poisoned some allocas in llvm.lifetime analysis, 1401 // unpoison whole stack frame now. 1402 assert(LocalStackBase == OrigStackBase); 1403 poisonAlloca(LocalStackBase, LocalStackSize, IRBRet, false); 1404 } 1405 } 1406 1407 // We are done. Remove the old unused alloca instructions. 1408 for (size_t i = 0, n = AllocaVec.size(); i < n; i++) 1409 AllocaVec[i]->eraseFromParent(); 1410} 1411 1412void FunctionStackPoisoner::poisonAlloca(Value *V, uint64_t Size, 1413 IRBuilder<> IRB, bool DoPoison) { 1414 // For now just insert the call to ASan runtime. 1415 Value *AddrArg = IRB.CreatePointerCast(V, IntptrTy); 1416 Value *SizeArg = ConstantInt::get(IntptrTy, Size); 1417 IRB.CreateCall2(DoPoison ? AsanPoisonStackMemoryFunc 1418 : AsanUnpoisonStackMemoryFunc, 1419 AddrArg, SizeArg); 1420} 1421 1422// Handling llvm.lifetime intrinsics for a given %alloca: 1423// (1) collect all llvm.lifetime.xxx(%size, %value) describing the alloca. 1424// (2) if %size is constant, poison memory for llvm.lifetime.end (to detect 1425// invalid accesses) and unpoison it for llvm.lifetime.start (the memory 1426// could be poisoned by previous llvm.lifetime.end instruction, as the 1427// variable may go in and out of scope several times, e.g. in loops). 1428// (3) if we poisoned at least one %alloca in a function, 1429// unpoison the whole stack frame at function exit. 1430 1431AllocaInst *FunctionStackPoisoner::findAllocaForValue(Value *V) { 1432 if (AllocaInst *AI = dyn_cast<AllocaInst>(V)) 1433 // We're intested only in allocas we can handle. 1434 return isInterestingAlloca(*AI) ? AI : 0; 1435 // See if we've already calculated (or started to calculate) alloca for a 1436 // given value. 1437 AllocaForValueMapTy::iterator I = AllocaForValue.find(V); 1438 if (I != AllocaForValue.end()) 1439 return I->second; 1440 // Store 0 while we're calculating alloca for value V to avoid 1441 // infinite recursion if the value references itself. 1442 AllocaForValue[V] = 0; 1443 AllocaInst *Res = 0; 1444 if (CastInst *CI = dyn_cast<CastInst>(V)) 1445 Res = findAllocaForValue(CI->getOperand(0)); 1446 else if (PHINode *PN = dyn_cast<PHINode>(V)) { 1447 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) { 1448 Value *IncValue = PN->getIncomingValue(i); 1449 // Allow self-referencing phi-nodes. 1450 if (IncValue == PN) continue; 1451 AllocaInst *IncValueAI = findAllocaForValue(IncValue); 1452 // AI for incoming values should exist and should all be equal. 1453 if (IncValueAI == 0 || (Res != 0 && IncValueAI != Res)) 1454 return 0; 1455 Res = IncValueAI; 1456 } 1457 } 1458 if (Res != 0) 1459 AllocaForValue[V] = Res; 1460 return Res; 1461} 1462