AddressSanitizer.cpp revision d6f62c8da5aa4f3388cec1542309ffa623cac601
1//===-- AddressSanitizer.cpp - memory error detector ------------*- C++ -*-===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file is a part of AddressSanitizer, an address sanity checker. 11// Details of the algorithm: 12// http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm 13// 14//===----------------------------------------------------------------------===// 15 16#define DEBUG_TYPE "asan" 17 18#include "BlackList.h" 19#include "llvm/Function.h" 20#include "llvm/IRBuilder.h" 21#include "llvm/InlineAsm.h" 22#include "llvm/IntrinsicInst.h" 23#include "llvm/LLVMContext.h" 24#include "llvm/Module.h" 25#include "llvm/Type.h" 26#include "llvm/ADT/ArrayRef.h" 27#include "llvm/ADT/OwningPtr.h" 28#include "llvm/ADT/SmallSet.h" 29#include "llvm/ADT/SmallString.h" 30#include "llvm/ADT/SmallVector.h" 31#include "llvm/ADT/StringExtras.h" 32#include "llvm/ADT/Triple.h" 33#include "llvm/Support/CommandLine.h" 34#include "llvm/Support/DataTypes.h" 35#include "llvm/Support/Debug.h" 36#include "llvm/Support/raw_ostream.h" 37#include "llvm/Support/system_error.h" 38#include "llvm/DataLayout.h" 39#include "llvm/Target/TargetMachine.h" 40#include "llvm/Transforms/Instrumentation.h" 41#include "llvm/Transforms/Utils/BasicBlockUtils.h" 42#include "llvm/Transforms/Utils/ModuleUtils.h" 43 44#include <string> 45#include <algorithm> 46 47using namespace llvm; 48 49static const uint64_t kDefaultShadowScale = 3; 50static const uint64_t kDefaultShadowOffset32 = 1ULL << 29; 51static const uint64_t kDefaultShadowOffset64 = 1ULL << 44; 52static const uint64_t kDefaultShadowOffsetAndroid = 0; 53 54static const size_t kMaxStackMallocSize = 1 << 16; // 64K 55static const uintptr_t kCurrentStackFrameMagic = 0x41B58AB3; 56static const uintptr_t kRetiredStackFrameMagic = 0x45E0360E; 57 58static const char *kAsanModuleCtorName = "asan.module_ctor"; 59static const char *kAsanModuleDtorName = "asan.module_dtor"; 60static const int kAsanCtorAndCtorPriority = 1; 61static const char *kAsanReportErrorTemplate = "__asan_report_"; 62static const char *kAsanRegisterGlobalsName = "__asan_register_globals"; 63static const char *kAsanUnregisterGlobalsName = "__asan_unregister_globals"; 64static const char *kAsanPoisonGlobalsName = "__asan_before_dynamic_init"; 65static const char *kAsanUnpoisonGlobalsName = "__asan_after_dynamic_init"; 66static const char *kAsanInitName = "__asan_init"; 67static const char *kAsanHandleNoReturnName = "__asan_handle_no_return"; 68static const char *kAsanMappingOffsetName = "__asan_mapping_offset"; 69static const char *kAsanMappingScaleName = "__asan_mapping_scale"; 70static const char *kAsanStackMallocName = "__asan_stack_malloc"; 71static const char *kAsanStackFreeName = "__asan_stack_free"; 72static const char *kAsanGenPrefix = "__asan_gen_"; 73 74static const int kAsanStackLeftRedzoneMagic = 0xf1; 75static const int kAsanStackMidRedzoneMagic = 0xf2; 76static const int kAsanStackRightRedzoneMagic = 0xf3; 77static const int kAsanStackPartialRedzoneMagic = 0xf4; 78 79// Accesses sizes are powers of two: 1, 2, 4, 8, 16. 80static const size_t kNumberOfAccessSizes = 5; 81 82// Command-line flags. 83 84// This flag may need to be replaced with -f[no-]asan-reads. 85static cl::opt<bool> ClInstrumentReads("asan-instrument-reads", 86 cl::desc("instrument read instructions"), cl::Hidden, cl::init(true)); 87static cl::opt<bool> ClInstrumentWrites("asan-instrument-writes", 88 cl::desc("instrument write instructions"), cl::Hidden, cl::init(true)); 89static cl::opt<bool> ClInstrumentAtomics("asan-instrument-atomics", 90 cl::desc("instrument atomic instructions (rmw, cmpxchg)"), 91 cl::Hidden, cl::init(true)); 92static cl::opt<bool> ClAlwaysSlowPath("asan-always-slow-path", 93 cl::desc("use instrumentation with slow path for all accesses"), 94 cl::Hidden, cl::init(false)); 95// This flag limits the number of instructions to be instrumented 96// in any given BB. Normally, this should be set to unlimited (INT_MAX), 97// but due to http://llvm.org/bugs/show_bug.cgi?id=12652 we temporary 98// set it to 10000. 99static cl::opt<int> ClMaxInsnsToInstrumentPerBB("asan-max-ins-per-bb", 100 cl::init(10000), 101 cl::desc("maximal number of instructions to instrument in any given BB"), 102 cl::Hidden); 103// This flag may need to be replaced with -f[no]asan-stack. 104static cl::opt<bool> ClStack("asan-stack", 105 cl::desc("Handle stack memory"), cl::Hidden, cl::init(true)); 106// This flag may need to be replaced with -f[no]asan-use-after-return. 107static cl::opt<bool> ClUseAfterReturn("asan-use-after-return", 108 cl::desc("Check return-after-free"), cl::Hidden, cl::init(false)); 109// This flag may need to be replaced with -f[no]asan-globals. 110static cl::opt<bool> ClGlobals("asan-globals", 111 cl::desc("Handle global objects"), cl::Hidden, cl::init(true)); 112static cl::opt<bool> ClInitializers("asan-initialization-order", 113 cl::desc("Handle C++ initializer order"), cl::Hidden, cl::init(false)); 114static cl::opt<bool> ClMemIntrin("asan-memintrin", 115 cl::desc("Handle memset/memcpy/memmove"), cl::Hidden, cl::init(true)); 116// This flag may need to be replaced with -fasan-blacklist. 117static cl::opt<std::string> ClBlackListFile("asan-blacklist", 118 cl::desc("File containing the list of functions to ignore " 119 "during instrumentation"), cl::Hidden); 120 121// These flags allow to change the shadow mapping. 122// The shadow mapping looks like 123// Shadow = (Mem >> scale) + (1 << offset_log) 124static cl::opt<int> ClMappingScale("asan-mapping-scale", 125 cl::desc("scale of asan shadow mapping"), cl::Hidden, cl::init(0)); 126static cl::opt<int> ClMappingOffsetLog("asan-mapping-offset-log", 127 cl::desc("offset of asan shadow mapping"), cl::Hidden, cl::init(-1)); 128 129// Optimization flags. Not user visible, used mostly for testing 130// and benchmarking the tool. 131static cl::opt<bool> ClOpt("asan-opt", 132 cl::desc("Optimize instrumentation"), cl::Hidden, cl::init(true)); 133static cl::opt<bool> ClOptSameTemp("asan-opt-same-temp", 134 cl::desc("Instrument the same temp just once"), cl::Hidden, 135 cl::init(true)); 136static cl::opt<bool> ClOptGlobals("asan-opt-globals", 137 cl::desc("Don't instrument scalar globals"), cl::Hidden, cl::init(true)); 138 139static cl::opt<bool> ClCheckLifetime("asan-check-lifetime", 140 cl::desc("Use llvm.lifetime intrinsics to insert extra checks"), 141 cl::Hidden, cl::init(false)); 142 143// Debug flags. 144static cl::opt<int> ClDebug("asan-debug", cl::desc("debug"), cl::Hidden, 145 cl::init(0)); 146static cl::opt<int> ClDebugStack("asan-debug-stack", cl::desc("debug stack"), 147 cl::Hidden, cl::init(0)); 148static cl::opt<std::string> ClDebugFunc("asan-debug-func", 149 cl::Hidden, cl::desc("Debug func")); 150static cl::opt<int> ClDebugMin("asan-debug-min", cl::desc("Debug min inst"), 151 cl::Hidden, cl::init(-1)); 152static cl::opt<int> ClDebugMax("asan-debug-max", cl::desc("Debug man inst"), 153 cl::Hidden, cl::init(-1)); 154 155namespace { 156/// A set of dynamically initialized globals extracted from metadata. 157class SetOfDynamicallyInitializedGlobals { 158 public: 159 void Init(Module& M) { 160 // Clang generates metadata identifying all dynamically initialized globals. 161 NamedMDNode *DynamicGlobals = 162 M.getNamedMetadata("llvm.asan.dynamically_initialized_globals"); 163 if (!DynamicGlobals) 164 return; 165 for (int i = 0, n = DynamicGlobals->getNumOperands(); i < n; ++i) { 166 MDNode *MDN = DynamicGlobals->getOperand(i); 167 assert(MDN->getNumOperands() == 1); 168 Value *VG = MDN->getOperand(0); 169 // The optimizer may optimize away a global entirely, in which case we 170 // cannot instrument access to it. 171 if (!VG) 172 continue; 173 DynInitGlobals.insert(cast<GlobalVariable>(VG)); 174 } 175 } 176 bool Contains(GlobalVariable *G) { return DynInitGlobals.count(G) != 0; } 177 private: 178 SmallSet<GlobalValue*, 32> DynInitGlobals; 179}; 180 181static int MappingScale() { 182 return ClMappingScale ? ClMappingScale : kDefaultShadowScale; 183} 184 185static size_t RedzoneSize() { 186 // Redzone used for stack and globals is at least 32 bytes. 187 // For scales 6 and 7, the redzone has to be 64 and 128 bytes respectively. 188 return std::max(32U, 1U << MappingScale()); 189} 190 191/// AddressSanitizer: instrument the code in module to find memory bugs. 192struct AddressSanitizer : public FunctionPass { 193 AddressSanitizer(bool CheckInitOrder = false, 194 bool CheckUseAfterReturn = false, 195 bool CheckLifetime = false) 196 : FunctionPass(ID), 197 CheckInitOrder(CheckInitOrder || ClInitializers), 198 CheckUseAfterReturn(CheckUseAfterReturn || ClUseAfterReturn), 199 CheckLifetime(CheckLifetime || ClCheckLifetime) {} 200 virtual const char *getPassName() const { 201 return "AddressSanitizerFunctionPass"; 202 } 203 void instrumentMop(Instruction *I); 204 void instrumentAddress(Instruction *OrigIns, IRBuilder<> &IRB, 205 Value *Addr, uint32_t TypeSize, bool IsWrite); 206 Value *createSlowPathCmp(IRBuilder<> &IRB, Value *AddrLong, 207 Value *ShadowValue, uint32_t TypeSize); 208 Instruction *generateCrashCode(Instruction *InsertBefore, Value *Addr, 209 bool IsWrite, size_t AccessSizeIndex); 210 bool instrumentMemIntrinsic(MemIntrinsic *MI); 211 void instrumentMemIntrinsicParam(Instruction *OrigIns, Value *Addr, 212 Value *Size, 213 Instruction *InsertBefore, bool IsWrite); 214 Value *memToShadow(Value *Shadow, IRBuilder<> &IRB); 215 bool runOnFunction(Function &F); 216 void createInitializerPoisonCalls(Module &M, 217 Value *FirstAddr, Value *LastAddr); 218 bool maybeInsertAsanInitAtFunctionEntry(Function &F); 219 bool poisonStackInFunction(Function &F); 220 virtual bool doInitialization(Module &M); 221 static char ID; // Pass identification, replacement for typeid 222 223 private: 224 void initializeCallbacks(Module &M); 225 uint64_t getAllocaSizeInBytes(AllocaInst *AI) { 226 Type *Ty = AI->getAllocatedType(); 227 uint64_t SizeInBytes = TD->getTypeAllocSize(Ty); 228 return SizeInBytes; 229 } 230 uint64_t getAlignedSize(uint64_t SizeInBytes) { 231 size_t RZ = RedzoneSize(); 232 return ((SizeInBytes + RZ - 1) / RZ) * RZ; 233 } 234 uint64_t getAlignedAllocaSize(AllocaInst *AI) { 235 uint64_t SizeInBytes = getAllocaSizeInBytes(AI); 236 return getAlignedSize(SizeInBytes); 237 } 238 239 bool ShouldInstrumentGlobal(GlobalVariable *G); 240 void PoisonStack(const ArrayRef<AllocaInst*> &AllocaVec, IRBuilder<> IRB, 241 Value *ShadowBase, bool DoPoison); 242 bool LooksLikeCodeInBug11395(Instruction *I); 243 void FindDynamicInitializers(Module &M); 244 245 bool CheckInitOrder; 246 bool CheckUseAfterReturn; 247 bool CheckLifetime; 248 LLVMContext *C; 249 DataLayout *TD; 250 uint64_t MappingOffset; 251 int LongSize; 252 Type *IntptrTy; 253 Type *IntptrPtrTy; 254 Function *AsanCtorFunction; 255 Function *AsanInitFunction; 256 Function *AsanStackMallocFunc, *AsanStackFreeFunc; 257 Function *AsanHandleNoReturnFunc; 258 OwningPtr<BlackList> BL; 259 // This array is indexed by AccessIsWrite and log2(AccessSize). 260 Function *AsanErrorCallback[2][kNumberOfAccessSizes]; 261 InlineAsm *EmptyAsm; 262 SetOfDynamicallyInitializedGlobals DynamicallyInitializedGlobals; 263}; 264 265class AddressSanitizerModule : public ModulePass { 266 public: 267 AddressSanitizerModule(bool CheckInitOrder = false) 268 : ModulePass(ID), 269 CheckInitOrder(CheckInitOrder || ClInitializers) {} 270 bool runOnModule(Module &M); 271 static char ID; // Pass identification, replacement for typeid 272 virtual const char *getPassName() const { 273 return "AddressSanitizerModule"; 274 } 275 private: 276 bool ShouldInstrumentGlobal(GlobalVariable *G); 277 void createInitializerPoisonCalls(Module &M, Value *FirstAddr, 278 Value *LastAddr); 279 280 bool CheckInitOrder; 281 OwningPtr<BlackList> BL; 282 SetOfDynamicallyInitializedGlobals DynamicallyInitializedGlobals; 283 Type *IntptrTy; 284 LLVMContext *C; 285 DataLayout *TD; 286}; 287 288} // namespace 289 290char AddressSanitizer::ID = 0; 291INITIALIZE_PASS(AddressSanitizer, "asan", 292 "AddressSanitizer: detects use-after-free and out-of-bounds bugs.", 293 false, false) 294FunctionPass *llvm::createAddressSanitizerFunctionPass( 295 bool CheckInitOrder, bool CheckUseAfterReturn, bool CheckLifetime) { 296 return new AddressSanitizer(CheckInitOrder, CheckUseAfterReturn, 297 CheckLifetime); 298} 299 300char AddressSanitizerModule::ID = 0; 301INITIALIZE_PASS(AddressSanitizerModule, "asan-module", 302 "AddressSanitizer: detects use-after-free and out-of-bounds bugs." 303 "ModulePass", false, false) 304ModulePass *llvm::createAddressSanitizerModulePass(bool CheckInitOrder) { 305 return new AddressSanitizerModule(CheckInitOrder); 306} 307 308static size_t TypeSizeToSizeIndex(uint32_t TypeSize) { 309 size_t Res = CountTrailingZeros_32(TypeSize / 8); 310 assert(Res < kNumberOfAccessSizes); 311 return Res; 312} 313 314// Create a constant for Str so that we can pass it to the run-time lib. 315static GlobalVariable *createPrivateGlobalForString(Module &M, StringRef Str) { 316 Constant *StrConst = ConstantDataArray::getString(M.getContext(), Str); 317 return new GlobalVariable(M, StrConst->getType(), true, 318 GlobalValue::PrivateLinkage, StrConst, 319 kAsanGenPrefix); 320} 321 322static bool GlobalWasGeneratedByAsan(GlobalVariable *G) { 323 return G->getName().find(kAsanGenPrefix) == 0; 324} 325 326Value *AddressSanitizer::memToShadow(Value *Shadow, IRBuilder<> &IRB) { 327 // Shadow >> scale 328 Shadow = IRB.CreateLShr(Shadow, MappingScale()); 329 if (MappingOffset == 0) 330 return Shadow; 331 // (Shadow >> scale) | offset 332 return IRB.CreateOr(Shadow, ConstantInt::get(IntptrTy, 333 MappingOffset)); 334} 335 336void AddressSanitizer::instrumentMemIntrinsicParam( 337 Instruction *OrigIns, 338 Value *Addr, Value *Size, Instruction *InsertBefore, bool IsWrite) { 339 // Check the first byte. 340 { 341 IRBuilder<> IRB(InsertBefore); 342 instrumentAddress(OrigIns, IRB, Addr, 8, IsWrite); 343 } 344 // Check the last byte. 345 { 346 IRBuilder<> IRB(InsertBefore); 347 Value *SizeMinusOne = IRB.CreateSub( 348 Size, ConstantInt::get(Size->getType(), 1)); 349 SizeMinusOne = IRB.CreateIntCast(SizeMinusOne, IntptrTy, false); 350 Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy); 351 Value *AddrPlusSizeMinisOne = IRB.CreateAdd(AddrLong, SizeMinusOne); 352 instrumentAddress(OrigIns, IRB, AddrPlusSizeMinisOne, 8, IsWrite); 353 } 354} 355 356// Instrument memset/memmove/memcpy 357bool AddressSanitizer::instrumentMemIntrinsic(MemIntrinsic *MI) { 358 Value *Dst = MI->getDest(); 359 MemTransferInst *MemTran = dyn_cast<MemTransferInst>(MI); 360 Value *Src = MemTran ? MemTran->getSource() : 0; 361 Value *Length = MI->getLength(); 362 363 Constant *ConstLength = dyn_cast<Constant>(Length); 364 Instruction *InsertBefore = MI; 365 if (ConstLength) { 366 if (ConstLength->isNullValue()) return false; 367 } else { 368 // The size is not a constant so it could be zero -- check at run-time. 369 IRBuilder<> IRB(InsertBefore); 370 371 Value *Cmp = IRB.CreateICmpNE(Length, 372 Constant::getNullValue(Length->getType())); 373 InsertBefore = SplitBlockAndInsertIfThen(cast<Instruction>(Cmp), false); 374 } 375 376 instrumentMemIntrinsicParam(MI, Dst, Length, InsertBefore, true); 377 if (Src) 378 instrumentMemIntrinsicParam(MI, Src, Length, InsertBefore, false); 379 return true; 380} 381 382// If I is an interesting memory access, return the PointerOperand 383// and set IsWrite. Otherwise return NULL. 384static Value *isInterestingMemoryAccess(Instruction *I, bool *IsWrite) { 385 if (LoadInst *LI = dyn_cast<LoadInst>(I)) { 386 if (!ClInstrumentReads) return NULL; 387 *IsWrite = false; 388 return LI->getPointerOperand(); 389 } 390 if (StoreInst *SI = dyn_cast<StoreInst>(I)) { 391 if (!ClInstrumentWrites) return NULL; 392 *IsWrite = true; 393 return SI->getPointerOperand(); 394 } 395 if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I)) { 396 if (!ClInstrumentAtomics) return NULL; 397 *IsWrite = true; 398 return RMW->getPointerOperand(); 399 } 400 if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I)) { 401 if (!ClInstrumentAtomics) return NULL; 402 *IsWrite = true; 403 return XCHG->getPointerOperand(); 404 } 405 return NULL; 406} 407 408void AddressSanitizer::instrumentMop(Instruction *I) { 409 bool IsWrite = false; 410 Value *Addr = isInterestingMemoryAccess(I, &IsWrite); 411 assert(Addr); 412 if (ClOpt && ClOptGlobals) { 413 if (GlobalVariable *G = dyn_cast<GlobalVariable>(Addr)) { 414 // If initialization order checking is disabled, a simple access to a 415 // dynamically initialized global is always valid. 416 if (!CheckInitOrder) 417 return; 418 // If a global variable does not have dynamic initialization we don't 419 // have to instrument it. However, if a global does not have initailizer 420 // at all, we assume it has dynamic initializer (in other TU). 421 if (G->hasInitializer() && !DynamicallyInitializedGlobals.Contains(G)) 422 return; 423 } 424 } 425 426 Type *OrigPtrTy = Addr->getType(); 427 Type *OrigTy = cast<PointerType>(OrigPtrTy)->getElementType(); 428 429 assert(OrigTy->isSized()); 430 uint32_t TypeSize = TD->getTypeStoreSizeInBits(OrigTy); 431 432 if (TypeSize != 8 && TypeSize != 16 && 433 TypeSize != 32 && TypeSize != 64 && TypeSize != 128) { 434 // Ignore all unusual sizes. 435 return; 436 } 437 438 IRBuilder<> IRB(I); 439 instrumentAddress(I, IRB, Addr, TypeSize, IsWrite); 440} 441 442// Validate the result of Module::getOrInsertFunction called for an interface 443// function of AddressSanitizer. If the instrumented module defines a function 444// with the same name, their prototypes must match, otherwise 445// getOrInsertFunction returns a bitcast. 446static Function *checkInterfaceFunction(Constant *FuncOrBitcast) { 447 if (isa<Function>(FuncOrBitcast)) return cast<Function>(FuncOrBitcast); 448 FuncOrBitcast->dump(); 449 report_fatal_error("trying to redefine an AddressSanitizer " 450 "interface function"); 451} 452 453Instruction *AddressSanitizer::generateCrashCode( 454 Instruction *InsertBefore, Value *Addr, 455 bool IsWrite, size_t AccessSizeIndex) { 456 IRBuilder<> IRB(InsertBefore); 457 CallInst *Call = IRB.CreateCall(AsanErrorCallback[IsWrite][AccessSizeIndex], 458 Addr); 459 // We don't do Call->setDoesNotReturn() because the BB already has 460 // UnreachableInst at the end. 461 // This EmptyAsm is required to avoid callback merge. 462 IRB.CreateCall(EmptyAsm); 463 return Call; 464} 465 466Value *AddressSanitizer::createSlowPathCmp(IRBuilder<> &IRB, Value *AddrLong, 467 Value *ShadowValue, 468 uint32_t TypeSize) { 469 size_t Granularity = 1 << MappingScale(); 470 // Addr & (Granularity - 1) 471 Value *LastAccessedByte = IRB.CreateAnd( 472 AddrLong, ConstantInt::get(IntptrTy, Granularity - 1)); 473 // (Addr & (Granularity - 1)) + size - 1 474 if (TypeSize / 8 > 1) 475 LastAccessedByte = IRB.CreateAdd( 476 LastAccessedByte, ConstantInt::get(IntptrTy, TypeSize / 8 - 1)); 477 // (uint8_t) ((Addr & (Granularity-1)) + size - 1) 478 LastAccessedByte = IRB.CreateIntCast( 479 LastAccessedByte, ShadowValue->getType(), false); 480 // ((uint8_t) ((Addr & (Granularity-1)) + size - 1)) >= ShadowValue 481 return IRB.CreateICmpSGE(LastAccessedByte, ShadowValue); 482} 483 484void AddressSanitizer::instrumentAddress(Instruction *OrigIns, 485 IRBuilder<> &IRB, Value *Addr, 486 uint32_t TypeSize, bool IsWrite) { 487 Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy); 488 489 Type *ShadowTy = IntegerType::get( 490 *C, std::max(8U, TypeSize >> MappingScale())); 491 Type *ShadowPtrTy = PointerType::get(ShadowTy, 0); 492 Value *ShadowPtr = memToShadow(AddrLong, IRB); 493 Value *CmpVal = Constant::getNullValue(ShadowTy); 494 Value *ShadowValue = IRB.CreateLoad( 495 IRB.CreateIntToPtr(ShadowPtr, ShadowPtrTy)); 496 497 Value *Cmp = IRB.CreateICmpNE(ShadowValue, CmpVal); 498 size_t AccessSizeIndex = TypeSizeToSizeIndex(TypeSize); 499 size_t Granularity = 1 << MappingScale(); 500 TerminatorInst *CrashTerm = 0; 501 502 if (ClAlwaysSlowPath || (TypeSize < 8 * Granularity)) { 503 TerminatorInst *CheckTerm = 504 SplitBlockAndInsertIfThen(cast<Instruction>(Cmp), false); 505 assert(dyn_cast<BranchInst>(CheckTerm)->isUnconditional()); 506 BasicBlock *NextBB = CheckTerm->getSuccessor(0); 507 IRB.SetInsertPoint(CheckTerm); 508 Value *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeSize); 509 BasicBlock *CrashBlock = 510 BasicBlock::Create(*C, "", NextBB->getParent(), NextBB); 511 CrashTerm = new UnreachableInst(*C, CrashBlock); 512 BranchInst *NewTerm = BranchInst::Create(CrashBlock, NextBB, Cmp2); 513 ReplaceInstWithInst(CheckTerm, NewTerm); 514 } else { 515 CrashTerm = SplitBlockAndInsertIfThen(cast<Instruction>(Cmp), true); 516 } 517 518 Instruction *Crash = 519 generateCrashCode(CrashTerm, AddrLong, IsWrite, AccessSizeIndex); 520 Crash->setDebugLoc(OrigIns->getDebugLoc()); 521} 522 523void AddressSanitizerModule::createInitializerPoisonCalls( 524 Module &M, Value *FirstAddr, Value *LastAddr) { 525 // We do all of our poisoning and unpoisoning within _GLOBAL__I_a. 526 Function *GlobalInit = M.getFunction("_GLOBAL__I_a"); 527 // If that function is not present, this TU contains no globals, or they have 528 // all been optimized away 529 if (!GlobalInit) 530 return; 531 532 // Set up the arguments to our poison/unpoison functions. 533 IRBuilder<> IRB(GlobalInit->begin()->getFirstInsertionPt()); 534 535 // Declare our poisoning and unpoisoning functions. 536 Function *AsanPoisonGlobals = checkInterfaceFunction(M.getOrInsertFunction( 537 kAsanPoisonGlobalsName, IRB.getVoidTy(), IntptrTy, IntptrTy, NULL)); 538 AsanPoisonGlobals->setLinkage(Function::ExternalLinkage); 539 Function *AsanUnpoisonGlobals = checkInterfaceFunction(M.getOrInsertFunction( 540 kAsanUnpoisonGlobalsName, IRB.getVoidTy(), NULL)); 541 AsanUnpoisonGlobals->setLinkage(Function::ExternalLinkage); 542 543 // Add a call to poison all external globals before the given function starts. 544 IRB.CreateCall2(AsanPoisonGlobals, FirstAddr, LastAddr); 545 546 // Add calls to unpoison all globals before each return instruction. 547 for (Function::iterator I = GlobalInit->begin(), E = GlobalInit->end(); 548 I != E; ++I) { 549 if (ReturnInst *RI = dyn_cast<ReturnInst>(I->getTerminator())) { 550 CallInst::Create(AsanUnpoisonGlobals, "", RI); 551 } 552 } 553} 554 555bool AddressSanitizerModule::ShouldInstrumentGlobal(GlobalVariable *G) { 556 Type *Ty = cast<PointerType>(G->getType())->getElementType(); 557 DEBUG(dbgs() << "GLOBAL: " << *G << "\n"); 558 559 if (BL->isIn(*G)) return false; 560 if (!Ty->isSized()) return false; 561 if (!G->hasInitializer()) return false; 562 if (GlobalWasGeneratedByAsan(G)) return false; // Our own global. 563 // Touch only those globals that will not be defined in other modules. 564 // Don't handle ODR type linkages since other modules may be built w/o asan. 565 if (G->getLinkage() != GlobalVariable::ExternalLinkage && 566 G->getLinkage() != GlobalVariable::PrivateLinkage && 567 G->getLinkage() != GlobalVariable::InternalLinkage) 568 return false; 569 // Two problems with thread-locals: 570 // - The address of the main thread's copy can't be computed at link-time. 571 // - Need to poison all copies, not just the main thread's one. 572 if (G->isThreadLocal()) 573 return false; 574 // For now, just ignore this Alloca if the alignment is large. 575 if (G->getAlignment() > RedzoneSize()) return false; 576 577 // Ignore all the globals with the names starting with "\01L_OBJC_". 578 // Many of those are put into the .cstring section. The linker compresses 579 // that section by removing the spare \0s after the string terminator, so 580 // our redzones get broken. 581 if ((G->getName().find("\01L_OBJC_") == 0) || 582 (G->getName().find("\01l_OBJC_") == 0)) { 583 DEBUG(dbgs() << "Ignoring \\01L_OBJC_* global: " << *G); 584 return false; 585 } 586 587 if (G->hasSection()) { 588 StringRef Section(G->getSection()); 589 // Ignore the globals from the __OBJC section. The ObjC runtime assumes 590 // those conform to /usr/lib/objc/runtime.h, so we can't add redzones to 591 // them. 592 if ((Section.find("__OBJC,") == 0) || 593 (Section.find("__DATA, __objc_") == 0)) { 594 DEBUG(dbgs() << "Ignoring ObjC runtime global: " << *G); 595 return false; 596 } 597 // See http://code.google.com/p/address-sanitizer/issues/detail?id=32 598 // Constant CFString instances are compiled in the following way: 599 // -- the string buffer is emitted into 600 // __TEXT,__cstring,cstring_literals 601 // -- the constant NSConstantString structure referencing that buffer 602 // is placed into __DATA,__cfstring 603 // Therefore there's no point in placing redzones into __DATA,__cfstring. 604 // Moreover, it causes the linker to crash on OS X 10.7 605 if (Section.find("__DATA,__cfstring") == 0) { 606 DEBUG(dbgs() << "Ignoring CFString: " << *G); 607 return false; 608 } 609 } 610 611 return true; 612} 613 614// This function replaces all global variables with new variables that have 615// trailing redzones. It also creates a function that poisons 616// redzones and inserts this function into llvm.global_ctors. 617bool AddressSanitizerModule::runOnModule(Module &M) { 618 if (!ClGlobals) return false; 619 TD = getAnalysisIfAvailable<DataLayout>(); 620 if (!TD) 621 return false; 622 BL.reset(new BlackList(ClBlackListFile)); 623 if (BL->isIn(M)) return false; 624 DynamicallyInitializedGlobals.Init(M); 625 C = &(M.getContext()); 626 IntptrTy = Type::getIntNTy(*C, TD->getPointerSizeInBits()); 627 628 SmallVector<GlobalVariable *, 16> GlobalsToChange; 629 630 for (Module::GlobalListType::iterator G = M.global_begin(), 631 E = M.global_end(); G != E; ++G) { 632 if (ShouldInstrumentGlobal(G)) 633 GlobalsToChange.push_back(G); 634 } 635 636 size_t n = GlobalsToChange.size(); 637 if (n == 0) return false; 638 639 // A global is described by a structure 640 // size_t beg; 641 // size_t size; 642 // size_t size_with_redzone; 643 // const char *name; 644 // size_t has_dynamic_init; 645 // We initialize an array of such structures and pass it to a run-time call. 646 StructType *GlobalStructTy = StructType::get(IntptrTy, IntptrTy, 647 IntptrTy, IntptrTy, 648 IntptrTy, NULL); 649 SmallVector<Constant *, 16> Initializers(n), DynamicInit; 650 651 652 Function *CtorFunc = M.getFunction(kAsanModuleCtorName); 653 assert(CtorFunc); 654 IRBuilder<> IRB(CtorFunc->getEntryBlock().getTerminator()); 655 656 // The addresses of the first and last dynamically initialized globals in 657 // this TU. Used in initialization order checking. 658 Value *FirstDynamic = 0, *LastDynamic = 0; 659 660 for (size_t i = 0; i < n; i++) { 661 GlobalVariable *G = GlobalsToChange[i]; 662 PointerType *PtrTy = cast<PointerType>(G->getType()); 663 Type *Ty = PtrTy->getElementType(); 664 uint64_t SizeInBytes = TD->getTypeAllocSize(Ty); 665 size_t RZ = RedzoneSize(); 666 uint64_t RightRedzoneSize = RZ + (RZ - (SizeInBytes % RZ)); 667 Type *RightRedZoneTy = ArrayType::get(IRB.getInt8Ty(), RightRedzoneSize); 668 // Determine whether this global should be poisoned in initialization. 669 bool GlobalHasDynamicInitializer = 670 DynamicallyInitializedGlobals.Contains(G); 671 // Don't check initialization order if this global is blacklisted. 672 GlobalHasDynamicInitializer &= !BL->isInInit(*G); 673 674 StructType *NewTy = StructType::get(Ty, RightRedZoneTy, NULL); 675 Constant *NewInitializer = ConstantStruct::get( 676 NewTy, G->getInitializer(), 677 Constant::getNullValue(RightRedZoneTy), NULL); 678 679 SmallString<2048> DescriptionOfGlobal = G->getName(); 680 DescriptionOfGlobal += " ("; 681 DescriptionOfGlobal += M.getModuleIdentifier(); 682 DescriptionOfGlobal += ")"; 683 GlobalVariable *Name = createPrivateGlobalForString(M, DescriptionOfGlobal); 684 685 // Create a new global variable with enough space for a redzone. 686 GlobalVariable *NewGlobal = new GlobalVariable( 687 M, NewTy, G->isConstant(), G->getLinkage(), 688 NewInitializer, "", G, G->getThreadLocalMode()); 689 NewGlobal->copyAttributesFrom(G); 690 NewGlobal->setAlignment(RZ); 691 692 Value *Indices2[2]; 693 Indices2[0] = IRB.getInt32(0); 694 Indices2[1] = IRB.getInt32(0); 695 696 G->replaceAllUsesWith( 697 ConstantExpr::getGetElementPtr(NewGlobal, Indices2, true)); 698 NewGlobal->takeName(G); 699 G->eraseFromParent(); 700 701 Initializers[i] = ConstantStruct::get( 702 GlobalStructTy, 703 ConstantExpr::getPointerCast(NewGlobal, IntptrTy), 704 ConstantInt::get(IntptrTy, SizeInBytes), 705 ConstantInt::get(IntptrTy, SizeInBytes + RightRedzoneSize), 706 ConstantExpr::getPointerCast(Name, IntptrTy), 707 ConstantInt::get(IntptrTy, GlobalHasDynamicInitializer), 708 NULL); 709 710 // Populate the first and last globals declared in this TU. 711 if (CheckInitOrder && GlobalHasDynamicInitializer) { 712 LastDynamic = ConstantExpr::getPointerCast(NewGlobal, IntptrTy); 713 if (FirstDynamic == 0) 714 FirstDynamic = LastDynamic; 715 } 716 717 DEBUG(dbgs() << "NEW GLOBAL: " << *NewGlobal << "\n"); 718 } 719 720 ArrayType *ArrayOfGlobalStructTy = ArrayType::get(GlobalStructTy, n); 721 GlobalVariable *AllGlobals = new GlobalVariable( 722 M, ArrayOfGlobalStructTy, false, GlobalVariable::PrivateLinkage, 723 ConstantArray::get(ArrayOfGlobalStructTy, Initializers), ""); 724 725 // Create calls for poisoning before initializers run and unpoisoning after. 726 if (CheckInitOrder && FirstDynamic && LastDynamic) 727 createInitializerPoisonCalls(M, FirstDynamic, LastDynamic); 728 729 Function *AsanRegisterGlobals = checkInterfaceFunction(M.getOrInsertFunction( 730 kAsanRegisterGlobalsName, IRB.getVoidTy(), 731 IntptrTy, IntptrTy, NULL)); 732 AsanRegisterGlobals->setLinkage(Function::ExternalLinkage); 733 734 IRB.CreateCall2(AsanRegisterGlobals, 735 IRB.CreatePointerCast(AllGlobals, IntptrTy), 736 ConstantInt::get(IntptrTy, n)); 737 738 // We also need to unregister globals at the end, e.g. when a shared library 739 // gets closed. 740 Function *AsanDtorFunction = Function::Create( 741 FunctionType::get(Type::getVoidTy(*C), false), 742 GlobalValue::InternalLinkage, kAsanModuleDtorName, &M); 743 BasicBlock *AsanDtorBB = BasicBlock::Create(*C, "", AsanDtorFunction); 744 IRBuilder<> IRB_Dtor(ReturnInst::Create(*C, AsanDtorBB)); 745 Function *AsanUnregisterGlobals = 746 checkInterfaceFunction(M.getOrInsertFunction( 747 kAsanUnregisterGlobalsName, 748 IRB.getVoidTy(), IntptrTy, IntptrTy, NULL)); 749 AsanUnregisterGlobals->setLinkage(Function::ExternalLinkage); 750 751 IRB_Dtor.CreateCall2(AsanUnregisterGlobals, 752 IRB.CreatePointerCast(AllGlobals, IntptrTy), 753 ConstantInt::get(IntptrTy, n)); 754 appendToGlobalDtors(M, AsanDtorFunction, kAsanCtorAndCtorPriority); 755 756 DEBUG(dbgs() << M); 757 return true; 758} 759 760void AddressSanitizer::initializeCallbacks(Module &M) { 761 IRBuilder<> IRB(*C); 762 // Create __asan_report* callbacks. 763 for (size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) { 764 for (size_t AccessSizeIndex = 0; AccessSizeIndex < kNumberOfAccessSizes; 765 AccessSizeIndex++) { 766 // IsWrite and TypeSize are encoded in the function name. 767 std::string FunctionName = std::string(kAsanReportErrorTemplate) + 768 (AccessIsWrite ? "store" : "load") + itostr(1 << AccessSizeIndex); 769 // If we are merging crash callbacks, they have two parameters. 770 AsanErrorCallback[AccessIsWrite][AccessSizeIndex] = 771 checkInterfaceFunction(M.getOrInsertFunction( 772 FunctionName, IRB.getVoidTy(), IntptrTy, NULL)); 773 } 774 } 775 776 AsanStackMallocFunc = checkInterfaceFunction(M.getOrInsertFunction( 777 kAsanStackMallocName, IntptrTy, IntptrTy, IntptrTy, NULL)); 778 AsanStackFreeFunc = checkInterfaceFunction(M.getOrInsertFunction( 779 kAsanStackFreeName, IRB.getVoidTy(), 780 IntptrTy, IntptrTy, IntptrTy, NULL)); 781 AsanHandleNoReturnFunc = checkInterfaceFunction(M.getOrInsertFunction( 782 kAsanHandleNoReturnName, IRB.getVoidTy(), NULL)); 783 784 // We insert an empty inline asm after __asan_report* to avoid callback merge. 785 EmptyAsm = InlineAsm::get(FunctionType::get(IRB.getVoidTy(), false), 786 StringRef(""), StringRef(""), 787 /*hasSideEffects=*/true); 788} 789 790// virtual 791bool AddressSanitizer::doInitialization(Module &M) { 792 // Initialize the private fields. No one has accessed them before. 793 TD = getAnalysisIfAvailable<DataLayout>(); 794 795 if (!TD) 796 return false; 797 BL.reset(new BlackList(ClBlackListFile)); 798 DynamicallyInitializedGlobals.Init(M); 799 800 C = &(M.getContext()); 801 LongSize = TD->getPointerSizeInBits(); 802 IntptrTy = Type::getIntNTy(*C, LongSize); 803 IntptrPtrTy = PointerType::get(IntptrTy, 0); 804 805 AsanCtorFunction = Function::Create( 806 FunctionType::get(Type::getVoidTy(*C), false), 807 GlobalValue::InternalLinkage, kAsanModuleCtorName, &M); 808 BasicBlock *AsanCtorBB = BasicBlock::Create(*C, "", AsanCtorFunction); 809 // call __asan_init in the module ctor. 810 IRBuilder<> IRB(ReturnInst::Create(*C, AsanCtorBB)); 811 AsanInitFunction = checkInterfaceFunction( 812 M.getOrInsertFunction(kAsanInitName, IRB.getVoidTy(), NULL)); 813 AsanInitFunction->setLinkage(Function::ExternalLinkage); 814 IRB.CreateCall(AsanInitFunction); 815 816 llvm::Triple targetTriple(M.getTargetTriple()); 817 bool isAndroid = targetTriple.getEnvironment() == llvm::Triple::Android; 818 819 MappingOffset = isAndroid ? kDefaultShadowOffsetAndroid : 820 (LongSize == 32 ? kDefaultShadowOffset32 : kDefaultShadowOffset64); 821 if (ClMappingOffsetLog >= 0) { 822 if (ClMappingOffsetLog == 0) { 823 // special case 824 MappingOffset = 0; 825 } else { 826 MappingOffset = 1ULL << ClMappingOffsetLog; 827 } 828 } 829 830 831 if (ClMappingOffsetLog >= 0) { 832 // Tell the run-time the current values of mapping offset and scale. 833 GlobalValue *asan_mapping_offset = 834 new GlobalVariable(M, IntptrTy, true, GlobalValue::LinkOnceODRLinkage, 835 ConstantInt::get(IntptrTy, MappingOffset), 836 kAsanMappingOffsetName); 837 // Read the global, otherwise it may be optimized away. 838 IRB.CreateLoad(asan_mapping_offset, true); 839 } 840 if (ClMappingScale) { 841 GlobalValue *asan_mapping_scale = 842 new GlobalVariable(M, IntptrTy, true, GlobalValue::LinkOnceODRLinkage, 843 ConstantInt::get(IntptrTy, MappingScale()), 844 kAsanMappingScaleName); 845 // Read the global, otherwise it may be optimized away. 846 IRB.CreateLoad(asan_mapping_scale, true); 847 } 848 849 appendToGlobalCtors(M, AsanCtorFunction, kAsanCtorAndCtorPriority); 850 851 return true; 852} 853 854bool AddressSanitizer::maybeInsertAsanInitAtFunctionEntry(Function &F) { 855 // For each NSObject descendant having a +load method, this method is invoked 856 // by the ObjC runtime before any of the static constructors is called. 857 // Therefore we need to instrument such methods with a call to __asan_init 858 // at the beginning in order to initialize our runtime before any access to 859 // the shadow memory. 860 // We cannot just ignore these methods, because they may call other 861 // instrumented functions. 862 if (F.getName().find(" load]") != std::string::npos) { 863 IRBuilder<> IRB(F.begin()->begin()); 864 IRB.CreateCall(AsanInitFunction); 865 return true; 866 } 867 return false; 868} 869 870// Check both the call and the callee for doesNotReturn(). 871static bool isNoReturnCall(CallInst *CI) { 872 if (CI->doesNotReturn()) return true; 873 Function *F = CI->getCalledFunction(); 874 return (F && F->doesNotReturn()); 875} 876 877bool AddressSanitizer::runOnFunction(Function &F) { 878 if (BL->isIn(F)) return false; 879 if (&F == AsanCtorFunction) return false; 880 DEBUG(dbgs() << "ASAN instrumenting:\n" << F << "\n"); 881 initializeCallbacks(*F.getParent()); 882 883 // If needed, insert __asan_init before checking for AddressSafety attr. 884 maybeInsertAsanInitAtFunctionEntry(F); 885 886 if (!F.getFnAttributes().hasAttribute(Attributes::AddressSafety)) 887 return false; 888 889 if (!ClDebugFunc.empty() && ClDebugFunc != F.getName()) 890 return false; 891 892 // We want to instrument every address only once per basic block (unless there 893 // are calls between uses). 894 SmallSet<Value*, 16> TempsToInstrument; 895 SmallVector<Instruction*, 16> ToInstrument; 896 SmallVector<Instruction*, 8> NoReturnCalls; 897 bool IsWrite; 898 899 // Fill the set of memory operations to instrument. 900 for (Function::iterator FI = F.begin(), FE = F.end(); 901 FI != FE; ++FI) { 902 TempsToInstrument.clear(); 903 int NumInsnsPerBB = 0; 904 for (BasicBlock::iterator BI = FI->begin(), BE = FI->end(); 905 BI != BE; ++BI) { 906 if (LooksLikeCodeInBug11395(BI)) return false; 907 if (Value *Addr = isInterestingMemoryAccess(BI, &IsWrite)) { 908 if (ClOpt && ClOptSameTemp) { 909 if (!TempsToInstrument.insert(Addr)) 910 continue; // We've seen this temp in the current BB. 911 } 912 } else if (isa<MemIntrinsic>(BI) && ClMemIntrin) { 913 // ok, take it. 914 } else { 915 if (CallInst *CI = dyn_cast<CallInst>(BI)) { 916 // A call inside BB. 917 TempsToInstrument.clear(); 918 if (isNoReturnCall(CI)) { 919 NoReturnCalls.push_back(CI); 920 } 921 } 922 continue; 923 } 924 ToInstrument.push_back(BI); 925 NumInsnsPerBB++; 926 if (NumInsnsPerBB >= ClMaxInsnsToInstrumentPerBB) 927 break; 928 } 929 } 930 931 // Instrument. 932 int NumInstrumented = 0; 933 for (size_t i = 0, n = ToInstrument.size(); i != n; i++) { 934 Instruction *Inst = ToInstrument[i]; 935 if (ClDebugMin < 0 || ClDebugMax < 0 || 936 (NumInstrumented >= ClDebugMin && NumInstrumented <= ClDebugMax)) { 937 if (isInterestingMemoryAccess(Inst, &IsWrite)) 938 instrumentMop(Inst); 939 else 940 instrumentMemIntrinsic(cast<MemIntrinsic>(Inst)); 941 } 942 NumInstrumented++; 943 } 944 945 bool ChangedStack = poisonStackInFunction(F); 946 947 // We must unpoison the stack before every NoReturn call (throw, _exit, etc). 948 // See e.g. http://code.google.com/p/address-sanitizer/issues/detail?id=37 949 for (size_t i = 0, n = NoReturnCalls.size(); i != n; i++) { 950 Instruction *CI = NoReturnCalls[i]; 951 IRBuilder<> IRB(CI); 952 IRB.CreateCall(AsanHandleNoReturnFunc); 953 } 954 DEBUG(dbgs() << "ASAN done instrumenting:\n" << F << "\n"); 955 956 return NumInstrumented > 0 || ChangedStack || !NoReturnCalls.empty(); 957} 958 959static uint64_t ValueForPoison(uint64_t PoisonByte, size_t ShadowRedzoneSize) { 960 if (ShadowRedzoneSize == 1) return PoisonByte; 961 if (ShadowRedzoneSize == 2) return (PoisonByte << 8) + PoisonByte; 962 if (ShadowRedzoneSize == 4) 963 return (PoisonByte << 24) + (PoisonByte << 16) + 964 (PoisonByte << 8) + (PoisonByte); 965 llvm_unreachable("ShadowRedzoneSize is either 1, 2 or 4"); 966} 967 968static void PoisonShadowPartialRightRedzone(uint8_t *Shadow, 969 size_t Size, 970 size_t RZSize, 971 size_t ShadowGranularity, 972 uint8_t Magic) { 973 for (size_t i = 0; i < RZSize; 974 i+= ShadowGranularity, Shadow++) { 975 if (i + ShadowGranularity <= Size) { 976 *Shadow = 0; // fully addressable 977 } else if (i >= Size) { 978 *Shadow = Magic; // unaddressable 979 } else { 980 *Shadow = Size - i; // first Size-i bytes are addressable 981 } 982 } 983} 984 985void AddressSanitizer::PoisonStack(const ArrayRef<AllocaInst*> &AllocaVec, 986 IRBuilder<> IRB, 987 Value *ShadowBase, bool DoPoison) { 988 size_t ShadowRZSize = RedzoneSize() >> MappingScale(); 989 assert(ShadowRZSize >= 1 && ShadowRZSize <= 4); 990 Type *RZTy = Type::getIntNTy(*C, ShadowRZSize * 8); 991 Type *RZPtrTy = PointerType::get(RZTy, 0); 992 993 Value *PoisonLeft = ConstantInt::get(RZTy, 994 ValueForPoison(DoPoison ? kAsanStackLeftRedzoneMagic : 0LL, ShadowRZSize)); 995 Value *PoisonMid = ConstantInt::get(RZTy, 996 ValueForPoison(DoPoison ? kAsanStackMidRedzoneMagic : 0LL, ShadowRZSize)); 997 Value *PoisonRight = ConstantInt::get(RZTy, 998 ValueForPoison(DoPoison ? kAsanStackRightRedzoneMagic : 0LL, ShadowRZSize)); 999 1000 // poison the first red zone. 1001 IRB.CreateStore(PoisonLeft, IRB.CreateIntToPtr(ShadowBase, RZPtrTy)); 1002 1003 // poison all other red zones. 1004 uint64_t Pos = RedzoneSize(); 1005 for (size_t i = 0, n = AllocaVec.size(); i < n; i++) { 1006 AllocaInst *AI = AllocaVec[i]; 1007 uint64_t SizeInBytes = getAllocaSizeInBytes(AI); 1008 uint64_t AlignedSize = getAlignedAllocaSize(AI); 1009 assert(AlignedSize - SizeInBytes < RedzoneSize()); 1010 Value *Ptr = NULL; 1011 1012 Pos += AlignedSize; 1013 1014 assert(ShadowBase->getType() == IntptrTy); 1015 if (SizeInBytes < AlignedSize) { 1016 // Poison the partial redzone at right 1017 Ptr = IRB.CreateAdd( 1018 ShadowBase, ConstantInt::get(IntptrTy, 1019 (Pos >> MappingScale()) - ShadowRZSize)); 1020 size_t AddressableBytes = RedzoneSize() - (AlignedSize - SizeInBytes); 1021 uint32_t Poison = 0; 1022 if (DoPoison) { 1023 PoisonShadowPartialRightRedzone((uint8_t*)&Poison, AddressableBytes, 1024 RedzoneSize(), 1025 1ULL << MappingScale(), 1026 kAsanStackPartialRedzoneMagic); 1027 } 1028 Value *PartialPoison = ConstantInt::get(RZTy, Poison); 1029 IRB.CreateStore(PartialPoison, IRB.CreateIntToPtr(Ptr, RZPtrTy)); 1030 } 1031 1032 // Poison the full redzone at right. 1033 Ptr = IRB.CreateAdd(ShadowBase, 1034 ConstantInt::get(IntptrTy, Pos >> MappingScale())); 1035 Value *Poison = i == AllocaVec.size() - 1 ? PoisonRight : PoisonMid; 1036 IRB.CreateStore(Poison, IRB.CreateIntToPtr(Ptr, RZPtrTy)); 1037 1038 Pos += RedzoneSize(); 1039 } 1040} 1041 1042// Workaround for bug 11395: we don't want to instrument stack in functions 1043// with large assembly blobs (32-bit only), otherwise reg alloc may crash. 1044// FIXME: remove once the bug 11395 is fixed. 1045bool AddressSanitizer::LooksLikeCodeInBug11395(Instruction *I) { 1046 if (LongSize != 32) return false; 1047 CallInst *CI = dyn_cast<CallInst>(I); 1048 if (!CI || !CI->isInlineAsm()) return false; 1049 if (CI->getNumArgOperands() <= 5) return false; 1050 // We have inline assembly with quite a few arguments. 1051 return true; 1052} 1053 1054// Find all static Alloca instructions and put 1055// poisoned red zones around all of them. 1056// Then unpoison everything back before the function returns. 1057// 1058// Stack poisoning does not play well with exception handling. 1059// When an exception is thrown, we essentially bypass the code 1060// that unpoisones the stack. This is why the run-time library has 1061// to intercept __cxa_throw (as well as longjmp, etc) and unpoison the entire 1062// stack in the interceptor. This however does not work inside the 1063// actual function which catches the exception. Most likely because the 1064// compiler hoists the load of the shadow value somewhere too high. 1065// This causes asan to report a non-existing bug on 453.povray. 1066// It sounds like an LLVM bug. 1067bool AddressSanitizer::poisonStackInFunction(Function &F) { 1068 if (!ClStack) return false; 1069 SmallVector<AllocaInst*, 16> AllocaVec; 1070 SmallVector<Instruction*, 8> RetVec; 1071 uint64_t TotalSize = 0; 1072 1073 // Filter out Alloca instructions we want (and can) handle. 1074 // Collect Ret instructions. 1075 for (Function::iterator FI = F.begin(), FE = F.end(); 1076 FI != FE; ++FI) { 1077 BasicBlock &BB = *FI; 1078 for (BasicBlock::iterator BI = BB.begin(), BE = BB.end(); 1079 BI != BE; ++BI) { 1080 if (isa<ReturnInst>(BI)) { 1081 RetVec.push_back(BI); 1082 continue; 1083 } 1084 1085 AllocaInst *AI = dyn_cast<AllocaInst>(BI); 1086 if (!AI) continue; 1087 if (AI->isArrayAllocation()) continue; 1088 if (!AI->isStaticAlloca()) continue; 1089 if (!AI->getAllocatedType()->isSized()) continue; 1090 if (AI->getAlignment() > RedzoneSize()) continue; 1091 AllocaVec.push_back(AI); 1092 uint64_t AlignedSize = getAlignedAllocaSize(AI); 1093 TotalSize += AlignedSize; 1094 } 1095 } 1096 1097 if (AllocaVec.empty()) return false; 1098 1099 uint64_t LocalStackSize = TotalSize + (AllocaVec.size() + 1) * RedzoneSize(); 1100 1101 bool DoStackMalloc = CheckUseAfterReturn 1102 && LocalStackSize <= kMaxStackMallocSize; 1103 1104 Instruction *InsBefore = AllocaVec[0]; 1105 IRBuilder<> IRB(InsBefore); 1106 1107 1108 Type *ByteArrayTy = ArrayType::get(IRB.getInt8Ty(), LocalStackSize); 1109 AllocaInst *MyAlloca = 1110 new AllocaInst(ByteArrayTy, "MyAlloca", InsBefore); 1111 MyAlloca->setAlignment(RedzoneSize()); 1112 assert(MyAlloca->isStaticAlloca()); 1113 Value *OrigStackBase = IRB.CreatePointerCast(MyAlloca, IntptrTy); 1114 Value *LocalStackBase = OrigStackBase; 1115 1116 if (DoStackMalloc) { 1117 LocalStackBase = IRB.CreateCall2(AsanStackMallocFunc, 1118 ConstantInt::get(IntptrTy, LocalStackSize), OrigStackBase); 1119 } 1120 1121 // This string will be parsed by the run-time (DescribeStackAddress). 1122 SmallString<2048> StackDescriptionStorage; 1123 raw_svector_ostream StackDescription(StackDescriptionStorage); 1124 StackDescription << F.getName() << " " << AllocaVec.size() << " "; 1125 1126 uint64_t Pos = RedzoneSize(); 1127 // Replace Alloca instructions with base+offset. 1128 for (size_t i = 0, n = AllocaVec.size(); i < n; i++) { 1129 AllocaInst *AI = AllocaVec[i]; 1130 uint64_t SizeInBytes = getAllocaSizeInBytes(AI); 1131 StringRef Name = AI->getName(); 1132 StackDescription << Pos << " " << SizeInBytes << " " 1133 << Name.size() << " " << Name << " "; 1134 uint64_t AlignedSize = getAlignedAllocaSize(AI); 1135 assert((AlignedSize % RedzoneSize()) == 0); 1136 AI->replaceAllUsesWith( 1137 IRB.CreateIntToPtr( 1138 IRB.CreateAdd(LocalStackBase, ConstantInt::get(IntptrTy, Pos)), 1139 AI->getType())); 1140 Pos += AlignedSize + RedzoneSize(); 1141 } 1142 assert(Pos == LocalStackSize); 1143 1144 // Write the Magic value and the frame description constant to the redzone. 1145 Value *BasePlus0 = IRB.CreateIntToPtr(LocalStackBase, IntptrPtrTy); 1146 IRB.CreateStore(ConstantInt::get(IntptrTy, kCurrentStackFrameMagic), 1147 BasePlus0); 1148 Value *BasePlus1 = IRB.CreateAdd(LocalStackBase, 1149 ConstantInt::get(IntptrTy, LongSize/8)); 1150 BasePlus1 = IRB.CreateIntToPtr(BasePlus1, IntptrPtrTy); 1151 GlobalVariable *StackDescriptionGlobal = 1152 createPrivateGlobalForString(*F.getParent(), StackDescription.str()); 1153 Value *Description = IRB.CreatePointerCast(StackDescriptionGlobal, IntptrTy); 1154 IRB.CreateStore(Description, BasePlus1); 1155 1156 // Poison the stack redzones at the entry. 1157 Value *ShadowBase = memToShadow(LocalStackBase, IRB); 1158 PoisonStack(ArrayRef<AllocaInst*>(AllocaVec), IRB, ShadowBase, true); 1159 1160 // Unpoison the stack before all ret instructions. 1161 for (size_t i = 0, n = RetVec.size(); i < n; i++) { 1162 Instruction *Ret = RetVec[i]; 1163 IRBuilder<> IRBRet(Ret); 1164 1165 // Mark the current frame as retired. 1166 IRBRet.CreateStore(ConstantInt::get(IntptrTy, kRetiredStackFrameMagic), 1167 BasePlus0); 1168 // Unpoison the stack. 1169 PoisonStack(ArrayRef<AllocaInst*>(AllocaVec), IRBRet, ShadowBase, false); 1170 1171 if (DoStackMalloc) { 1172 IRBRet.CreateCall3(AsanStackFreeFunc, LocalStackBase, 1173 ConstantInt::get(IntptrTy, LocalStackSize), 1174 OrigStackBase); 1175 } 1176 } 1177 1178 // We are done. Remove the old unused alloca instructions. 1179 for (size_t i = 0, n = AllocaVec.size(); i < n; i++) 1180 AllocaVec[i]->eraseFromParent(); 1181 1182 if (ClDebugStack) { 1183 DEBUG(dbgs() << F); 1184 } 1185 1186 return true; 1187} 1188