ArrayBoundCheckerV2.cpp revision 9b0970f2c7fdc070b18e113f0bbd96e7f77b4f54
1//== ArrayBoundCheckerV2.cpp ------------------------------------*- C++ -*--==//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines ArrayBoundCheckerV2, which is a path-sensitive check
11// which looks for an out-of-bound array element access.
12//
13//===----------------------------------------------------------------------===//
14
15#include "ClangSACheckers.h"
16#include "clang/StaticAnalyzer/Core/Checker.h"
17#include "clang/StaticAnalyzer/Core/CheckerManager.h"
18#include "clang/StaticAnalyzer/Core/PathSensitive/CheckerContext.h"
19#include "clang/StaticAnalyzer/Core/BugReporter/BugType.h"
20#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
21#include "clang/AST/CharUnits.h"
22
23using namespace clang;
24using namespace ento;
25
26namespace {
27class ArrayBoundCheckerV2 :
28    public Checker<check::Location> {
29  mutable llvm::OwningPtr<BuiltinBug> BT;
30
31  enum OOB_Kind { OOB_Precedes, OOB_Excedes };
32
33  void reportOOB(CheckerContext &C, const ProgramState *errorState,
34                 OOB_Kind kind) const;
35
36public:
37  void checkLocation(SVal l, bool isLoad, const Stmt*S,
38                     CheckerContext &C) const;
39};
40
41// FIXME: Eventually replace RegionRawOffset with this class.
42class RegionRawOffsetV2 {
43private:
44  const SubRegion *baseRegion;
45  SVal byteOffset;
46
47  RegionRawOffsetV2()
48    : baseRegion(0), byteOffset(UnknownVal()) {}
49
50public:
51  RegionRawOffsetV2(const SubRegion* base, SVal offset)
52    : baseRegion(base), byteOffset(offset) {}
53
54  NonLoc getByteOffset() const { return cast<NonLoc>(byteOffset); }
55  const SubRegion *getRegion() const { return baseRegion; }
56
57  static RegionRawOffsetV2 computeOffset(const ProgramState *state,
58                                         SValBuilder &svalBuilder,
59                                         SVal location);
60
61  void dump() const;
62  void dumpToStream(raw_ostream &os) const;
63};
64}
65
66static SVal computeExtentBegin(SValBuilder &svalBuilder,
67                               const MemRegion *region) {
68  while (true)
69    switch (region->getKind()) {
70      default:
71        return svalBuilder.makeZeroArrayIndex();
72      case MemRegion::SymbolicRegionKind:
73        // FIXME: improve this later by tracking symbolic lower bounds
74        // for symbolic regions.
75        return UnknownVal();
76      case MemRegion::ElementRegionKind:
77        region = cast<SubRegion>(region)->getSuperRegion();
78        continue;
79    }
80}
81
82void ArrayBoundCheckerV2::checkLocation(SVal location, bool isLoad,
83                                        const Stmt* LoadS,
84                                        CheckerContext &checkerContext) const {
85
86  // NOTE: Instead of using ProgramState::assumeInBound(), we are prototyping
87  // some new logic here that reasons directly about memory region extents.
88  // Once that logic is more mature, we can bring it back to assumeInBound()
89  // for all clients to use.
90  //
91  // The algorithm we are using here for bounds checking is to see if the
92  // memory access is within the extent of the base region.  Since we
93  // have some flexibility in defining the base region, we can achieve
94  // various levels of conservatism in our buffer overflow checking.
95  const ProgramState *state = checkerContext.getState();
96  const ProgramState *originalState = state;
97
98  SValBuilder &svalBuilder = checkerContext.getSValBuilder();
99  const RegionRawOffsetV2 &rawOffset =
100    RegionRawOffsetV2::computeOffset(state, svalBuilder, location);
101
102  if (!rawOffset.getRegion())
103    return;
104
105  // CHECK LOWER BOUND: Is byteOffset < extent begin?
106  //  If so, we are doing a load/store
107  //  before the first valid offset in the memory region.
108
109  SVal extentBegin = computeExtentBegin(svalBuilder, rawOffset.getRegion());
110
111  if (isa<NonLoc>(extentBegin)) {
112    SVal lowerBound
113      = svalBuilder.evalBinOpNN(state, BO_LT, rawOffset.getByteOffset(),
114                                cast<NonLoc>(extentBegin),
115                                svalBuilder.getConditionType());
116
117    NonLoc *lowerBoundToCheck = dyn_cast<NonLoc>(&lowerBound);
118    if (!lowerBoundToCheck)
119      return;
120
121    const ProgramState *state_precedesLowerBound, *state_withinLowerBound;
122    llvm::tie(state_precedesLowerBound, state_withinLowerBound) =
123      state->assume(*lowerBoundToCheck);
124
125    // Are we constrained enough to definitely precede the lower bound?
126    if (state_precedesLowerBound && !state_withinLowerBound) {
127      reportOOB(checkerContext, state_precedesLowerBound, OOB_Precedes);
128      return;
129    }
130
131    // Otherwise, assume the constraint of the lower bound.
132    assert(state_withinLowerBound);
133    state = state_withinLowerBound;
134  }
135
136  do {
137    // CHECK UPPER BOUND: Is byteOffset >= extent(baseRegion)?  If so,
138    // we are doing a load/store after the last valid offset.
139    DefinedOrUnknownSVal extentVal =
140      rawOffset.getRegion()->getExtent(svalBuilder);
141    if (!isa<NonLoc>(extentVal))
142      break;
143
144    SVal upperbound
145      = svalBuilder.evalBinOpNN(state, BO_GE, rawOffset.getByteOffset(),
146                                cast<NonLoc>(extentVal),
147                                svalBuilder.getConditionType());
148
149    NonLoc *upperboundToCheck = dyn_cast<NonLoc>(&upperbound);
150    if (!upperboundToCheck)
151      break;
152
153    const ProgramState *state_exceedsUpperBound, *state_withinUpperBound;
154    llvm::tie(state_exceedsUpperBound, state_withinUpperBound) =
155      state->assume(*upperboundToCheck);
156
157    // If we are under constrained and the index variables are tainted, report.
158    if (state_exceedsUpperBound && state_withinUpperBound) {
159      if (state->isTainted(rawOffset.getByteOffset()))
160        reportOOB(checkerContext, state_exceedsUpperBound, OOB_Excedes);
161        return;
162    }
163
164    // If we are constrained enough to definitely exceed the upper bound, report.
165    if (state_exceedsUpperBound) {
166      assert(!state_withinUpperBound);
167      reportOOB(checkerContext, state_exceedsUpperBound, OOB_Excedes);
168      return;
169    }
170
171    assert(state_withinUpperBound);
172    state = state_withinUpperBound;
173  }
174  while (false);
175
176  if (state != originalState)
177    checkerContext.addTransition(state);
178}
179
180void ArrayBoundCheckerV2::reportOOB(CheckerContext &checkerContext,
181                                    const ProgramState *errorState,
182                                    OOB_Kind kind) const {
183
184  ExplodedNode *errorNode = checkerContext.generateSink(errorState);
185  if (!errorNode)
186    return;
187
188  if (!BT)
189    BT.reset(new BuiltinBug("Out-of-bound access"));
190
191  // FIXME: This diagnostics are preliminary.  We should get far better
192  // diagnostics for explaining buffer overruns.
193
194  llvm::SmallString<256> buf;
195  llvm::raw_svector_ostream os(buf);
196  os << "Out of bound memory access "
197     << (kind == OOB_Precedes ? "(accessed memory precedes memory block)"
198                              : "(access exceeds upper limit of memory block)");
199
200  checkerContext.EmitReport(new BugReport(*BT, os.str(), errorNode));
201}
202
203void RegionRawOffsetV2::dump() const {
204  dumpToStream(llvm::errs());
205}
206
207void RegionRawOffsetV2::dumpToStream(raw_ostream &os) const {
208  os << "raw_offset_v2{" << getRegion() << ',' << getByteOffset() << '}';
209}
210
211// FIXME: Merge with the implementation of the same method in Store.cpp
212static bool IsCompleteType(ASTContext &Ctx, QualType Ty) {
213  if (const RecordType *RT = Ty->getAs<RecordType>()) {
214    const RecordDecl *D = RT->getDecl();
215    if (!D->getDefinition())
216      return false;
217  }
218
219  return true;
220}
221
222
223// Lazily computes a value to be used by 'computeOffset'.  If 'val'
224// is unknown or undefined, we lazily substitute '0'.  Otherwise,
225// return 'val'.
226static inline SVal getValue(SVal val, SValBuilder &svalBuilder) {
227  return isa<UndefinedVal>(val) ? svalBuilder.makeArrayIndex(0) : val;
228}
229
230// Scale a base value by a scaling factor, and return the scaled
231// value as an SVal.  Used by 'computeOffset'.
232static inline SVal scaleValue(const ProgramState *state,
233                              NonLoc baseVal, CharUnits scaling,
234                              SValBuilder &sb) {
235  return sb.evalBinOpNN(state, BO_Mul, baseVal,
236                        sb.makeArrayIndex(scaling.getQuantity()),
237                        sb.getArrayIndexType());
238}
239
240// Add an SVal to another, treating unknown and undefined values as
241// summing to UnknownVal.  Used by 'computeOffset'.
242static SVal addValue(const ProgramState *state, SVal x, SVal y,
243                     SValBuilder &svalBuilder) {
244  // We treat UnknownVals and UndefinedVals the same here because we
245  // only care about computing offsets.
246  if (x.isUnknownOrUndef() || y.isUnknownOrUndef())
247    return UnknownVal();
248
249  return svalBuilder.evalBinOpNN(state, BO_Add,
250                                 cast<NonLoc>(x), cast<NonLoc>(y),
251                                 svalBuilder.getArrayIndexType());
252}
253
254/// Compute a raw byte offset from a base region.  Used for array bounds
255/// checking.
256RegionRawOffsetV2 RegionRawOffsetV2::computeOffset(const ProgramState *state,
257                                                   SValBuilder &svalBuilder,
258                                                   SVal location)
259{
260  const MemRegion *region = location.getAsRegion();
261  SVal offset = UndefinedVal();
262
263  while (region) {
264    switch (region->getKind()) {
265      default: {
266        if (const SubRegion *subReg = dyn_cast<SubRegion>(region)) {
267          offset = getValue(offset, svalBuilder);
268          if (!offset.isUnknownOrUndef())
269            return RegionRawOffsetV2(subReg, offset);
270        }
271        return RegionRawOffsetV2();
272      }
273      case MemRegion::ElementRegionKind: {
274        const ElementRegion *elemReg = cast<ElementRegion>(region);
275        SVal index = elemReg->getIndex();
276        if (!isa<NonLoc>(index))
277          return RegionRawOffsetV2();
278        QualType elemType = elemReg->getElementType();
279        // If the element is an incomplete type, go no further.
280        ASTContext &astContext = svalBuilder.getContext();
281        if (!IsCompleteType(astContext, elemType))
282          return RegionRawOffsetV2();
283
284        // Update the offset.
285        offset = addValue(state,
286                          getValue(offset, svalBuilder),
287                          scaleValue(state,
288                          cast<NonLoc>(index),
289                          astContext.getTypeSizeInChars(elemType),
290                          svalBuilder),
291                          svalBuilder);
292
293        if (offset.isUnknownOrUndef())
294          return RegionRawOffsetV2();
295
296        region = elemReg->getSuperRegion();
297        continue;
298      }
299    }
300  }
301  return RegionRawOffsetV2();
302}
303
304
305void ento::registerArrayBoundCheckerV2(CheckerManager &mgr) {
306  mgr.registerChecker<ArrayBoundCheckerV2>();
307}
308