Coverage Report

Created: 2022-01-15 10:30

/Users/buildslave/jenkins/workspace/coverage/llvm-project/clang/lib/StaticAnalyzer/Checkers/ArrayBoundCheckerV2.cpp
Line
Count
Source (jump to first uncovered line)
1
//== ArrayBoundCheckerV2.cpp ------------------------------------*- C++ -*--==//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// This file defines ArrayBoundCheckerV2, which is a path-sensitive check
10
// which looks for an out-of-bound array element access.
11
//
12
//===----------------------------------------------------------------------===//
13
14
#include "Taint.h"
15
#include "clang/AST/CharUnits.h"
16
#include "clang/StaticAnalyzer/Checkers/BuiltinCheckerRegistration.h"
17
#include "clang/StaticAnalyzer/Core/BugReporter/BugType.h"
18
#include "clang/StaticAnalyzer/Core/Checker.h"
19
#include "clang/StaticAnalyzer/Core/CheckerManager.h"
20
#include "clang/StaticAnalyzer/Core/PathSensitive/APSIntType.h"
21
#include "clang/StaticAnalyzer/Core/PathSensitive/CheckerContext.h"
22
#include "clang/StaticAnalyzer/Core/PathSensitive/DynamicExtent.h"
23
#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
24
#include "llvm/ADT/SmallString.h"
25
#include "llvm/Support/raw_ostream.h"
26
27
using namespace clang;
28
using namespace ento;
29
using namespace taint;
30
31
namespace {
32
class ArrayBoundCheckerV2 :
33
    public Checker<check::Location> {
34
  mutable std::unique_ptr<BuiltinBug> BT;
35
36
  enum OOB_Kind { OOB_Precedes, OOB_Excedes, OOB_Tainted };
37
38
  void reportOOB(CheckerContext &C, ProgramStateRef errorState, OOB_Kind kind,
39
                 std::unique_ptr<BugReporterVisitor> Visitor = nullptr) const;
40
41
public:
42
  void checkLocation(SVal l, bool isLoad, const Stmt*S,
43
                     CheckerContext &C) const;
44
};
45
46
// FIXME: Eventually replace RegionRawOffset with this class.
47
class RegionRawOffsetV2 {
48
private:
49
  const SubRegion *baseRegion;
50
  SVal byteOffset;
51
52
  RegionRawOffsetV2()
53
0
    : baseRegion(nullptr), byteOffset(UnknownVal()) {}
54
55
public:
56
  RegionRawOffsetV2(const SubRegion* base, SVal offset)
57
1.03k
    : baseRegion(base), byteOffset(offset) {}
58
59
3.05k
  NonLoc getByteOffset() const { return byteOffset.castAs<NonLoc>(); }
60
3.08k
  const SubRegion *getRegion() const { return baseRegion; }
61
62
  static RegionRawOffsetV2 computeOffset(ProgramStateRef state,
63
                                         SValBuilder &svalBuilder,
64
                                         SVal location);
65
66
  void dump() const;
67
  void dumpToStream(raw_ostream &os) const;
68
};
69
}
70
71
static SVal computeExtentBegin(SValBuilder &svalBuilder,
72
1.03k
                               const MemRegion *region) {
73
1.03k
  const MemSpaceRegion *SR = region->getMemorySpace();
74
1.03k
  if (SR->getKind() == MemRegion::UnknownSpaceRegionKind)
75
37
    return UnknownVal();
76
995
  else
77
995
    return svalBuilder.makeZeroArrayIndex();
78
1.03k
}
79
80
// TODO: once the constraint manager is smart enough to handle non simplified
81
// symbolic expressions remove this function. Note that this can not be used in
82
// the constraint manager as is, since this does not handle overflows. It is
83
// safe to assume, however, that memory offsets will not overflow.
84
static std::pair<NonLoc, nonloc::ConcreteInt>
85
getSimplifiedOffsets(NonLoc offset, nonloc::ConcreteInt extent,
86
2.05k
                     SValBuilder &svalBuilder) {
87
2.05k
  Optional<nonloc::SymbolVal> SymVal = offset.getAs<nonloc::SymbolVal>();
88
2.05k
  if (SymVal && 
SymVal->isExpression()176
) {
89
95
    if (const SymIntExpr *SIE = dyn_cast<SymIntExpr>(SymVal->getSymbol())) {
90
91
      llvm::APSInt constant =
91
91
          APSIntType(extent.getValue()).convert(SIE->getRHS());
92
91
      switch (SIE->getOpcode()) {
93
71
      case BO_Mul:
94
        // The constant should never be 0 here, since it the result of scaling
95
        // based on the size of a type which is never 0.
96
71
        if ((extent.getValue() % constant) != 0)
97
0
          return std::pair<NonLoc, nonloc::ConcreteInt>(offset, extent);
98
71
        else
99
71
          return getSimplifiedOffsets(
100
71
              nonloc::SymbolVal(SIE->getLHS()),
101
71
              svalBuilder.makeIntVal(extent.getValue() / constant),
102
71
              svalBuilder);
103
8
      case BO_Add:
104
8
        return getSimplifiedOffsets(
105
8
            nonloc::SymbolVal(SIE->getLHS()),
106
8
            svalBuilder.makeIntVal(extent.getValue() - constant), svalBuilder);
107
12
      default:
108
12
        break;
109
91
      }
110
91
    }
111
95
  }
112
113
1.97k
  return std::pair<NonLoc, nonloc::ConcreteInt>(offset, extent);
114
2.05k
}
115
116
void ArrayBoundCheckerV2::checkLocation(SVal location, bool isLoad,
117
                                        const Stmt* LoadS,
118
1.03k
                                        CheckerContext &checkerContext) const {
119
120
  // NOTE: Instead of using ProgramState::assumeInBound(), we are prototyping
121
  // some new logic here that reasons directly about memory region extents.
122
  // Once that logic is more mature, we can bring it back to assumeInBound()
123
  // for all clients to use.
124
  //
125
  // The algorithm we are using here for bounds checking is to see if the
126
  // memory access is within the extent of the base region.  Since we
127
  // have some flexibility in defining the base region, we can achieve
128
  // various levels of conservatism in our buffer overflow checking.
129
1.03k
  ProgramStateRef state = checkerContext.getState();
130
131
1.03k
  SValBuilder &svalBuilder = checkerContext.getSValBuilder();
132
1.03k
  const RegionRawOffsetV2 &rawOffset =
133
1.03k
    RegionRawOffsetV2::computeOffset(state, svalBuilder, location);
134
135
1.03k
  if (!rawOffset.getRegion())
136
0
    return;
137
138
1.03k
  NonLoc rawOffsetVal = rawOffset.getByteOffset();
139
140
  // CHECK LOWER BOUND: Is byteOffset < extent begin?
141
  //  If so, we are doing a load/store
142
  //  before the first valid offset in the memory region.
143
144
1.03k
  SVal extentBegin = computeExtentBegin(svalBuilder, rawOffset.getRegion());
145
146
1.03k
  if (Optional<NonLoc> NV = extentBegin.getAs<NonLoc>()) {
147
995
    if (NV->getAs<nonloc::ConcreteInt>()) {
148
995
      std::pair<NonLoc, nonloc::ConcreteInt> simplifiedOffsets =
149
995
          getSimplifiedOffsets(rawOffset.getByteOffset(),
150
995
                               NV->castAs<nonloc::ConcreteInt>(),
151
995
                               svalBuilder);
152
995
      rawOffsetVal = simplifiedOffsets.first;
153
995
      *NV = simplifiedOffsets.second;
154
995
    }
155
156
995
    SVal lowerBound = svalBuilder.evalBinOpNN(state, BO_LT, rawOffsetVal, *NV,
157
995
                                              svalBuilder.getConditionType());
158
159
995
    Optional<NonLoc> lowerBoundToCheck = lowerBound.getAs<NonLoc>();
160
995
    if (!lowerBoundToCheck)
161
0
      return;
162
163
995
    ProgramStateRef state_precedesLowerBound, state_withinLowerBound;
164
995
    std::tie(state_precedesLowerBound, state_withinLowerBound) =
165
995
      state->assume(*lowerBoundToCheck);
166
167
    // Are we constrained enough to definitely precede the lower bound?
168
995
    if (state_precedesLowerBound && 
!state_withinLowerBound54
) {
169
12
      reportOOB(checkerContext, state_precedesLowerBound, OOB_Precedes);
170
12
      return;
171
12
    }
172
173
    // Otherwise, assume the constraint of the lower bound.
174
983
    assert(state_withinLowerBound);
175
0
    state = state_withinLowerBound;
176
983
  }
177
178
1.02k
  do {
179
    // CHECK UPPER BOUND: Is byteOffset >= size(baseRegion)?  If so,
180
    // we are doing a load/store after the last valid offset.
181
1.02k
    const MemRegion *MR = rawOffset.getRegion();
182
1.02k
    DefinedOrUnknownSVal Size = getDynamicExtent(state, MR, svalBuilder);
183
1.02k
    if (!Size.getAs<NonLoc>())
184
2
      break;
185
186
1.01k
    if (Size.getAs<nonloc::ConcreteInt>()) {
187
980
      std::pair<NonLoc, nonloc::ConcreteInt> simplifiedOffsets =
188
980
          getSimplifiedOffsets(rawOffset.getByteOffset(),
189
980
                               Size.castAs<nonloc::ConcreteInt>(), svalBuilder);
190
980
      rawOffsetVal = simplifiedOffsets.first;
191
980
      Size = simplifiedOffsets.second;
192
980
    }
193
194
1.01k
    SVal upperbound = svalBuilder.evalBinOpNN(state, BO_GE, rawOffsetVal,
195
1.01k
                                              Size.castAs<NonLoc>(),
196
1.01k
                                              svalBuilder.getConditionType());
197
198
1.01k
    Optional<NonLoc> upperboundToCheck = upperbound.getAs<NonLoc>();
199
1.01k
    if (!upperboundToCheck)
200
0
      break;
201
202
1.01k
    ProgramStateRef state_exceedsUpperBound, state_withinUpperBound;
203
1.01k
    std::tie(state_exceedsUpperBound, state_withinUpperBound) =
204
1.01k
      state->assume(*upperboundToCheck);
205
206
    // If we are under constrained and the index variables are tainted, report.
207
1.01k
    if (state_exceedsUpperBound && 
state_withinUpperBound67
) {
208
52
      SVal ByteOffset = rawOffset.getByteOffset();
209
52
      if (isTainted(state, ByteOffset)) {
210
25
        reportOOB(checkerContext, state_exceedsUpperBound, OOB_Tainted,
211
25
                  std::make_unique<TaintBugVisitor>(ByteOffset));
212
25
        return;
213
25
      }
214
966
    } else if (state_exceedsUpperBound) {
215
      // If we are constrained enough to definitely exceed the upper bound,
216
      // report.
217
15
      assert(!state_withinUpperBound);
218
0
      reportOOB(checkerContext, state_exceedsUpperBound, OOB_Excedes);
219
15
      return;
220
15
    }
221
222
978
    assert(state_withinUpperBound);
223
0
    state = state_withinUpperBound;
224
978
  }
225
1.02k
  while (
false978
);
226
227
980
  checkerContext.addTransition(state);
228
980
}
229
230
void ArrayBoundCheckerV2::reportOOB(
231
    CheckerContext &checkerContext, ProgramStateRef errorState, OOB_Kind kind,
232
52
    std::unique_ptr<BugReporterVisitor> Visitor) const {
233
234
52
  ExplodedNode *errorNode = checkerContext.generateErrorNode(errorState);
235
52
  if (!errorNode)
236
0
    return;
237
238
52
  if (!BT)
239
7
    BT.reset(new BuiltinBug(this, "Out-of-bound access"));
240
241
  // FIXME: This diagnostics are preliminary.  We should get far better
242
  // diagnostics for explaining buffer overruns.
243
244
52
  SmallString<256> buf;
245
52
  llvm::raw_svector_ostream os(buf);
246
52
  os << "Out of bound memory access ";
247
52
  switch (kind) {
248
12
  case OOB_Precedes:
249
12
    os << "(accessed memory precedes memory block)";
250
12
    break;
251
15
  case OOB_Excedes:
252
15
    os << "(access exceeds upper limit of memory block)";
253
15
    break;
254
25
  case OOB_Tainted:
255
25
    os << "(index is tainted)";
256
25
    break;
257
52
  }
258
259
52
  auto BR = std::make_unique<PathSensitiveBugReport>(*BT, os.str(), errorNode);
260
52
  BR->addVisitor(std::move(Visitor));
261
52
  checkerContext.emitReport(std::move(BR));
262
52
}
263
264
#ifndef NDEBUG
265
0
LLVM_DUMP_METHOD void RegionRawOffsetV2::dump() const {
266
0
  dumpToStream(llvm::errs());
267
0
}
268
269
0
void RegionRawOffsetV2::dumpToStream(raw_ostream &os) const {
270
0
  os << "raw_offset_v2{" << getRegion() << ',' << getByteOffset() << '}';
271
0
}
272
#endif
273
274
// Lazily computes a value to be used by 'computeOffset'.  If 'val'
275
// is unknown or undefined, we lazily substitute '0'.  Otherwise,
276
// return 'val'.
277
1.16k
static inline SVal getValue(SVal val, SValBuilder &svalBuilder) {
278
1.16k
  return val.getAs<UndefinedVal>() ? 
svalBuilder.makeArrayIndex(0)1.03k
:
val129
;
279
1.16k
}
280
281
// Scale a base value by a scaling factor, and return the scaled
282
// value as an SVal.  Used by 'computeOffset'.
283
static inline SVal scaleValue(ProgramStateRef state,
284
                              NonLoc baseVal, CharUnits scaling,
285
129
                              SValBuilder &sb) {
286
129
  return sb.evalBinOpNN(state, BO_Mul, baseVal,
287
129
                        sb.makeArrayIndex(scaling.getQuantity()),
288
129
                        sb.getArrayIndexType());
289
129
}
290
291
// Add an SVal to another, treating unknown and undefined values as
292
// summing to UnknownVal.  Used by 'computeOffset'.
293
static SVal addValue(ProgramStateRef state, SVal x, SVal y,
294
129
                     SValBuilder &svalBuilder) {
295
  // We treat UnknownVals and UndefinedVals the same here because we
296
  // only care about computing offsets.
297
129
  if (x.isUnknownOrUndef() || y.isUnknownOrUndef())
298
0
    return UnknownVal();
299
300
129
  return svalBuilder.evalBinOpNN(state, BO_Add, x.castAs<NonLoc>(),
301
129
                                 y.castAs<NonLoc>(),
302
129
                                 svalBuilder.getArrayIndexType());
303
129
}
304
305
/// Compute a raw byte offset from a base region.  Used for array bounds
306
/// checking.
307
RegionRawOffsetV2 RegionRawOffsetV2::computeOffset(ProgramStateRef state,
308
                                                   SValBuilder &svalBuilder,
309
                                                   SVal location)
310
1.03k
{
311
1.03k
  const MemRegion *region = location.getAsRegion();
312
1.03k
  SVal offset = UndefinedVal();
313
314
1.16k
  while (region) {
315
1.16k
    switch (region->getKind()) {
316
1.03k
      default: {
317
1.03k
        if (const SubRegion *subReg = dyn_cast<SubRegion>(region)) {
318
1.03k
          offset = getValue(offset, svalBuilder);
319
1.03k
          if (!offset.isUnknownOrUndef())
320
1.03k
            return RegionRawOffsetV2(subReg, offset);
321
1.03k
        }
322
0
        return RegionRawOffsetV2();
323
1.03k
      }
324
129
      case MemRegion::ElementRegionKind: {
325
129
        const ElementRegion *elemReg = cast<ElementRegion>(region);
326
129
        SVal index = elemReg->getIndex();
327
129
        if (!index.getAs<NonLoc>())
328
0
          return RegionRawOffsetV2();
329
129
        QualType elemType = elemReg->getElementType();
330
        // If the element is an incomplete type, go no further.
331
129
        ASTContext &astContext = svalBuilder.getContext();
332
129
        if (elemType->isIncompleteType())
333
0
          return RegionRawOffsetV2();
334
335
        // Update the offset.
336
129
        offset = addValue(state,
337
129
                          getValue(offset, svalBuilder),
338
129
                          scaleValue(state,
339
129
                          index.castAs<NonLoc>(),
340
129
                          astContext.getTypeSizeInChars(elemType),
341
129
                          svalBuilder),
342
129
                          svalBuilder);
343
344
129
        if (offset.isUnknownOrUndef())
345
0
          return RegionRawOffsetV2();
346
347
129
        region = elemReg->getSuperRegion();
348
129
        continue;
349
129
      }
350
1.16k
    }
351
1.16k
  }
352
0
  return RegionRawOffsetV2();
353
1.03k
}
354
355
9
void ento::registerArrayBoundCheckerV2(CheckerManager &mgr) {
356
9
  mgr.registerChecker<ArrayBoundCheckerV2>();
357
9
}
358
359
18
bool ento::shouldRegisterArrayBoundCheckerV2(const CheckerManager &mgr) {
360
18
  return true;
361
18
}