Coverage Report

Created: 2018-09-23 22:08

/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/tools/clang/lib/Rewrite/RewriteRope.cpp
Line
Count
Source (jump to first uncovered line)
1
//===- RewriteRope.cpp - Rope specialized for rewriter --------------------===//
2
//
3
//                     The LLVM Compiler Infrastructure
4
//
5
// This file is distributed under the University of Illinois Open Source
6
// License. See LICENSE.TXT for details.
7
//
8
//===----------------------------------------------------------------------===//
9
//
10
//  This file implements the RewriteRope class, which is a powerful string.
11
//
12
//===----------------------------------------------------------------------===//
13
14
#include "clang/Rewrite/Core/RewriteRope.h"
15
#include "clang/Basic/LLVM.h"
16
#include "llvm/Support/Casting.h"
17
#include <algorithm>
18
#include <cassert>
19
#include <cstring>
20
21
using namespace clang;
22
23
/// RewriteRope is a "strong" string class, designed to make insertions and
24
/// deletions in the middle of the string nearly constant time (really, they are
25
/// O(log N), but with a very low constant factor).
26
///
27
/// The implementation of this datastructure is a conceptual linear sequence of
28
/// RopePiece elements.  Each RopePiece represents a view on a separately
29
/// allocated and reference counted string.  This means that splitting a very
30
/// long string can be done in constant time by splitting a RopePiece that
31
/// references the whole string into two rope pieces that reference each half.
32
/// Once split, another string can be inserted in between the two halves by
33
/// inserting a RopePiece in between the two others.  All of this is very
34
/// inexpensive: it takes time proportional to the number of RopePieces, not the
35
/// length of the strings they represent.
36
///
37
/// While a linear sequences of RopePieces is the conceptual model, the actual
38
/// implementation captures them in an adapted B+ Tree.  Using a B+ tree (which
39
/// is a tree that keeps the values in the leaves and has where each node
40
/// contains a reasonable number of pointers to children/values) allows us to
41
/// maintain efficient operation when the RewriteRope contains a *huge* number
42
/// of RopePieces.  The basic idea of the B+ Tree is that it allows us to find
43
/// the RopePiece corresponding to some offset very efficiently, and it
44
/// automatically balances itself on insertions of RopePieces (which can happen
45
/// for both insertions and erases of string ranges).
46
///
47
/// The one wrinkle on the theory is that we don't attempt to keep the tree
48
/// properly balanced when erases happen.  Erases of string data can both insert
49
/// new RopePieces (e.g. when the middle of some other rope piece is deleted,
50
/// which results in two rope pieces, which is just like an insert) or it can
51
/// reduce the number of RopePieces maintained by the B+Tree.  In the case when
52
/// the number of RopePieces is reduced, we don't attempt to maintain the
53
/// standard 'invariant' that each node in the tree contains at least
54
/// 'WidthFactor' children/values.  For our use cases, this doesn't seem to
55
/// matter.
56
///
57
/// The implementation below is primarily implemented in terms of three classes:
58
///   RopePieceBTreeNode - Common base class for:
59
///
60
///     RopePieceBTreeLeaf - Directly manages up to '2*WidthFactor' RopePiece
61
///          nodes.  This directly represents a chunk of the string with those
62
///          RopePieces contatenated.
63
///     RopePieceBTreeInterior - An interior node in the B+ Tree, which manages
64
///          up to '2*WidthFactor' other nodes in the tree.
65
66
namespace {
67
68
//===----------------------------------------------------------------------===//
69
// RopePieceBTreeNode Class
70
//===----------------------------------------------------------------------===//
71
72
  /// RopePieceBTreeNode - Common base class of RopePieceBTreeLeaf and
73
  /// RopePieceBTreeInterior.  This provides some 'virtual' dispatching methods
74
  /// and a flag that determines which subclass the instance is.  Also
75
  /// important, this node knows the full extend of the node, including any
76
  /// children that it has.  This allows efficient skipping over entire subtrees
77
  /// when looking for an offset in the BTree.
78
  class RopePieceBTreeNode {
79
  protected:
80
    /// WidthFactor - This controls the number of K/V slots held in the BTree:
81
    /// how wide it is.  Each level of the BTree is guaranteed to have at least
82
    /// 'WidthFactor' elements in it (either ropepieces or children), (except
83
    /// the root, which may have less) and may have at most 2*WidthFactor
84
    /// elements.
85
    enum { WidthFactor = 8 };
86
87
    /// Size - This is the number of bytes of file this node (including any
88
    /// potential children) covers.
89
    unsigned Size = 0;
90
91
    /// IsLeaf - True if this is an instance of RopePieceBTreeLeaf, false if it
92
    /// is an instance of RopePieceBTreeInterior.
93
    bool IsLeaf;
94
95
54.9k
    RopePieceBTreeNode(bool isLeaf) : IsLeaf(isLeaf) {}
96
    ~RopePieceBTreeNode() = default;
97
98
  public:
99
811k
    bool isLeaf() const { return IsLeaf; }
100
5.44M
    unsigned size() const { return Size; }
101
102
    void Destroy();
103
104
    /// split - Split the range containing the specified offset so that we are
105
    /// guaranteed that there is a place to do an insertion at the specified
106
    /// offset.  The offset is relative, so "0" is the start of the node.
107
    ///
108
    /// If there is no space in this subtree for the extra piece, the extra tree
109
    /// node is returned and must be inserted into a parent.
110
    RopePieceBTreeNode *split(unsigned Offset);
111
112
    /// insert - Insert the specified ropepiece into this tree node at the
113
    /// specified offset.  The offset is relative, so "0" is the start of the
114
    /// node.
115
    ///
116
    /// If there is no space in this subtree for the extra piece, the extra tree
117
    /// node is returned and must be inserted into a parent.
118
    RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R);
119
120
    /// erase - Remove NumBytes from this node at the specified offset.  We are
121
    /// guaranteed that there is a split at Offset.
122
    void erase(unsigned Offset, unsigned NumBytes);
123
  };
124
125
//===----------------------------------------------------------------------===//
126
// RopePieceBTreeLeaf Class
127
//===----------------------------------------------------------------------===//
128
129
  /// RopePieceBTreeLeaf - Directly manages up to '2*WidthFactor' RopePiece
130
  /// nodes.  This directly represents a chunk of the string with those
131
  /// RopePieces contatenated.  Since this is a B+Tree, all values (in this case
132
  /// instances of RopePiece) are stored in leaves like this.  To make iteration
133
  /// over the leaves efficient, they maintain a singly linked list through the
134
  /// NextLeaf field.  This allows the B+Tree forward iterator to be constant
135
  /// time for all increments.
136
  class RopePieceBTreeLeaf : public RopePieceBTreeNode {
137
    /// NumPieces - This holds the number of rope pieces currently active in the
138
    /// Pieces array.
139
    unsigned char NumPieces = 0;
140
141
    /// Pieces - This tracks the file chunks currently in this leaf.
142
    RopePiece Pieces[2*WidthFactor];
143
144
    /// NextLeaf - This is a pointer to the next leaf in the tree, allowing
145
    /// efficient in-order forward iteration of the tree without traversal.
146
    RopePieceBTreeLeaf **PrevLeaf = nullptr;
147
    RopePieceBTreeLeaf *NextLeaf = nullptr;
148
149
  public:
150
52.6k
    RopePieceBTreeLeaf() : RopePieceBTreeNode(true) {}
151
152
52.6k
    ~RopePieceBTreeLeaf() {
153
52.6k
      if (PrevLeaf || 
NextLeaf52.6k
)
154
14.7k
        removeFromLeafInOrder();
155
52.6k
      clear();
156
52.6k
    }
157
158
248k
    bool isFull() const { return NumPieces == 2*WidthFactor; }
159
160
    /// clear - Remove all rope pieces from this leaf.
161
65.2k
    void clear() {
162
297k
      while (NumPieces)
163
232k
        Pieces[--NumPieces] = RopePiece();
164
65.2k
      Size = 0;
165
65.2k
    }
166
167
546k
    unsigned getNumPieces() const { return NumPieces; }
168
169
1.63M
    const RopePiece &getPiece(unsigned i) const {
170
1.63M
      assert(i < getNumPieces() && "Invalid piece ID");
171
1.63M
      return Pieces[i];
172
1.63M
    }
173
174
28.5k
    const RopePieceBTreeLeaf *getNextLeafInOrder() const { return NextLeaf; }
175
176
14.7k
    void insertAfterLeafInOrder(RopePieceBTreeLeaf *Node) {
177
14.7k
      assert(!PrevLeaf && !NextLeaf && "Already in ordering");
178
14.7k
179
14.7k
      NextLeaf = Node->NextLeaf;
180
14.7k
      if (NextLeaf)
181
11.8k
        NextLeaf->PrevLeaf = &NextLeaf;
182
14.7k
      PrevLeaf = &Node->NextLeaf;
183
14.7k
      Node->NextLeaf = this;
184
14.7k
    }
185
186
14.7k
    void removeFromLeafInOrder() {
187
14.7k
      if (PrevLeaf) {
188
1
        *PrevLeaf = NextLeaf;
189
1
        if (NextLeaf)
190
1
          NextLeaf->PrevLeaf = PrevLeaf;
191
14.7k
      } else if (NextLeaf) {
192
14.7k
        NextLeaf->PrevLeaf = nullptr;
193
14.7k
      }
194
14.7k
    }
195
196
    /// FullRecomputeSizeLocally - This method recomputes the 'Size' field by
197
    /// summing the size of all RopePieces.
198
29.5k
    void FullRecomputeSizeLocally() {
199
29.5k
      Size = 0;
200
265k
      for (unsigned i = 0, e = getNumPieces(); i != e; 
++i236k
)
201
236k
        Size += getPiece(i).size();
202
29.5k
    }
203
204
    /// split - Split the range containing the specified offset so that we are
205
    /// guaranteed that there is a place to do an insertion at the specified
206
    /// offset.  The offset is relative, so "0" is the start of the node.
207
    ///
208
    /// If there is no space in this subtree for the extra piece, the extra tree
209
    /// node is returned and must be inserted into a parent.
210
    RopePieceBTreeNode *split(unsigned Offset);
211
212
    /// insert - Insert the specified ropepiece into this tree node at the
213
    /// specified offset.  The offset is relative, so "0" is the start of the
214
    /// node.
215
    ///
216
    /// If there is no space in this subtree for the extra piece, the extra tree
217
    /// node is returned and must be inserted into a parent.
218
    RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R);
219
220
    /// erase - Remove NumBytes from this node at the specified offset.  We are
221
    /// guaranteed that there is a split at Offset.
222
    void erase(unsigned Offset, unsigned NumBytes);
223
224
796k
    static bool classof(const RopePieceBTreeNode *N) {
225
796k
      return N->isLeaf();
226
796k
    }
227
  };
228
229
} // namespace
230
231
/// split - Split the range containing the specified offset so that we are
232
/// guaranteed that there is a place to do an insertion at the specified
233
/// offset.  The offset is relative, so "0" is the start of the node.
234
///
235
/// If there is no space in this subtree for the extra piece, the extra tree
236
/// node is returned and must be inserted into a parent.
237
167k
RopePieceBTreeNode *RopePieceBTreeLeaf::split(unsigned Offset) {
238
167k
  // Find the insertion point.  We are guaranteed that there is a split at the
239
167k
  // specified offset so find it.
240
167k
  if (Offset == 0 || 
Offset == size()154k
) {
241
13.6k
    // Fastpath for a common case.  There is already a splitpoint at the end.
242
13.6k
    return nullptr;
243
13.6k
  }
244
154k
245
154k
  // Find the piece that this offset lands in.
246
154k
  unsigned PieceOffs = 0;
247
154k
  unsigned i = 0;
248
828k
  while (Offset >= PieceOffs+Pieces[i].size()) {
249
673k
    PieceOffs += Pieces[i].size();
250
673k
    ++i;
251
673k
  }
252
154k
253
154k
  // If there is already a split point at the specified offset, just return
254
154k
  // success.
255
154k
  if (PieceOffs == Offset)
256
50.5k
    return nullptr;
257
103k
258
103k
  // Otherwise, we need to split piece 'i' at Offset-PieceOffs.  Convert Offset
259
103k
  // to being Piece relative.
260
103k
  unsigned IntraPieceOffset = Offset-PieceOffs;
261
103k
262
103k
  // We do this by shrinking the RopePiece and then doing an insert of the tail.
263
103k
  RopePiece Tail(Pieces[i].StrData, Pieces[i].StartOffs+IntraPieceOffset,
264
103k
                 Pieces[i].EndOffs);
265
103k
  Size -= Pieces[i].size();
266
103k
  Pieces[i].EndOffs = Pieces[i].StartOffs+IntraPieceOffset;
267
103k
  Size += Pieces[i].size();
268
103k
269
103k
  return insert(Offset, Tail);
270
103k
}
271
272
/// insert - Insert the specified RopePiece into this tree node at the
273
/// specified offset.  The offset is relative, so "0" is the start of the node.
274
///
275
/// If there is no space in this subtree for the extra piece, the extra tree
276
/// node is returned and must be inserted into a parent.
277
RopePieceBTreeNode *RopePieceBTreeLeaf::insert(unsigned Offset,
278
248k
                                               const RopePiece &R) {
279
248k
  // If this node is not full, insert the piece.
280
248k
  if (!isFull()) {
281
233k
    // Find the insertion point.  We are guaranteed that there is a split at the
282
233k
    // specified offset so find it.
283
233k
    unsigned i = 0, e = getNumPieces();
284
233k
    if (Offset == size()) {
285
42.2k
      // Fastpath for a common case.
286
42.2k
      i = e;
287
191k
    } else {
288
191k
      unsigned SlotOffs = 0;
289
1.13M
      for (; Offset > SlotOffs; 
++i947k
)
290
947k
        SlotOffs += getPiece(i).size();
291
191k
      assert(SlotOffs == Offset && "Split didn't occur before insertion!");
292
191k
    }
293
233k
294
233k
    // For an insertion into a non-full leaf node, just insert the value in
295
233k
    // its sorted position.  This requires moving later values over.
296
1.10M
    for (; i != e; 
--e875k
)
297
875k
      Pieces[e] = Pieces[e-1];
298
233k
    Pieces[i] = R;
299
233k
    ++NumPieces;
300
233k
    Size += R.size();
301
233k
    return nullptr;
302
233k
  }
303
14.7k
304
14.7k
  // Otherwise, if this is leaf is full, split it in two halves.  Since this
305
14.7k
  // node is full, it contains 2*WidthFactor values.  We move the first
306
14.7k
  // 'WidthFactor' values to the LHS child (which we leave in this node) and
307
14.7k
  // move the last 'WidthFactor' values into the RHS child.
308
14.7k
309
14.7k
  // Create the new node.
310
14.7k
  RopePieceBTreeLeaf *NewNode = new RopePieceBTreeLeaf();
311
14.7k
312
14.7k
  // Move over the last 'WidthFactor' values from here to NewNode.
313
14.7k
  std::copy(&Pieces[WidthFactor], &Pieces[2*WidthFactor],
314
14.7k
            &NewNode->Pieces[0]);
315
14.7k
  // Replace old pieces with null RopePieces to drop refcounts.
316
14.7k
  std::fill(&Pieces[WidthFactor], &Pieces[2*WidthFactor], RopePiece());
317
14.7k
318
14.7k
  // Decrease the number of values in the two nodes.
319
14.7k
  NewNode->NumPieces = NumPieces = WidthFactor;
320
14.7k
321
14.7k
  // Recompute the two nodes' size.
322
14.7k
  NewNode->FullRecomputeSizeLocally();
323
14.7k
  FullRecomputeSizeLocally();
324
14.7k
325
14.7k
  // Update the list of leaves.
326
14.7k
  NewNode->insertAfterLeafInOrder(this);
327
14.7k
328
14.7k
  // These insertions can't fail.
329
14.7k
  if (this->size() >= Offset)
330
2.18k
    this->insert(Offset, R);
331
12.5k
  else
332
12.5k
    NewNode->insert(Offset - this->size(), R);
333
14.7k
  return NewNode;
334
14.7k
}
335
336
/// erase - Remove NumBytes from this node at the specified offset.  We are
337
/// guaranteed that there is a split at Offset.
338
40.6k
void RopePieceBTreeLeaf::erase(unsigned Offset, unsigned NumBytes) {
339
40.6k
  // Since we are guaranteed that there is a split at Offset, we start by
340
40.6k
  // finding the Piece that starts there.
341
40.6k
  unsigned PieceOffs = 0;
342
40.6k
  unsigned i = 0;
343
131k
  for (; Offset > PieceOffs; 
++i90.8k
)
344
90.8k
    PieceOffs += getPiece(i).size();
345
40.6k
  assert(PieceOffs == Offset && "Split didn't occur before erase!");
346
40.6k
347
40.6k
  unsigned StartPiece = i;
348
40.6k
349
40.6k
  // Figure out how many pieces completely cover 'NumBytes'.  We want to remove
350
40.6k
  // all of them.
351
41.1k
  for (; Offset+NumBytes > PieceOffs+getPiece(i).size(); 
++i563
)
352
563
    PieceOffs += getPiece(i).size();
353
40.6k
354
40.6k
  // If we exactly include the last one, include it in the region to delete.
355
40.6k
  if (Offset+NumBytes == PieceOffs+getPiece(i).size()) {
356
662
    PieceOffs += getPiece(i).size();
357
662
    ++i;
358
662
  }
359
40.6k
360
40.6k
  // If we completely cover some RopePieces, erase them now.
361
40.6k
  if (i != StartPiece) {
362
898
    unsigned NumDeleted = i-StartPiece;
363
4.07k
    for (; i != getNumPieces(); 
++i3.17k
)
364
3.17k
      Pieces[i-NumDeleted] = Pieces[i];
365
898
366
898
    // Drop references to dead rope pieces.
367
898
    std::fill(&Pieces[getNumPieces()-NumDeleted], &Pieces[getNumPieces()],
368
898
              RopePiece());
369
898
    NumPieces -= NumDeleted;
370
898
371
898
    unsigned CoverBytes = PieceOffs-Offset;
372
898
    NumBytes -= CoverBytes;
373
898
    Size -= CoverBytes;
374
898
  }
375
40.6k
376
40.6k
  // If we completely removed some stuff, we could be done.
377
40.6k
  if (NumBytes == 0) 
return662
;
378
39.9k
379
39.9k
  // Okay, now might be erasing part of some Piece.  If this is the case, then
380
39.9k
  // move the start point of the piece.
381
39.9k
  assert(getPiece(StartPiece).size() > NumBytes);
382
39.9k
  Pieces[StartPiece].StartOffs += NumBytes;
383
39.9k
384
39.9k
  // The size of this node just shrunk by NumBytes.
385
39.9k
  Size -= NumBytes;
386
39.9k
}
387
388
//===----------------------------------------------------------------------===//
389
// RopePieceBTreeInterior Class
390
//===----------------------------------------------------------------------===//
391
392
namespace {
393
394
  /// RopePieceBTreeInterior - This represents an interior node in the B+Tree,
395
  /// which holds up to 2*WidthFactor pointers to child nodes.
396
  class RopePieceBTreeInterior : public RopePieceBTreeNode {
397
    /// NumChildren - This holds the number of children currently active in the
398
    /// Children array.
399
    unsigned char NumChildren = 0;
400
401
    RopePieceBTreeNode *Children[2*WidthFactor];
402
403
  public:
404
1.23k
    RopePieceBTreeInterior() : RopePieceBTreeNode(false) {}
405
406
    RopePieceBTreeInterior(RopePieceBTreeNode *LHS, RopePieceBTreeNode *RHS)
407
1.10k
        : RopePieceBTreeNode(false) {
408
1.10k
      Children[0] = LHS;
409
1.10k
      Children[1] = RHS;
410
1.10k
      NumChildren = 2;
411
1.10k
      Size = LHS->size() + RHS->size();
412
1.10k
    }
413
414
2.34k
    ~RopePieceBTreeInterior() {
415
19.4k
      for (unsigned i = 0, e = getNumChildren(); i != e; 
++i17.1k
)
416
17.1k
        Children[i]->Destroy();
417
2.34k
    }
418
419
16.1k
    bool isFull() const { return NumChildren == 2*WidthFactor; }
420
421
217k
    unsigned getNumChildren() const { return NumChildren; }
422
423
1.68k
    const RopePieceBTreeNode *getChild(unsigned i) const {
424
1.68k
      assert(i < NumChildren && "invalid child #");
425
1.68k
      return Children[i];
426
1.68k
    }
427
428
5.02M
    RopePieceBTreeNode *getChild(unsigned i) {
429
5.02M
      assert(i < NumChildren && "invalid child #");
430
5.02M
      return Children[i];
431
5.02M
    }
432
433
    /// FullRecomputeSizeLocally - Recompute the Size field of this node by
434
    /// summing up the sizes of the child nodes.
435
2.47k
    void FullRecomputeSizeLocally() {
436
2.47k
      Size = 0;
437
23.4k
      for (unsigned i = 0, e = getNumChildren(); i != e; 
++i21.0k
)
438
21.0k
        Size += getChild(i)->size();
439
2.47k
    }
440
441
    /// split - Split the range containing the specified offset so that we are
442
    /// guaranteed that there is a place to do an insertion at the specified
443
    /// offset.  The offset is relative, so "0" is the start of the node.
444
    ///
445
    /// If there is no space in this subtree for the extra piece, the extra tree
446
    /// node is returned and must be inserted into a parent.
447
    RopePieceBTreeNode *split(unsigned Offset);
448
449
    /// insert - Insert the specified ropepiece into this tree node at the
450
    /// specified offset.  The offset is relative, so "0" is the start of the
451
    /// node.
452
    ///
453
    /// If there is no space in this subtree for the extra piece, the extra tree
454
    /// node is returned and must be inserted into a parent.
455
    RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R);
456
457
    /// HandleChildPiece - A child propagated an insertion result up to us.
458
    /// Insert the new child, and/or propagate the result further up the tree.
459
    RopePieceBTreeNode *HandleChildPiece(unsigned i, RopePieceBTreeNode *RHS);
460
461
    /// erase - Remove NumBytes from this node at the specified offset.  We are
462
    /// guaranteed that there is a split at Offset.
463
    void erase(unsigned Offset, unsigned NumBytes);
464
465
15.0k
    static bool classof(const RopePieceBTreeNode *N) {
466
15.0k
      return !N->isLeaf();
467
15.0k
    }
468
  };
469
470
} // namespace
471
472
/// split - Split the range containing the specified offset so that we are
473
/// guaranteed that there is a place to do an insertion at the specified
474
/// offset.  The offset is relative, so "0" is the start of the node.
475
///
476
/// If there is no space in this subtree for the extra piece, the extra tree
477
/// node is returned and must be inserted into a parent.
478
194k
RopePieceBTreeNode *RopePieceBTreeInterior::split(unsigned Offset) {
479
194k
  // Figure out which child to split.
480
194k
  if (Offset == 0 || 
Offset == size()193k
)
481
968
    return nullptr; // If we have an exact offset, we're already split.
482
193k
483
193k
  unsigned ChildOffset = 0;
484
193k
  unsigned i = 0;
485
1.25M
  for (; Offset >= ChildOffset+getChild(i)->size(); 
++i1.05M
)
486
1.05M
    ChildOffset += getChild(i)->size();
487
193k
488
193k
  // If already split there, we're done.
489
193k
  if (ChildOffset == Offset)
490
1.51k
    return nullptr;
491
192k
492
192k
  // Otherwise, recursively split the child.
493
192k
  if (RopePieceBTreeNode *RHS = getChild(i)->split(Offset-ChildOffset))
494
10.8k
    return HandleChildPiece(i, RHS);
495
181k
  return nullptr; // Done!
496
181k
}
497
498
/// insert - Insert the specified ropepiece into this tree node at the
499
/// specified offset.  The offset is relative, so "0" is the start of the
500
/// node.
501
///
502
/// If there is no space in this subtree for the extra piece, the extra tree
503
/// node is returned and must be inserted into a parent.
504
RopePieceBTreeNode *RopePieceBTreeInterior::insert(unsigned Offset,
505
186k
                                                   const RopePiece &R) {
506
186k
  // Find the insertion point.  We are guaranteed that there is a split at the
507
186k
  // specified offset so find it.
508
186k
  unsigned i = 0, e = getNumChildren();
509
186k
510
186k
  unsigned ChildOffs = 0;
511
186k
  if (Offset == size()) {
512
527
    // Fastpath for a common case.  Insert at end of last child.
513
527
    i = e-1;
514
527
    ChildOffs = size()-getChild(i)->size();
515
185k
  } else {
516
1.20M
    for (; Offset > ChildOffs+getChild(i)->size(); 
++i1.02M
)
517
1.02M
      ChildOffs += getChild(i)->size();
518
185k
  }
519
186k
520
186k
  Size += R.size();
521
186k
522
186k
  // Insert at the end of this child.
523
186k
  if (RopePieceBTreeNode *RHS = getChild(i)->insert(Offset-ChildOffs, R))
524
4.00k
    return HandleChildPiece(i, RHS);
525
182k
526
182k
  return nullptr;
527
182k
}
528
529
/// HandleChildPiece - A child propagated an insertion result up to us.
530
/// Insert the new child, and/or propagate the result further up the tree.
531
RopePieceBTreeNode *
532
16.1k
RopePieceBTreeInterior::HandleChildPiece(unsigned i, RopePieceBTreeNode *RHS) {
533
16.1k
  // Otherwise the child propagated a subtree up to us as a new child.  See if
534
16.1k
  // we have space for it here.
535
16.1k
  if (!isFull()) {
536
14.8k
    // Insert RHS after child 'i'.
537
14.8k
    if (i + 1 != getNumChildren())
538
11.3k
      memmove(&Children[i+2], &Children[i+1],
539
11.3k
              (getNumChildren()-i-1)*sizeof(Children[0]));
540
14.8k
    Children[i+1] = RHS;
541
14.8k
    ++NumChildren;
542
14.8k
    return nullptr;
543
14.8k
  }
544
1.23k
545
1.23k
  // Okay, this node is full.  Split it in half, moving WidthFactor children to
546
1.23k
  // a newly allocated interior node.
547
1.23k
548
1.23k
  // Create the new node.
549
1.23k
  RopePieceBTreeInterior *NewNode = new RopePieceBTreeInterior();
550
1.23k
551
1.23k
  // Move over the last 'WidthFactor' values from here to NewNode.
552
1.23k
  memcpy(&NewNode->Children[0], &Children[WidthFactor],
553
1.23k
         WidthFactor*sizeof(Children[0]));
554
1.23k
555
1.23k
  // Decrease the number of values in the two nodes.
556
1.23k
  NewNode->NumChildren = NumChildren = WidthFactor;
557
1.23k
558
1.23k
  // Finally, insert the two new children in the side the can (now) hold them.
559
1.23k
  // These insertions can't fail.
560
1.23k
  if (i < WidthFactor)
561
35
    this->HandleChildPiece(i, RHS);
562
1.20k
  else
563
1.20k
    NewNode->HandleChildPiece(i-WidthFactor, RHS);
564
1.23k
565
1.23k
  // Recompute the two nodes' size.
566
1.23k
  NewNode->FullRecomputeSizeLocally();
567
1.23k
  FullRecomputeSizeLocally();
568
1.23k
  return NewNode;
569
1.23k
}
570
571
/// erase - Remove NumBytes from this node at the specified offset.  We are
572
/// guaranteed that there is a split at Offset.
573
9.81k
void RopePieceBTreeInterior::erase(unsigned Offset, unsigned NumBytes) {
574
9.81k
  // This will shrink this node by NumBytes.
575
9.81k
  Size -= NumBytes;
576
9.81k
577
9.81k
  // Find the first child that overlaps with Offset.
578
9.81k
  unsigned i = 0;
579
45.8k
  for (; Offset >= getChild(i)->size(); 
++i36.0k
)
580
36.0k
    Offset -= getChild(i)->size();
581
9.81k
582
9.81k
  // Propagate the delete request into overlapping children, or completely
583
9.81k
  // delete the children as appropriate.
584
9.82k
  while (NumBytes) {
585
9.82k
    RopePieceBTreeNode *CurChild = getChild(i);
586
9.82k
587
9.82k
    // If we are deleting something contained entirely in the child, pass on the
588
9.82k
    // request.
589
9.82k
    if (Offset+NumBytes < CurChild->size()) {
590
9.81k
      CurChild->erase(Offset, NumBytes);
591
9.81k
      return;
592
9.81k
    }
593
9
594
9
    // If this deletion request starts somewhere in the middle of the child, it
595
9
    // must be deleting to the end of the child.
596
9
    if (Offset) {
597
8
      unsigned BytesFromChild = CurChild->size()-Offset;
598
8
      CurChild->erase(Offset, BytesFromChild);
599
8
      NumBytes -= BytesFromChild;
600
8
      // Start at the beginning of the next child.
601
8
      Offset = 0;
602
8
      ++i;
603
8
      continue;
604
8
    }
605
1
606
1
    // If the deletion request completely covers the child, delete it and move
607
1
    // the rest down.
608
1
    NumBytes -= CurChild->size();
609
1
    CurChild->Destroy();
610
1
    --NumChildren;
611
1
    if (i != getNumChildren())
612
1
      memmove(&Children[i], &Children[i+1],
613
1
              (getNumChildren()-i)*sizeof(Children[0]));
614
1
  }
615
9.81k
}
616
617
//===----------------------------------------------------------------------===//
618
// RopePieceBTreeNode Implementation
619
//===----------------------------------------------------------------------===//
620
621
54.9k
void RopePieceBTreeNode::Destroy() {
622
54.9k
  if (auto *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
623
52.6k
    delete Leaf;
624
2.34k
  else
625
2.34k
    delete cast<RopePieceBTreeInterior>(this);
626
54.9k
}
627
628
/// split - Split the range containing the specified offset so that we are
629
/// guaranteed that there is a place to do an insertion at the specified
630
/// offset.  The offset is relative, so "0" is the start of the node.
631
///
632
/// If there is no space in this subtree for the extra piece, the extra tree
633
/// node is returned and must be inserted into a parent.
634
362k
RopePieceBTreeNode *RopePieceBTreeNode::split(unsigned Offset) {
635
362k
  assert(Offset <= size() && "Invalid offset to split!");
636
362k
  if (auto *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
637
167k
    return Leaf->split(Offset);
638
194k
  return cast<RopePieceBTreeInterior>(this)->split(Offset);
639
194k
}
640
641
/// insert - Insert the specified ropepiece into this tree node at the
642
/// specified offset.  The offset is relative, so "0" is the start of the
643
/// node.
644
///
645
/// If there is no space in this subtree for the extra piece, the extra tree
646
/// node is returned and must be inserted into a parent.
647
RopePieceBTreeNode *RopePieceBTreeNode::insert(unsigned Offset,
648
316k
                                               const RopePiece &R) {
649
316k
  assert(Offset <= size() && "Invalid offset to insert!");
650
316k
  if (auto *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
651
129k
    return Leaf->insert(Offset, R);
652
186k
  return cast<RopePieceBTreeInterior>(this)->insert(Offset, R);
653
186k
}
654
655
/// erase - Remove NumBytes from this node at the specified offset.  We are
656
/// guaranteed that there is a split at Offset.
657
50.4k
void RopePieceBTreeNode::erase(unsigned Offset, unsigned NumBytes) {
658
50.4k
  assert(Offset+NumBytes <= size() && "Invalid offset to erase!");
659
50.4k
  if (auto *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
660
40.6k
    return Leaf->erase(Offset, NumBytes);
661
9.81k
  return cast<RopePieceBTreeInterior>(this)->erase(Offset, NumBytes);
662
9.81k
}
663
664
//===----------------------------------------------------------------------===//
665
// RopePieceBTreeIterator Implementation
666
//===----------------------------------------------------------------------===//
667
668
583k
static const RopePieceBTreeLeaf *getCN(const void *P) {
669
583k
  return static_cast<const RopePieceBTreeLeaf*>(P);
670
583k
}
671
672
// begin iterator.
673
13.3k
RopePieceBTreeIterator::RopePieceBTreeIterator(const void *n) {
674
13.3k
  const auto *N = static_cast<const RopePieceBTreeNode *>(n);
675
13.3k
676
13.3k
  // Walk down the left side of the tree until we get to a leaf.
677
15.0k
  while (const auto *IN = dyn_cast<RopePieceBTreeInterior>(N))
678
1.68k
    N = IN->getChild(0);
679
13.3k
680
13.3k
  // We must have at least one leaf.
681
13.3k
  CurNode = cast<RopePieceBTreeLeaf>(N);
682
13.3k
683
13.3k
  // If we found a leaf that happens to be empty, skip over it until we get
684
13.3k
  // to something full.
685
13.3k
  while (CurNode && 
getCN(CurNode)->getNumPieces() == 013.3k
)
686
7
    CurNode = getCN(CurNode)->getNextLeafInOrder();
687
13.3k
688
13.3k
  if (CurNode)
689
13.3k
    CurPiece = &getCN(CurNode)->getPiece(0);
690
7
  else  // Empty tree, this is an end() iterator.
691
7
    CurPiece = nullptr;
692
13.3k
  CurChar = 0;
693
13.3k
}
694
695
248k
void RopePieceBTreeIterator::MoveToNextPiece() {
696
248k
  if (CurPiece != &getCN(CurNode)->getPiece(getCN(CurNode)->getNumPieces()-1)) {
697
219k
    CurChar = 0;
698
219k
    ++CurPiece;
699
219k
    return;
700
219k
  }
701
28.4k
702
28.4k
  // Find the next non-empty leaf node.
703
28.4k
  do
704
28.4k
    CurNode = getCN(CurNode)->getNextLeafInOrder();
705
28.4k
  while (CurNode && 
getCN(CurNode)->getNumPieces() == 015.9k
);
706
28.4k
707
28.4k
  if (CurNode)
708
15.9k
    CurPiece = &getCN(CurNode)->getPiece(0);
709
12.5k
  else // Hit end().
710
12.5k
    CurPiece = nullptr;
711
28.4k
  CurChar = 0;
712
28.4k
}
713
714
//===----------------------------------------------------------------------===//
715
// RopePieceBTree Implementation
716
//===----------------------------------------------------------------------===//
717
718
392k
static RopePieceBTreeNode *getRoot(void *P) {
719
392k
  return static_cast<RopePieceBTreeNode*>(P);
720
392k
}
721
722
12.6k
RopePieceBTree::RopePieceBTree() {
723
12.6k
  Root = new RopePieceBTreeLeaf();
724
12.6k
}
725
726
25.2k
RopePieceBTree::RopePieceBTree(const RopePieceBTree &RHS) {
727
25.2k
  assert(RHS.empty() && "Can't copy non-empty tree yet");
728
25.2k
  Root = new RopePieceBTreeLeaf();
729
25.2k
}
730
731
37.8k
RopePieceBTree::~RopePieceBTree() {
732
37.8k
  getRoot(Root)->Destroy();
733
37.8k
}
734
735
6
unsigned RopePieceBTree::size() const {
736
6
  return getRoot(Root)->size();
737
6
}
738
739
12.6k
void RopePieceBTree::clear() {
740
12.6k
  if (auto *Leaf = dyn_cast<RopePieceBTreeLeaf>(getRoot(Root)))
741
12.6k
    Leaf->clear();
742
0
  else {
743
0
    getRoot(Root)->Destroy();
744
0
    Root = new RopePieceBTreeLeaf();
745
0
  }
746
12.6k
}
747
748
129k
void RopePieceBTree::insert(unsigned Offset, const RopePiece &R) {
749
129k
  // #1. Split at Offset.
750
129k
  if (RopePieceBTreeNode *RHS = getRoot(Root)->split(Offset))
751
104
    Root = new RopePieceBTreeInterior(getRoot(Root), RHS);
752
129k
753
129k
  // #2. Do the insertion.
754
129k
  if (RopePieceBTreeNode *RHS = getRoot(Root)->insert(Offset, R))
755
847
    Root = new RopePieceBTreeInterior(getRoot(Root), RHS);
756
129k
}
757
758
40.6k
void RopePieceBTree::erase(unsigned Offset, unsigned NumBytes) {
759
40.6k
  // #1. Split at Offset.
760
40.6k
  if (RopePieceBTreeNode *RHS = getRoot(Root)->split(Offset))
761
156
    Root = new RopePieceBTreeInterior(getRoot(Root), RHS);
762
40.6k
763
40.6k
  // #2. Do the erasing.
764
40.6k
  getRoot(Root)->erase(Offset, NumBytes);
765
40.6k
}
766
767
//===----------------------------------------------------------------------===//
768
// RewriteRope Implementation
769
//===----------------------------------------------------------------------===//
770
771
/// MakeRopeString - This copies the specified byte range into some instance of
772
/// RopeRefCountString, and return a RopePiece that represents it.  This uses
773
/// the AllocBuffer object to aggregate requests for small strings into one
774
/// allocation instead of doing tons of tiny allocations.
775
129k
RopePiece RewriteRope::MakeRopeString(const char *Start, const char *End) {
776
129k
  unsigned Len = End-Start;
777
129k
  assert(Len && "Zero length RopePiece is invalid!");
778
129k
779
129k
  // If we have space for this string in the current alloc buffer, use it.
780
129k
  if (AllocOffs+Len <= AllocChunkSize) {
781
116k
    memcpy(AllocBuffer->Data+AllocOffs, Start, Len);
782
116k
    AllocOffs += Len;
783
116k
    return RopePiece(AllocBuffer, AllocOffs-Len, AllocOffs);
784
116k
  }
785
13.8k
786
13.8k
  // If we don't have enough room because this specific allocation is huge,
787
13.8k
  // just allocate a new rope piece for it alone.
788
13.8k
  if (Len > AllocChunkSize) {
789
235
    unsigned Size = End-Start+sizeof(RopeRefCountString)-1;
790
235
    auto *Res = reinterpret_cast<RopeRefCountString *>(new char[Size]);
791
235
    Res->RefCount = 0;
792
235
    memcpy(Res->Data, Start, End-Start);
793
235
    return RopePiece(Res, 0, End-Start);
794
235
  }
795
13.5k
796
13.5k
  // Otherwise, this was a small request but we just don't have space for it
797
13.5k
  // Make a new chunk and share it with later allocations.
798
13.5k
799
13.5k
  unsigned AllocSize = offsetof(RopeRefCountString, Data) + AllocChunkSize;
800
13.5k
  auto *Res = reinterpret_cast<RopeRefCountString *>(new char[AllocSize]);
801
13.5k
  Res->RefCount = 0;
802
13.5k
  memcpy(Res->Data, Start, Len);
803
13.5k
  AllocBuffer = Res;
804
13.5k
  AllocOffs = Len;
805
13.5k
806
13.5k
  return RopePiece(AllocBuffer, 0, Len);
807
13.5k
}