Coverage Report

Created: 2017-10-03 07:32

/Users/buildslave/jenkins/sharedspace/clang-stage2-coverage-R@2/llvm/tools/clang/lib/Rewrite/RewriteRope.cpp
Line
Count
Source (jump to first uncovered line)
1
//===--- RewriteRope.cpp - Rope specialized for rewriter --------*- C++ -*-===//
2
//
3
//                     The LLVM Compiler Infrastructure
4
//
5
// This file is distributed under the University of Illinois Open Source
6
// License. See LICENSE.TXT for details.
7
//
8
//===----------------------------------------------------------------------===//
9
//
10
//  This file implements the RewriteRope class, which is a powerful string.
11
//
12
//===----------------------------------------------------------------------===//
13
14
#include "clang/Rewrite/Core/RewriteRope.h"
15
#include "clang/Basic/LLVM.h"
16
#include <algorithm>
17
using namespace clang;
18
19
/// RewriteRope is a "strong" string class, designed to make insertions and
20
/// deletions in the middle of the string nearly constant time (really, they are
21
/// O(log N), but with a very low constant factor).
22
///
23
/// The implementation of this datastructure is a conceptual linear sequence of
24
/// RopePiece elements.  Each RopePiece represents a view on a separately
25
/// allocated and reference counted string.  This means that splitting a very
26
/// long string can be done in constant time by splitting a RopePiece that
27
/// references the whole string into two rope pieces that reference each half.
28
/// Once split, another string can be inserted in between the two halves by
29
/// inserting a RopePiece in between the two others.  All of this is very
30
/// inexpensive: it takes time proportional to the number of RopePieces, not the
31
/// length of the strings they represent.
32
///
33
/// While a linear sequences of RopePieces is the conceptual model, the actual
34
/// implementation captures them in an adapted B+ Tree.  Using a B+ tree (which
35
/// is a tree that keeps the values in the leaves and has where each node
36
/// contains a reasonable number of pointers to children/values) allows us to
37
/// maintain efficient operation when the RewriteRope contains a *huge* number
38
/// of RopePieces.  The basic idea of the B+ Tree is that it allows us to find
39
/// the RopePiece corresponding to some offset very efficiently, and it
40
/// automatically balances itself on insertions of RopePieces (which can happen
41
/// for both insertions and erases of string ranges).
42
///
43
/// The one wrinkle on the theory is that we don't attempt to keep the tree
44
/// properly balanced when erases happen.  Erases of string data can both insert
45
/// new RopePieces (e.g. when the middle of some other rope piece is deleted,
46
/// which results in two rope pieces, which is just like an insert) or it can
47
/// reduce the number of RopePieces maintained by the B+Tree.  In the case when
48
/// the number of RopePieces is reduced, we don't attempt to maintain the
49
/// standard 'invariant' that each node in the tree contains at least
50
/// 'WidthFactor' children/values.  For our use cases, this doesn't seem to
51
/// matter.
52
///
53
/// The implementation below is primarily implemented in terms of three classes:
54
///   RopePieceBTreeNode - Common base class for:
55
///
56
///     RopePieceBTreeLeaf - Directly manages up to '2*WidthFactor' RopePiece
57
///          nodes.  This directly represents a chunk of the string with those
58
///          RopePieces contatenated.
59
///     RopePieceBTreeInterior - An interior node in the B+ Tree, which manages
60
///          up to '2*WidthFactor' other nodes in the tree.
61
62
63
//===----------------------------------------------------------------------===//
64
// RopePieceBTreeNode Class
65
//===----------------------------------------------------------------------===//
66
67
namespace {
68
  /// RopePieceBTreeNode - Common base class of RopePieceBTreeLeaf and
69
  /// RopePieceBTreeInterior.  This provides some 'virtual' dispatching methods
70
  /// and a flag that determines which subclass the instance is.  Also
71
  /// important, this node knows the full extend of the node, including any
72
  /// children that it has.  This allows efficient skipping over entire subtrees
73
  /// when looking for an offset in the BTree.
74
  class RopePieceBTreeNode {
75
  protected:
76
    /// WidthFactor - This controls the number of K/V slots held in the BTree:
77
    /// how wide it is.  Each level of the BTree is guaranteed to have at least
78
    /// 'WidthFactor' elements in it (either ropepieces or children), (except
79
    /// the root, which may have less) and may have at most 2*WidthFactor
80
    /// elements.
81
    enum { WidthFactor = 8 };
82
83
    /// Size - This is the number of bytes of file this node (including any
84
    /// potential children) covers.
85
    unsigned Size;
86
87
    /// IsLeaf - True if this is an instance of RopePieceBTreeLeaf, false if it
88
    /// is an instance of RopePieceBTreeInterior.
89
    bool IsLeaf;
90
91
84.7k
    RopePieceBTreeNode(bool isLeaf) : Size(0), IsLeaf(isLeaf) {}
92
    ~RopePieceBTreeNode() = default;
93
94
  public:
95
4.59M
    bool isLeaf() const { return IsLeaf; }
96
58.6M
    unsigned size() const { return Size; }
97
98
    void Destroy();
99
100
    /// split - Split the range containing the specified offset so that we are
101
    /// guaranteed that there is a place to do an insertion at the specified
102
    /// offset.  The offset is relative, so "0" is the start of the node.
103
    ///
104
    /// If there is no space in this subtree for the extra piece, the extra tree
105
    /// node is returned and must be inserted into a parent.
106
    RopePieceBTreeNode *split(unsigned Offset);
107
108
    /// insert - Insert the specified ropepiece into this tree node at the
109
    /// specified offset.  The offset is relative, so "0" is the start of the
110
    /// node.
111
    ///
112
    /// If there is no space in this subtree for the extra piece, the extra tree
113
    /// node is returned and must be inserted into a parent.
114
    RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R);
115
116
    /// erase - Remove NumBytes from this node at the specified offset.  We are
117
    /// guaranteed that there is a split at Offset.
118
    void erase(unsigned Offset, unsigned NumBytes);
119
120
  };
121
} // end anonymous namespace
122
123
//===----------------------------------------------------------------------===//
124
// RopePieceBTreeLeaf Class
125
//===----------------------------------------------------------------------===//
126
127
namespace {
128
  /// RopePieceBTreeLeaf - Directly manages up to '2*WidthFactor' RopePiece
129
  /// nodes.  This directly represents a chunk of the string with those
130
  /// RopePieces contatenated.  Since this is a B+Tree, all values (in this case
131
  /// instances of RopePiece) are stored in leaves like this.  To make iteration
132
  /// over the leaves efficient, they maintain a singly linked list through the
133
  /// NextLeaf field.  This allows the B+Tree forward iterator to be constant
134
  /// time for all increments.
135
  class RopePieceBTreeLeaf : public RopePieceBTreeNode {
136
    /// NumPieces - This holds the number of rope pieces currently active in the
137
    /// Pieces array.
138
    unsigned char NumPieces;
139
140
    /// Pieces - This tracks the file chunks currently in this leaf.
141
    ///
142
    RopePiece Pieces[2*WidthFactor];
143
144
    /// NextLeaf - This is a pointer to the next leaf in the tree, allowing
145
    /// efficient in-order forward iteration of the tree without traversal.
146
    RopePieceBTreeLeaf **PrevLeaf, *NextLeaf;
147
  public:
148
    RopePieceBTreeLeaf() : RopePieceBTreeNode(true), NumPieces(0),
149
78.3k
                           PrevLeaf(nullptr), NextLeaf(nullptr) {}
150
78.3k
    ~RopePieceBTreeLeaf() {
151
78.3k
      if (
PrevLeaf || 78.3k
NextLeaf78.3k
)
152
49.2k
        removeFromLeafInOrder();
153
78.3k
      clear();
154
78.3k
    }
155
156
622k
    bool isFull() const { return NumPieces == 2*WidthFactor; }
157
158
    /// clear - Remove all rope pieces from this leaf.
159
88.0k
    void clear() {
160
660k
      while (NumPieces)
161
572k
        Pieces[--NumPieces] = RopePiece();
162
88.0k
      Size = 0;
163
88.0k
    }
164
165
1.32M
    unsigned getNumPieces() const { return NumPieces; }
166
167
6.48M
    const RopePiece &getPiece(unsigned i) const {
168
6.48M
      assert(i < getNumPieces() && "Invalid piece ID");
169
6.48M
      return Pieces[i];
170
6.48M
    }
171
172
60.2k
    const RopePieceBTreeLeaf *getNextLeafInOrder() const { return NextLeaf; }
173
49.2k
    void insertAfterLeafInOrder(RopePieceBTreeLeaf *Node) {
174
49.2k
      assert(!PrevLeaf && !NextLeaf && "Already in ordering");
175
49.2k
176
49.2k
      NextLeaf = Node->NextLeaf;
177
49.2k
      if (NextLeaf)
178
9.86k
        NextLeaf->PrevLeaf = &NextLeaf;
179
49.2k
      PrevLeaf = &Node->NextLeaf;
180
49.2k
      Node->NextLeaf = this;
181
49.2k
    }
182
183
49.2k
    void removeFromLeafInOrder() {
184
49.2k
      if (
PrevLeaf49.2k
) {
185
1
        *PrevLeaf = NextLeaf;
186
1
        if (NextLeaf)
187
1
          NextLeaf->PrevLeaf = PrevLeaf;
188
49.2k
      } else 
if (49.2k
NextLeaf49.2k
) {
189
49.2k
        NextLeaf->PrevLeaf = nullptr;
190
49.2k
      }
191
49.2k
    }
192
193
    /// FullRecomputeSizeLocally - This method recomputes the 'Size' field by
194
    /// summing the size of all RopePieces.
195
98.4k
    void FullRecomputeSizeLocally() {
196
98.4k
      Size = 0;
197
886k
      for (unsigned i = 0, e = getNumPieces(); 
i != e886k
;
++i787k
)
198
787k
        Size += getPiece(i).size();
199
98.4k
    }
200
201
    /// split - Split the range containing the specified offset so that we are
202
    /// guaranteed that there is a place to do an insertion at the specified
203
    /// offset.  The offset is relative, so "0" is the start of the node.
204
    ///
205
    /// If there is no space in this subtree for the extra piece, the extra tree
206
    /// node is returned and must be inserted into a parent.
207
    RopePieceBTreeNode *split(unsigned Offset);
208
209
    /// insert - Insert the specified ropepiece into this tree node at the
210
    /// specified offset.  The offset is relative, so "0" is the start of the
211
    /// node.
212
    ///
213
    /// If there is no space in this subtree for the extra piece, the extra tree
214
    /// node is returned and must be inserted into a parent.
215
    RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R);
216
217
218
    /// erase - Remove NumBytes from this node at the specified offset.  We are
219
    /// guaranteed that there is a split at Offset.
220
    void erase(unsigned Offset, unsigned NumBytes);
221
222
4.58M
    static inline bool classof(const RopePieceBTreeNode *N) {
223
4.58M
      return N->isLeaf();
224
4.58M
    }
225
  };
226
} // end anonymous namespace
227
228
/// split - Split the range containing the specified offset so that we are
229
/// guaranteed that there is a place to do an insertion at the specified
230
/// offset.  The offset is relative, so "0" is the start of the node.
231
///
232
/// If there is no space in this subtree for the extra piece, the extra tree
233
/// node is returned and must be inserted into a parent.
234
533k
RopePieceBTreeNode *RopePieceBTreeLeaf::split(unsigned Offset) {
235
533k
  // Find the insertion point.  We are guaranteed that there is a split at the
236
533k
  // specified offset so find it.
237
533k
  if (
Offset == 0 || 533k
Offset == size()523k
) {
238
10.4k
    // Fastpath for a common case.  There is already a splitpoint at the end.
239
10.4k
    return nullptr;
240
10.4k
  }
241
523k
242
523k
  // Find the piece that this offset lands in.
243
523k
  unsigned PieceOffs = 0;
244
523k
  unsigned i = 0;
245
5.08M
  while (
Offset >= PieceOffs+Pieces[i].size()5.08M
) {
246
4.56M
    PieceOffs += Pieces[i].size();
247
4.56M
    ++i;
248
4.56M
  }
249
523k
250
523k
  // If there is already a split point at the specified offset, just return
251
523k
  // success.
252
523k
  if (PieceOffs == Offset)
253
298k
    return nullptr;
254
224k
255
224k
  // Otherwise, we need to split piece 'i' at Offset-PieceOffs.  Convert Offset
256
224k
  // to being Piece relative.
257
224k
  unsigned IntraPieceOffset = Offset-PieceOffs;
258
224k
259
224k
  // We do this by shrinking the RopePiece and then doing an insert of the tail.
260
224k
  RopePiece Tail(Pieces[i].StrData, Pieces[i].StartOffs+IntraPieceOffset,
261
224k
                 Pieces[i].EndOffs);
262
224k
  Size -= Pieces[i].size();
263
224k
  Pieces[i].EndOffs = Pieces[i].StartOffs+IntraPieceOffset;
264
224k
  Size += Pieces[i].size();
265
224k
266
224k
  return insert(Offset, Tail);
267
224k
}
268
269
270
/// insert - Insert the specified RopePiece into this tree node at the
271
/// specified offset.  The offset is relative, so "0" is the start of the node.
272
///
273
/// If there is no space in this subtree for the extra piece, the extra tree
274
/// node is returned and must be inserted into a parent.
275
RopePieceBTreeNode *RopePieceBTreeLeaf::insert(unsigned Offset,
276
622k
                                               const RopePiece &R) {
277
622k
  // If this node is not full, insert the piece.
278
622k
  if (
!isFull()622k
) {
279
573k
    // Find the insertion point.  We are guaranteed that there is a split at the
280
573k
    // specified offset so find it.
281
573k
    unsigned i = 0, e = getNumPieces();
282
573k
    if (
Offset == size()573k
) {
283
177k
      // Fastpath for a common case.
284
177k
      i = e;
285
573k
    } else {
286
396k
      unsigned SlotOffs = 0;
287
3.17M
      for (; 
Offset > SlotOffs3.17M
;
++i2.77M
)
288
2.77M
        SlotOffs += getPiece(i).size();
289
396k
      assert(SlotOffs == Offset && "Split didn't occur before insertion!");
290
396k
    }
291
573k
292
573k
    // For an insertion into a non-full leaf node, just insert the value in
293
573k
    // its sorted position.  This requires moving later values over.
294
1.91M
    for (; 
i != e1.91M
;
--e1.33M
)
295
1.33M
      Pieces[e] = Pieces[e-1];
296
573k
    Pieces[i] = R;
297
573k
    ++NumPieces;
298
573k
    Size += R.size();
299
573k
    return nullptr;
300
573k
  }
301
49.2k
302
49.2k
  // Otherwise, if this is leaf is full, split it in two halves.  Since this
303
49.2k
  // node is full, it contains 2*WidthFactor values.  We move the first
304
49.2k
  // 'WidthFactor' values to the LHS child (which we leave in this node) and
305
49.2k
  // move the last 'WidthFactor' values into the RHS child.
306
49.2k
307
49.2k
  // Create the new node.
308
49.2k
  RopePieceBTreeLeaf *NewNode = new RopePieceBTreeLeaf();
309
49.2k
310
49.2k
  // Move over the last 'WidthFactor' values from here to NewNode.
311
49.2k
  std::copy(&Pieces[WidthFactor], &Pieces[2*WidthFactor],
312
49.2k
            &NewNode->Pieces[0]);
313
49.2k
  // Replace old pieces with null RopePieces to drop refcounts.
314
49.2k
  std::fill(&Pieces[WidthFactor], &Pieces[2*WidthFactor], RopePiece());
315
49.2k
316
49.2k
  // Decrease the number of values in the two nodes.
317
49.2k
  NewNode->NumPieces = NumPieces = WidthFactor;
318
49.2k
319
49.2k
  // Recompute the two nodes' size.
320
49.2k
  NewNode->FullRecomputeSizeLocally();
321
49.2k
  FullRecomputeSizeLocally();
322
49.2k
323
49.2k
  // Update the list of leaves.
324
49.2k
  NewNode->insertAfterLeafInOrder(this);
325
49.2k
326
49.2k
  // These insertions can't fail.
327
49.2k
  if (this->size() >= Offset)
328
1.43k
    this->insert(Offset, R);
329
49.2k
  else
330
47.7k
    NewNode->insert(Offset - this->size(), R);
331
622k
  return NewNode;
332
622k
}
333
334
/// erase - Remove NumBytes from this node at the specified offset.  We are
335
/// guaranteed that there is a split at Offset.
336
189k
void RopePieceBTreeLeaf::erase(unsigned Offset, unsigned NumBytes) {
337
189k
  // Since we are guaranteed that there is a split at Offset, we start by
338
189k
  // finding the Piece that starts there.
339
189k
  unsigned PieceOffs = 0;
340
189k
  unsigned i = 0;
341
2.07M
  for (; 
Offset > PieceOffs2.07M
;
++i1.88M
)
342
1.88M
    PieceOffs += getPiece(i).size();
343
189k
  assert(PieceOffs == Offset && "Split didn't occur before erase!");
344
189k
345
189k
  unsigned StartPiece = i;
346
189k
347
189k
  // Figure out how many pieces completely cover 'NumBytes'.  We want to remove
348
189k
  // all of them.
349
190k
  for (; 
Offset+NumBytes > PieceOffs+getPiece(i).size()190k
;
++i563
)
350
563
    PieceOffs += getPiece(i).size();
351
189k
352
189k
  // If we exactly include the last one, include it in the region to delete.
353
189k
  if (
Offset+NumBytes == PieceOffs+getPiece(i).size()189k
) {
354
506
    PieceOffs += getPiece(i).size();
355
506
    ++i;
356
506
  }
357
189k
358
189k
  // If we completely cover some RopePieces, erase them now.
359
189k
  if (
i != StartPiece189k
) {
360
742
    unsigned NumDeleted = i-StartPiece;
361
3.84k
    for (; 
i != getNumPieces()3.84k
;
++i3.10k
)
362
3.10k
      Pieces[i-NumDeleted] = Pieces[i];
363
742
364
742
    // Drop references to dead rope pieces.
365
742
    std::fill(&Pieces[getNumPieces()-NumDeleted], &Pieces[getNumPieces()],
366
742
              RopePiece());
367
742
    NumPieces -= NumDeleted;
368
742
369
742
    unsigned CoverBytes = PieceOffs-Offset;
370
742
    NumBytes -= CoverBytes;
371
742
    Size -= CoverBytes;
372
742
  }
373
189k
374
189k
  // If we completely removed some stuff, we could be done.
375
189k
  if (
NumBytes == 0189k
)
return506
;
376
189k
377
189k
  // Okay, now might be erasing part of some Piece.  If this is the case, then
378
189k
  // move the start point of the piece.
379
189k
  assert(getPiece(StartPiece).size() > NumBytes);
380
189k
  Pieces[StartPiece].StartOffs += NumBytes;
381
189k
382
189k
  // The size of this node just shrunk by NumBytes.
383
189k
  Size -= NumBytes;
384
189k
}
385
386
//===----------------------------------------------------------------------===//
387
// RopePieceBTreeInterior Class
388
//===----------------------------------------------------------------------===//
389
390
namespace {
391
  /// RopePieceBTreeInterior - This represents an interior node in the B+Tree,
392
  /// which holds up to 2*WidthFactor pointers to child nodes.
393
  class RopePieceBTreeInterior : public RopePieceBTreeNode {
394
    /// NumChildren - This holds the number of children currently active in the
395
    /// Children array.
396
    unsigned char NumChildren;
397
    RopePieceBTreeNode *Children[2*WidthFactor];
398
  public:
399
5.57k
    RopePieceBTreeInterior() : RopePieceBTreeNode(false), NumChildren(0) {}
400
401
    RopePieceBTreeInterior(RopePieceBTreeNode *LHS, RopePieceBTreeNode *RHS)
402
831
    : RopePieceBTreeNode(false) {
403
831
      Children[0] = LHS;
404
831
      Children[1] = RHS;
405
831
      NumChildren = 2;
406
831
      Size = LHS->size() + RHS->size();
407
831
    }
408
409
6.40k
    ~RopePieceBTreeInterior() {
410
62.0k
      for (unsigned i = 0, e = getNumChildren(); 
i != e62.0k
;
++i55.6k
)
411
55.6k
        Children[i]->Destroy();
412
6.40k
    }
413
414
59.5k
    bool isFull() const { return NumChildren == 2*WidthFactor; }
415
416
1.22M
    unsigned getNumChildren() const { return NumChildren; }
417
1.40k
    const RopePieceBTreeNode *getChild(unsigned i) const {
418
1.40k
      assert(i < NumChildren && "invalid child #");
419
1.40k
      return Children[i];
420
1.40k
    }
421
57.4M
    RopePieceBTreeNode *getChild(unsigned i) {
422
57.4M
      assert(i < NumChildren && "invalid child #");
423
57.4M
      return Children[i];
424
57.4M
    }
425
426
    /// FullRecomputeSizeLocally - Recompute the Size field of this node by
427
    /// summing up the sizes of the child nodes.
428
11.1k
    void FullRecomputeSizeLocally() {
429
11.1k
      Size = 0;
430
105k
      for (unsigned i = 0, e = getNumChildren(); 
i != e105k
;
++i94.7k
)
431
94.7k
        Size += getChild(i)->size();
432
11.1k
    }
433
434
435
    /// split - Split the range containing the specified offset so that we are
436
    /// guaranteed that there is a place to do an insertion at the specified
437
    /// offset.  The offset is relative, so "0" is the start of the node.
438
    ///
439
    /// If there is no space in this subtree for the extra piece, the extra tree
440
    /// node is returned and must be inserted into a parent.
441
    RopePieceBTreeNode *split(unsigned Offset);
442
443
444
    /// insert - Insert the specified ropepiece into this tree node at the
445
    /// specified offset.  The offset is relative, so "0" is the start of the
446
    /// node.
447
    ///
448
    /// If there is no space in this subtree for the extra piece, the extra tree
449
    /// node is returned and must be inserted into a parent.
450
    RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R);
451
452
    /// HandleChildPiece - A child propagated an insertion result up to us.
453
    /// Insert the new child, and/or propagate the result further up the tree.
454
    RopePieceBTreeNode *HandleChildPiece(unsigned i, RopePieceBTreeNode *RHS);
455
456
    /// erase - Remove NumBytes from this node at the specified offset.  We are
457
    /// guaranteed that there is a split at Offset.
458
    void erase(unsigned Offset, unsigned NumBytes);
459
460
11.9k
    static inline bool classof(const RopePieceBTreeNode *N) {
461
11.9k
      return !N->isLeaf();
462
11.9k
    }
463
  };
464
} // end anonymous namespace
465
466
/// split - Split the range containing the specified offset so that we are
467
/// guaranteed that there is a place to do an insertion at the specified
468
/// offset.  The offset is relative, so "0" is the start of the node.
469
///
470
/// If there is no space in this subtree for the extra piece, the extra tree
471
/// node is returned and must be inserted into a parent.
472
1.70M
RopePieceBTreeNode *RopePieceBTreeInterior::split(unsigned Offset) {
473
1.70M
  // Figure out which child to split.
474
1.70M
  if (
Offset == 0 || 1.70M
Offset == size()1.70M
)
475
303
    return nullptr; // If we have an exact offset, we're already split.
476
1.70M
477
1.70M
  unsigned ChildOffset = 0;
478
1.70M
  unsigned i = 0;
479
14.3M
  for (; 
Offset >= ChildOffset+getChild(i)->size()14.3M
;
++i12.6M
)
480
12.6M
    ChildOffset += getChild(i)->size();
481
1.70M
482
1.70M
  // If already split there, we're done.
483
1.70M
  if (ChildOffset == Offset)
484
4.44k
    return nullptr;
485
1.70M
486
1.70M
  // Otherwise, recursively split the child.
487
1.70M
  
if (RopePieceBTreeNode *1.70M
RHS1.70M
= getChild(i)->split(Offset-ChildOffset))
488
22.9k
    return HandleChildPiece(i, RHS);
489
1.68M
  return nullptr; // Done!
490
1.68M
}
491
492
/// insert - Insert the specified ropepiece into this tree node at the
493
/// specified offset.  The offset is relative, so "0" is the start of the
494
/// node.
495
///
496
/// If there is no space in this subtree for the extra piece, the extra tree
497
/// node is returned and must be inserted into a parent.
498
RopePieceBTreeNode *RopePieceBTreeInterior::insert(unsigned Offset,
499
1.14M
                                                   const RopePiece &R) {
500
1.14M
  // Find the insertion point.  We are guaranteed that there is a split at the
501
1.14M
  // specified offset so find it.
502
1.14M
  unsigned i = 0, e = getNumChildren();
503
1.14M
504
1.14M
  unsigned ChildOffs = 0;
505
1.14M
  if (
Offset == size()1.14M
) {
506
786
    // Fastpath for a common case.  Insert at end of last child.
507
786
    i = e-1;
508
786
    ChildOffs = size()-getChild(i)->size();
509
1.14M
  } else {
510
8.49M
    for (; 
Offset > ChildOffs+getChild(i)->size()8.49M
;
++i7.35M
)
511
7.35M
      ChildOffs += getChild(i)->size();
512
1.14M
  }
513
1.14M
514
1.14M
  Size += R.size();
515
1.14M
516
1.14M
  // Insert at the end of this child.
517
1.14M
  if (RopePieceBTreeNode *RHS = getChild(i)->insert(Offset-ChildOffs, R))
518
31.0k
    return HandleChildPiece(i, RHS);
519
1.11M
520
1.11M
  return nullptr;
521
1.11M
}
522
523
/// HandleChildPiece - A child propagated an insertion result up to us.
524
/// Insert the new child, and/or propagate the result further up the tree.
525
RopePieceBTreeNode *
526
59.5k
RopePieceBTreeInterior::HandleChildPiece(unsigned i, RopePieceBTreeNode *RHS) {
527
59.5k
  // Otherwise the child propagated a subtree up to us as a new child.  See if
528
59.5k
  // we have space for it here.
529
59.5k
  if (
!isFull()59.5k
) {
530
53.9k
    // Insert RHS after child 'i'.
531
53.9k
    if (i + 1 != getNumChildren())
532
9.02k
      memmove(&Children[i+2], &Children[i+1],
533
9.02k
              (getNumChildren()-i-1)*sizeof(Children[0]));
534
53.9k
    Children[i+1] = RHS;
535
53.9k
    ++NumChildren;
536
53.9k
    return nullptr;
537
53.9k
  }
538
5.57k
539
5.57k
  // Okay, this node is full.  Split it in half, moving WidthFactor children to
540
5.57k
  // a newly allocated interior node.
541
5.57k
542
5.57k
  // Create the new node.
543
5.57k
  RopePieceBTreeInterior *NewNode = new RopePieceBTreeInterior();
544
5.57k
545
5.57k
  // Move over the last 'WidthFactor' values from here to NewNode.
546
5.57k
  memcpy(&NewNode->Children[0], &Children[WidthFactor],
547
5.57k
         WidthFactor*sizeof(Children[0]));
548
5.57k
549
5.57k
  // Decrease the number of values in the two nodes.
550
5.57k
  NewNode->NumChildren = NumChildren = WidthFactor;
551
5.57k
552
5.57k
  // Finally, insert the two new children in the side the can (now) hold them.
553
5.57k
  // These insertions can't fail.
554
5.57k
  if (i < WidthFactor)
555
6
    this->HandleChildPiece(i, RHS);
556
5.57k
  else
557
5.56k
    NewNode->HandleChildPiece(i-WidthFactor, RHS);
558
59.5k
559
59.5k
  // Recompute the two nodes' size.
560
59.5k
  NewNode->FullRecomputeSizeLocally();
561
59.5k
  FullRecomputeSizeLocally();
562
59.5k
  return NewNode;
563
59.5k
}
564
565
/// erase - Remove NumBytes from this node at the specified offset.  We are
566
/// guaranteed that there is a split at Offset.
567
562k
void RopePieceBTreeInterior::erase(unsigned Offset, unsigned NumBytes) {
568
562k
  // This will shrink this node by NumBytes.
569
562k
  Size -= NumBytes;
570
562k
571
562k
  // Find the first child that overlaps with Offset.
572
562k
  unsigned i = 0;
573
5.85M
  for (; 
Offset >= getChild(i)->size()5.85M
;
++i5.28M
)
574
5.28M
    Offset -= getChild(i)->size();
575
562k
576
562k
  // Propagate the delete request into overlapping children, or completely
577
562k
  // delete the children as appropriate.
578
562k
  while (
NumBytes562k
) {
579
562k
    RopePieceBTreeNode *CurChild = getChild(i);
580
562k
581
562k
    // If we are deleting something contained entirely in the child, pass on the
582
562k
    // request.
583
562k
    if (
Offset+NumBytes < CurChild->size()562k
) {
584
562k
      CurChild->erase(Offset, NumBytes);
585
562k
      return;
586
562k
    }
587
9
588
9
    // If this deletion request starts somewhere in the middle of the child, it
589
9
    // must be deleting to the end of the child.
590
9
    
if (9
Offset9
) {
591
8
      unsigned BytesFromChild = CurChild->size()-Offset;
592
8
      CurChild->erase(Offset, BytesFromChild);
593
8
      NumBytes -= BytesFromChild;
594
8
      // Start at the beginning of the next child.
595
8
      Offset = 0;
596
8
      ++i;
597
8
      continue;
598
8
    }
599
1
600
1
    // If the deletion request completely covers the child, delete it and move
601
1
    // the rest down.
602
1
    NumBytes -= CurChild->size();
603
1
    CurChild->Destroy();
604
1
    --NumChildren;
605
1
    if (i != getNumChildren())
606
1
      memmove(&Children[i], &Children[i+1],
607
1
              (getNumChildren()-i)*sizeof(Children[0]));
608
562k
  }
609
562k
}
610
611
//===----------------------------------------------------------------------===//
612
// RopePieceBTreeNode Implementation
613
//===----------------------------------------------------------------------===//
614
615
84.7k
void RopePieceBTreeNode::Destroy() {
616
84.7k
  if (RopePieceBTreeLeaf *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
617
78.3k
    delete Leaf;
618
84.7k
  else
619
6.40k
    delete cast<RopePieceBTreeInterior>(this);
620
84.7k
}
621
622
/// split - Split the range containing the specified offset so that we are
623
/// guaranteed that there is a place to do an insertion at the specified
624
/// offset.  The offset is relative, so "0" is the start of the node.
625
///
626
/// If there is no space in this subtree for the extra piece, the extra tree
627
/// node is returned and must be inserted into a parent.
628
2.24M
RopePieceBTreeNode *RopePieceBTreeNode::split(unsigned Offset) {
629
2.24M
  assert(Offset <= size() && "Invalid offset to split!");
630
2.24M
  if (RopePieceBTreeLeaf *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
631
533k
    return Leaf->split(Offset);
632
1.70M
  return cast<RopePieceBTreeInterior>(this)->split(Offset);
633
1.70M
}
634
635
/// insert - Insert the specified ropepiece into this tree node at the
636
/// specified offset.  The offset is relative, so "0" is the start of the
637
/// node.
638
///
639
/// If there is no space in this subtree for the extra piece, the extra tree
640
/// node is returned and must be inserted into a parent.
641
RopePieceBTreeNode *RopePieceBTreeNode::insert(unsigned Offset,
642
1.49M
                                               const RopePiece &R) {
643
1.49M
  assert(Offset <= size() && "Invalid offset to insert!");
644
1.49M
  if (RopePieceBTreeLeaf *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
645
348k
    return Leaf->insert(Offset, R);
646
1.14M
  return cast<RopePieceBTreeInterior>(this)->insert(Offset, R);
647
1.14M
}
648
649
/// erase - Remove NumBytes from this node at the specified offset.  We are
650
/// guaranteed that there is a split at Offset.
651
752k
void RopePieceBTreeNode::erase(unsigned Offset, unsigned NumBytes) {
652
752k
  assert(Offset+NumBytes <= size() && "Invalid offset to erase!");
653
752k
  if (RopePieceBTreeLeaf *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
654
189k
    return Leaf->erase(Offset, NumBytes);
655
562k
  return cast<RopePieceBTreeInterior>(this)->erase(Offset, NumBytes);
656
562k
}
657
658
659
//===----------------------------------------------------------------------===//
660
// RopePieceBTreeIterator Implementation
661
//===----------------------------------------------------------------------===//
662
663
1.36M
static const RopePieceBTreeLeaf *getCN(const void *P) {
664
1.36M
  return static_cast<const RopePieceBTreeLeaf*>(P);
665
1.36M
}
666
667
// begin iterator.
668
10.5k
RopePieceBTreeIterator::RopePieceBTreeIterator(const void *n) {
669
10.5k
  const RopePieceBTreeNode *N = static_cast<const RopePieceBTreeNode*>(n);
670
10.5k
671
10.5k
  // Walk down the left side of the tree until we get to a leaf.
672
11.9k
  while (const RopePieceBTreeInterior *IN = dyn_cast<RopePieceBTreeInterior>(N))
673
1.40k
    N = IN->getChild(0);
674
10.5k
675
10.5k
  // We must have at least one leaf.
676
10.5k
  CurNode = cast<RopePieceBTreeLeaf>(N);
677
10.5k
678
10.5k
  // If we found a leaf that happens to be empty, skip over it until we get
679
10.5k
  // to something full.
680
10.5k
  while (
CurNode && 10.5k
getCN(CurNode)->getNumPieces() == 010.5k
)
681
6
    CurNode = getCN(CurNode)->getNextLeafInOrder();
682
10.5k
683
10.5k
  if (CurNode)
684
10.5k
    CurPiece = &getCN(CurNode)->getPiece(0);
685
10.5k
  else  // Empty tree, this is an end() iterator.
686
6
    CurPiece = nullptr;
687
10.5k
  CurChar = 0;
688
10.5k
}
689
690
589k
void RopePieceBTreeIterator::MoveToNextPiece() {
691
589k
  if (
CurPiece != &getCN(CurNode)->getPiece(getCN(CurNode)->getNumPieces()-1)589k
) {
692
528k
    CurChar = 0;
693
528k
    ++CurPiece;
694
528k
    return;
695
528k
  }
696
60.2k
697
60.2k
  // Find the next non-empty leaf node.
698
60.2k
  do
699
60.2k
    CurNode = getCN(CurNode)->getNextLeafInOrder();
700
60.2k
  while (
CurNode && 60.2k
getCN(CurNode)->getNumPieces() == 050.4k
);
701
60.2k
702
60.2k
  if (CurNode)
703
50.4k
    CurPiece = &getCN(CurNode)->getPiece(0);
704
60.2k
  else // Hit end().
705
9.76k
    CurPiece = nullptr;
706
589k
  CurChar = 0;
707
589k
}
708
709
//===----------------------------------------------------------------------===//
710
// RopePieceBTree Implementation
711
//===----------------------------------------------------------------------===//
712
713
1.11M
static RopePieceBTreeNode *getRoot(void *P) {
714
1.11M
  return static_cast<RopePieceBTreeNode*>(P);
715
1.11M
}
716
717
9.69k
RopePieceBTree::RopePieceBTree() {
718
9.69k
  Root = new RopePieceBTreeLeaf();
719
9.69k
}
720
19.3k
RopePieceBTree::RopePieceBTree(const RopePieceBTree &RHS) {
721
19.3k
  assert(RHS.empty() && "Can't copy non-empty tree yet");
722
19.3k
  Root = new RopePieceBTreeLeaf();
723
19.3k
}
724
29.0k
RopePieceBTree::~RopePieceBTree() {
725
29.0k
  getRoot(Root)->Destroy();
726
29.0k
}
727
728
6
unsigned RopePieceBTree::size() const {
729
6
  return getRoot(Root)->size();
730
6
}
731
732
9.69k
void RopePieceBTree::clear() {
733
9.69k
  if (RopePieceBTreeLeaf *Leaf = dyn_cast<RopePieceBTreeLeaf>(getRoot(Root)))
734
9.69k
    Leaf->clear();
735
0
  else {
736
0
    getRoot(Root)->Destroy();
737
0
    Root = new RopePieceBTreeLeaf();
738
0
  }
739
9.69k
}
740
741
348k
void RopePieceBTree::insert(unsigned Offset, const RopePiece &R) {
742
348k
  // #1. Split at Offset.
743
348k
  if (RopePieceBTreeNode *RHS = getRoot(Root)->split(Offset))
744
32
    Root = new RopePieceBTreeInterior(getRoot(Root), RHS);
745
348k
746
348k
  // #2. Do the insertion.
747
348k
  if (RopePieceBTreeNode *RHS = getRoot(Root)->insert(Offset, R))
748
708
    Root = new RopePieceBTreeInterior(getRoot(Root), RHS);
749
348k
}
750
751
189k
void RopePieceBTree::erase(unsigned Offset, unsigned NumBytes) {
752
189k
  // #1. Split at Offset.
753
189k
  if (RopePieceBTreeNode *RHS = getRoot(Root)->split(Offset))
754
91
    Root = new RopePieceBTreeInterior(getRoot(Root), RHS);
755
189k
756
189k
  // #2. Do the erasing.
757
189k
  getRoot(Root)->erase(Offset, NumBytes);
758
189k
}
759
760
//===----------------------------------------------------------------------===//
761
// RewriteRope Implementation
762
//===----------------------------------------------------------------------===//
763
764
/// MakeRopeString - This copies the specified byte range into some instance of
765
/// RopeRefCountString, and return a RopePiece that represents it.  This uses
766
/// the AllocBuffer object to aggregate requests for small strings into one
767
/// allocation instead of doing tons of tiny allocations.
768
348k
RopePiece RewriteRope::MakeRopeString(const char *Start, const char *End) {
769
348k
  unsigned Len = End-Start;
770
348k
  assert(Len && "Zero length RopePiece is invalid!");
771
348k
772
348k
  // If we have space for this string in the current alloc buffer, use it.
773
348k
  if (
AllocOffs+Len <= AllocChunkSize348k
) {
774
337k
    memcpy(AllocBuffer->Data+AllocOffs, Start, Len);
775
337k
    AllocOffs += Len;
776
337k
    return RopePiece(AllocBuffer, AllocOffs-Len, AllocOffs);
777
337k
  }
778
11.0k
779
11.0k
  // If we don't have enough room because this specific allocation is huge,
780
11.0k
  // just allocate a new rope piece for it alone.
781
11.0k
  
if (11.0k
Len > AllocChunkSize11.0k
) {
782
179
    unsigned Size = End-Start+sizeof(RopeRefCountString)-1;
783
179
    RopeRefCountString *Res =
784
179
      reinterpret_cast<RopeRefCountString *>(new char[Size]);
785
179
    Res->RefCount = 0;
786
179
    memcpy(Res->Data, Start, End-Start);
787
179
    return RopePiece(Res, 0, End-Start);
788
179
  }
789
10.8k
790
10.8k
  // Otherwise, this was a small request but we just don't have space for it
791
10.8k
  // Make a new chunk and share it with later allocations.
792
10.8k
793
10.8k
  unsigned AllocSize = offsetof(RopeRefCountString, Data) + AllocChunkSize;
794
10.8k
  RopeRefCountString *Res =
795
10.8k
      reinterpret_cast<RopeRefCountString *>(new char[AllocSize]);
796
10.8k
  Res->RefCount = 0;
797
10.8k
  memcpy(Res->Data, Start, Len);
798
10.8k
  AllocBuffer = Res;
799
10.8k
  AllocOffs = Len;
800
10.8k
801
10.8k
  return RopePiece(AllocBuffer, 0, Len);
802
10.8k
}
803
804