clang  7.0.0svn
RewriteRope.cpp
Go to the documentation of this file.
1 //===- RewriteRope.cpp - Rope specialized for rewriter --------------------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file implements the RewriteRope class, which is a powerful string.
11 //
12 //===----------------------------------------------------------------------===//
13 
15 #include "clang/Basic/LLVM.h"
16 #include "llvm/Support/Casting.h"
17 #include <algorithm>
18 #include <cassert>
19 #include <cstring>
20 
21 using namespace clang;
22 
23 /// RewriteRope is a "strong" string class, designed to make insertions and
24 /// deletions in the middle of the string nearly constant time (really, they are
25 /// O(log N), but with a very low constant factor).
26 ///
27 /// The implementation of this datastructure is a conceptual linear sequence of
28 /// RopePiece elements. Each RopePiece represents a view on a separately
29 /// allocated and reference counted string. This means that splitting a very
30 /// long string can be done in constant time by splitting a RopePiece that
31 /// references the whole string into two rope pieces that reference each half.
32 /// Once split, another string can be inserted in between the two halves by
33 /// inserting a RopePiece in between the two others. All of this is very
34 /// inexpensive: it takes time proportional to the number of RopePieces, not the
35 /// length of the strings they represent.
36 ///
37 /// While a linear sequences of RopePieces is the conceptual model, the actual
38 /// implementation captures them in an adapted B+ Tree. Using a B+ tree (which
39 /// is a tree that keeps the values in the leaves and has where each node
40 /// contains a reasonable number of pointers to children/values) allows us to
41 /// maintain efficient operation when the RewriteRope contains a *huge* number
42 /// of RopePieces. The basic idea of the B+ Tree is that it allows us to find
43 /// the RopePiece corresponding to some offset very efficiently, and it
44 /// automatically balances itself on insertions of RopePieces (which can happen
45 /// for both insertions and erases of string ranges).
46 ///
47 /// The one wrinkle on the theory is that we don't attempt to keep the tree
48 /// properly balanced when erases happen. Erases of string data can both insert
49 /// new RopePieces (e.g. when the middle of some other rope piece is deleted,
50 /// which results in two rope pieces, which is just like an insert) or it can
51 /// reduce the number of RopePieces maintained by the B+Tree. In the case when
52 /// the number of RopePieces is reduced, we don't attempt to maintain the
53 /// standard 'invariant' that each node in the tree contains at least
54 /// 'WidthFactor' children/values. For our use cases, this doesn't seem to
55 /// matter.
56 ///
57 /// The implementation below is primarily implemented in terms of three classes:
58 /// RopePieceBTreeNode - Common base class for:
59 ///
60 /// RopePieceBTreeLeaf - Directly manages up to '2*WidthFactor' RopePiece
61 /// nodes. This directly represents a chunk of the string with those
62 /// RopePieces contatenated.
63 /// RopePieceBTreeInterior - An interior node in the B+ Tree, which manages
64 /// up to '2*WidthFactor' other nodes in the tree.
65 
66 namespace {
67 
68 //===----------------------------------------------------------------------===//
69 // RopePieceBTreeNode Class
70 //===----------------------------------------------------------------------===//
71 
72  /// RopePieceBTreeNode - Common base class of RopePieceBTreeLeaf and
73  /// RopePieceBTreeInterior. This provides some 'virtual' dispatching methods
74  /// and a flag that determines which subclass the instance is. Also
75  /// important, this node knows the full extend of the node, including any
76  /// children that it has. This allows efficient skipping over entire subtrees
77  /// when looking for an offset in the BTree.
78  class RopePieceBTreeNode {
79  protected:
80  /// WidthFactor - This controls the number of K/V slots held in the BTree:
81  /// how wide it is. Each level of the BTree is guaranteed to have at least
82  /// 'WidthFactor' elements in it (either ropepieces or children), (except
83  /// the root, which may have less) and may have at most 2*WidthFactor
84  /// elements.
85  enum { WidthFactor = 8 };
86 
87  /// Size - This is the number of bytes of file this node (including any
88  /// potential children) covers.
89  unsigned Size = 0;
90 
91  /// IsLeaf - True if this is an instance of RopePieceBTreeLeaf, false if it
92  /// is an instance of RopePieceBTreeInterior.
93  bool IsLeaf;
94 
95  RopePieceBTreeNode(bool isLeaf) : IsLeaf(isLeaf) {}
96  ~RopePieceBTreeNode() = default;
97 
98  public:
99  bool isLeaf() const { return IsLeaf; }
100  unsigned size() const { return Size; }
101 
102  void Destroy();
103 
104  /// split - Split the range containing the specified offset so that we are
105  /// guaranteed that there is a place to do an insertion at the specified
106  /// offset. The offset is relative, so "0" is the start of the node.
107  ///
108  /// If there is no space in this subtree for the extra piece, the extra tree
109  /// node is returned and must be inserted into a parent.
110  RopePieceBTreeNode *split(unsigned Offset);
111 
112  /// insert - Insert the specified ropepiece into this tree node at the
113  /// specified offset. The offset is relative, so "0" is the start of the
114  /// node.
115  ///
116  /// If there is no space in this subtree for the extra piece, the extra tree
117  /// node is returned and must be inserted into a parent.
118  RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R);
119 
120  /// erase - Remove NumBytes from this node at the specified offset. We are
121  /// guaranteed that there is a split at Offset.
122  void erase(unsigned Offset, unsigned NumBytes);
123  };
124 
125 //===----------------------------------------------------------------------===//
126 // RopePieceBTreeLeaf Class
127 //===----------------------------------------------------------------------===//
128 
129  /// RopePieceBTreeLeaf - Directly manages up to '2*WidthFactor' RopePiece
130  /// nodes. This directly represents a chunk of the string with those
131  /// RopePieces contatenated. Since this is a B+Tree, all values (in this case
132  /// instances of RopePiece) are stored in leaves like this. To make iteration
133  /// over the leaves efficient, they maintain a singly linked list through the
134  /// NextLeaf field. This allows the B+Tree forward iterator to be constant
135  /// time for all increments.
136  class RopePieceBTreeLeaf : public RopePieceBTreeNode {
137  /// NumPieces - This holds the number of rope pieces currently active in the
138  /// Pieces array.
139  unsigned char NumPieces = 0;
140 
141  /// Pieces - This tracks the file chunks currently in this leaf.
142  RopePiece Pieces[2*WidthFactor];
143 
144  /// NextLeaf - This is a pointer to the next leaf in the tree, allowing
145  /// efficient in-order forward iteration of the tree without traversal.
146  RopePieceBTreeLeaf **PrevLeaf = nullptr;
147  RopePieceBTreeLeaf *NextLeaf = nullptr;
148 
149  public:
150  RopePieceBTreeLeaf() : RopePieceBTreeNode(true) {}
151 
152  ~RopePieceBTreeLeaf() {
153  if (PrevLeaf || NextLeaf)
154  removeFromLeafInOrder();
155  clear();
156  }
157 
158  bool isFull() const { return NumPieces == 2*WidthFactor; }
159 
160  /// clear - Remove all rope pieces from this leaf.
161  void clear() {
162  while (NumPieces)
163  Pieces[--NumPieces] = RopePiece();
164  Size = 0;
165  }
166 
167  unsigned getNumPieces() const { return NumPieces; }
168 
169  const RopePiece &getPiece(unsigned i) const {
170  assert(i < getNumPieces() && "Invalid piece ID");
171  return Pieces[i];
172  }
173 
174  const RopePieceBTreeLeaf *getNextLeafInOrder() const { return NextLeaf; }
175 
176  void insertAfterLeafInOrder(RopePieceBTreeLeaf *Node) {
177  assert(!PrevLeaf && !NextLeaf && "Already in ordering");
178 
179  NextLeaf = Node->NextLeaf;
180  if (NextLeaf)
181  NextLeaf->PrevLeaf = &NextLeaf;
182  PrevLeaf = &Node->NextLeaf;
183  Node->NextLeaf = this;
184  }
185 
186  void removeFromLeafInOrder() {
187  if (PrevLeaf) {
188  *PrevLeaf = NextLeaf;
189  if (NextLeaf)
190  NextLeaf->PrevLeaf = PrevLeaf;
191  } else if (NextLeaf) {
192  NextLeaf->PrevLeaf = nullptr;
193  }
194  }
195 
196  /// FullRecomputeSizeLocally - This method recomputes the 'Size' field by
197  /// summing the size of all RopePieces.
198  void FullRecomputeSizeLocally() {
199  Size = 0;
200  for (unsigned i = 0, e = getNumPieces(); i != e; ++i)
201  Size += getPiece(i).size();
202  }
203 
204  /// split - Split the range containing the specified offset so that we are
205  /// guaranteed that there is a place to do an insertion at the specified
206  /// offset. The offset is relative, so "0" is the start of the node.
207  ///
208  /// If there is no space in this subtree for the extra piece, the extra tree
209  /// node is returned and must be inserted into a parent.
210  RopePieceBTreeNode *split(unsigned Offset);
211 
212  /// insert - Insert the specified ropepiece into this tree node at the
213  /// specified offset. The offset is relative, so "0" is the start of the
214  /// node.
215  ///
216  /// If there is no space in this subtree for the extra piece, the extra tree
217  /// node is returned and must be inserted into a parent.
218  RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R);
219 
220  /// erase - Remove NumBytes from this node at the specified offset. We are
221  /// guaranteed that there is a split at Offset.
222  void erase(unsigned Offset, unsigned NumBytes);
223 
224  static bool classof(const RopePieceBTreeNode *N) {
225  return N->isLeaf();
226  }
227  };
228 
229 } // namespace
230 
231 /// split - Split the range containing the specified offset so that we are
232 /// guaranteed that there is a place to do an insertion at the specified
233 /// offset. The offset is relative, so "0" is the start of the node.
234 ///
235 /// If there is no space in this subtree for the extra piece, the extra tree
236 /// node is returned and must be inserted into a parent.
237 RopePieceBTreeNode *RopePieceBTreeLeaf::split(unsigned Offset) {
238  // Find the insertion point. We are guaranteed that there is a split at the
239  // specified offset so find it.
240  if (Offset == 0 || Offset == size()) {
241  // Fastpath for a common case. There is already a splitpoint at the end.
242  return nullptr;
243  }
244 
245  // Find the piece that this offset lands in.
246  unsigned PieceOffs = 0;
247  unsigned i = 0;
248  while (Offset >= PieceOffs+Pieces[i].size()) {
249  PieceOffs += Pieces[i].size();
250  ++i;
251  }
252 
253  // If there is already a split point at the specified offset, just return
254  // success.
255  if (PieceOffs == Offset)
256  return nullptr;
257 
258  // Otherwise, we need to split piece 'i' at Offset-PieceOffs. Convert Offset
259  // to being Piece relative.
260  unsigned IntraPieceOffset = Offset-PieceOffs;
261 
262  // We do this by shrinking the RopePiece and then doing an insert of the tail.
263  RopePiece Tail(Pieces[i].StrData, Pieces[i].StartOffs+IntraPieceOffset,
264  Pieces[i].EndOffs);
265  Size -= Pieces[i].size();
266  Pieces[i].EndOffs = Pieces[i].StartOffs+IntraPieceOffset;
267  Size += Pieces[i].size();
268 
269  return insert(Offset, Tail);
270 }
271 
272 /// insert - Insert the specified RopePiece into this tree node at the
273 /// specified offset. The offset is relative, so "0" is the start of the node.
274 ///
275 /// If there is no space in this subtree for the extra piece, the extra tree
276 /// node is returned and must be inserted into a parent.
277 RopePieceBTreeNode *RopePieceBTreeLeaf::insert(unsigned Offset,
278  const RopePiece &R) {
279  // If this node is not full, insert the piece.
280  if (!isFull()) {
281  // Find the insertion point. We are guaranteed that there is a split at the
282  // specified offset so find it.
283  unsigned i = 0, e = getNumPieces();
284  if (Offset == size()) {
285  // Fastpath for a common case.
286  i = e;
287  } else {
288  unsigned SlotOffs = 0;
289  for (; Offset > SlotOffs; ++i)
290  SlotOffs += getPiece(i).size();
291  assert(SlotOffs == Offset && "Split didn't occur before insertion!");
292  }
293 
294  // For an insertion into a non-full leaf node, just insert the value in
295  // its sorted position. This requires moving later values over.
296  for (; i != e; --e)
297  Pieces[e] = Pieces[e-1];
298  Pieces[i] = R;
299  ++NumPieces;
300  Size += R.size();
301  return nullptr;
302  }
303 
304  // Otherwise, if this is leaf is full, split it in two halves. Since this
305  // node is full, it contains 2*WidthFactor values. We move the first
306  // 'WidthFactor' values to the LHS child (which we leave in this node) and
307  // move the last 'WidthFactor' values into the RHS child.
308 
309  // Create the new node.
310  RopePieceBTreeLeaf *NewNode = new RopePieceBTreeLeaf();
311 
312  // Move over the last 'WidthFactor' values from here to NewNode.
313  std::copy(&Pieces[WidthFactor], &Pieces[2*WidthFactor],
314  &NewNode->Pieces[0]);
315  // Replace old pieces with null RopePieces to drop refcounts.
316  std::fill(&Pieces[WidthFactor], &Pieces[2*WidthFactor], RopePiece());
317 
318  // Decrease the number of values in the two nodes.
319  NewNode->NumPieces = NumPieces = WidthFactor;
320 
321  // Recompute the two nodes' size.
322  NewNode->FullRecomputeSizeLocally();
323  FullRecomputeSizeLocally();
324 
325  // Update the list of leaves.
326  NewNode->insertAfterLeafInOrder(this);
327 
328  // These insertions can't fail.
329  if (this->size() >= Offset)
330  this->insert(Offset, R);
331  else
332  NewNode->insert(Offset - this->size(), R);
333  return NewNode;
334 }
335 
336 /// erase - Remove NumBytes from this node at the specified offset. We are
337 /// guaranteed that there is a split at Offset.
338 void RopePieceBTreeLeaf::erase(unsigned Offset, unsigned NumBytes) {
339  // Since we are guaranteed that there is a split at Offset, we start by
340  // finding the Piece that starts there.
341  unsigned PieceOffs = 0;
342  unsigned i = 0;
343  for (; Offset > PieceOffs; ++i)
344  PieceOffs += getPiece(i).size();
345  assert(PieceOffs == Offset && "Split didn't occur before erase!");
346 
347  unsigned StartPiece = i;
348 
349  // Figure out how many pieces completely cover 'NumBytes'. We want to remove
350  // all of them.
351  for (; Offset+NumBytes > PieceOffs+getPiece(i).size(); ++i)
352  PieceOffs += getPiece(i).size();
353 
354  // If we exactly include the last one, include it in the region to delete.
355  if (Offset+NumBytes == PieceOffs+getPiece(i).size()) {
356  PieceOffs += getPiece(i).size();
357  ++i;
358  }
359 
360  // If we completely cover some RopePieces, erase them now.
361  if (i != StartPiece) {
362  unsigned NumDeleted = i-StartPiece;
363  for (; i != getNumPieces(); ++i)
364  Pieces[i-NumDeleted] = Pieces[i];
365 
366  // Drop references to dead rope pieces.
367  std::fill(&Pieces[getNumPieces()-NumDeleted], &Pieces[getNumPieces()],
368  RopePiece());
369  NumPieces -= NumDeleted;
370 
371  unsigned CoverBytes = PieceOffs-Offset;
372  NumBytes -= CoverBytes;
373  Size -= CoverBytes;
374  }
375 
376  // If we completely removed some stuff, we could be done.
377  if (NumBytes == 0) return;
378 
379  // Okay, now might be erasing part of some Piece. If this is the case, then
380  // move the start point of the piece.
381  assert(getPiece(StartPiece).size() > NumBytes);
382  Pieces[StartPiece].StartOffs += NumBytes;
383 
384  // The size of this node just shrunk by NumBytes.
385  Size -= NumBytes;
386 }
387 
388 //===----------------------------------------------------------------------===//
389 // RopePieceBTreeInterior Class
390 //===----------------------------------------------------------------------===//
391 
392 namespace {
393 
394  /// RopePieceBTreeInterior - This represents an interior node in the B+Tree,
395  /// which holds up to 2*WidthFactor pointers to child nodes.
396  class RopePieceBTreeInterior : public RopePieceBTreeNode {
397  /// NumChildren - This holds the number of children currently active in the
398  /// Children array.
399  unsigned char NumChildren = 0;
400 
401  RopePieceBTreeNode *Children[2*WidthFactor];
402 
403  public:
404  RopePieceBTreeInterior() : RopePieceBTreeNode(false) {}
405 
406  RopePieceBTreeInterior(RopePieceBTreeNode *LHS, RopePieceBTreeNode *RHS)
407  : RopePieceBTreeNode(false) {
408  Children[0] = LHS;
409  Children[1] = RHS;
410  NumChildren = 2;
411  Size = LHS->size() + RHS->size();
412  }
413 
414  ~RopePieceBTreeInterior() {
415  for (unsigned i = 0, e = getNumChildren(); i != e; ++i)
416  Children[i]->Destroy();
417  }
418 
419  bool isFull() const { return NumChildren == 2*WidthFactor; }
420 
421  unsigned getNumChildren() const { return NumChildren; }
422 
423  const RopePieceBTreeNode *getChild(unsigned i) const {
424  assert(i < NumChildren && "invalid child #");
425  return Children[i];
426  }
427 
428  RopePieceBTreeNode *getChild(unsigned i) {
429  assert(i < NumChildren && "invalid child #");
430  return Children[i];
431  }
432 
433  /// FullRecomputeSizeLocally - Recompute the Size field of this node by
434  /// summing up the sizes of the child nodes.
435  void FullRecomputeSizeLocally() {
436  Size = 0;
437  for (unsigned i = 0, e = getNumChildren(); i != e; ++i)
438  Size += getChild(i)->size();
439  }
440 
441  /// split - Split the range containing the specified offset so that we are
442  /// guaranteed that there is a place to do an insertion at the specified
443  /// offset. The offset is relative, so "0" is the start of the node.
444  ///
445  /// If there is no space in this subtree for the extra piece, the extra tree
446  /// node is returned and must be inserted into a parent.
447  RopePieceBTreeNode *split(unsigned Offset);
448 
449  /// insert - Insert the specified ropepiece into this tree node at the
450  /// specified offset. The offset is relative, so "0" is the start of the
451  /// node.
452  ///
453  /// If there is no space in this subtree for the extra piece, the extra tree
454  /// node is returned and must be inserted into a parent.
455  RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R);
456 
457  /// HandleChildPiece - A child propagated an insertion result up to us.
458  /// Insert the new child, and/or propagate the result further up the tree.
459  RopePieceBTreeNode *HandleChildPiece(unsigned i, RopePieceBTreeNode *RHS);
460 
461  /// erase - Remove NumBytes from this node at the specified offset. We are
462  /// guaranteed that there is a split at Offset.
463  void erase(unsigned Offset, unsigned NumBytes);
464 
465  static bool classof(const RopePieceBTreeNode *N) {
466  return !N->isLeaf();
467  }
468  };
469 
470 } // namespace
471 
472 /// split - Split the range containing the specified offset so that we are
473 /// guaranteed that there is a place to do an insertion at the specified
474 /// offset. The offset is relative, so "0" is the start of the node.
475 ///
476 /// If there is no space in this subtree for the extra piece, the extra tree
477 /// node is returned and must be inserted into a parent.
478 RopePieceBTreeNode *RopePieceBTreeInterior::split(unsigned Offset) {
479  // Figure out which child to split.
480  if (Offset == 0 || Offset == size())
481  return nullptr; // If we have an exact offset, we're already split.
482 
483  unsigned ChildOffset = 0;
484  unsigned i = 0;
485  for (; Offset >= ChildOffset+getChild(i)->size(); ++i)
486  ChildOffset += getChild(i)->size();
487 
488  // If already split there, we're done.
489  if (ChildOffset == Offset)
490  return nullptr;
491 
492  // Otherwise, recursively split the child.
493  if (RopePieceBTreeNode *RHS = getChild(i)->split(Offset-ChildOffset))
494  return HandleChildPiece(i, RHS);
495  return nullptr; // Done!
496 }
497 
498 /// insert - Insert the specified ropepiece into this tree node at the
499 /// specified offset. The offset is relative, so "0" is the start of the
500 /// node.
501 ///
502 /// If there is no space in this subtree for the extra piece, the extra tree
503 /// node is returned and must be inserted into a parent.
504 RopePieceBTreeNode *RopePieceBTreeInterior::insert(unsigned Offset,
505  const RopePiece &R) {
506  // Find the insertion point. We are guaranteed that there is a split at the
507  // specified offset so find it.
508  unsigned i = 0, e = getNumChildren();
509 
510  unsigned ChildOffs = 0;
511  if (Offset == size()) {
512  // Fastpath for a common case. Insert at end of last child.
513  i = e-1;
514  ChildOffs = size()-getChild(i)->size();
515  } else {
516  for (; Offset > ChildOffs+getChild(i)->size(); ++i)
517  ChildOffs += getChild(i)->size();
518  }
519 
520  Size += R.size();
521 
522  // Insert at the end of this child.
523  if (RopePieceBTreeNode *RHS = getChild(i)->insert(Offset-ChildOffs, R))
524  return HandleChildPiece(i, RHS);
525 
526  return nullptr;
527 }
528 
529 /// HandleChildPiece - A child propagated an insertion result up to us.
530 /// Insert the new child, and/or propagate the result further up the tree.
531 RopePieceBTreeNode *
532 RopePieceBTreeInterior::HandleChildPiece(unsigned i, RopePieceBTreeNode *RHS) {
533  // Otherwise the child propagated a subtree up to us as a new child. See if
534  // we have space for it here.
535  if (!isFull()) {
536  // Insert RHS after child 'i'.
537  if (i + 1 != getNumChildren())
538  memmove(&Children[i+2], &Children[i+1],
539  (getNumChildren()-i-1)*sizeof(Children[0]));
540  Children[i+1] = RHS;
541  ++NumChildren;
542  return nullptr;
543  }
544 
545  // Okay, this node is full. Split it in half, moving WidthFactor children to
546  // a newly allocated interior node.
547 
548  // Create the new node.
549  RopePieceBTreeInterior *NewNode = new RopePieceBTreeInterior();
550 
551  // Move over the last 'WidthFactor' values from here to NewNode.
552  memcpy(&NewNode->Children[0], &Children[WidthFactor],
553  WidthFactor*sizeof(Children[0]));
554 
555  // Decrease the number of values in the two nodes.
556  NewNode->NumChildren = NumChildren = WidthFactor;
557 
558  // Finally, insert the two new children in the side the can (now) hold them.
559  // These insertions can't fail.
560  if (i < WidthFactor)
561  this->HandleChildPiece(i, RHS);
562  else
563  NewNode->HandleChildPiece(i-WidthFactor, RHS);
564 
565  // Recompute the two nodes' size.
566  NewNode->FullRecomputeSizeLocally();
567  FullRecomputeSizeLocally();
568  return NewNode;
569 }
570 
571 /// erase - Remove NumBytes from this node at the specified offset. We are
572 /// guaranteed that there is a split at Offset.
573 void RopePieceBTreeInterior::erase(unsigned Offset, unsigned NumBytes) {
574  // This will shrink this node by NumBytes.
575  Size -= NumBytes;
576 
577  // Find the first child that overlaps with Offset.
578  unsigned i = 0;
579  for (; Offset >= getChild(i)->size(); ++i)
580  Offset -= getChild(i)->size();
581 
582  // Propagate the delete request into overlapping children, or completely
583  // delete the children as appropriate.
584  while (NumBytes) {
585  RopePieceBTreeNode *CurChild = getChild(i);
586 
587  // If we are deleting something contained entirely in the child, pass on the
588  // request.
589  if (Offset+NumBytes < CurChild->size()) {
590  CurChild->erase(Offset, NumBytes);
591  return;
592  }
593 
594  // If this deletion request starts somewhere in the middle of the child, it
595  // must be deleting to the end of the child.
596  if (Offset) {
597  unsigned BytesFromChild = CurChild->size()-Offset;
598  CurChild->erase(Offset, BytesFromChild);
599  NumBytes -= BytesFromChild;
600  // Start at the beginning of the next child.
601  Offset = 0;
602  ++i;
603  continue;
604  }
605 
606  // If the deletion request completely covers the child, delete it and move
607  // the rest down.
608  NumBytes -= CurChild->size();
609  CurChild->Destroy();
610  --NumChildren;
611  if (i != getNumChildren())
612  memmove(&Children[i], &Children[i+1],
613  (getNumChildren()-i)*sizeof(Children[0]));
614  }
615 }
616 
617 //===----------------------------------------------------------------------===//
618 // RopePieceBTreeNode Implementation
619 //===----------------------------------------------------------------------===//
620 
621 void RopePieceBTreeNode::Destroy() {
622  if (auto *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
623  delete Leaf;
624  else
625  delete cast<RopePieceBTreeInterior>(this);
626 }
627 
628 /// split - Split the range containing the specified offset so that we are
629 /// guaranteed that there is a place to do an insertion at the specified
630 /// offset. The offset is relative, so "0" is the start of the node.
631 ///
632 /// If there is no space in this subtree for the extra piece, the extra tree
633 /// node is returned and must be inserted into a parent.
634 RopePieceBTreeNode *RopePieceBTreeNode::split(unsigned Offset) {
635  assert(Offset <= size() && "Invalid offset to split!");
636  if (auto *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
637  return Leaf->split(Offset);
638  return cast<RopePieceBTreeInterior>(this)->split(Offset);
639 }
640 
641 /// insert - Insert the specified ropepiece into this tree node at the
642 /// specified offset. The offset is relative, so "0" is the start of the
643 /// node.
644 ///
645 /// If there is no space in this subtree for the extra piece, the extra tree
646 /// node is returned and must be inserted into a parent.
647 RopePieceBTreeNode *RopePieceBTreeNode::insert(unsigned Offset,
648  const RopePiece &R) {
649  assert(Offset <= size() && "Invalid offset to insert!");
650  if (auto *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
651  return Leaf->insert(Offset, R);
652  return cast<RopePieceBTreeInterior>(this)->insert(Offset, R);
653 }
654 
655 /// erase - Remove NumBytes from this node at the specified offset. We are
656 /// guaranteed that there is a split at Offset.
657 void RopePieceBTreeNode::erase(unsigned Offset, unsigned NumBytes) {
658  assert(Offset+NumBytes <= size() && "Invalid offset to erase!");
659  if (auto *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
660  return Leaf->erase(Offset, NumBytes);
661  return cast<RopePieceBTreeInterior>(this)->erase(Offset, NumBytes);
662 }
663 
664 //===----------------------------------------------------------------------===//
665 // RopePieceBTreeIterator Implementation
666 //===----------------------------------------------------------------------===//
667 
668 static const RopePieceBTreeLeaf *getCN(const void *P) {
669  return static_cast<const RopePieceBTreeLeaf*>(P);
670 }
671 
672 // begin iterator.
674  const auto *N = static_cast<const RopePieceBTreeNode *>(n);
675 
676  // Walk down the left side of the tree until we get to a leaf.
677  while (const auto *IN = dyn_cast<RopePieceBTreeInterior>(N))
678  N = IN->getChild(0);
679 
680  // We must have at least one leaf.
681  CurNode = cast<RopePieceBTreeLeaf>(N);
682 
683  // If we found a leaf that happens to be empty, skip over it until we get
684  // to something full.
685  while (CurNode && getCN(CurNode)->getNumPieces() == 0)
686  CurNode = getCN(CurNode)->getNextLeafInOrder();
687 
688  if (CurNode)
689  CurPiece = &getCN(CurNode)->getPiece(0);
690  else // Empty tree, this is an end() iterator.
691  CurPiece = nullptr;
692  CurChar = 0;
693 }
694 
696  if (CurPiece != &getCN(CurNode)->getPiece(getCN(CurNode)->getNumPieces()-1)) {
697  CurChar = 0;
698  ++CurPiece;
699  return;
700  }
701 
702  // Find the next non-empty leaf node.
703  do
704  CurNode = getCN(CurNode)->getNextLeafInOrder();
705  while (CurNode && getCN(CurNode)->getNumPieces() == 0);
706 
707  if (CurNode)
708  CurPiece = &getCN(CurNode)->getPiece(0);
709  else // Hit end().
710  CurPiece = nullptr;
711  CurChar = 0;
712 }
713 
714 //===----------------------------------------------------------------------===//
715 // RopePieceBTree Implementation
716 //===----------------------------------------------------------------------===//
717 
718 static RopePieceBTreeNode *getRoot(void *P) {
719  return static_cast<RopePieceBTreeNode*>(P);
720 }
721 
723  Root = new RopePieceBTreeLeaf();
724 }
725 
727  assert(RHS.empty() && "Can't copy non-empty tree yet");
728  Root = new RopePieceBTreeLeaf();
729 }
730 
732  getRoot(Root)->Destroy();
733 }
734 
735 unsigned RopePieceBTree::size() const {
736  return getRoot(Root)->size();
737 }
738 
740  if (auto *Leaf = dyn_cast<RopePieceBTreeLeaf>(getRoot(Root)))
741  Leaf->clear();
742  else {
743  getRoot(Root)->Destroy();
744  Root = new RopePieceBTreeLeaf();
745  }
746 }
747 
748 void RopePieceBTree::insert(unsigned Offset, const RopePiece &R) {
749  // #1. Split at Offset.
750  if (RopePieceBTreeNode *RHS = getRoot(Root)->split(Offset))
751  Root = new RopePieceBTreeInterior(getRoot(Root), RHS);
752 
753  // #2. Do the insertion.
754  if (RopePieceBTreeNode *RHS = getRoot(Root)->insert(Offset, R))
755  Root = new RopePieceBTreeInterior(getRoot(Root), RHS);
756 }
757 
758 void RopePieceBTree::erase(unsigned Offset, unsigned NumBytes) {
759  // #1. Split at Offset.
760  if (RopePieceBTreeNode *RHS = getRoot(Root)->split(Offset))
761  Root = new RopePieceBTreeInterior(getRoot(Root), RHS);
762 
763  // #2. Do the erasing.
764  getRoot(Root)->erase(Offset, NumBytes);
765 }
766 
767 //===----------------------------------------------------------------------===//
768 // RewriteRope Implementation
769 //===----------------------------------------------------------------------===//
770 
771 /// MakeRopeString - This copies the specified byte range into some instance of
772 /// RopeRefCountString, and return a RopePiece that represents it. This uses
773 /// the AllocBuffer object to aggregate requests for small strings into one
774 /// allocation instead of doing tons of tiny allocations.
775 RopePiece RewriteRope::MakeRopeString(const char *Start, const char *End) {
776  unsigned Len = End-Start;
777  assert(Len && "Zero length RopePiece is invalid!");
778 
779  // If we have space for this string in the current alloc buffer, use it.
780  if (AllocOffs+Len <= AllocChunkSize) {
781  memcpy(AllocBuffer->Data+AllocOffs, Start, Len);
782  AllocOffs += Len;
783  return RopePiece(AllocBuffer, AllocOffs-Len, AllocOffs);
784  }
785 
786  // If we don't have enough room because this specific allocation is huge,
787  // just allocate a new rope piece for it alone.
788  if (Len > AllocChunkSize) {
789  unsigned Size = End-Start+sizeof(RopeRefCountString)-1;
790  auto *Res = reinterpret_cast<RopeRefCountString *>(new char[Size]);
791  Res->RefCount = 0;
792  memcpy(Res->Data, Start, End-Start);
793  return RopePiece(Res, 0, End-Start);
794  }
795 
796  // Otherwise, this was a small request but we just don't have space for it
797  // Make a new chunk and share it with later allocations.
798 
799  unsigned AllocSize = offsetof(RopeRefCountString, Data) + AllocChunkSize;
800  auto *Res = reinterpret_cast<RopeRefCountString *>(new char[AllocSize]);
801  Res->RefCount = 0;
802  memcpy(Res->Data, Start, Len);
803  AllocBuffer = Res;
804  AllocOffs = Len;
805 
806  return RopePiece(AllocBuffer, 0, Len);
807 }
void insert(unsigned Offset, const RopePiece &R)
StringRef P
virtual void clear()
RopeRefCountString - This struct is allocated with &#39;new char[]&#39; from the heap, and represents a refer...
Definition: RewriteRope.h:34
RopePiece - This class represents a view into a RopeRefCountString object.
Definition: RewriteRope.h:59
uint32_t Offset
Definition: CacheTokens.cpp:43
Forward-declares and imports various common LLVM datatypes that clang wants to use unqualified...
void erase(unsigned Offset, unsigned NumBytes)
unsigned EndOffs
Definition: RewriteRope.h:62
unsigned empty() const
Definition: RewriteRope.h:151
SourceLocation End
#define offsetof(t, d)
Definition: stddef.h:120
#define false
Definition: stdbool.h:33
unsigned size() const
Definition: RewriteRope.h:76
__DEVICE__ void * memcpy(void *__a, const void *__b, size_t __c)
static RopePieceBTreeNode * getRoot(void *P)
ast_type_traits::DynTypedNode Node
Dataflow Directional Tag Classes.
unsigned size() const
static bool classof(const OMPClause *T)
static const RopePieceBTreeLeaf * getCN(const void *P)
unsigned StartOffs
Definition: RewriteRope.h:61
#define true
Definition: stdbool.h:32