Export of internal Abseil changes
-- 87cdfd6aa40941e116cd79ef70f9a7a8271db163 by Abseil Team <absl-team@google.com>: Fix a typo in random.h API documentation. PiperOrigin-RevId: 305176308 -- 8a38e1df49a18a954daca3ce617fd69045ff4c19 by Derek Mauro <dmauro@google.com>: Import GitHub #647: Allow external add_subdirectory for using GoogleTest PiperOrigin-RevId: 305156797 -- b1a2441536d4964fbe4e2329e74c322e6c41a4e6 by Gennadiy Rozental <rogeeff@google.com>: temporary roll back. PiperOrigin-RevId: 305149619 -- c78767577264348d2f881893f9407aadfe73ab75 by CJ Johnson <johnsoncj@google.com>: Rollback update to linux_clang-latest container while investigating a compiler bug. PiperOrigin-RevId: 304897689 -- 3c6fd38f53d2e982569fdba4043f75271c7b5de4 by Derek Mauro <dmauro@google.com>: Update linux_clang-latest container to one based on Ubuntu 18.04, which has libstdc++-8. PiperOrigin-RevId: 304885120 GitOrigin-RevId: 87cdfd6aa40941e116cd79ef70f9a7a8271db163 Change-Id: Iefa6efee93907ec0eecb8add804c5cc2f052b64d
This commit is contained in:
		
							parent
							
								
									c01b9916e7
								
							
						
					
					
						commit
						73ea9a9572
					
				
					 5 changed files with 173 additions and 351 deletions
				
			
		|  | @ -82,7 +82,8 @@ endif() | |||
| find_package(Threads REQUIRED) | ||||
| 
 | ||||
| option(ABSL_USE_EXTERNAL_GOOGLETEST | ||||
|   "If ON, abseil will assume that the targets for googletest are already provided by the including project folder. This makes sense when abseil is used with add_subproject." OFF) | ||||
|   "If ON, Abseil will assume that the targets for GoogleTest are already provided by the including project. This makes sense when Abseil is used with add_subproject." OFF) | ||||
| 
 | ||||
| 
 | ||||
| option(ABSL_USE_GOOGLETEST_HEAD | ||||
|   "If ON, abseil will download HEAD from googletest at config time." OFF) | ||||
|  |  | |||
|  | @ -109,7 +109,7 @@ ABSL_NAMESPACE_BEGIN | |||
| 
 | ||||
| // absl::BitGen::max()
 | ||||
| //
 | ||||
| // Returns the largest possible value from this bit generator., and
 | ||||
| // Returns the largest possible value from this bit generator.
 | ||||
| 
 | ||||
| // absl::BitGen::discard(num)
 | ||||
| //
 | ||||
|  |  | |||
|  | @ -31,6 +31,7 @@ | |||
| #include "absl/base/macros.h" | ||||
| #include "absl/base/port.h" | ||||
| #include "absl/container/fixed_array.h" | ||||
| #include "absl/container/inlined_vector.h" | ||||
| #include "absl/strings/escaping.h" | ||||
| #include "absl/strings/internal/cord_internal.h" | ||||
| #include "absl/strings/internal/resize_uninitialized.h" | ||||
|  | @ -132,14 +133,6 @@ inline const CordRepExternal* CordRep::external() const { | |||
|   return static_cast<const CordRepExternal*>(this); | ||||
| } | ||||
| 
 | ||||
| using CordTreeConstPath = CordTreePath<const CordRep*, MaxCordDepth()>; | ||||
| 
 | ||||
| // This type is used to store the list of pending nodes during re-balancing.
 | ||||
| // Its maximum size is 2 * MaxCordDepth() because the tree has a maximum
 | ||||
| // possible depth of MaxCordDepth() and every concat node along a tree path
 | ||||
| // could theoretically be split during rebalancing.
 | ||||
| using RebalancingStack = CordTreePath<CordRep*, 2 * MaxCordDepth()>; | ||||
| 
 | ||||
| }  // namespace cord_internal
 | ||||
| 
 | ||||
| static const size_t kFlatOverhead = offsetof(CordRep, data); | ||||
|  | @ -188,78 +181,64 @@ static constexpr size_t TagToLength(uint8_t tag) { | |||
| // Enforce that kMaxFlatSize maps to a well-known exact tag value.
 | ||||
| static_assert(TagToAllocatedSize(224) == kMaxFlatSize, "Bad tag logic"); | ||||
| 
 | ||||
| constexpr size_t Fibonacci(uint8_t n, const size_t a = 0, const size_t b = 1) { | ||||
|   return n == 0 | ||||
|              ? a | ||||
|              : n == 1 ? b | ||||
|                       : Fibonacci(n - 1, b, | ||||
|                                   (a > (size_t(-1) - b)) ? size_t(-1) : a + b); | ||||
| constexpr uint64_t Fibonacci(unsigned char n, uint64_t a = 0, uint64_t b = 1) { | ||||
|   return n == 0 ? a : Fibonacci(n - 1, b, a + b); | ||||
| } | ||||
| 
 | ||||
| static_assert(Fibonacci(63) == 6557470319842, | ||||
|               "Fibonacci values computed incorrectly"); | ||||
| 
 | ||||
| // Minimum length required for a given depth tree -- a tree is considered
 | ||||
| // balanced if
 | ||||
| //      length(t) >= kMinLength[depth(t)]
 | ||||
| // The node depth is allowed to become larger to reduce rebalancing
 | ||||
| // for larger strings (see ShouldRebalance).
 | ||||
| constexpr size_t kMinLength[] = { | ||||
|     Fibonacci(2),  Fibonacci(3),  Fibonacci(4),  Fibonacci(5),  Fibonacci(6), | ||||
|     Fibonacci(7),  Fibonacci(8),  Fibonacci(9),  Fibonacci(10), Fibonacci(11), | ||||
|     Fibonacci(12), Fibonacci(13), Fibonacci(14), Fibonacci(15), Fibonacci(16), | ||||
|     Fibonacci(17), Fibonacci(18), Fibonacci(19), Fibonacci(20), Fibonacci(21), | ||||
|     Fibonacci(22), Fibonacci(23), Fibonacci(24), Fibonacci(25), Fibonacci(26), | ||||
|     Fibonacci(27), Fibonacci(28), Fibonacci(29), Fibonacci(30), Fibonacci(31), | ||||
|     Fibonacci(32), Fibonacci(33), Fibonacci(34), Fibonacci(35), Fibonacci(36), | ||||
|     Fibonacci(37), Fibonacci(38), Fibonacci(39), Fibonacci(40), Fibonacci(41), | ||||
|     Fibonacci(42), Fibonacci(43), Fibonacci(44), Fibonacci(45), Fibonacci(46), | ||||
|     Fibonacci(47), Fibonacci(48), Fibonacci(49), Fibonacci(50), Fibonacci(51), | ||||
|     Fibonacci(52), Fibonacci(53), Fibonacci(54), Fibonacci(55), Fibonacci(56), | ||||
|     Fibonacci(57), Fibonacci(58), Fibonacci(59), Fibonacci(60), Fibonacci(61), | ||||
|     Fibonacci(62), Fibonacci(63), Fibonacci(64), Fibonacci(65), Fibonacci(66), | ||||
|     Fibonacci(67), Fibonacci(68), Fibonacci(69), Fibonacci(70), Fibonacci(71), | ||||
|     Fibonacci(72), Fibonacci(73), Fibonacci(74), Fibonacci(75), Fibonacci(76), | ||||
|     Fibonacci(77), Fibonacci(78), Fibonacci(79), Fibonacci(80), Fibonacci(81), | ||||
|     Fibonacci(82), Fibonacci(83), Fibonacci(84), Fibonacci(85), Fibonacci(86), | ||||
|     Fibonacci(87), Fibonacci(88), Fibonacci(89), Fibonacci(90), Fibonacci(91), | ||||
|     Fibonacci(92), Fibonacci(93), Fibonacci(94), Fibonacci(95)}; | ||||
| //      length(t) >= min_length[depth(t)]
 | ||||
| // The root node depth is allowed to become twice as large to reduce rebalancing
 | ||||
| // for larger strings (see IsRootBalanced).
 | ||||
| static constexpr uint64_t min_length[] = { | ||||
|     Fibonacci(2),          Fibonacci(3),  Fibonacci(4),  Fibonacci(5), | ||||
|     Fibonacci(6),          Fibonacci(7),  Fibonacci(8),  Fibonacci(9), | ||||
|     Fibonacci(10),         Fibonacci(11), Fibonacci(12), Fibonacci(13), | ||||
|     Fibonacci(14),         Fibonacci(15), Fibonacci(16), Fibonacci(17), | ||||
|     Fibonacci(18),         Fibonacci(19), Fibonacci(20), Fibonacci(21), | ||||
|     Fibonacci(22),         Fibonacci(23), Fibonacci(24), Fibonacci(25), | ||||
|     Fibonacci(26),         Fibonacci(27), Fibonacci(28), Fibonacci(29), | ||||
|     Fibonacci(30),         Fibonacci(31), Fibonacci(32), Fibonacci(33), | ||||
|     Fibonacci(34),         Fibonacci(35), Fibonacci(36), Fibonacci(37), | ||||
|     Fibonacci(38),         Fibonacci(39), Fibonacci(40), Fibonacci(41), | ||||
|     Fibonacci(42),         Fibonacci(43), Fibonacci(44), Fibonacci(45), | ||||
|     Fibonacci(46),         Fibonacci(47), | ||||
|     0xffffffffffffffffull,  // Avoid overflow
 | ||||
| }; | ||||
| 
 | ||||
| static_assert(sizeof(kMinLength) / sizeof(size_t) >= | ||||
|                   (cord_internal::MaxCordDepth() + 1), | ||||
|               "Not enough elements in kMinLength array to cover all the " | ||||
|               "supported Cord depth(s)"); | ||||
| static const int kMinLengthSize = ABSL_ARRAYSIZE(min_length); | ||||
| 
 | ||||
| inline bool ShouldRebalance(const CordRep* node) { | ||||
|   if (node->tag != CONCAT) return false; | ||||
| // The inlined size to use with absl::InlinedVector.
 | ||||
| //
 | ||||
| // Note: The InlinedVectors in this file (and in cord.h) do not need to use
 | ||||
| // the same value for their inlined size. The fact that they do is historical.
 | ||||
| // It may be desirable for each to use a different inlined size optimized for
 | ||||
| // that InlinedVector's usage.
 | ||||
| //
 | ||||
| // TODO(jgm): Benchmark to see if there's a more optimal value than 47 for
 | ||||
| // the inlined vector size (47 exists for backward compatibility).
 | ||||
| static const int kInlinedVectorSize = 47; | ||||
| 
 | ||||
|   size_t node_depth = node->concat()->depth(); | ||||
| 
 | ||||
|   if (node_depth <= 15) return false; | ||||
| 
 | ||||
|   // Rebalancing Cords is expensive, so we reduce how often rebalancing occurs
 | ||||
|   // by allowing shallow Cords to have twice the depth that the Fibonacci rule
 | ||||
|   // would otherwise imply. Deep Cords need to follow the rule more closely,
 | ||||
|   // however to ensure algorithm correctness. We implement this with linear
 | ||||
|   // interpolation. Cords of depth 16 are treated as though they have a depth
 | ||||
|   // of 16 * 1/2, and Cords of depth MaxCordDepth() interpolate to
 | ||||
|   // MaxCordDepth() * 1.
 | ||||
|   return node->length < | ||||
|          kMinLength[(node_depth * (cord_internal::MaxCordDepth() - 16)) / | ||||
|                     (2 * cord_internal::MaxCordDepth() - 16 - node_depth)]; | ||||
| } | ||||
| 
 | ||||
| // Unlike root balancing condition this one is part of the re-balancing
 | ||||
| // algorithm and has to be always matching against right depth for
 | ||||
| // algorithm to be correct.
 | ||||
| inline bool IsNodeBalanced(const CordRep* node) { | ||||
|   if (node->tag != CONCAT) return true; | ||||
| 
 | ||||
|   size_t node_depth = node->concat()->depth(); | ||||
| 
 | ||||
|   return node->length >= kMinLength[node_depth]; | ||||
| static inline bool IsRootBalanced(CordRep* node) { | ||||
|   if (node->tag != CONCAT) { | ||||
|     return true; | ||||
|   } else if (node->concat()->depth() <= 15) { | ||||
|     return true; | ||||
|   } else if (node->concat()->depth() > kMinLengthSize) { | ||||
|     return false; | ||||
|   } else { | ||||
|     // Allow depth to become twice as large as implied by fibonacci rule to
 | ||||
|     // reduce rebalancing for larger strings.
 | ||||
|     return (node->length >= min_length[node->concat()->depth() / 2]); | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| static CordRep* Rebalance(CordRep* node); | ||||
| static void DumpNode(const CordRep* rep, bool include_data, std::ostream* os); | ||||
| static bool VerifyNode(const CordRep* root, const CordRep* start_node, | ||||
| static void DumpNode(CordRep* rep, bool include_data, std::ostream* os); | ||||
| static bool VerifyNode(CordRep* root, CordRep* start_node, | ||||
|                        bool full_validation); | ||||
| 
 | ||||
| static inline CordRep* VerifyTree(CordRep* node) { | ||||
|  | @ -306,8 +285,7 @@ __attribute__((preserve_most)) | |||
| static void UnrefInternal(CordRep* rep) { | ||||
|   assert(rep != nullptr); | ||||
| 
 | ||||
|   cord_internal::RebalancingStack pending; | ||||
| 
 | ||||
|   absl::InlinedVector<CordRep*, kInlinedVectorSize> pending; | ||||
|   while (true) { | ||||
|     if (rep->tag == CONCAT) { | ||||
|       CordRepConcat* rep_concat = rep->concat(); | ||||
|  | @ -389,11 +367,6 @@ static void SetConcatChildren(CordRepConcat* concat, CordRep* left, | |||
| 
 | ||||
|   concat->length = left->length + right->length; | ||||
|   concat->set_depth(1 + std::max(Depth(left), Depth(right))); | ||||
| 
 | ||||
|   ABSL_INTERNAL_CHECK(concat->depth() <= cord_internal::MaxCordDepth(), | ||||
|                       "Cord depth exceeds max"); | ||||
|   ABSL_INTERNAL_CHECK(concat->length >= left->length, "Cord is too long"); | ||||
|   ABSL_INTERNAL_CHECK(concat->length >= right->length, "Cord is too long"); | ||||
| } | ||||
| 
 | ||||
| // Create a concatenation of the specified nodes.
 | ||||
|  | @ -419,7 +392,7 @@ static CordRep* RawConcat(CordRep* left, CordRep* right) { | |||
| 
 | ||||
| static CordRep* Concat(CordRep* left, CordRep* right) { | ||||
|   CordRep* rep = RawConcat(left, right); | ||||
|   if (rep != nullptr && ShouldRebalance(rep)) { | ||||
|   if (rep != nullptr && !IsRootBalanced(rep)) { | ||||
|     rep = Rebalance(rep); | ||||
|   } | ||||
|   return VerifyTree(rep); | ||||
|  | @ -714,14 +687,6 @@ void Cord::InlineRep::ClearSlow() { | |||
|   memset(data_, 0, sizeof(data_)); | ||||
| } | ||||
| 
 | ||||
| inline Cord::InternalChunkIterator Cord::internal_chunk_begin() const { | ||||
|   return InternalChunkIterator(this); | ||||
| } | ||||
| 
 | ||||
| inline Cord::InternalChunkRange Cord::InternalChunks() const { | ||||
|   return InternalChunkRange(this); | ||||
| } | ||||
| 
 | ||||
| // --------------------------------------------------------------------
 | ||||
| // Constructors and destructors
 | ||||
| 
 | ||||
|  | @ -918,7 +883,7 @@ void Cord::Prepend(absl::string_view src) { | |||
| static CordRep* RemovePrefixFrom(CordRep* node, size_t n) { | ||||
|   if (n >= node->length) return nullptr; | ||||
|   if (n == 0) return Ref(node); | ||||
|   cord_internal::CordTreeMutablePath rhs_stack; | ||||
|   absl::InlinedVector<CordRep*, kInlinedVectorSize> rhs_stack; | ||||
| 
 | ||||
|   while (node->tag == CONCAT) { | ||||
|     assert(n <= node->length); | ||||
|  | @ -959,7 +924,7 @@ static CordRep* RemovePrefixFrom(CordRep* node, size_t n) { | |||
| static CordRep* RemoveSuffixFrom(CordRep* node, size_t n) { | ||||
|   if (n >= node->length) return nullptr; | ||||
|   if (n == 0) return Ref(node); | ||||
|   absl::cord_internal::CordTreeMutablePath lhs_stack; | ||||
|   absl::InlinedVector<CordRep*, kInlinedVectorSize> lhs_stack; | ||||
|   bool inplace_ok = node->refcount.IsOne(); | ||||
| 
 | ||||
|   while (node->tag == CONCAT) { | ||||
|  | @ -1030,7 +995,6 @@ void Cord::RemoveSuffix(size_t n) { | |||
| 
 | ||||
| // Work item for NewSubRange().
 | ||||
| struct SubRange { | ||||
|   SubRange() = default; | ||||
|   SubRange(CordRep* a_node, size_t a_pos, size_t a_n) | ||||
|       : node(a_node), pos(a_pos), n(a_n) {} | ||||
|   CordRep* node;  // nullptr means concat last 2 results.
 | ||||
|  | @ -1039,11 +1003,8 @@ struct SubRange { | |||
| }; | ||||
| 
 | ||||
| static CordRep* NewSubRange(CordRep* node, size_t pos, size_t n) { | ||||
|   cord_internal::CordTreeMutablePath results; | ||||
|   // The algorithm below in worst case scenario adds up to 3 nodes to the `todo`
 | ||||
|   // list, but we also pop one out on every cycle. If original tree has depth d
 | ||||
|   // todo list can grew up to 2*d in size.
 | ||||
|   cord_internal::CordTreePath<SubRange, 2 * cord_internal::MaxCordDepth()> todo; | ||||
|   absl::InlinedVector<CordRep*, kInlinedVectorSize> results; | ||||
|   absl::InlinedVector<SubRange, kInlinedVectorSize> todo; | ||||
|   todo.push_back(SubRange(node, pos, n)); | ||||
|   do { | ||||
|     const SubRange& sr = todo.back(); | ||||
|  | @ -1080,7 +1041,7 @@ static CordRep* NewSubRange(CordRep* node, size_t pos, size_t n) { | |||
|     } | ||||
|   } while (!todo.empty()); | ||||
|   assert(results.size() == 1); | ||||
|   return results.back(); | ||||
|   return results[0]; | ||||
| } | ||||
| 
 | ||||
| Cord Cord::Subcord(size_t pos, size_t new_size) const { | ||||
|  | @ -1096,7 +1057,7 @@ Cord Cord::Subcord(size_t pos, size_t new_size) const { | |||
|   } else if (new_size == 0) { | ||||
|     // We want to return empty subcord, so nothing to do.
 | ||||
|   } else if (new_size <= InlineRep::kMaxInline) { | ||||
|     Cord::InternalChunkIterator it = internal_chunk_begin(); | ||||
|     Cord::ChunkIterator it = chunk_begin(); | ||||
|     it.AdvanceBytes(pos); | ||||
|     char* dest = sub_cord.contents_.data_; | ||||
|     size_t remaining_size = new_size; | ||||
|  | @ -1119,12 +1080,11 @@ Cord Cord::Subcord(size_t pos, size_t new_size) const { | |||
| 
 | ||||
| class CordForest { | ||||
|  public: | ||||
|   explicit CordForest(size_t length) : root_length_(length), trees_({}) {} | ||||
|   explicit CordForest(size_t length) | ||||
|       : root_length_(length), trees_(kMinLengthSize, nullptr) {} | ||||
| 
 | ||||
|   void Build(CordRep* cord_root) { | ||||
|     // We are adding up to two nodes to the `pending` list, but we also popping
 | ||||
|     // one, so the size of `pending` will never exceed `MaxCordDepth()`.
 | ||||
|     cord_internal::CordTreeMutablePath pending(cord_root); | ||||
|     std::vector<CordRep*> pending = {cord_root}; | ||||
| 
 | ||||
|     while (!pending.empty()) { | ||||
|       CordRep* node = pending.back(); | ||||
|  | @ -1136,20 +1096,21 @@ class CordForest { | |||
|       } | ||||
| 
 | ||||
|       CordRepConcat* concat_node = node->concat(); | ||||
|       if (IsNodeBalanced(concat_node)) { | ||||
|         AddNode(node); | ||||
|         continue; | ||||
|       } | ||||
|       pending.push_back(concat_node->right); | ||||
|       pending.push_back(concat_node->left); | ||||
|       if (concat_node->depth() >= kMinLengthSize || | ||||
|           concat_node->length < min_length[concat_node->depth()]) { | ||||
|         pending.push_back(concat_node->right); | ||||
|         pending.push_back(concat_node->left); | ||||
| 
 | ||||
|       if (concat_node->refcount.IsOne()) { | ||||
|         concat_node->left = concat_freelist_; | ||||
|         concat_freelist_ = concat_node; | ||||
|         if (concat_node->refcount.IsOne()) { | ||||
|           concat_node->left = concat_freelist_; | ||||
|           concat_freelist_ = concat_node; | ||||
|         } else { | ||||
|           Ref(concat_node->right); | ||||
|           Ref(concat_node->left); | ||||
|           Unref(concat_node); | ||||
|         } | ||||
|       } else { | ||||
|         Ref(concat_node->right); | ||||
|         Ref(concat_node->left); | ||||
|         Unref(concat_node); | ||||
|         AddNode(node); | ||||
|       } | ||||
|     } | ||||
|   } | ||||
|  | @ -1181,7 +1142,7 @@ class CordForest { | |||
| 
 | ||||
|     // Collect together everything with which we will merge with node
 | ||||
|     int i = 0; | ||||
|     for (; node->length >= kMinLength[i + 1]; ++i) { | ||||
|     for (; node->length > min_length[i + 1]; ++i) { | ||||
|       auto& tree_at_i = trees_[i]; | ||||
| 
 | ||||
|       if (tree_at_i == nullptr) continue; | ||||
|  | @ -1192,7 +1153,7 @@ class CordForest { | |||
|     sum = AppendNode(node, sum); | ||||
| 
 | ||||
|     // Insert sum into appropriate place in the forest
 | ||||
|     for (; sum->length >= kMinLength[i]; ++i) { | ||||
|     for (; sum->length >= min_length[i]; ++i) { | ||||
|       auto& tree_at_i = trees_[i]; | ||||
|       if (tree_at_i == nullptr) continue; | ||||
| 
 | ||||
|  | @ -1200,7 +1161,7 @@ class CordForest { | |||
|       tree_at_i = nullptr; | ||||
|     } | ||||
| 
 | ||||
|     // kMinLength[0] == 1, which means sum->length >= kMinLength[0]
 | ||||
|     // min_length[0] == 1, which means sum->length >= min_length[0]
 | ||||
|     assert(i > 0); | ||||
|     trees_[i - 1] = sum; | ||||
|   } | ||||
|  | @ -1233,7 +1194,9 @@ class CordForest { | |||
|   } | ||||
| 
 | ||||
|   size_t root_length_; | ||||
|   std::array<cord_internal::CordRep*, cord_internal::MaxCordDepth()> trees_; | ||||
| 
 | ||||
|   // use an inlined vector instead of a flat array to get bounds checking
 | ||||
|   absl::InlinedVector<CordRep*, kInlinedVectorSize> trees_; | ||||
| 
 | ||||
|   // List of concat nodes we can re-use for Cord balancing.
 | ||||
|   CordRepConcat* concat_freelist_ = nullptr; | ||||
|  | @ -1334,7 +1297,7 @@ inline absl::string_view Cord::InlineRep::FindFlatStartPiece() const { | |||
| 
 | ||||
| inline int Cord::CompareSlowPath(absl::string_view rhs, size_t compared_size, | ||||
|                                  size_t size_to_compare) const { | ||||
|   auto advance = [](Cord::InternalChunkIterator* it, absl::string_view* chunk) { | ||||
|   auto advance = [](Cord::ChunkIterator* it, absl::string_view* chunk) { | ||||
|     if (!chunk->empty()) return true; | ||||
|     ++*it; | ||||
|     if (it->bytes_remaining_ == 0) return false; | ||||
|  | @ -1342,7 +1305,7 @@ inline int Cord::CompareSlowPath(absl::string_view rhs, size_t compared_size, | |||
|     return true; | ||||
|   }; | ||||
| 
 | ||||
|   Cord::InternalChunkIterator lhs_it = internal_chunk_begin(); | ||||
|   Cord::ChunkIterator lhs_it = chunk_begin(); | ||||
| 
 | ||||
|   // compared_size is inside first chunk.
 | ||||
|   absl::string_view lhs_chunk = | ||||
|  | @ -1364,7 +1327,7 @@ inline int Cord::CompareSlowPath(absl::string_view rhs, size_t compared_size, | |||
| 
 | ||||
| inline int Cord::CompareSlowPath(const Cord& rhs, size_t compared_size, | ||||
|                                  size_t size_to_compare) const { | ||||
|   auto advance = [](Cord::InternalChunkIterator* it, absl::string_view* chunk) { | ||||
|   auto advance = [](Cord::ChunkIterator* it, absl::string_view* chunk) { | ||||
|     if (!chunk->empty()) return true; | ||||
|     ++*it; | ||||
|     if (it->bytes_remaining_ == 0) return false; | ||||
|  | @ -1372,8 +1335,8 @@ inline int Cord::CompareSlowPath(const Cord& rhs, size_t compared_size, | |||
|     return true; | ||||
|   }; | ||||
| 
 | ||||
|   Cord::InternalChunkIterator lhs_it = internal_chunk_begin(); | ||||
|   Cord::InternalChunkIterator rhs_it = rhs.internal_chunk_begin(); | ||||
|   Cord::ChunkIterator lhs_it = chunk_begin(); | ||||
|   Cord::ChunkIterator rhs_it = rhs.chunk_begin(); | ||||
| 
 | ||||
|   // compared_size is inside both first chunks.
 | ||||
|   absl::string_view lhs_chunk = | ||||
|  | @ -1507,9 +1470,7 @@ void Cord::CopyToArraySlowPath(char* dst) const { | |||
|   } | ||||
| } | ||||
| 
 | ||||
| template <typename StorageType> | ||||
| Cord::GenericChunkIterator<StorageType>& | ||||
| Cord::GenericChunkIterator<StorageType>::operator++() { | ||||
| Cord::ChunkIterator& Cord::ChunkIterator::operator++() { | ||||
|   ABSL_HARDENING_ASSERT(bytes_remaining_ > 0 && | ||||
|                         "Attempted to iterate past `end()`"); | ||||
|   assert(bytes_remaining_ >= current_chunk_.size()); | ||||
|  | @ -1549,8 +1510,7 @@ Cord::GenericChunkIterator<StorageType>::operator++() { | |||
|   return *this; | ||||
| } | ||||
| 
 | ||||
| template <typename StorageType> | ||||
| Cord Cord::GenericChunkIterator<StorageType>::AdvanceAndReadBytes(size_t n) { | ||||
| Cord Cord::ChunkIterator::AdvanceAndReadBytes(size_t n) { | ||||
|   ABSL_HARDENING_ASSERT(bytes_remaining_ >= n && | ||||
|                         "Attempted to iterate past `end()`"); | ||||
|   Cord subcord; | ||||
|  | @ -1664,8 +1624,7 @@ Cord Cord::GenericChunkIterator<StorageType>::AdvanceAndReadBytes(size_t n) { | |||
|   return subcord; | ||||
| } | ||||
| 
 | ||||
| template <typename StorageType> | ||||
| void Cord::GenericChunkIterator<StorageType>::AdvanceBytesSlowPath(size_t n) { | ||||
| void Cord::ChunkIterator::AdvanceBytesSlowPath(size_t n) { | ||||
|   assert(bytes_remaining_ >= n && "Attempted to iterate past `end()`"); | ||||
|   assert(n >= current_chunk_.size());  // This should only be called when
 | ||||
|                                        // iterating to a new node.
 | ||||
|  | @ -1851,18 +1810,18 @@ absl::string_view Cord::FlattenSlowPath() { | |||
|   } | ||||
| } | ||||
| 
 | ||||
| static void DumpNode(const CordRep* rep, bool include_data, std::ostream* os) { | ||||
| static void DumpNode(CordRep* rep, bool include_data, std::ostream* os) { | ||||
|   const int kIndentStep = 1; | ||||
|   int indent = 0; | ||||
|   cord_internal::CordTreeConstPath stack; | ||||
|   cord_internal::CordTreePath<int, cord_internal::MaxCordDepth()> indents; | ||||
|   absl::InlinedVector<CordRep*, kInlinedVectorSize> stack; | ||||
|   absl::InlinedVector<int, kInlinedVectorSize> indents; | ||||
|   for (;;) { | ||||
|     *os << std::setw(3) << rep->refcount.Get(); | ||||
|     *os << " " << std::setw(7) << rep->length; | ||||
|     *os << " ["; | ||||
|     if (include_data) *os << static_cast<const void*>(rep); | ||||
|     if (include_data) *os << static_cast<void*>(rep); | ||||
|     *os << "]"; | ||||
|     *os << " " << (IsNodeBalanced(rep) ? 'b' : 'u'); | ||||
|     *os << " " << (IsRootBalanced(rep) ? 'b' : 'u'); | ||||
|     *os << " " << std::setw(indent) << ""; | ||||
|     if (rep->tag == CONCAT) { | ||||
|       *os << "CONCAT depth=" << Depth(rep) << "\n"; | ||||
|  | @ -1883,7 +1842,7 @@ static void DumpNode(const CordRep* rep, bool include_data, std::ostream* os) { | |||
|       } else { | ||||
|         *os << "FLAT cap=" << TagToLength(rep->tag) << " ["; | ||||
|         if (include_data) | ||||
|           *os << absl::CEscape(absl::string_view(rep->data, rep->length)); | ||||
|           *os << absl::CEscape(std::string(rep->data, rep->length)); | ||||
|         *os << "]\n"; | ||||
|       } | ||||
|       if (stack.empty()) break; | ||||
|  | @ -1896,19 +1855,19 @@ static void DumpNode(const CordRep* rep, bool include_data, std::ostream* os) { | |||
|   ABSL_INTERNAL_CHECK(indents.empty(), ""); | ||||
| } | ||||
| 
 | ||||
| static std::string ReportError(const CordRep* root, const CordRep* node) { | ||||
| static std::string ReportError(CordRep* root, CordRep* node) { | ||||
|   std::ostringstream buf; | ||||
|   buf << "Error at node " << node << " in:"; | ||||
|   DumpNode(root, true, &buf); | ||||
|   return buf.str(); | ||||
| } | ||||
| 
 | ||||
| static bool VerifyNode(const CordRep* root, const CordRep* start_node, | ||||
| static bool VerifyNode(CordRep* root, CordRep* start_node, | ||||
|                        bool full_validation) { | ||||
|   cord_internal::CordTreeConstPath worklist; | ||||
|   absl::InlinedVector<CordRep*, 2> worklist; | ||||
|   worklist.push_back(start_node); | ||||
|   do { | ||||
|     const CordRep* node = worklist.back(); | ||||
|     CordRep* node = worklist.back(); | ||||
|     worklist.pop_back(); | ||||
| 
 | ||||
|     ABSL_INTERNAL_CHECK(node != nullptr, ReportError(root, node)); | ||||
|  | @ -1958,7 +1917,7 @@ static bool VerifyNode(const CordRep* root, const CordRep* start_node, | |||
|   // Iterate over the tree. cur_node is never a leaf node and leaf nodes will
 | ||||
|   // never be appended to tree_stack. This reduces overhead from manipulating
 | ||||
|   // tree_stack.
 | ||||
|   cord_internal::CordTreeConstPath tree_stack; | ||||
|   absl::InlinedVector<const CordRep*, kInlinedVectorSize> tree_stack; | ||||
|   const CordRep* cur_node = rep; | ||||
|   while (true) { | ||||
|     const CordRep* next_node = nullptr; | ||||
|  | @ -2005,9 +1964,6 @@ std::ostream& operator<<(std::ostream& out, const Cord& cord) { | |||
|   return out; | ||||
| } | ||||
| 
 | ||||
| template class Cord::GenericChunkIterator<cord_internal::CordTreeMutablePath>; | ||||
| template class Cord::GenericChunkIterator<cord_internal::CordTreeDynamicPath>; | ||||
| 
 | ||||
| namespace strings_internal { | ||||
| size_t CordTestAccess::FlatOverhead() { return kFlatOverhead; } | ||||
| size_t CordTestAccess::MaxFlatLength() { return kMaxFlatLength; } | ||||
|  |  | |||
|  | @ -123,132 +123,12 @@ H HashFragmentedCord(H, const Cord&); | |||
| // Additionally, the API provides iterator utilities to iterate through Cord
 | ||||
| // data via chunks or character bytes.
 | ||||
| //
 | ||||
| 
 | ||||
| namespace cord_internal { | ||||
| 
 | ||||
| // It's expensive to keep a Cord's tree perfectly balanced, so instead we keep
 | ||||
| // trees approximately balanced.  A tree node N of depth D(N) that contains a
 | ||||
| // string of L(N) characters is considered balanced if L >= Fibonacci(D + 2).
 | ||||
| // The "+ 2" is used to ensure that every balanced leaf node contains at least
 | ||||
| //  one character. Here we presume that
 | ||||
| //   Fibonacci(0) = 0
 | ||||
| //   Fibonacci(1) = 1
 | ||||
| //   Fibonacci(2) = 1
 | ||||
| //   Fibonacci(3) = 2
 | ||||
| //   ...
 | ||||
| // The algorithm is based on paper by Hans Boehm et al:
 | ||||
| // https://www.cs.rit.edu/usr/local/pub/jeh/courses/QUARTERS/FP/Labs/CedarRope/rope-paper.pdf
 | ||||
| // In this paper authors shows that rebalancing based on cord forest of already
 | ||||
| // balanced subtrees can be proven to never produce tree of depth larger than
 | ||||
| // largest Fibonacci number representable in the same integral type as cord size
 | ||||
| // For 64 bit integers this is the 93rd Fibonacci number. For 32 bit integrals
 | ||||
| // this is 47th Fibonacci number.
 | ||||
| constexpr size_t MaxCordDepth() { return sizeof(size_t) == 8 ? 93 : 47; } | ||||
| 
 | ||||
| // This class models fixed max size stack of CordRep pointers.
 | ||||
| // The elements are being pushed back and popped from the back.
 | ||||
| template <typename CordRepPtr, size_t N> | ||||
| class CordTreePath { | ||||
|  public: | ||||
|   CordTreePath() {} | ||||
|   explicit CordTreePath(CordRepPtr root) { push_back(root); } | ||||
| 
 | ||||
|   bool empty() const { return size_ == 0; } | ||||
|   size_t size() const { return size_; } | ||||
|   void clear() { size_ = 0; } | ||||
| 
 | ||||
|   CordRepPtr back() { return data_[size_ - 1]; } | ||||
| 
 | ||||
|   void pop_back() { | ||||
|     --size_; | ||||
|     assert(size_ < N); | ||||
|   } | ||||
|   void push_back(CordRepPtr elem) { data_[size_++] = elem; } | ||||
| 
 | ||||
|  private: | ||||
|   CordRepPtr data_[N]; | ||||
|   size_t size_ = 0; | ||||
| }; | ||||
| 
 | ||||
| // Fixed length container for mutable "path" in cord tree, which can hold any
 | ||||
| // possible valid path in cord tree.
 | ||||
| using CordTreeMutablePath = CordTreePath<CordRep*, MaxCordDepth()>; | ||||
| // Variable length container for mutable "path" in cord tree. It starts with
 | ||||
| // capacity for 15 elements and grow if necessary.
 | ||||
| using CordTreeDynamicPath = | ||||
|     absl::InlinedVector<absl::cord_internal::CordRep*, 15>; | ||||
| }  // namespace cord_internal
 | ||||
| 
 | ||||
| // A Cord is a sequence of characters.
 | ||||
| class Cord { | ||||
|  private: | ||||
|   template <typename T> | ||||
|   using EnableIfString = | ||||
|       absl::enable_if_t<std::is_same<T, std::string>::value, int>; | ||||
| 
 | ||||
|   //----------------------------------------------------------------------------
 | ||||
|   // Cord::GenericChunkIterator
 | ||||
|   //----------------------------------------------------------------------------
 | ||||
|   //
 | ||||
|   // A `Cord::GenericChunkIterator` provides an interface for the standard
 | ||||
|   // `Cord::ChunkIterator` as well as some private implementations.
 | ||||
|   template <typename StorageType> | ||||
|   class GenericChunkIterator { | ||||
|    public: | ||||
|     using iterator_category = std::input_iterator_tag; | ||||
|     using value_type = absl::string_view; | ||||
|     using difference_type = ptrdiff_t; | ||||
|     using pointer = const value_type*; | ||||
|     using reference = value_type; | ||||
| 
 | ||||
|     GenericChunkIterator() = default; | ||||
| 
 | ||||
|     GenericChunkIterator& operator++(); | ||||
|     GenericChunkIterator operator++(int); | ||||
|     bool operator==(const GenericChunkIterator& other) const; | ||||
|     bool operator!=(const GenericChunkIterator& other) const; | ||||
|     reference operator*() const; | ||||
|     pointer operator->() const; | ||||
| 
 | ||||
|     friend class Cord; | ||||
|     friend class CharIterator; | ||||
| 
 | ||||
|    private: | ||||
|     // Constructs a `begin()` iterator from `cord`.
 | ||||
|     explicit GenericChunkIterator(const Cord* cord); | ||||
| 
 | ||||
|     // Removes `n` bytes from `current_chunk_`. Expects `n` to be smaller than
 | ||||
|     // `current_chunk_.size()`.
 | ||||
|     void RemoveChunkPrefix(size_t n); | ||||
|     Cord AdvanceAndReadBytes(size_t n); | ||||
|     void AdvanceBytes(size_t n); | ||||
|     // Iterates `n` bytes, where `n` is expected to be greater than or equal to
 | ||||
|     // `current_chunk_.size()`.
 | ||||
|     void AdvanceBytesSlowPath(size_t n); | ||||
| 
 | ||||
|     // A view into bytes of the current `CordRep`. It may only be a view to a
 | ||||
|     // suffix of bytes if this is being used by `CharIterator`.
 | ||||
|     absl::string_view current_chunk_; | ||||
|     // The current leaf, or `nullptr` if the iterator points to short data.
 | ||||
|     // If the current chunk is a substring node, current_leaf_ points to the
 | ||||
|     // underlying flat or external node.
 | ||||
|     cord_internal::CordRep* current_leaf_ = nullptr; | ||||
|     // The number of bytes left in the `Cord` over which we are iterating.
 | ||||
|     size_t bytes_remaining_ = 0; | ||||
|     StorageType stack_of_right_children_; | ||||
|   }; | ||||
|   template <typename IteratorType> | ||||
|   class GenericChunkRange { | ||||
|    public: | ||||
|     explicit GenericChunkRange(const Cord* cord) : cord_(cord) {} | ||||
| 
 | ||||
|     IteratorType begin() const { return IteratorType(cord_); } | ||||
|     IteratorType end() const { return IteratorType(); } | ||||
| 
 | ||||
|    private: | ||||
|     const Cord* cord_; | ||||
|   }; | ||||
| 
 | ||||
|  public: | ||||
|   // Cord::Cord() Constructors
 | ||||
| 
 | ||||
|  | @ -464,8 +344,51 @@ class Cord { | |||
|   //   * The iterator keeps state that can grow for Cords that contain many
 | ||||
|   //     nodes and are imbalanced due to sharing. Prefer to pass this type by
 | ||||
|   //     const reference instead of by value.
 | ||||
|   using ChunkIterator = | ||||
|       GenericChunkIterator<cord_internal::CordTreeDynamicPath>; | ||||
|   class ChunkIterator { | ||||
|    public: | ||||
|     using iterator_category = std::input_iterator_tag; | ||||
|     using value_type = absl::string_view; | ||||
|     using difference_type = ptrdiff_t; | ||||
|     using pointer = const value_type*; | ||||
|     using reference = value_type; | ||||
| 
 | ||||
|     ChunkIterator() = default; | ||||
| 
 | ||||
|     ChunkIterator& operator++(); | ||||
|     ChunkIterator operator++(int); | ||||
|     bool operator==(const ChunkIterator& other) const; | ||||
|     bool operator!=(const ChunkIterator& other) const; | ||||
|     reference operator*() const; | ||||
|     pointer operator->() const; | ||||
| 
 | ||||
|     friend class Cord; | ||||
|     friend class CharIterator; | ||||
| 
 | ||||
|    private: | ||||
|     // Constructs a `begin()` iterator from `cord`.
 | ||||
|     explicit ChunkIterator(const Cord* cord); | ||||
| 
 | ||||
|     // Removes `n` bytes from `current_chunk_`. Expects `n` to be smaller than
 | ||||
|     // `current_chunk_.size()`.
 | ||||
|     void RemoveChunkPrefix(size_t n); | ||||
|     Cord AdvanceAndReadBytes(size_t n); | ||||
|     void AdvanceBytes(size_t n); | ||||
|     // Iterates `n` bytes, where `n` is expected to be greater than or equal to
 | ||||
|     // `current_chunk_.size()`.
 | ||||
|     void AdvanceBytesSlowPath(size_t n); | ||||
| 
 | ||||
|     // A view into bytes of the current `CordRep`. It may only be a view to a
 | ||||
|     // suffix of bytes if this is being used by `CharIterator`.
 | ||||
|     absl::string_view current_chunk_; | ||||
|     // The current leaf, or `nullptr` if the iterator points to short data.
 | ||||
|     // If the current chunk is a substring node, current_leaf_ points to the
 | ||||
|     // underlying flat or external node.
 | ||||
|     absl::cord_internal::CordRep* current_leaf_ = nullptr; | ||||
|     // The number of bytes left in the `Cord` over which we are iterating.
 | ||||
|     size_t bytes_remaining_ = 0; | ||||
|     absl::InlinedVector<absl::cord_internal::CordRep*, 4> | ||||
|         stack_of_right_children_; | ||||
|   }; | ||||
| 
 | ||||
|   // Cord::ChunkIterator::chunk_begin()
 | ||||
|   //
 | ||||
|  | @ -504,7 +427,16 @@ class Cord { | |||
|   //
 | ||||
|   // Implementation note: `ChunkRange` is simply a convenience wrapper over
 | ||||
|   // `Cord::chunk_begin()` and `Cord::chunk_end()`.
 | ||||
|   using ChunkRange = GenericChunkRange<ChunkIterator>; | ||||
|   class ChunkRange { | ||||
|    public: | ||||
|     explicit ChunkRange(const Cord* cord) : cord_(cord) {} | ||||
| 
 | ||||
|     ChunkIterator begin() const; | ||||
|     ChunkIterator end() const; | ||||
| 
 | ||||
|    private: | ||||
|     const Cord* cord_; | ||||
|   }; | ||||
| 
 | ||||
|   // Cord::Chunks()
 | ||||
|   //
 | ||||
|  | @ -800,14 +732,6 @@ class Cord { | |||
|   static bool GetFlatAux(absl::cord_internal::CordRep* rep, | ||||
|                          absl::string_view* fragment); | ||||
| 
 | ||||
|   // Iterators for use inside Cord implementation
 | ||||
|   using InternalChunkIterator = | ||||
|       GenericChunkIterator<cord_internal::CordTreeMutablePath>; | ||||
|   using InternalChunkRange = GenericChunkRange<InternalChunkIterator>; | ||||
| 
 | ||||
|   InternalChunkIterator internal_chunk_begin() const; | ||||
|   InternalChunkRange InternalChunks() const; | ||||
| 
 | ||||
|   // Helper for ForEachChunk()
 | ||||
|   static void ForEachChunkAux( | ||||
|       absl::cord_internal::CordRep* rep, | ||||
|  | @ -842,11 +766,6 @@ class Cord { | |||
|   void AppendImpl(C&& src); | ||||
| }; | ||||
| 
 | ||||
| extern template class Cord::GenericChunkIterator< | ||||
|     cord_internal::CordTreeMutablePath>; | ||||
| extern template class Cord::GenericChunkIterator< | ||||
|     cord_internal::CordTreeDynamicPath>; | ||||
| 
 | ||||
| ABSL_NAMESPACE_END | ||||
| }  // namespace absl
 | ||||
| 
 | ||||
|  | @ -1186,9 +1105,7 @@ inline bool Cord::StartsWith(absl::string_view rhs) const { | |||
|   return EqualsImpl(rhs, rhs_size); | ||||
| } | ||||
| 
 | ||||
| template <typename StorageType> | ||||
| inline Cord::GenericChunkIterator<StorageType>::GenericChunkIterator( | ||||
|     const Cord* cord) | ||||
| inline Cord::ChunkIterator::ChunkIterator(const Cord* cord) | ||||
|     : bytes_remaining_(cord->size()) { | ||||
|   if (cord->empty()) return; | ||||
|   if (cord->contents_.is_tree()) { | ||||
|  | @ -1199,50 +1116,37 @@ inline Cord::GenericChunkIterator<StorageType>::GenericChunkIterator( | |||
|   } | ||||
| } | ||||
| 
 | ||||
| template <typename StorageType> | ||||
| inline Cord::GenericChunkIterator<StorageType> | ||||
| Cord::GenericChunkIterator<StorageType>::operator++(int) { | ||||
|   GenericChunkIterator tmp(*this); | ||||
| inline Cord::ChunkIterator Cord::ChunkIterator::operator++(int) { | ||||
|   ChunkIterator tmp(*this); | ||||
|   operator++(); | ||||
|   return tmp; | ||||
| } | ||||
| 
 | ||||
| template <typename StorageType> | ||||
| inline bool Cord::GenericChunkIterator<StorageType>::operator==( | ||||
|     const GenericChunkIterator<StorageType>& other) const { | ||||
| inline bool Cord::ChunkIterator::operator==(const ChunkIterator& other) const { | ||||
|   return bytes_remaining_ == other.bytes_remaining_; | ||||
| } | ||||
| 
 | ||||
| template <typename StorageType> | ||||
| inline bool Cord::GenericChunkIterator<StorageType>::operator!=( | ||||
|     const GenericChunkIterator<StorageType>& other) const { | ||||
| inline bool Cord::ChunkIterator::operator!=(const ChunkIterator& other) const { | ||||
|   return !(*this == other); | ||||
| } | ||||
| 
 | ||||
| template <typename StorageType> | ||||
| inline typename Cord::GenericChunkIterator<StorageType>::reference | ||||
| Cord::GenericChunkIterator<StorageType>::operator*() const { | ||||
| inline Cord::ChunkIterator::reference Cord::ChunkIterator::operator*() const { | ||||
|   ABSL_HARDENING_ASSERT(bytes_remaining_ != 0); | ||||
|   return current_chunk_; | ||||
| } | ||||
| 
 | ||||
| template <typename StorageType> | ||||
| inline typename Cord::GenericChunkIterator<StorageType>::pointer | ||||
| Cord::GenericChunkIterator<StorageType>::operator->() const { | ||||
| inline Cord::ChunkIterator::pointer Cord::ChunkIterator::operator->() const { | ||||
|   ABSL_HARDENING_ASSERT(bytes_remaining_ != 0); | ||||
|   return ¤t_chunk_; | ||||
| } | ||||
| 
 | ||||
| template <typename StorageType> | ||||
| inline void Cord::GenericChunkIterator<StorageType>::RemoveChunkPrefix( | ||||
|     size_t n) { | ||||
| inline void Cord::ChunkIterator::RemoveChunkPrefix(size_t n) { | ||||
|   assert(n < current_chunk_.size()); | ||||
|   current_chunk_.remove_prefix(n); | ||||
|   bytes_remaining_ -= n; | ||||
| } | ||||
| 
 | ||||
| template <typename StorageType> | ||||
| inline void Cord::GenericChunkIterator<StorageType>::AdvanceBytes(size_t n) { | ||||
| inline void Cord::ChunkIterator::AdvanceBytes(size_t n) { | ||||
|   if (ABSL_PREDICT_TRUE(n < current_chunk_.size())) { | ||||
|     RemoveChunkPrefix(n); | ||||
|   } else if (n != 0) { | ||||
|  | @ -1256,6 +1160,14 @@ inline Cord::ChunkIterator Cord::chunk_begin() const { | |||
| 
 | ||||
| inline Cord::ChunkIterator Cord::chunk_end() const { return ChunkIterator(); } | ||||
| 
 | ||||
| inline Cord::ChunkIterator Cord::ChunkRange::begin() const { | ||||
|   return cord_->chunk_begin(); | ||||
| } | ||||
| 
 | ||||
| inline Cord::ChunkIterator Cord::ChunkRange::end() const { | ||||
|   return cord_->chunk_end(); | ||||
| } | ||||
| 
 | ||||
| inline Cord::ChunkRange Cord::Chunks() const { return ChunkRange(this); } | ||||
| 
 | ||||
| inline Cord::CharIterator& Cord::CharIterator::operator++() { | ||||
|  |  | |||
|  | @ -1403,53 +1403,6 @@ TEST(CordChunkIterator, Operations) { | |||
|   VerifyChunkIterator(subcords, 128); | ||||
| } | ||||
| 
 | ||||
| TEST(CordChunkIterator, MaxLengthFullTree) { | ||||
|   // Start with a 1-byte cord, and then double its length in a loop.  We should
 | ||||
|   // be able to do this until the point where we would overflow size_t.
 | ||||
| 
 | ||||
|   absl::Cord cord; | ||||
|   size_t size = 1; | ||||
|   AddExternalMemory("x", &cord); | ||||
|   EXPECT_EQ(cord.size(), size); | ||||
| 
 | ||||
|   const int kCordLengthDoublingLimit = std::numeric_limits<size_t>::digits - 1; | ||||
|   for (int i = 0; i < kCordLengthDoublingLimit; ++i) { | ||||
|     cord.Prepend(absl::Cord(cord)); | ||||
|     size <<= 1; | ||||
| 
 | ||||
|     EXPECT_EQ(cord.size(), size); | ||||
| 
 | ||||
|     auto chunk_it = cord.chunk_begin(); | ||||
|     EXPECT_EQ(*chunk_it, "x"); | ||||
|   } | ||||
| 
 | ||||
|   EXPECT_DEATH_IF_SUPPORTED( | ||||
|       (cord.Prepend(absl::Cord(cord)), *cord.chunk_begin()), | ||||
|       "Cord is too long"); | ||||
| } | ||||
| 
 | ||||
| TEST(CordChunkIterator, MaxDepth) { | ||||
|   // By reusing nodes, it's possible in pathological cases to build a Cord that
 | ||||
|   // exceeds both the maximum permissible length and depth.  In this case, the
 | ||||
|   // violation of the maximum depth is reported.
 | ||||
|   absl::Cord left_child; | ||||
|   AddExternalMemory("x", &left_child); | ||||
|   absl::Cord root = left_child; | ||||
| 
 | ||||
|   for (int i = 0; i < absl::cord_internal::MaxCordDepth() - 2; ++i) { | ||||
|     size_t new_size = left_child.size() + root.size(); | ||||
|     root.Prepend(left_child); | ||||
|     EXPECT_EQ(root.size(), new_size); | ||||
| 
 | ||||
|     auto chunk_it = root.chunk_begin(); | ||||
|     EXPECT_EQ(*chunk_it, "x"); | ||||
| 
 | ||||
|     std::swap(left_child, root); | ||||
|   } | ||||
| 
 | ||||
|   EXPECT_DEATH_IF_SUPPORTED(root.Prepend(left_child), "Cord is too long"); | ||||
| } | ||||
| 
 | ||||
| TEST(CordCharIterator, Traits) { | ||||
|   static_assert(std::is_copy_constructible<absl::Cord::CharIterator>::value, | ||||
|                 ""); | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue