Export of internal Abseil changes

--
a3e58c1870a9626039f4d178d2d599319bd9f8a8 by Matt Kulukundis <kfm@google.com>:

Allow MakeCordFromExternal to take a zero arg releaser.

PiperOrigin-RevId: 298650274

--
01897c4a9bb99f3dc329a794019498ad345ddebd by Samuel Benzaquen <sbenza@google.com>:

Reduce library bloat for absl::Flag by moving the definition of base virtual functions to a .cc file.
This removes the duplicate symbols in user translation units and  has the side effect of moving the vtable definition too (re key function)

PiperOrigin-RevId: 298617920

--
190f0d3782c63aed01046886d7fbc1be5bca2de9 by Derek Mauro <dmauro@google.com>:

Import GitHub #596: Unbreak stacktrace code for UWP apps

PiperOrigin-RevId: 298600834

--
cd5cf6f8c87b35b85a9584e94da2a99057345b73 by Gennadiy Rozental <rogeeff@google.com>:

Use union of heap allocated pointer, one word atomic and two word atomic to represent flags value.

Any type T, which is trivially copy-able and with with sizeof(T) <= 8, will be stored in atomic int64_t.
Any type T, which is trivially copy-able and with with 8 < sizeof(T) <= 16, will be stored in atomic AlignedTwoWords.

We also introducing value storage type to distinguish these cases.

PiperOrigin-RevId: 298497200

--
f8fe7bd53bfed601f002f521e34ab4bc083fc28b by Matthew Brown <matthewbr@google.com>:

Ensure a deep copy and proper equality on absl::Status::ErasePayload

PiperOrigin-RevId: 298482742

--
a5c9ccddf4b04f444e3f7e27dbc14faf1fcb5373 by Gennadiy Rozental <rogeeff@google.com>:

Change ChunkIterator implementation to use fixed capacity collection of CordRep*. We can now assume that depth never exceeds 91. That makes comparison operator exception safe.

I've tested that with this CL we do not observe an overhead of chunk_end. Compiler optimized this iterator completely.

PiperOrigin-RevId: 298458472

--
327ea5e8910bc388b03389c730763f9823abfce5 by Abseil Team <absl-team@google.com>:

Minor cleanups in b-tree code:
- Rename some variables: fix issues of different param names between definition/declaration, move away from `x` as a default meaningless variable name.
- Make init_leaf/init_internal be non-static methods (they already take the node as the first parameter).
- In internal_emplace/try_shrink, update root/rightmost the same way as in insert_unique/insert_multi.
- Replace a TODO with a comment.

PiperOrigin-RevId: 298432836

--
8020ce9ec8558ee712d9733ae3d660ac1d3ffe1a by Abseil Team <absl-team@google.com>:

Guard against unnecessary copy in case the buffer is empty. This is important in cases were the user is explicitly tuning their chunks to match PiecewiseChunkSize().

PiperOrigin-RevId: 298366044

--
89324441d1c0c697c90ba7d8fc63639805fcaa9d by Abseil Team <absl-team@google.com>:

Internal change

PiperOrigin-RevId: 298219363
GitOrigin-RevId: a3e58c1870a9626039f4d178d2d599319bd9f8a8
Change-Id: I28dffc684b6fd0292b94807b88ec6664d5d0e183
This commit is contained in:
Abseil Team 2020-03-03 11:22:10 -08:00 committed by Andy Soffer
parent 06f0e767d1
commit b19ba96766
24 changed files with 842 additions and 501 deletions

View file

@ -30,7 +30,6 @@
#include "absl/base/internal/raw_logging.h"
#include "absl/base/port.h"
#include "absl/container/fixed_array.h"
#include "absl/container/inlined_vector.h"
#include "absl/strings/escaping.h"
#include "absl/strings/internal/cord_internal.h"
#include "absl/strings/internal/resize_uninitialized.h"
@ -132,6 +131,14 @@ inline const CordRepExternal* CordRep::external() const {
return static_cast<const CordRepExternal*>(this);
}
using CordTreeConstPath = CordTreePath<const CordRep*, MaxCordDepth()>;
// This type is used to store the list of pending nodes during re-balancing.
// Its maximum size is 2 * MaxCordDepth() because the tree has a maximum
// possible depth of MaxCordDepth() and every concat node along a tree path
// could theoretically be split during rebalancing.
using RebalancingStack = CordTreePath<CordRep*, 2 * MaxCordDepth()>;
} // namespace cord_internal
static const size_t kFlatOverhead = offsetof(CordRep, data);
@ -180,8 +187,8 @@ static constexpr size_t TagToLength(uint8_t tag) {
// Enforce that kMaxFlatSize maps to a well-known exact tag value.
static_assert(TagToAllocatedSize(224) == kMaxFlatSize, "Bad tag logic");
constexpr uint64_t Fibonacci(unsigned char n, uint64_t a = 0, uint64_t b = 1) {
return n == 0 ? a : Fibonacci(n - 1, b, a + b);
constexpr uint64_t Fibonacci(uint8_t n, uint64_t a = 0, uint64_t b = 1) {
return n == 0 ? a : n == 1 ? b : Fibonacci(n - 1, b, a + b);
}
static_assert(Fibonacci(63) == 6557470319842,
@ -189,89 +196,68 @@ static_assert(Fibonacci(63) == 6557470319842,
// Minimum length required for a given depth tree -- a tree is considered
// balanced if
// length(t) >= min_length[depth(t)]
// The root node depth is allowed to become twice as large to reduce rebalancing
// for larger strings (see IsRootBalanced).
static constexpr uint64_t min_length[] = {
Fibonacci(2),
Fibonacci(3),
Fibonacci(4),
Fibonacci(5),
Fibonacci(6),
Fibonacci(7),
Fibonacci(8),
Fibonacci(9),
Fibonacci(10),
Fibonacci(11),
Fibonacci(12),
Fibonacci(13),
Fibonacci(14),
Fibonacci(15),
Fibonacci(16),
Fibonacci(17),
Fibonacci(18),
Fibonacci(19),
Fibonacci(20),
Fibonacci(21),
Fibonacci(22),
Fibonacci(23),
Fibonacci(24),
Fibonacci(25),
Fibonacci(26),
Fibonacci(27),
Fibonacci(28),
Fibonacci(29),
Fibonacci(30),
Fibonacci(31),
Fibonacci(32),
Fibonacci(33),
Fibonacci(34),
Fibonacci(35),
Fibonacci(36),
Fibonacci(37),
Fibonacci(38),
Fibonacci(39),
Fibonacci(40),
Fibonacci(41),
Fibonacci(42),
Fibonacci(43),
Fibonacci(44),
Fibonacci(45),
Fibonacci(46),
Fibonacci(47),
0xffffffffffffffffull, // Avoid overflow
};
// length(t) >= kMinLength[depth(t)]
// The node depth is allowed to become larger to reduce rebalancing
// for larger strings (see ShouldRebalance).
constexpr uint64_t kMinLength[] = {
Fibonacci(2), Fibonacci(3), Fibonacci(4), Fibonacci(5), Fibonacci(6),
Fibonacci(7), Fibonacci(8), Fibonacci(9), Fibonacci(10), Fibonacci(11),
Fibonacci(12), Fibonacci(13), Fibonacci(14), Fibonacci(15), Fibonacci(16),
Fibonacci(17), Fibonacci(18), Fibonacci(19), Fibonacci(20), Fibonacci(21),
Fibonacci(22), Fibonacci(23), Fibonacci(24), Fibonacci(25), Fibonacci(26),
Fibonacci(27), Fibonacci(28), Fibonacci(29), Fibonacci(30), Fibonacci(31),
Fibonacci(32), Fibonacci(33), Fibonacci(34), Fibonacci(35), Fibonacci(36),
Fibonacci(37), Fibonacci(38), Fibonacci(39), Fibonacci(40), Fibonacci(41),
Fibonacci(42), Fibonacci(43), Fibonacci(44), Fibonacci(45), Fibonacci(46),
Fibonacci(47), Fibonacci(48), Fibonacci(49), Fibonacci(50), Fibonacci(51),
Fibonacci(52), Fibonacci(53), Fibonacci(54), Fibonacci(55), Fibonacci(56),
Fibonacci(57), Fibonacci(58), Fibonacci(59), Fibonacci(60), Fibonacci(61),
Fibonacci(62), Fibonacci(63), Fibonacci(64), Fibonacci(65), Fibonacci(66),
Fibonacci(67), Fibonacci(68), Fibonacci(69), Fibonacci(70), Fibonacci(71),
Fibonacci(72), Fibonacci(73), Fibonacci(74), Fibonacci(75), Fibonacci(76),
Fibonacci(77), Fibonacci(78), Fibonacci(79), Fibonacci(80), Fibonacci(81),
Fibonacci(82), Fibonacci(83), Fibonacci(84), Fibonacci(85), Fibonacci(86),
Fibonacci(87), Fibonacci(88), Fibonacci(89), Fibonacci(90), Fibonacci(91),
Fibonacci(92), Fibonacci(93)};
static const int kMinLengthSize = ABSL_ARRAYSIZE(min_length);
static_assert(sizeof(kMinLength) / sizeof(uint64_t) ==
(cord_internal::MaxCordDepth() + 1),
"Not enough elements in kMinLength array to cover all the "
"supported Cord depth(s)");
// The inlined size to use with absl::InlinedVector.
//
// Note: The InlinedVectors in this file (and in cord.h) do not need to use
// the same value for their inlined size. The fact that they do is historical.
// It may be desirable for each to use a different inlined size optimized for
// that InlinedVector's usage.
//
// TODO(jgm): Benchmark to see if there's a more optimal value than 47 for
// the inlined vector size (47 exists for backward compatibility).
static const int kInlinedVectorSize = 47;
inline bool ShouldRebalance(const CordRep* node) {
if (node->tag != CONCAT) return false;
static inline bool IsRootBalanced(CordRep* node) {
if (node->tag != CONCAT) {
return true;
} else if (node->concat()->depth() <= 15) {
return true;
} else if (node->concat()->depth() > kMinLengthSize) {
return false;
} else {
// Allow depth to become twice as large as implied by fibonacci rule to
// reduce rebalancing for larger strings.
return (node->length >= min_length[node->concat()->depth() / 2]);
}
size_t node_depth = node->concat()->depth();
if (node_depth <= 15) return false;
// Rebalancing Cords is expensive, so we reduce how often rebalancing occurs
// by allowing shallow Cords to have twice the depth that the Fibonacci rule
// would otherwise imply. Deep Cords need to follow the rule more closely,
// however to ensure algorithm correctness. We implement this with linear
// interpolation. Cords of depth 16 are treated as though they have a depth
// of 16 * 1/2, and Cords of depth MaxCordDepth() interpolate to
// MaxCordDepth() * 1.
return node->length <
kMinLength[(node_depth * (cord_internal::MaxCordDepth() - 16)) /
(2 * cord_internal::MaxCordDepth() - 16 - node_depth)];
}
// Unlike root balancing condition this one is part of the re-balancing
// algorithm and has to be always matching against right depth for
// algorithm to be correct.
inline bool IsNodeBalanced(const CordRep* node) {
if (node->tag != CONCAT) return true;
size_t node_depth = node->concat()->depth();
return node->length >= kMinLength[node_depth];
}
static CordRep* Rebalance(CordRep* node);
static void DumpNode(CordRep* rep, bool include_data, std::ostream* os);
static bool VerifyNode(CordRep* root, CordRep* start_node,
static void DumpNode(const CordRep* rep, bool include_data, std::ostream* os);
static bool VerifyNode(const CordRep* root, const CordRep* start_node,
bool full_validation);
static inline CordRep* VerifyTree(CordRep* node) {
@ -318,7 +304,8 @@ __attribute__((preserve_most))
static void UnrefInternal(CordRep* rep) {
assert(rep != nullptr);
absl::InlinedVector<CordRep*, kInlinedVectorSize> pending;
cord_internal::RebalancingStack pending;
while (true) {
if (rep->tag == CONCAT) {
CordRepConcat* rep_concat = rep->concat();
@ -400,6 +387,11 @@ static void SetConcatChildren(CordRepConcat* concat, CordRep* left,
concat->length = left->length + right->length;
concat->set_depth(1 + std::max(Depth(left), Depth(right)));
ABSL_INTERNAL_CHECK(concat->depth() <= cord_internal::MaxCordDepth(),
"Cord depth exceeds max");
ABSL_INTERNAL_CHECK(concat->length >= left->length, "Cord is too long");
ABSL_INTERNAL_CHECK(concat->length >= right->length, "Cord is too long");
}
// Create a concatenation of the specified nodes.
@ -425,7 +417,7 @@ static CordRep* RawConcat(CordRep* left, CordRep* right) {
static CordRep* Concat(CordRep* left, CordRep* right) {
CordRep* rep = RawConcat(left, right);
if (rep != nullptr && !IsRootBalanced(rep)) {
if (rep != nullptr && ShouldRebalance(rep)) {
rep = Rebalance(rep);
}
return VerifyTree(rep);
@ -916,7 +908,7 @@ void Cord::Prepend(absl::string_view src) {
static CordRep* RemovePrefixFrom(CordRep* node, size_t n) {
if (n >= node->length) return nullptr;
if (n == 0) return Ref(node);
absl::InlinedVector<CordRep*, kInlinedVectorSize> rhs_stack;
cord_internal::CordTreeMutablePath rhs_stack;
while (node->tag == CONCAT) {
assert(n <= node->length);
@ -957,7 +949,7 @@ static CordRep* RemovePrefixFrom(CordRep* node, size_t n) {
static CordRep* RemoveSuffixFrom(CordRep* node, size_t n) {
if (n >= node->length) return nullptr;
if (n == 0) return Ref(node);
absl::InlinedVector<CordRep*, kInlinedVectorSize> lhs_stack;
absl::cord_internal::CordTreeMutablePath lhs_stack;
bool inplace_ok = node->refcount.IsOne();
while (node->tag == CONCAT) {
@ -1028,6 +1020,7 @@ void Cord::RemoveSuffix(size_t n) {
// Work item for NewSubRange().
struct SubRange {
SubRange() = default;
SubRange(CordRep* a_node, size_t a_pos, size_t a_n)
: node(a_node), pos(a_pos), n(a_n) {}
CordRep* node; // nullptr means concat last 2 results.
@ -1036,8 +1029,11 @@ struct SubRange {
};
static CordRep* NewSubRange(CordRep* node, size_t pos, size_t n) {
absl::InlinedVector<CordRep*, kInlinedVectorSize> results;
absl::InlinedVector<SubRange, kInlinedVectorSize> todo;
cord_internal::CordTreeMutablePath results;
// The algorithm below in worst case scenario adds up to 3 nodes to the `todo`
// list, but we also pop one out on every cycle. If original tree has depth d
// todo list can grew up to 2*d in size.
cord_internal::CordTreePath<SubRange, 2 * cord_internal::MaxCordDepth()> todo;
todo.push_back(SubRange(node, pos, n));
do {
const SubRange& sr = todo.back();
@ -1074,7 +1070,7 @@ static CordRep* NewSubRange(CordRep* node, size_t pos, size_t n) {
}
} while (!todo.empty());
assert(results.size() == 1);
return results[0];
return results.back();
}
Cord Cord::Subcord(size_t pos, size_t new_size) const {
@ -1113,11 +1109,12 @@ Cord Cord::Subcord(size_t pos, size_t new_size) const {
class CordForest {
public:
explicit CordForest(size_t length)
: root_length_(length), trees_(kMinLengthSize, nullptr) {}
explicit CordForest(size_t length) : root_length_(length), trees_({}) {}
void Build(CordRep* cord_root) {
std::vector<CordRep*> pending = {cord_root};
// We are adding up to two nodes to the `pending` list, but we also popping
// one, so the size of `pending` will never exceed `MaxCordDepth()`.
cord_internal::CordTreeMutablePath pending(cord_root);
while (!pending.empty()) {
CordRep* node = pending.back();
@ -1129,21 +1126,20 @@ class CordForest {
}
CordRepConcat* concat_node = node->concat();
if (concat_node->depth() >= kMinLengthSize ||
concat_node->length < min_length[concat_node->depth()]) {
pending.push_back(concat_node->right);
pending.push_back(concat_node->left);
if (concat_node->refcount.IsOne()) {
concat_node->left = concat_freelist_;
concat_freelist_ = concat_node;
} else {
Ref(concat_node->right);
Ref(concat_node->left);
Unref(concat_node);
}
} else {
if (IsNodeBalanced(concat_node)) {
AddNode(node);
continue;
}
pending.push_back(concat_node->right);
pending.push_back(concat_node->left);
if (concat_node->refcount.IsOne()) {
concat_node->left = concat_freelist_;
concat_freelist_ = concat_node;
} else {
Ref(concat_node->right);
Ref(concat_node->left);
Unref(concat_node);
}
}
}
@ -1175,7 +1171,7 @@ class CordForest {
// Collect together everything with which we will merge node
int i = 0;
for (; node->length > min_length[i + 1]; ++i) {
for (; node->length > kMinLength[i + 1]; ++i) {
auto& tree_at_i = trees_[i];
if (tree_at_i == nullptr) continue;
@ -1186,7 +1182,7 @@ class CordForest {
sum = AppendNode(node, sum);
// Insert sum into appropriate place in the forest
for (; sum->length >= min_length[i]; ++i) {
for (; sum->length >= kMinLength[i]; ++i) {
auto& tree_at_i = trees_[i];
if (tree_at_i == nullptr) continue;
@ -1194,7 +1190,7 @@ class CordForest {
tree_at_i = nullptr;
}
// min_length[0] == 1, which means sum->length >= min_length[0]
// kMinLength[0] == 1, which means sum->length >= kMinLength[0]
assert(i > 0);
trees_[i - 1] = sum;
}
@ -1227,9 +1223,7 @@ class CordForest {
}
size_t root_length_;
// use an inlined vector instead of a flat array to get bounds checking
absl::InlinedVector<CordRep*, kInlinedVectorSize> trees_;
std::array<cord_internal::CordRep*, cord_internal::MaxCordDepth()> trees_;
// List of concat nodes we can re-use for Cord balancing.
CordRepConcat* concat_freelist_ = nullptr;
@ -1841,18 +1835,18 @@ absl::string_view Cord::FlattenSlowPath() {
}
}
static void DumpNode(CordRep* rep, bool include_data, std::ostream* os) {
static void DumpNode(const CordRep* rep, bool include_data, std::ostream* os) {
const int kIndentStep = 1;
int indent = 0;
absl::InlinedVector<CordRep*, kInlinedVectorSize> stack;
absl::InlinedVector<int, kInlinedVectorSize> indents;
cord_internal::CordTreeConstPath stack;
cord_internal::CordTreePath<int, cord_internal::MaxCordDepth()> indents;
for (;;) {
*os << std::setw(3) << rep->refcount.Get();
*os << " " << std::setw(7) << rep->length;
*os << " [";
if (include_data) *os << static_cast<void*>(rep);
if (include_data) *os << static_cast<const void*>(rep);
*os << "]";
*os << " " << (IsRootBalanced(rep) ? 'b' : 'u');
*os << " " << (IsNodeBalanced(rep) ? 'b' : 'u');
*os << " " << std::setw(indent) << "";
if (rep->tag == CONCAT) {
*os << "CONCAT depth=" << Depth(rep) << "\n";
@ -1873,7 +1867,7 @@ static void DumpNode(CordRep* rep, bool include_data, std::ostream* os) {
} else {
*os << "FLAT cap=" << TagToLength(rep->tag) << " [";
if (include_data)
*os << absl::CEscape(std::string(rep->data, rep->length));
*os << absl::CEscape(absl::string_view(rep->data, rep->length));
*os << "]\n";
}
if (stack.empty()) break;
@ -1886,19 +1880,19 @@ static void DumpNode(CordRep* rep, bool include_data, std::ostream* os) {
ABSL_INTERNAL_CHECK(indents.empty(), "");
}
static std::string ReportError(CordRep* root, CordRep* node) {
static std::string ReportError(const CordRep* root, const CordRep* node) {
std::ostringstream buf;
buf << "Error at node " << node << " in:";
DumpNode(root, true, &buf);
return buf.str();
}
static bool VerifyNode(CordRep* root, CordRep* start_node,
static bool VerifyNode(const CordRep* root, const CordRep* start_node,
bool full_validation) {
absl::InlinedVector<CordRep*, 2> worklist;
cord_internal::CordTreeConstPath worklist;
worklist.push_back(start_node);
do {
CordRep* node = worklist.back();
const CordRep* node = worklist.back();
worklist.pop_back();
ABSL_INTERNAL_CHECK(node != nullptr, ReportError(root, node));
@ -1948,7 +1942,7 @@ static bool VerifyNode(CordRep* root, CordRep* start_node,
// Iterate over the tree. cur_node is never a leaf node and leaf nodes will
// never be appended to tree_stack. This reduces overhead from manipulating
// tree_stack.
absl::InlinedVector<const CordRep*, kInlinedVectorSize> tree_stack;
cord_internal::CordTreeConstPath tree_stack;
const CordRep* cur_node = rep;
while (true) {
const CordRep* next_node = nullptr;

View file

@ -41,13 +41,13 @@
#include <iostream>
#include <iterator>
#include <string>
#include <type_traits>
#include "absl/base/internal/endian.h"
#include "absl/base/internal/invoke.h"
#include "absl/base/internal/per_thread_tls.h"
#include "absl/base/macros.h"
#include "absl/base/port.h"
#include "absl/container/inlined_vector.h"
#include "absl/functional/function_ref.h"
#include "absl/meta/type_traits.h"
#include "absl/strings/internal/cord_internal.h"
@ -66,6 +66,73 @@ template <typename H>
H HashFragmentedCord(H, const Cord&);
}
namespace cord_internal {
// It's expensive to keep a tree perfectly balanced, so instead we keep trees
// approximately balanced. A tree node N of depth D(N) that contains a string
// of L(N) characters is considered balanced if L >= Fibonacci(D + 2).
// The "+ 2" is used to ensure that every leaf node contains at least one
// character. Here we presume that
// Fibonacci(0) = 0
// Fibonacci(1) = 1
// Fibonacci(2) = 1
// Fibonacci(3) = 2
// ...
//
// Fibonacci numbers are convenient because it means when two balanced trees of
// the same depth are made the children of a new node, the resulting tree is
// guaranteed to also be balanced:
//
//
// L(left) >= Fibonacci(D(left) + 2)
// L(right) >= Fibonacci(D(right) + 2)
//
// L(left) + L(right) >= Fibonacci(D(left) + 2) + Fibonacci(D(right) + 2)
// L(left) + L(right) == L(new_tree)
//
// L(new_tree) >= 2 * Fibonacci(D(child) + 2)
// D(child) == D(new_tree) - 1
//
// L(new_tree) >= 2 * Fibonacci(D(new_tree) + 1)
// 2 * Fibonacci(N) >= Fibonacci(N + 1)
//
// L(new_tree) >= Fibonacci(D(new_tree) + 2)
//
//
// The 93rd Fibonacci number is the largest Fibonacci number that can be
// represented in 64 bits, so the size of a balanced Cord of depth 92 is too big
// for an unsigned 64 bit integer to hold. Therefore we can safely assume that
// the maximum depth of a Cord is 91.
constexpr size_t MaxCordDepth() { return 91; }
// This class models fixed max size stack of CordRep pointers.
// The elements are being pushed back and popped from the back.
template <typename CordRepPtr, size_t N>
class CordTreePath {
public:
CordTreePath() {}
explicit CordTreePath(CordRepPtr root) { push_back(root); }
bool empty() const { return size_ == 0; }
size_t size() const { return size_; }
void clear() { size_ = 0; }
CordRepPtr back() { return data_[size_ - 1]; }
void pop_back() {
--size_;
assert(size_ < N);
}
void push_back(CordRepPtr elem) { data_[size_++] = elem; }
private:
CordRepPtr data_[N];
size_t size_ = 0;
};
using CordTreeMutablePath = CordTreePath<CordRep*, MaxCordDepth()>;
} // namespace cord_internal
// A Cord is a sequence of characters.
class Cord {
private:
@ -114,7 +181,8 @@ class Cord {
// finished with `data`. The data must remain live and unchanging until the
// releaser is called. The requirements for the releaser are that it:
// * is move constructible,
// * supports `void operator()(absl::string_view) const`,
// * supports `void operator()(absl::string_view) const` or
// `void operator()() const`,
// * does not have alignment requirement greater than what is guaranteed by
// ::operator new. This is dictated by alignof(std::max_align_t) before
// C++17 and __STDCPP_DEFAULT_NEW_ALIGNMENT__ if compiling with C++17 or
@ -127,8 +195,8 @@ class Cord {
// FillBlock(block);
// return absl::MakeCordFromExternal(
// block->ToStringView(),
// [pool, block](absl::string_view /*ignored*/) {
// pool->FreeBlock(block);
// [pool, block](absl::string_view v) {
// pool->FreeBlock(block, v);
// });
// }
//
@ -282,8 +350,7 @@ class Cord {
absl::cord_internal::CordRep* current_leaf_ = nullptr;
// The number of bytes left in the `Cord` over which we are iterating.
size_t bytes_remaining_ = 0;
absl::InlinedVector<absl::cord_internal::CordRep*, 4>
stack_of_right_children_;
absl::cord_internal::CordTreeMutablePath stack_of_right_children_;
};
// Returns an iterator to the first chunk of the `Cord`.
@ -667,6 +734,21 @@ ExternalRepReleaserPair NewExternalWithUninitializedReleaser(
absl::string_view data, ExternalReleaserInvoker invoker,
size_t releaser_size);
struct Rank1 {};
struct Rank0 : Rank1 {};
template <typename Releaser, typename = ::absl::base_internal::InvokeT<
Releaser, absl::string_view>>
void InvokeReleaser(Rank0, Releaser&& releaser, absl::string_view data) {
::absl::base_internal::Invoke(std::forward<Releaser>(releaser), data);
}
template <typename Releaser,
typename = ::absl::base_internal::InvokeT<Releaser>>
void InvokeReleaser(Rank1, Releaser&& releaser, absl::string_view) {
::absl::base_internal::Invoke(std::forward<Releaser>(releaser));
}
// Creates a new `CordRep` that owns `data` and `releaser` and returns a pointer
// to it, or `nullptr` if `data` was empty.
template <typename Releaser>
@ -684,14 +766,14 @@ CordRep* NewExternalRep(absl::string_view data, Releaser&& releaser) {
using ReleaserType = absl::decay_t<Releaser>;
if (data.empty()) {
// Never create empty external nodes.
::absl::base_internal::Invoke(
ReleaserType(std::forward<Releaser>(releaser)), data);
InvokeReleaser(Rank0{}, ReleaserType(std::forward<Releaser>(releaser)),
data);
return nullptr;
}
auto releaser_invoker = [](void* type_erased_releaser, absl::string_view d) {
auto* my_releaser = static_cast<ReleaserType*>(type_erased_releaser);
::absl::base_internal::Invoke(std::move(*my_releaser), d);
InvokeReleaser(Rank0{}, std::move(*my_releaser), d);
my_releaser->~ReleaserType();
return sizeof(Releaser);
};

View file

@ -1032,6 +1032,19 @@ TEST(ConstructFromExternal, MoveOnlyReleaser) {
EXPECT_TRUE(invoked);
}
TEST(ConstructFromExternal, NoArgLambda) {
bool invoked = false;
(void)absl::MakeCordFromExternal("dummy", [&invoked]() { invoked = true; });
EXPECT_TRUE(invoked);
}
TEST(ConstructFromExternal, StringViewArgLambda) {
bool invoked = false;
(void)absl::MakeCordFromExternal(
"dummy", [&invoked](absl::string_view) { invoked = true; });
EXPECT_TRUE(invoked);
}
TEST(ConstructFromExternal, NonTrivialReleaserDestructor) {
struct Releaser {
explicit Releaser(bool* destroyed) : destroyed(destroyed) {}
@ -1346,6 +1359,49 @@ TEST(CordChunkIterator, Operations) {
VerifyChunkIterator(subcords, 128);
}
TEST(CordChunkIterator, MaxLengthFullTree) {
absl::Cord cord;
size_t size = 1;
AddExternalMemory("x", &cord);
EXPECT_EQ(cord.size(), size);
for (int i = 0; i < 63; ++i) {
cord.Prepend(absl::Cord(cord));
size <<= 1;
EXPECT_EQ(cord.size(), size);
auto chunk_it = cord.chunk_begin();
EXPECT_EQ(*chunk_it, "x");
}
EXPECT_DEATH_IF_SUPPORTED(
(cord.Prepend(absl::Cord(cord)), *cord.chunk_begin()),
"Cord is too long");
}
TEST(CordChunkIterator, MaxDepth) {
// By reusing nodes, it's possible in pathological cases to build a Cord that
// exceeds both the maximum permissible length and depth. In this case, the
// violation of the maximum depth is reported.
absl::Cord left_child;
AddExternalMemory("x", &left_child);
absl::Cord root = left_child;
for (int i = 0; i < 91; ++i) {
size_t new_size = left_child.size() + root.size();
root.Prepend(left_child);
EXPECT_EQ(root.size(), new_size);
auto chunk_it = root.chunk_begin();
EXPECT_EQ(*chunk_it, "x");
std::swap(left_child, root);
}
EXPECT_DEATH_IF_SUPPORTED(root.Prepend(left_child), "Cord depth exceeds max");
}
TEST(CordCharIterator, Traits) {
static_assert(std::is_copy_constructible<absl::Cord::CharIterator>::value,
"");