Export of internal Abseil changes.

--
c321829735accc2e6beb81e6a5a4421e5647b876 by CJ Johnson <johnsoncj@google.com>:

Updates the definition of InlinedVector::swap(InlinedVector&) to be exception safe and adds exception safety tests

PiperOrigin-RevId: 255511536

--
0d86445891748efb09430eb9ede267b54185a246 by CJ Johnson <johnsoncj@google.com>:

Updates the definition of InlinedVector::erase(...) to be exception safe and adds an exception safety test for it.

PiperOrigin-RevId: 255492671

--
f07e8fa62dfe9eb0d025b27fca8c6db43c5a328f by CJ Johnson <johnsoncj@google.com>:

Updates the implementation of InlinedVector::emplace_back(...) to be exception safe and adds exception safety tests

PiperOrigin-RevId: 255422837

--
4c3be92bfe4c1636a03cef8fd5aa802fed0d2c61 by Abseil Team <absl-team@google.com>:

Internal Change

PiperOrigin-RevId: 255422693

--
6df38ea42f00678c357a539016163f8ac4c084e6 by Gennadiy Rozental <rogeeff@google.com>:

Introduce public interfaces for setting and getting program usage messages.

PiperOrigin-RevId: 255291467

--
8f21d594aed3971d37db70226847c693eb548edb by Laramie Leavitt <lar@google.com>:

Move absl/random's copy of ABSL_ATTRIBUTE_FORCE_INLINE and
ABSL_ATTRIBUTE_NEVER_INLINE into .cc files and rename to
prevent conflicts.

https://github.com/abseil/abseil-cpp/issues/343

PiperOrigin-RevId: 255288599

--
6b7430ad0c8bd860fb9394894f5eeedd1acc9f77 by CJ Johnson <johnsoncj@google.com>:

Updates the ScopedAllocatorWorks test for InlinedVector to not rely on the byte count allocated by the standard library

In doing so, removes LegacyNextCapacityFrom(...) impl function from InlinedVector

Also applies clang-format to the test file

PiperOrigin-RevId: 255207606
GitOrigin-RevId: c321829735accc2e6beb81e6a5a4421e5647b876
Change-Id: I7438211c36c4549fca2e866658f8d579c65d7d52
This commit is contained in:
Abseil Team 2019-06-27 17:24:26 -07:00 committed by Shaindel Schwartz
parent 72e09a54d9
commit c964fcffac
16 changed files with 518 additions and 367 deletions

View file

@ -640,28 +640,7 @@ class InlinedVector {
// returning a `reference` to the emplaced element.
template <typename... Args>
reference emplace_back(Args&&... args) {
size_type s = size();
if (ABSL_PREDICT_FALSE(s == capacity())) {
size_type new_capacity = 2 * capacity();
pointer new_data =
AllocatorTraits::allocate(*storage_.GetAllocPtr(), new_capacity);
reference new_element =
Construct(new_data + s, std::forward<Args>(args)...);
UninitializedCopy(std::make_move_iterator(data()),
std::make_move_iterator(data() + s), new_data);
ResetAllocation(new_data, new_capacity, s + 1);
return new_element;
} else {
pointer space;
if (storage_.GetIsAllocated()) {
storage_.SetAllocatedSize(s + 1);
space = storage_.GetAllocatedData();
} else {
storage_.SetInlinedSize(s + 1);
space = storage_.GetInlinedData();
}
return Construct(space + s, std::forward<Args>(args)...);
}
return storage_.EmplaceBack(std::forward<Args>(args)...);
}
// `InlinedVector::push_back()`
@ -696,10 +675,7 @@ class InlinedVector {
assert(pos >= begin());
assert(pos < end());
iterator position = const_cast<iterator>(pos);
std::move(position + 1, end(), position);
pop_back();
return position;
return storage_.Erase(pos, pos + 1);
}
// Overload of `InlinedVector::erase()` for erasing all elements in the
@ -707,28 +683,15 @@ class InlinedVector {
// to the first element following the range erased or the end iterator if `to`
// was the end iterator.
iterator erase(const_iterator from, const_iterator to) {
assert(begin() <= from);
assert(from >= begin());
assert(from <= to);
assert(to <= end());
iterator range_start = const_cast<iterator>(from);
iterator range_end = const_cast<iterator>(to);
size_type s = size();
ptrdiff_t erase_gap = std::distance(range_start, range_end);
if (erase_gap > 0) {
pointer space;
if (storage_.GetIsAllocated()) {
space = storage_.GetAllocatedData();
storage_.SetAllocatedSize(s - erase_gap);
} else {
space = storage_.GetInlinedData();
storage_.SetInlinedSize(s - erase_gap);
}
std::move(range_end, space + s, range_start);
Destroy(space + s - erase_gap, space + s);
if (ABSL_PREDICT_TRUE(from != to)) {
return storage_.Erase(from, to);
} else {
return const_cast<iterator>(from);
}
return range_start;
}
// `InlinedVector::clear()`
@ -774,96 +737,9 @@ class InlinedVector {
//
// Swaps the contents of this inlined vector with the contents of `other`.
void swap(InlinedVector& other) {
using std::swap;
if (ABSL_PREDICT_FALSE(this == std::addressof(other))) {
return;
if (ABSL_PREDICT_TRUE(this != std::addressof(other))) {
storage_.Swap(std::addressof(other.storage_));
}
bool is_allocated = storage_.GetIsAllocated();
bool other_is_allocated = other.storage_.GetIsAllocated();
if (is_allocated && other_is_allocated) {
// Both out of line, so just swap the tag, allocation, and allocator.
storage_.SwapSizeAndIsAllocated(std::addressof(other.storage_));
storage_.SwapAllocatedSizeAndCapacity(std::addressof(other.storage_));
swap(*storage_.GetAllocPtr(), *other.storage_.GetAllocPtr());
return;
}
if (!is_allocated && !other_is_allocated) {
// Both inlined: swap up to smaller size, then move remaining elements.
InlinedVector* a = this;
InlinedVector* b = std::addressof(other);
if (size() < other.size()) {
swap(a, b);
}
const size_type a_size = a->size();
const size_type b_size = b->size();
assert(a_size >= b_size);
// `a` is larger. Swap the elements up to the smaller array size.
std::swap_ranges(a->storage_.GetInlinedData(),
a->storage_.GetInlinedData() + b_size,
b->storage_.GetInlinedData());
// Move the remaining elements:
// [`b_size`, `a_size`) from `a` -> [`b_size`, `a_size`) from `b`
b->UninitializedCopy(a->storage_.GetInlinedData() + b_size,
a->storage_.GetInlinedData() + a_size,
b->storage_.GetInlinedData() + b_size);
a->Destroy(a->storage_.GetInlinedData() + b_size,
a->storage_.GetInlinedData() + a_size);
storage_.SwapSizeAndIsAllocated(std::addressof(other.storage_));
swap(*storage_.GetAllocPtr(), *other.storage_.GetAllocPtr());
assert(b->size() == a_size);
assert(a->size() == b_size);
return;
}
// One is out of line, one is inline.
// We first move the elements from the inlined vector into the
// inlined space in the other vector. We then put the other vector's
// pointer/capacity into the originally inlined vector and swap
// the tags.
InlinedVector* a = this;
InlinedVector* b = std::addressof(other);
if (a->storage_.GetIsAllocated()) {
swap(a, b);
}
assert(!a->storage_.GetIsAllocated());
assert(b->storage_.GetIsAllocated());
const size_type a_size = a->size();
const size_type b_size = b->size();
// In an optimized build, `b_size` would be unused.
static_cast<void>(b_size);
// Made Local copies of `size()`, these can now be swapped
a->storage_.SwapSizeAndIsAllocated(std::addressof(b->storage_));
// Copy out before `b`'s union gets clobbered by `inline_space`
pointer b_data = b->storage_.GetAllocatedData();
size_type b_capacity = b->storage_.GetAllocatedCapacity();
b->UninitializedCopy(a->storage_.GetInlinedData(),
a->storage_.GetInlinedData() + a_size,
b->storage_.GetInlinedData());
a->Destroy(a->storage_.GetInlinedData(),
a->storage_.GetInlinedData() + a_size);
a->storage_.SetAllocatedData(b_data, b_capacity);
if (*a->storage_.GetAllocPtr() != *b->storage_.GetAllocPtr()) {
swap(*a->storage_.GetAllocPtr(), *b->storage_.GetAllocPtr());
}
assert(b->size() == a_size);
assert(a->size() == b_size);
}
private:

View file

@ -279,12 +279,34 @@ TYPED_TEST(TwoSizeTest, Resize) {
}));
}
TYPED_TEST(OneSizeTest, EmplaceBack) {
using VecT = typename TypeParam::VecT;
constexpr static auto size = TypeParam::GetSizeAt(0);
VecT full_vec{size};
full_vec.resize(full_vec.capacity());
VecT nonfull_vec{size};
nonfull_vec.reserve(size + 1);
auto tester = testing::MakeExceptionSafetyTester().WithContracts(
InlinedVectorInvariants<VecT>);
EXPECT_TRUE(tester.WithInitialValue(nonfull_vec).Test([](VecT* vec) {
vec->emplace_back(); //
}));
EXPECT_TRUE(tester.WithInitialValue(full_vec).Test([](VecT* vec) {
vec->emplace_back(); //
}));
}
TYPED_TEST(OneSizeTest, PopBack) {
using VecT = typename TypeParam::VecT;
constexpr static auto size = TypeParam::GetSizeAt(0);
auto tester = testing::MakeExceptionSafetyTester()
.WithInitialValue(VecT(size))
.WithInitialValue(VecT{size})
.WithContracts(NoThrowGuarantee<VecT>);
EXPECT_TRUE(tester.Test([](VecT* vec) {
@ -292,12 +314,47 @@ TYPED_TEST(OneSizeTest, PopBack) {
}));
}
TYPED_TEST(OneSizeTest, Erase) {
using VecT = typename TypeParam::VecT;
constexpr static auto size = TypeParam::GetSizeAt(0);
auto tester = testing::MakeExceptionSafetyTester()
.WithInitialValue(VecT{size})
.WithContracts(InlinedVectorInvariants<VecT>);
EXPECT_TRUE(tester.Test([](VecT* vec) {
auto it = vec->begin();
vec->erase(it);
}));
EXPECT_TRUE(tester.Test([](VecT* vec) {
auto it = vec->begin() + (vec->size() / 2);
vec->erase(it);
}));
EXPECT_TRUE(tester.Test([](VecT* vec) {
auto it = vec->begin() + (vec->size() - 1);
vec->erase(it);
}));
EXPECT_TRUE(tester.Test([](VecT* vec) {
auto it = vec->begin();
vec->erase(it, it + 1);
}));
EXPECT_TRUE(tester.Test([](VecT* vec) {
auto it = vec->begin() + (vec->size() / 2);
vec->erase(it, it + 1);
}));
EXPECT_TRUE(tester.Test([](VecT* vec) {
auto it = vec->begin() + (vec->size() - 1);
vec->erase(it, it + 1);
}));
}
TYPED_TEST(OneSizeTest, Clear) {
using VecT = typename TypeParam::VecT;
constexpr static auto size = TypeParam::GetSizeAt(0);
auto tester = testing::MakeExceptionSafetyTester()
.WithInitialValue(VecT(size))
.WithInitialValue(VecT{size})
.WithContracts(NoThrowGuarantee<VecT>);
EXPECT_TRUE(tester.Test([](VecT* vec) {
@ -332,4 +389,25 @@ TYPED_TEST(OneSizeTest, ShrinkToFit) {
}));
}
TYPED_TEST(TwoSizeTest, Swap) {
using VecT = typename TypeParam::VecT;
constexpr static auto from_size = TypeParam::GetSizeAt(0);
constexpr static auto to_size = TypeParam::GetSizeAt(1);
auto tester = testing::MakeExceptionSafetyTester()
.WithInitialValue(VecT{from_size})
.WithContracts(InlinedVectorInvariants<VecT>);
EXPECT_TRUE(tester.Test([](VecT* vec) {
VecT other_vec{to_size};
vec->swap(other_vec);
}));
EXPECT_TRUE(tester.Test([](VecT* vec) {
using std::swap;
VecT other_vec{to_size};
swap(*vec, other_vec);
}));
}
} // namespace

View file

@ -76,12 +76,9 @@ TYPED_TEST_SUITE_P(InstanceTest);
// destroyed in the erase(begin, end) test.
class RefCounted {
public:
RefCounted(int value, int* count) : value_(value), count_(count) {
Ref();
}
RefCounted(int value, int* count) : value_(value), count_(count) { Ref(); }
RefCounted(const RefCounted& v)
: value_(v.value_), count_(v.count_) {
RefCounted(const RefCounted& v) : value_(v.value_), count_(v.count_) {
Ref();
}
@ -290,7 +287,7 @@ TEST(RefCountedVec, EraseBeginEnd) {
}
// Check that the elements at the end are preserved.
for (int i = erase_end; i< len; ++i) {
for (int i = erase_end; i < len; ++i) {
EXPECT_EQ(1, counts[i]);
}
}
@ -552,10 +549,10 @@ TEST(IntVec, Resize) {
static const int kResizeElem = 1000000;
for (int k = 0; k < 10; k++) {
// Enlarging resize
v.resize(len+k, kResizeElem);
EXPECT_EQ(len+k, v.size());
EXPECT_LE(len+k, v.capacity());
for (int i = 0; i < len+k; i++) {
v.resize(len + k, kResizeElem);
EXPECT_EQ(len + k, v.size());
EXPECT_LE(len + k, v.capacity());
for (int i = 0; i < len + k; i++) {
if (i < len) {
EXPECT_EQ(i, v[i]);
} else {
@ -866,7 +863,7 @@ TYPED_TEST_P(InstanceTest, Swap) {
auto min_len = std::min(l1, l2);
auto max_len = std::max(l1, l2);
for (int i = 0; i < l1; i++) a.push_back(Instance(i));
for (int i = 0; i < l2; i++) b.push_back(Instance(100+i));
for (int i = 0; i < l2; i++) b.push_back(Instance(100 + i));
EXPECT_EQ(tracker.instances(), l1 + l2);
tracker.ResetCopiesMovesSwaps();
{
@ -934,7 +931,7 @@ TEST(IntVec, EqualAndNotEqual) {
EXPECT_FALSE(a == b);
EXPECT_TRUE(a != b);
b[i] = b[i] - 1; // Back to before
b[i] = b[i] - 1; // Back to before
EXPECT_TRUE(a == b);
EXPECT_FALSE(a != b);
}
@ -1001,7 +998,7 @@ TYPED_TEST_P(InstanceTest, CountConstructorsDestructors) {
// reserve() must not increase the number of initialized objects
SCOPED_TRACE("reserve");
v.reserve(len+1000);
v.reserve(len + 1000);
EXPECT_EQ(tracker.instances(), len);
EXPECT_EQ(tracker.copies() + tracker.moves(), len);
@ -1247,9 +1244,8 @@ void InstanceCountElemAssignWithAllocationTest() {
absl::InlinedVector<Instance, 2> v(original_contents.begin(),
original_contents.end());
v.assign(3, Instance(123));
EXPECT_THAT(v,
AllOf(SizeIs(3),
ElementsAre(ValueIs(123), ValueIs(123), ValueIs(123))));
EXPECT_THAT(v, AllOf(SizeIs(3), ElementsAre(ValueIs(123), ValueIs(123),
ValueIs(123))));
EXPECT_LE(v.size(), v.capacity());
}
}
@ -1528,8 +1524,8 @@ TYPED_TEST_P(InstanceTest, InitializerListAssign) {
SCOPED_TRACE(original_size);
absl::InlinedVector<Instance, 2> v(original_size, Instance(12345));
v.assign({Instance(3), Instance(4), Instance(5)});
EXPECT_THAT(v, AllOf(SizeIs(3),
ElementsAre(ValueIs(3), ValueIs(4), ValueIs(5))));
EXPECT_THAT(
v, AllOf(SizeIs(3), ElementsAre(ValueIs(3), ValueIs(4), ValueIs(5))));
EXPECT_LE(3, v.capacity());
}
}
@ -1554,7 +1550,7 @@ TEST(DynamicVec, DynamicVecCompiles) {
TEST(AllocatorSupportTest, Constructors) {
using MyAlloc = CountingAllocator<int>;
using AllocVec = absl::InlinedVector<int, 4, MyAlloc>;
const int ia[] = { 0, 1, 2, 3, 4, 5, 6, 7 };
const int ia[] = {0, 1, 2, 3, 4, 5, 6, 7};
int64_t allocated = 0;
MyAlloc alloc(&allocated);
{ AllocVec ABSL_ATTRIBUTE_UNUSED v; }
@ -1570,7 +1566,7 @@ TEST(AllocatorSupportTest, Constructors) {
TEST(AllocatorSupportTest, CountAllocations) {
using MyAlloc = CountingAllocator<int>;
using AllocVec = absl::InlinedVector<int, 4, MyAlloc>;
const int ia[] = { 0, 1, 2, 3, 4, 5, 6, 7 };
const int ia[] = {0, 1, 2, 3, 4, 5, 6, 7};
int64_t allocated = 0;
MyAlloc alloc(&allocated);
{
@ -1634,8 +1630,8 @@ TEST(AllocatorSupportTest, SwapBothAllocated) {
int64_t allocated1 = 0;
int64_t allocated2 = 0;
{
const int ia1[] = { 0, 1, 2, 3, 4, 5, 6, 7 };
const int ia2[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8 };
const int ia1[] = {0, 1, 2, 3, 4, 5, 6, 7};
const int ia2[] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
MyAlloc a1(&allocated1);
MyAlloc a2(&allocated2);
AllocVec v1(ia1, ia1 + ABSL_ARRAYSIZE(ia1), a1);
@ -1659,8 +1655,8 @@ TEST(AllocatorSupportTest, SwapOneAllocated) {
int64_t allocated1 = 0;
int64_t allocated2 = 0;
{
const int ia1[] = { 0, 1, 2, 3, 4, 5, 6, 7 };
const int ia2[] = { 0, 1, 2, 3 };
const int ia1[] = {0, 1, 2, 3, 4, 5, 6, 7};
const int ia2[] = {0, 1, 2, 3};
MyAlloc a1(&allocated1);
MyAlloc a2(&allocated2);
AllocVec v1(ia1, ia1 + ABSL_ARRAYSIZE(ia1), a1);
@ -1681,65 +1677,42 @@ TEST(AllocatorSupportTest, SwapOneAllocated) {
TEST(AllocatorSupportTest, ScopedAllocatorWorks) {
using StdVector = std::vector<int, CountingAllocator<int>>;
using MyAlloc =
std::scoped_allocator_adaptor<CountingAllocator<StdVector>>;
using AllocVec = absl::InlinedVector<StdVector, 4, MyAlloc>;
using Alloc = CountingAllocator<StdVector>;
using ScopedAlloc = std::scoped_allocator_adaptor<Alloc>;
using AllocVec = absl::InlinedVector<StdVector, 1, ScopedAlloc>;
// MSVC 2017's std::vector allocates different amounts of memory in debug
// versus opt mode.
int64_t test_allocated = 0;
StdVector v(CountingAllocator<int>{&test_allocated});
// The amount of memory allocated by a default constructed vector<int>
auto default_std_vec_allocated = test_allocated;
v.push_back(1);
// The amound of memory allocated by a copy-constructed vector<int> with one
// element.
int64_t one_element_std_vec_copy_allocated = test_allocated;
{
int64_t total_allocated_byte_count = 0;
int64_t allocated = 0;
AllocVec vec(MyAlloc{CountingAllocator<StdVector>{&allocated}});
EXPECT_EQ(allocated, 0);
AllocVec inlined_case(ScopedAlloc(Alloc(+&total_allocated_byte_count)));
inlined_case.emplace_back();
// This default constructs a vector<int>, but the allocator should pass itself
// into the vector<int>, so check allocation compared to that.
// The absl::InlinedVector does not allocate any memory.
// The vector<int> may allocate any memory.
auto expected = default_std_vec_allocated;
vec.resize(1);
EXPECT_EQ(allocated, expected);
int64_t absl_responsible_for_count = total_allocated_byte_count;
EXPECT_EQ(absl_responsible_for_count, 0);
// We make vector<int> allocate memory.
// It must go through the allocator even though we didn't construct the
// vector directly. This assumes that vec[0] doesn't need to grow its
// allocation.
expected += sizeof(int);
vec[0].push_back(1);
EXPECT_EQ(allocated, expected);
inlined_case[0].emplace_back();
EXPECT_GT(total_allocated_byte_count, absl_responsible_for_count);
// Another allocating vector.
expected += one_element_std_vec_copy_allocated;
vec.push_back(vec[0]);
EXPECT_EQ(allocated, expected);
inlined_case.clear();
EXPECT_EQ(total_allocated_byte_count, 0);
}
// Overflow the inlined memory.
// The absl::InlinedVector will now allocate.
expected += sizeof(StdVector) * 8 + default_std_vec_allocated * 3;
vec.resize(5);
EXPECT_EQ(allocated, expected);
{
int64_t total_allocated_byte_count = 0;
// Adding one more in external mode should also work.
expected += one_element_std_vec_copy_allocated;
vec.push_back(vec[0]);
EXPECT_EQ(allocated, expected);
AllocVec allocated_case(ScopedAlloc(Alloc(+&total_allocated_byte_count)));
allocated_case.emplace_back();
allocated_case.emplace_back();
// And extending these should still work. This assumes that vec[0] does not
// need to grow its allocation.
expected += sizeof(int);
vec[0].push_back(1);
EXPECT_EQ(allocated, expected);
int64_t absl_responsible_for_count = total_allocated_byte_count;
EXPECT_GT(absl_responsible_for_count, 0);
vec.clear();
EXPECT_EQ(allocated, 0);
allocated_case[1].emplace_back();
EXPECT_GT(total_allocated_byte_count, absl_responsible_for_count);
allocated_case.clear();
EXPECT_EQ(total_allocated_byte_count, 0);
}
}
TEST(AllocatorSupportTest, SizeAllocConstructor) {

View file

@ -364,16 +364,6 @@ class Storage {
allocation_tx_ptr->GetCapacity() = 0;
}
void SwapSizeAndIsAllocated(Storage* other) {
using std::swap;
swap(GetSizeAndIsAllocated(), other->GetSizeAndIsAllocated());
}
void SwapAllocatedSizeAndCapacity(Storage* other) {
using std::swap;
swap(data_.allocated, other->data_.allocated);
}
void MemcpyFrom(const Storage& other_storage) {
assert(IsMemcpyOk::value || other_storage.GetIsAllocated());
@ -390,10 +380,17 @@ class Storage {
template <typename ValueAdapter>
void Resize(ValueAdapter values, size_type new_size);
template <typename... Args>
reference EmplaceBack(Args&&... args);
iterator Erase(const_iterator from, const_iterator to);
void Reserve(size_type requested_capacity);
void ShrinkToFit();
void Swap(Storage* other_storage_ptr);
private:
size_type& GetSizeAndIsAllocated() { return metadata_.template get<1>(); }
@ -401,14 +398,8 @@ class Storage {
return metadata_.template get<1>();
}
static size_type LegacyNextCapacityFrom(size_type current_capacity,
size_type requested_capacity) {
// TODO(johnsoncj): Get rid of this old behavior.
size_type new_capacity = current_capacity;
while (new_capacity < requested_capacity) {
new_capacity *= 2;
}
return new_capacity;
static size_type NextCapacityFrom(size_type current_capacity) {
return current_capacity * 2;
}
using Metadata =
@ -521,8 +512,7 @@ auto Storage<T, N, A>::Resize(ValueAdapter values, size_type new_size) -> void {
absl::Span<value_type> destroy_loop;
if (new_size > storage_view.capacity) {
pointer new_data = allocation_tx.Allocate(
LegacyNextCapacityFrom(storage_view.capacity, new_size));
pointer new_data = allocation_tx.Allocate(new_size);
// Construct new objects in `new_data`
construct_loop = {new_data + storage_view.size,
@ -562,6 +552,75 @@ auto Storage<T, N, A>::Resize(ValueAdapter values, size_type new_size) -> void {
SetSize(new_size);
}
template <typename T, size_t N, typename A>
template <typename... Args>
auto Storage<T, N, A>::EmplaceBack(Args&&... args) -> reference {
StorageView storage_view = MakeStorageView();
AllocationTransaction allocation_tx(GetAllocPtr());
IteratorValueAdapter<MoveIterator> move_values(
MoveIterator(storage_view.data));
pointer construct_data =
(storage_view.size == storage_view.capacity
? allocation_tx.Allocate(NextCapacityFrom(storage_view.capacity))
: storage_view.data);
pointer last_ptr = construct_data + storage_view.size;
AllocatorTraits::construct(*GetAllocPtr(), last_ptr,
std::forward<Args>(args)...);
if (allocation_tx.DidAllocate()) {
ABSL_INTERNAL_TRY {
inlined_vector_internal::ConstructElements(
GetAllocPtr(), allocation_tx.GetData(), &move_values,
storage_view.size);
}
ABSL_INTERNAL_CATCH_ANY {
AllocatorTraits::destroy(*GetAllocPtr(), last_ptr);
ABSL_INTERNAL_RETHROW;
}
inlined_vector_internal::DestroyElements(GetAllocPtr(), storage_view.data,
storage_view.size);
DeallocateIfAllocated();
AcquireAllocation(&allocation_tx);
SetIsAllocated();
}
AddSize(1);
return *last_ptr;
}
template <typename T, size_t N, typename A>
auto Storage<T, N, A>::Erase(const_iterator from, const_iterator to)
-> iterator {
assert(from != to);
StorageView storage_view = MakeStorageView();
size_type erase_size = std::distance(from, to);
size_type erase_index =
std::distance(const_iterator(storage_view.data), from);
size_type erase_end_index = erase_index + erase_size;
IteratorValueAdapter<MoveIterator> move_values(
MoveIterator(storage_view.data + erase_end_index));
inlined_vector_internal::AssignElements(storage_view.data + erase_index,
&move_values,
storage_view.size - erase_end_index);
inlined_vector_internal::DestroyElements(
GetAllocPtr(), storage_view.data + (storage_view.size - erase_size),
erase_size);
SubtractSize(erase_size);
return iterator(storage_view.data + erase_index);
}
template <typename T, size_t N, typename A>
auto Storage<T, N, A>::Reserve(size_type requested_capacity) -> void {
StorageView storage_view = MakeStorageView();
@ -573,8 +632,7 @@ auto Storage<T, N, A>::Reserve(size_type requested_capacity) -> void {
IteratorValueAdapter<MoveIterator> move_values(
MoveIterator(storage_view.data));
pointer new_data = allocation_tx.Allocate(
LegacyNextCapacityFrom(storage_view.capacity, requested_capacity));
pointer new_data = allocation_tx.Allocate(requested_capacity);
inlined_vector_internal::ConstructElements(GetAllocPtr(), new_data,
&move_values, storage_view.size);
@ -592,8 +650,8 @@ auto Storage<T, N, A>::ShrinkToFit() -> void {
// May only be called on allocated instances!
assert(GetIsAllocated());
StorageView storage_view = {GetAllocatedData(), GetSize(),
GetAllocatedCapacity()};
StorageView storage_view{GetAllocatedData(), GetSize(),
GetAllocatedCapacity()};
AllocationTransaction allocation_tx(GetAllocPtr());
@ -634,6 +692,82 @@ auto Storage<T, N, A>::ShrinkToFit() -> void {
}
}
template <typename T, size_t N, typename A>
auto Storage<T, N, A>::Swap(Storage* other_storage_ptr) -> void {
using std::swap;
assert(this != other_storage_ptr);
if (GetIsAllocated() && other_storage_ptr->GetIsAllocated()) {
// Both are allocated, thus we can swap the allocations at the top level.
swap(data_.allocated, other_storage_ptr->data_.allocated);
} else if (!GetIsAllocated() && !other_storage_ptr->GetIsAllocated()) {
// Both are inlined, thus element-wise swap up to smaller size, then move
// the remaining elements.
Storage* small_ptr = this;
Storage* large_ptr = other_storage_ptr;
if (small_ptr->GetSize() > large_ptr->GetSize()) swap(small_ptr, large_ptr);
for (size_type i = 0; i < small_ptr->GetSize(); ++i) {
swap(small_ptr->GetInlinedData()[i], large_ptr->GetInlinedData()[i]);
}
IteratorValueAdapter<MoveIterator> move_values(
MoveIterator(large_ptr->GetInlinedData() + small_ptr->GetSize()));
inlined_vector_internal::ConstructElements(
large_ptr->GetAllocPtr(),
small_ptr->GetInlinedData() + small_ptr->GetSize(), &move_values,
large_ptr->GetSize() - small_ptr->GetSize());
inlined_vector_internal::DestroyElements(
large_ptr->GetAllocPtr(),
large_ptr->GetInlinedData() + small_ptr->GetSize(),
large_ptr->GetSize() - small_ptr->GetSize());
} else {
// One is allocated and the other is inlined, thus we first move the
// elements from the inlined instance to the inlined space in the allocated
// instance and then we can finish by having the other vector take on the
// allocation.
Storage* allocated_ptr = this;
Storage* inlined_ptr = other_storage_ptr;
if (!allocated_ptr->GetIsAllocated()) swap(allocated_ptr, inlined_ptr);
StorageView allocated_storage_view{allocated_ptr->GetAllocatedData(),
allocated_ptr->GetSize(),
allocated_ptr->GetAllocatedCapacity()};
IteratorValueAdapter<MoveIterator> move_values(
MoveIterator(inlined_ptr->GetInlinedData()));
ABSL_INTERNAL_TRY {
inlined_vector_internal::ConstructElements(
inlined_ptr->GetAllocPtr(), allocated_ptr->GetInlinedData(),
&move_values, inlined_ptr->GetSize());
}
ABSL_INTERNAL_CATCH_ANY {
// Writing to inlined data will trample on the existing state, thus it
// needs to be restored when a construction fails.
allocated_ptr->SetAllocatedData(allocated_storage_view.data,
allocated_storage_view.capacity);
ABSL_INTERNAL_RETHROW;
}
inlined_vector_internal::DestroyElements(inlined_ptr->GetAllocPtr(),
inlined_ptr->GetInlinedData(),
inlined_ptr->GetSize());
inlined_ptr->SetAllocatedData(allocated_storage_view.data,
allocated_storage_view.capacity);
}
// All cases swap the size, `is_allocated` boolean and the allocator.
swap(GetSizeAndIsAllocated(), other_storage_ptr->GetSizeAndIsAllocated());
swap(*GetAllocPtr(), *other_storage_ptr->GetAllocPtr());
}
} // namespace inlined_vector_internal
} // namespace absl