Add 'third_party/abseil_cpp/' from commit '768eb2ca28'
git-subtree-dir: third_party/abseil_cpp git-subtree-mainline:ffb2ae54begit-subtree-split:768eb2ca28
This commit is contained in:
commit
fc8dc48020
1276 changed files with 208196 additions and 0 deletions
2629
third_party/abseil_cpp/absl/container/internal/btree.h
vendored
Normal file
2629
third_party/abseil_cpp/absl/container/internal/btree.h
vendored
Normal file
File diff suppressed because it is too large
Load diff
672
third_party/abseil_cpp/absl/container/internal/btree_container.h
vendored
Normal file
672
third_party/abseil_cpp/absl/container/internal/btree_container.h
vendored
Normal file
|
|
@ -0,0 +1,672 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_BTREE_CONTAINER_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_BTREE_CONTAINER_H_
|
||||
|
||||
#include <algorithm>
|
||||
#include <initializer_list>
|
||||
#include <iterator>
|
||||
#include <utility>
|
||||
|
||||
#include "absl/base/internal/throw_delegate.h"
|
||||
#include "absl/container/internal/btree.h" // IWYU pragma: export
|
||||
#include "absl/container/internal/common.h"
|
||||
#include "absl/meta/type_traits.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
// A common base class for btree_set, btree_map, btree_multiset, and
|
||||
// btree_multimap.
|
||||
template <typename Tree>
|
||||
class btree_container {
|
||||
using params_type = typename Tree::params_type;
|
||||
|
||||
protected:
|
||||
// Alias used for heterogeneous lookup functions.
|
||||
// `key_arg<K>` evaluates to `K` when the functors are transparent and to
|
||||
// `key_type` otherwise. It permits template argument deduction on `K` for the
|
||||
// transparent case.
|
||||
template <class K>
|
||||
using key_arg =
|
||||
typename KeyArg<IsTransparent<typename Tree::key_compare>::value>::
|
||||
template type<K, typename Tree::key_type>;
|
||||
|
||||
public:
|
||||
using key_type = typename Tree::key_type;
|
||||
using value_type = typename Tree::value_type;
|
||||
using size_type = typename Tree::size_type;
|
||||
using difference_type = typename Tree::difference_type;
|
||||
using key_compare = typename Tree::key_compare;
|
||||
using value_compare = typename Tree::value_compare;
|
||||
using allocator_type = typename Tree::allocator_type;
|
||||
using reference = typename Tree::reference;
|
||||
using const_reference = typename Tree::const_reference;
|
||||
using pointer = typename Tree::pointer;
|
||||
using const_pointer = typename Tree::const_pointer;
|
||||
using iterator = typename Tree::iterator;
|
||||
using const_iterator = typename Tree::const_iterator;
|
||||
using reverse_iterator = typename Tree::reverse_iterator;
|
||||
using const_reverse_iterator = typename Tree::const_reverse_iterator;
|
||||
using node_type = typename Tree::node_handle_type;
|
||||
|
||||
// Constructors/assignments.
|
||||
btree_container() : tree_(key_compare(), allocator_type()) {}
|
||||
explicit btree_container(const key_compare &comp,
|
||||
const allocator_type &alloc = allocator_type())
|
||||
: tree_(comp, alloc) {}
|
||||
btree_container(const btree_container &other) = default;
|
||||
btree_container(btree_container &&other) noexcept = default;
|
||||
btree_container &operator=(const btree_container &other) = default;
|
||||
btree_container &operator=(btree_container &&other) noexcept(
|
||||
std::is_nothrow_move_assignable<Tree>::value) = default;
|
||||
|
||||
// Iterator routines.
|
||||
iterator begin() { return tree_.begin(); }
|
||||
const_iterator begin() const { return tree_.begin(); }
|
||||
const_iterator cbegin() const { return tree_.begin(); }
|
||||
iterator end() { return tree_.end(); }
|
||||
const_iterator end() const { return tree_.end(); }
|
||||
const_iterator cend() const { return tree_.end(); }
|
||||
reverse_iterator rbegin() { return tree_.rbegin(); }
|
||||
const_reverse_iterator rbegin() const { return tree_.rbegin(); }
|
||||
const_reverse_iterator crbegin() const { return tree_.rbegin(); }
|
||||
reverse_iterator rend() { return tree_.rend(); }
|
||||
const_reverse_iterator rend() const { return tree_.rend(); }
|
||||
const_reverse_iterator crend() const { return tree_.rend(); }
|
||||
|
||||
// Lookup routines.
|
||||
template <typename K = key_type>
|
||||
iterator find(const key_arg<K> &key) {
|
||||
return tree_.find(key);
|
||||
}
|
||||
template <typename K = key_type>
|
||||
const_iterator find(const key_arg<K> &key) const {
|
||||
return tree_.find(key);
|
||||
}
|
||||
template <typename K = key_type>
|
||||
bool contains(const key_arg<K> &key) const {
|
||||
return find(key) != end();
|
||||
}
|
||||
template <typename K = key_type>
|
||||
iterator lower_bound(const key_arg<K> &key) {
|
||||
return tree_.lower_bound(key);
|
||||
}
|
||||
template <typename K = key_type>
|
||||
const_iterator lower_bound(const key_arg<K> &key) const {
|
||||
return tree_.lower_bound(key);
|
||||
}
|
||||
template <typename K = key_type>
|
||||
iterator upper_bound(const key_arg<K> &key) {
|
||||
return tree_.upper_bound(key);
|
||||
}
|
||||
template <typename K = key_type>
|
||||
const_iterator upper_bound(const key_arg<K> &key) const {
|
||||
return tree_.upper_bound(key);
|
||||
}
|
||||
template <typename K = key_type>
|
||||
std::pair<iterator, iterator> equal_range(const key_arg<K> &key) {
|
||||
return tree_.equal_range(key);
|
||||
}
|
||||
template <typename K = key_type>
|
||||
std::pair<const_iterator, const_iterator> equal_range(
|
||||
const key_arg<K> &key) const {
|
||||
return tree_.equal_range(key);
|
||||
}
|
||||
|
||||
// Deletion routines. Note that there is also a deletion routine that is
|
||||
// specific to btree_set_container/btree_multiset_container.
|
||||
|
||||
// Erase the specified iterator from the btree. The iterator must be valid
|
||||
// (i.e. not equal to end()). Return an iterator pointing to the node after
|
||||
// the one that was erased (or end() if none exists).
|
||||
iterator erase(const_iterator iter) { return tree_.erase(iterator(iter)); }
|
||||
iterator erase(iterator iter) { return tree_.erase(iter); }
|
||||
iterator erase(const_iterator first, const_iterator last) {
|
||||
return tree_.erase_range(iterator(first), iterator(last)).second;
|
||||
}
|
||||
|
||||
// Extract routines.
|
||||
node_type extract(iterator position) {
|
||||
// Use Move instead of Transfer, because the rebalancing code expects to
|
||||
// have a valid object to scribble metadata bits on top of.
|
||||
auto node = CommonAccess::Move<node_type>(get_allocator(), position.slot());
|
||||
erase(position);
|
||||
return node;
|
||||
}
|
||||
node_type extract(const_iterator position) {
|
||||
return extract(iterator(position));
|
||||
}
|
||||
|
||||
public:
|
||||
// Utility routines.
|
||||
void clear() { tree_.clear(); }
|
||||
void swap(btree_container &other) { tree_.swap(other.tree_); }
|
||||
void verify() const { tree_.verify(); }
|
||||
|
||||
// Size routines.
|
||||
size_type size() const { return tree_.size(); }
|
||||
size_type max_size() const { return tree_.max_size(); }
|
||||
bool empty() const { return tree_.empty(); }
|
||||
|
||||
friend bool operator==(const btree_container &x, const btree_container &y) {
|
||||
if (x.size() != y.size()) return false;
|
||||
return std::equal(x.begin(), x.end(), y.begin());
|
||||
}
|
||||
|
||||
friend bool operator!=(const btree_container &x, const btree_container &y) {
|
||||
return !(x == y);
|
||||
}
|
||||
|
||||
friend bool operator<(const btree_container &x, const btree_container &y) {
|
||||
return std::lexicographical_compare(x.begin(), x.end(), y.begin(), y.end());
|
||||
}
|
||||
|
||||
friend bool operator>(const btree_container &x, const btree_container &y) {
|
||||
return y < x;
|
||||
}
|
||||
|
||||
friend bool operator<=(const btree_container &x, const btree_container &y) {
|
||||
return !(y < x);
|
||||
}
|
||||
|
||||
friend bool operator>=(const btree_container &x, const btree_container &y) {
|
||||
return !(x < y);
|
||||
}
|
||||
|
||||
// The allocator used by the btree.
|
||||
allocator_type get_allocator() const { return tree_.get_allocator(); }
|
||||
|
||||
// The key comparator used by the btree.
|
||||
key_compare key_comp() const { return tree_.key_comp(); }
|
||||
value_compare value_comp() const { return tree_.value_comp(); }
|
||||
|
||||
// Support absl::Hash.
|
||||
template <typename State>
|
||||
friend State AbslHashValue(State h, const btree_container &b) {
|
||||
for (const auto &v : b) {
|
||||
h = State::combine(std::move(h), v);
|
||||
}
|
||||
return State::combine(std::move(h), b.size());
|
||||
}
|
||||
|
||||
protected:
|
||||
Tree tree_;
|
||||
};
|
||||
|
||||
// A common base class for btree_set and btree_map.
|
||||
template <typename Tree>
|
||||
class btree_set_container : public btree_container<Tree> {
|
||||
using super_type = btree_container<Tree>;
|
||||
using params_type = typename Tree::params_type;
|
||||
using init_type = typename params_type::init_type;
|
||||
using is_key_compare_to = typename params_type::is_key_compare_to;
|
||||
friend class BtreeNodePeer;
|
||||
|
||||
protected:
|
||||
template <class K>
|
||||
using key_arg = typename super_type::template key_arg<K>;
|
||||
|
||||
public:
|
||||
using key_type = typename Tree::key_type;
|
||||
using value_type = typename Tree::value_type;
|
||||
using size_type = typename Tree::size_type;
|
||||
using key_compare = typename Tree::key_compare;
|
||||
using allocator_type = typename Tree::allocator_type;
|
||||
using iterator = typename Tree::iterator;
|
||||
using const_iterator = typename Tree::const_iterator;
|
||||
using node_type = typename super_type::node_type;
|
||||
using insert_return_type = InsertReturnType<iterator, node_type>;
|
||||
|
||||
// Inherit constructors.
|
||||
using super_type::super_type;
|
||||
btree_set_container() {}
|
||||
|
||||
// Range constructor.
|
||||
template <class InputIterator>
|
||||
btree_set_container(InputIterator b, InputIterator e,
|
||||
const key_compare &comp = key_compare(),
|
||||
const allocator_type &alloc = allocator_type())
|
||||
: super_type(comp, alloc) {
|
||||
insert(b, e);
|
||||
}
|
||||
|
||||
// Initializer list constructor.
|
||||
btree_set_container(std::initializer_list<init_type> init,
|
||||
const key_compare &comp = key_compare(),
|
||||
const allocator_type &alloc = allocator_type())
|
||||
: btree_set_container(init.begin(), init.end(), comp, alloc) {}
|
||||
|
||||
// Lookup routines.
|
||||
template <typename K = key_type>
|
||||
size_type count(const key_arg<K> &key) const {
|
||||
return this->tree_.count_unique(key);
|
||||
}
|
||||
|
||||
// Insertion routines.
|
||||
std::pair<iterator, bool> insert(const value_type &v) {
|
||||
return this->tree_.insert_unique(params_type::key(v), v);
|
||||
}
|
||||
std::pair<iterator, bool> insert(value_type &&v) {
|
||||
return this->tree_.insert_unique(params_type::key(v), std::move(v));
|
||||
}
|
||||
template <typename... Args>
|
||||
std::pair<iterator, bool> emplace(Args &&... args) {
|
||||
init_type v(std::forward<Args>(args)...);
|
||||
return this->tree_.insert_unique(params_type::key(v), std::move(v));
|
||||
}
|
||||
iterator insert(const_iterator position, const value_type &v) {
|
||||
return this->tree_
|
||||
.insert_hint_unique(iterator(position), params_type::key(v), v)
|
||||
.first;
|
||||
}
|
||||
iterator insert(const_iterator position, value_type &&v) {
|
||||
return this->tree_
|
||||
.insert_hint_unique(iterator(position), params_type::key(v),
|
||||
std::move(v))
|
||||
.first;
|
||||
}
|
||||
template <typename... Args>
|
||||
iterator emplace_hint(const_iterator position, Args &&... args) {
|
||||
init_type v(std::forward<Args>(args)...);
|
||||
return this->tree_
|
||||
.insert_hint_unique(iterator(position), params_type::key(v),
|
||||
std::move(v))
|
||||
.first;
|
||||
}
|
||||
template <typename InputIterator>
|
||||
void insert(InputIterator b, InputIterator e) {
|
||||
this->tree_.insert_iterator_unique(b, e);
|
||||
}
|
||||
void insert(std::initializer_list<init_type> init) {
|
||||
this->tree_.insert_iterator_unique(init.begin(), init.end());
|
||||
}
|
||||
insert_return_type insert(node_type &&node) {
|
||||
if (!node) return {this->end(), false, node_type()};
|
||||
std::pair<iterator, bool> res =
|
||||
this->tree_.insert_unique(params_type::key(CommonAccess::GetSlot(node)),
|
||||
CommonAccess::GetSlot(node));
|
||||
if (res.second) {
|
||||
CommonAccess::Destroy(&node);
|
||||
return {res.first, true, node_type()};
|
||||
} else {
|
||||
return {res.first, false, std::move(node)};
|
||||
}
|
||||
}
|
||||
iterator insert(const_iterator hint, node_type &&node) {
|
||||
if (!node) return this->end();
|
||||
std::pair<iterator, bool> res = this->tree_.insert_hint_unique(
|
||||
iterator(hint), params_type::key(CommonAccess::GetSlot(node)),
|
||||
CommonAccess::GetSlot(node));
|
||||
if (res.second) CommonAccess::Destroy(&node);
|
||||
return res.first;
|
||||
}
|
||||
|
||||
// Deletion routines.
|
||||
template <typename K = key_type>
|
||||
size_type erase(const key_arg<K> &key) {
|
||||
return this->tree_.erase_unique(key);
|
||||
}
|
||||
using super_type::erase;
|
||||
|
||||
// Node extraction routines.
|
||||
template <typename K = key_type>
|
||||
node_type extract(const key_arg<K> &key) {
|
||||
auto it = this->find(key);
|
||||
return it == this->end() ? node_type() : extract(it);
|
||||
}
|
||||
using super_type::extract;
|
||||
|
||||
// Merge routines.
|
||||
// Moves elements from `src` into `this`. If the element already exists in
|
||||
// `this`, it is left unmodified in `src`.
|
||||
template <
|
||||
typename T,
|
||||
typename absl::enable_if_t<
|
||||
absl::conjunction<
|
||||
std::is_same<value_type, typename T::value_type>,
|
||||
std::is_same<allocator_type, typename T::allocator_type>,
|
||||
std::is_same<typename params_type::is_map_container,
|
||||
typename T::params_type::is_map_container>>::value,
|
||||
int> = 0>
|
||||
void merge(btree_container<T> &src) { // NOLINT
|
||||
for (auto src_it = src.begin(); src_it != src.end();) {
|
||||
if (insert(std::move(*src_it)).second) {
|
||||
src_it = src.erase(src_it);
|
||||
} else {
|
||||
++src_it;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
template <
|
||||
typename T,
|
||||
typename absl::enable_if_t<
|
||||
absl::conjunction<
|
||||
std::is_same<value_type, typename T::value_type>,
|
||||
std::is_same<allocator_type, typename T::allocator_type>,
|
||||
std::is_same<typename params_type::is_map_container,
|
||||
typename T::params_type::is_map_container>>::value,
|
||||
int> = 0>
|
||||
void merge(btree_container<T> &&src) {
|
||||
merge(src);
|
||||
}
|
||||
};
|
||||
|
||||
// Base class for btree_map.
|
||||
template <typename Tree>
|
||||
class btree_map_container : public btree_set_container<Tree> {
|
||||
using super_type = btree_set_container<Tree>;
|
||||
using params_type = typename Tree::params_type;
|
||||
|
||||
private:
|
||||
template <class K>
|
||||
using key_arg = typename super_type::template key_arg<K>;
|
||||
|
||||
public:
|
||||
using key_type = typename Tree::key_type;
|
||||
using mapped_type = typename params_type::mapped_type;
|
||||
using value_type = typename Tree::value_type;
|
||||
using key_compare = typename Tree::key_compare;
|
||||
using allocator_type = typename Tree::allocator_type;
|
||||
using iterator = typename Tree::iterator;
|
||||
using const_iterator = typename Tree::const_iterator;
|
||||
|
||||
// Inherit constructors.
|
||||
using super_type::super_type;
|
||||
btree_map_container() {}
|
||||
|
||||
// Insertion routines.
|
||||
// Note: the nullptr template arguments and extra `const M&` overloads allow
|
||||
// for supporting bitfield arguments.
|
||||
// Note: when we call `std::forward<M>(obj)` twice, it's safe because
|
||||
// insert_unique/insert_hint_unique are guaranteed to not consume `obj` when
|
||||
// `ret.second` is false.
|
||||
template <class M>
|
||||
std::pair<iterator, bool> insert_or_assign(const key_type &k, const M &obj) {
|
||||
const std::pair<iterator, bool> ret = this->tree_.insert_unique(k, k, obj);
|
||||
if (!ret.second) ret.first->second = obj;
|
||||
return ret;
|
||||
}
|
||||
template <class M, key_type * = nullptr>
|
||||
std::pair<iterator, bool> insert_or_assign(key_type &&k, const M &obj) {
|
||||
const std::pair<iterator, bool> ret =
|
||||
this->tree_.insert_unique(k, std::move(k), obj);
|
||||
if (!ret.second) ret.first->second = obj;
|
||||
return ret;
|
||||
}
|
||||
template <class M, M * = nullptr>
|
||||
std::pair<iterator, bool> insert_or_assign(const key_type &k, M &&obj) {
|
||||
const std::pair<iterator, bool> ret =
|
||||
this->tree_.insert_unique(k, k, std::forward<M>(obj));
|
||||
if (!ret.second) ret.first->second = std::forward<M>(obj);
|
||||
return ret;
|
||||
}
|
||||
template <class M, key_type * = nullptr, M * = nullptr>
|
||||
std::pair<iterator, bool> insert_or_assign(key_type &&k, M &&obj) {
|
||||
const std::pair<iterator, bool> ret =
|
||||
this->tree_.insert_unique(k, std::move(k), std::forward<M>(obj));
|
||||
if (!ret.second) ret.first->second = std::forward<M>(obj);
|
||||
return ret;
|
||||
}
|
||||
template <class M>
|
||||
iterator insert_or_assign(const_iterator position, const key_type &k,
|
||||
const M &obj) {
|
||||
const std::pair<iterator, bool> ret =
|
||||
this->tree_.insert_hint_unique(iterator(position), k, k, obj);
|
||||
if (!ret.second) ret.first->second = obj;
|
||||
return ret.first;
|
||||
}
|
||||
template <class M, key_type * = nullptr>
|
||||
iterator insert_or_assign(const_iterator position, key_type &&k,
|
||||
const M &obj) {
|
||||
const std::pair<iterator, bool> ret = this->tree_.insert_hint_unique(
|
||||
iterator(position), k, std::move(k), obj);
|
||||
if (!ret.second) ret.first->second = obj;
|
||||
return ret.first;
|
||||
}
|
||||
template <class M, M * = nullptr>
|
||||
iterator insert_or_assign(const_iterator position, const key_type &k,
|
||||
M &&obj) {
|
||||
const std::pair<iterator, bool> ret = this->tree_.insert_hint_unique(
|
||||
iterator(position), k, k, std::forward<M>(obj));
|
||||
if (!ret.second) ret.first->second = std::forward<M>(obj);
|
||||
return ret.first;
|
||||
}
|
||||
template <class M, key_type * = nullptr, M * = nullptr>
|
||||
iterator insert_or_assign(const_iterator position, key_type &&k, M &&obj) {
|
||||
const std::pair<iterator, bool> ret = this->tree_.insert_hint_unique(
|
||||
iterator(position), k, std::move(k), std::forward<M>(obj));
|
||||
if (!ret.second) ret.first->second = std::forward<M>(obj);
|
||||
return ret.first;
|
||||
}
|
||||
template <typename... Args>
|
||||
std::pair<iterator, bool> try_emplace(const key_type &k, Args &&... args) {
|
||||
return this->tree_.insert_unique(
|
||||
k, std::piecewise_construct, std::forward_as_tuple(k),
|
||||
std::forward_as_tuple(std::forward<Args>(args)...));
|
||||
}
|
||||
template <typename... Args>
|
||||
std::pair<iterator, bool> try_emplace(key_type &&k, Args &&... args) {
|
||||
// Note: `key_ref` exists to avoid a ClangTidy warning about moving from `k`
|
||||
// and then using `k` unsequenced. This is safe because the move is into a
|
||||
// forwarding reference and insert_unique guarantees that `key` is never
|
||||
// referenced after consuming `args`.
|
||||
const key_type &key_ref = k;
|
||||
return this->tree_.insert_unique(
|
||||
key_ref, std::piecewise_construct, std::forward_as_tuple(std::move(k)),
|
||||
std::forward_as_tuple(std::forward<Args>(args)...));
|
||||
}
|
||||
template <typename... Args>
|
||||
iterator try_emplace(const_iterator hint, const key_type &k,
|
||||
Args &&... args) {
|
||||
return this->tree_
|
||||
.insert_hint_unique(iterator(hint), k, std::piecewise_construct,
|
||||
std::forward_as_tuple(k),
|
||||
std::forward_as_tuple(std::forward<Args>(args)...))
|
||||
.first;
|
||||
}
|
||||
template <typename... Args>
|
||||
iterator try_emplace(const_iterator hint, key_type &&k, Args &&... args) {
|
||||
// Note: `key_ref` exists to avoid a ClangTidy warning about moving from `k`
|
||||
// and then using `k` unsequenced. This is safe because the move is into a
|
||||
// forwarding reference and insert_hint_unique guarantees that `key` is
|
||||
// never referenced after consuming `args`.
|
||||
const key_type &key_ref = k;
|
||||
return this->tree_
|
||||
.insert_hint_unique(iterator(hint), key_ref, std::piecewise_construct,
|
||||
std::forward_as_tuple(std::move(k)),
|
||||
std::forward_as_tuple(std::forward<Args>(args)...))
|
||||
.first;
|
||||
}
|
||||
mapped_type &operator[](const key_type &k) {
|
||||
return try_emplace(k).first->second;
|
||||
}
|
||||
mapped_type &operator[](key_type &&k) {
|
||||
return try_emplace(std::move(k)).first->second;
|
||||
}
|
||||
|
||||
template <typename K = key_type>
|
||||
mapped_type &at(const key_arg<K> &key) {
|
||||
auto it = this->find(key);
|
||||
if (it == this->end())
|
||||
base_internal::ThrowStdOutOfRange("absl::btree_map::at");
|
||||
return it->second;
|
||||
}
|
||||
template <typename K = key_type>
|
||||
const mapped_type &at(const key_arg<K> &key) const {
|
||||
auto it = this->find(key);
|
||||
if (it == this->end())
|
||||
base_internal::ThrowStdOutOfRange("absl::btree_map::at");
|
||||
return it->second;
|
||||
}
|
||||
};
|
||||
|
||||
// A common base class for btree_multiset and btree_multimap.
|
||||
template <typename Tree>
|
||||
class btree_multiset_container : public btree_container<Tree> {
|
||||
using super_type = btree_container<Tree>;
|
||||
using params_type = typename Tree::params_type;
|
||||
using init_type = typename params_type::init_type;
|
||||
using is_key_compare_to = typename params_type::is_key_compare_to;
|
||||
|
||||
template <class K>
|
||||
using key_arg = typename super_type::template key_arg<K>;
|
||||
|
||||
public:
|
||||
using key_type = typename Tree::key_type;
|
||||
using value_type = typename Tree::value_type;
|
||||
using size_type = typename Tree::size_type;
|
||||
using key_compare = typename Tree::key_compare;
|
||||
using allocator_type = typename Tree::allocator_type;
|
||||
using iterator = typename Tree::iterator;
|
||||
using const_iterator = typename Tree::const_iterator;
|
||||
using node_type = typename super_type::node_type;
|
||||
|
||||
// Inherit constructors.
|
||||
using super_type::super_type;
|
||||
btree_multiset_container() {}
|
||||
|
||||
// Range constructor.
|
||||
template <class InputIterator>
|
||||
btree_multiset_container(InputIterator b, InputIterator e,
|
||||
const key_compare &comp = key_compare(),
|
||||
const allocator_type &alloc = allocator_type())
|
||||
: super_type(comp, alloc) {
|
||||
insert(b, e);
|
||||
}
|
||||
|
||||
// Initializer list constructor.
|
||||
btree_multiset_container(std::initializer_list<init_type> init,
|
||||
const key_compare &comp = key_compare(),
|
||||
const allocator_type &alloc = allocator_type())
|
||||
: btree_multiset_container(init.begin(), init.end(), comp, alloc) {}
|
||||
|
||||
// Lookup routines.
|
||||
template <typename K = key_type>
|
||||
size_type count(const key_arg<K> &key) const {
|
||||
return this->tree_.count_multi(key);
|
||||
}
|
||||
|
||||
// Insertion routines.
|
||||
iterator insert(const value_type &v) { return this->tree_.insert_multi(v); }
|
||||
iterator insert(value_type &&v) {
|
||||
return this->tree_.insert_multi(std::move(v));
|
||||
}
|
||||
iterator insert(const_iterator position, const value_type &v) {
|
||||
return this->tree_.insert_hint_multi(iterator(position), v);
|
||||
}
|
||||
iterator insert(const_iterator position, value_type &&v) {
|
||||
return this->tree_.insert_hint_multi(iterator(position), std::move(v));
|
||||
}
|
||||
template <typename InputIterator>
|
||||
void insert(InputIterator b, InputIterator e) {
|
||||
this->tree_.insert_iterator_multi(b, e);
|
||||
}
|
||||
void insert(std::initializer_list<init_type> init) {
|
||||
this->tree_.insert_iterator_multi(init.begin(), init.end());
|
||||
}
|
||||
template <typename... Args>
|
||||
iterator emplace(Args &&... args) {
|
||||
return this->tree_.insert_multi(init_type(std::forward<Args>(args)...));
|
||||
}
|
||||
template <typename... Args>
|
||||
iterator emplace_hint(const_iterator position, Args &&... args) {
|
||||
return this->tree_.insert_hint_multi(
|
||||
iterator(position), init_type(std::forward<Args>(args)...));
|
||||
}
|
||||
iterator insert(node_type &&node) {
|
||||
if (!node) return this->end();
|
||||
iterator res =
|
||||
this->tree_.insert_multi(params_type::key(CommonAccess::GetSlot(node)),
|
||||
CommonAccess::GetSlot(node));
|
||||
CommonAccess::Destroy(&node);
|
||||
return res;
|
||||
}
|
||||
iterator insert(const_iterator hint, node_type &&node) {
|
||||
if (!node) return this->end();
|
||||
iterator res = this->tree_.insert_hint_multi(
|
||||
iterator(hint),
|
||||
std::move(params_type::element(CommonAccess::GetSlot(node))));
|
||||
CommonAccess::Destroy(&node);
|
||||
return res;
|
||||
}
|
||||
|
||||
// Deletion routines.
|
||||
template <typename K = key_type>
|
||||
size_type erase(const key_arg<K> &key) {
|
||||
return this->tree_.erase_multi(key);
|
||||
}
|
||||
using super_type::erase;
|
||||
|
||||
// Node extraction routines.
|
||||
template <typename K = key_type>
|
||||
node_type extract(const key_arg<K> &key) {
|
||||
auto it = this->find(key);
|
||||
return it == this->end() ? node_type() : extract(it);
|
||||
}
|
||||
using super_type::extract;
|
||||
|
||||
// Merge routines.
|
||||
// Moves all elements from `src` into `this`.
|
||||
template <
|
||||
typename T,
|
||||
typename absl::enable_if_t<
|
||||
absl::conjunction<
|
||||
std::is_same<value_type, typename T::value_type>,
|
||||
std::is_same<allocator_type, typename T::allocator_type>,
|
||||
std::is_same<typename params_type::is_map_container,
|
||||
typename T::params_type::is_map_container>>::value,
|
||||
int> = 0>
|
||||
void merge(btree_container<T> &src) { // NOLINT
|
||||
insert(std::make_move_iterator(src.begin()),
|
||||
std::make_move_iterator(src.end()));
|
||||
src.clear();
|
||||
}
|
||||
|
||||
template <
|
||||
typename T,
|
||||
typename absl::enable_if_t<
|
||||
absl::conjunction<
|
||||
std::is_same<value_type, typename T::value_type>,
|
||||
std::is_same<allocator_type, typename T::allocator_type>,
|
||||
std::is_same<typename params_type::is_map_container,
|
||||
typename T::params_type::is_map_container>>::value,
|
||||
int> = 0>
|
||||
void merge(btree_container<T> &&src) {
|
||||
merge(src);
|
||||
}
|
||||
};
|
||||
|
||||
// A base class for btree_multimap.
|
||||
template <typename Tree>
|
||||
class btree_multimap_container : public btree_multiset_container<Tree> {
|
||||
using super_type = btree_multiset_container<Tree>;
|
||||
using params_type = typename Tree::params_type;
|
||||
|
||||
public:
|
||||
using mapped_type = typename params_type::mapped_type;
|
||||
|
||||
// Inherit constructors.
|
||||
using super_type::super_type;
|
||||
btree_multimap_container() {}
|
||||
};
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_BTREE_CONTAINER_H_
|
||||
203
third_party/abseil_cpp/absl/container/internal/common.h
vendored
Normal file
203
third_party/abseil_cpp/absl/container/internal/common.h
vendored
Normal file
|
|
@ -0,0 +1,203 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_CONTAINER_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_CONTAINER_H_
|
||||
|
||||
#include <cassert>
|
||||
#include <type_traits>
|
||||
|
||||
#include "absl/meta/type_traits.h"
|
||||
#include "absl/types/optional.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
template <class, class = void>
|
||||
struct IsTransparent : std::false_type {};
|
||||
template <class T>
|
||||
struct IsTransparent<T, absl::void_t<typename T::is_transparent>>
|
||||
: std::true_type {};
|
||||
|
||||
template <bool is_transparent>
|
||||
struct KeyArg {
|
||||
// Transparent. Forward `K`.
|
||||
template <typename K, typename key_type>
|
||||
using type = K;
|
||||
};
|
||||
|
||||
template <>
|
||||
struct KeyArg<false> {
|
||||
// Not transparent. Always use `key_type`.
|
||||
template <typename K, typename key_type>
|
||||
using type = key_type;
|
||||
};
|
||||
|
||||
// The node_handle concept from C++17.
|
||||
// We specialize node_handle for sets and maps. node_handle_base holds the
|
||||
// common API of both.
|
||||
template <typename PolicyTraits, typename Alloc>
|
||||
class node_handle_base {
|
||||
protected:
|
||||
using slot_type = typename PolicyTraits::slot_type;
|
||||
|
||||
public:
|
||||
using allocator_type = Alloc;
|
||||
|
||||
constexpr node_handle_base() = default;
|
||||
node_handle_base(node_handle_base&& other) noexcept {
|
||||
*this = std::move(other);
|
||||
}
|
||||
~node_handle_base() { destroy(); }
|
||||
node_handle_base& operator=(node_handle_base&& other) noexcept {
|
||||
destroy();
|
||||
if (!other.empty()) {
|
||||
alloc_ = other.alloc_;
|
||||
PolicyTraits::transfer(alloc(), slot(), other.slot());
|
||||
other.reset();
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
|
||||
bool empty() const noexcept { return !alloc_; }
|
||||
explicit operator bool() const noexcept { return !empty(); }
|
||||
allocator_type get_allocator() const { return *alloc_; }
|
||||
|
||||
protected:
|
||||
friend struct CommonAccess;
|
||||
|
||||
struct transfer_tag_t {};
|
||||
node_handle_base(transfer_tag_t, const allocator_type& a, slot_type* s)
|
||||
: alloc_(a) {
|
||||
PolicyTraits::transfer(alloc(), slot(), s);
|
||||
}
|
||||
|
||||
struct move_tag_t {};
|
||||
node_handle_base(move_tag_t, const allocator_type& a, slot_type* s)
|
||||
: alloc_(a) {
|
||||
PolicyTraits::construct(alloc(), slot(), s);
|
||||
}
|
||||
|
||||
void destroy() {
|
||||
if (!empty()) {
|
||||
PolicyTraits::destroy(alloc(), slot());
|
||||
reset();
|
||||
}
|
||||
}
|
||||
|
||||
void reset() {
|
||||
assert(alloc_.has_value());
|
||||
alloc_ = absl::nullopt;
|
||||
}
|
||||
|
||||
slot_type* slot() const {
|
||||
assert(!empty());
|
||||
return reinterpret_cast<slot_type*>(std::addressof(slot_space_));
|
||||
}
|
||||
allocator_type* alloc() { return std::addressof(*alloc_); }
|
||||
|
||||
private:
|
||||
absl::optional<allocator_type> alloc_ = {};
|
||||
alignas(slot_type) mutable unsigned char slot_space_[sizeof(slot_type)] = {};
|
||||
};
|
||||
|
||||
// For sets.
|
||||
template <typename Policy, typename PolicyTraits, typename Alloc,
|
||||
typename = void>
|
||||
class node_handle : public node_handle_base<PolicyTraits, Alloc> {
|
||||
using Base = node_handle_base<PolicyTraits, Alloc>;
|
||||
|
||||
public:
|
||||
using value_type = typename PolicyTraits::value_type;
|
||||
|
||||
constexpr node_handle() {}
|
||||
|
||||
value_type& value() const { return PolicyTraits::element(this->slot()); }
|
||||
|
||||
private:
|
||||
friend struct CommonAccess;
|
||||
|
||||
using Base::Base;
|
||||
};
|
||||
|
||||
// For maps.
|
||||
template <typename Policy, typename PolicyTraits, typename Alloc>
|
||||
class node_handle<Policy, PolicyTraits, Alloc,
|
||||
absl::void_t<typename Policy::mapped_type>>
|
||||
: public node_handle_base<PolicyTraits, Alloc> {
|
||||
using Base = node_handle_base<PolicyTraits, Alloc>;
|
||||
using slot_type = typename PolicyTraits::slot_type;
|
||||
|
||||
public:
|
||||
using key_type = typename Policy::key_type;
|
||||
using mapped_type = typename Policy::mapped_type;
|
||||
|
||||
constexpr node_handle() {}
|
||||
|
||||
auto key() const -> decltype(PolicyTraits::key(std::declval<slot_type*>())) {
|
||||
return PolicyTraits::key(this->slot());
|
||||
}
|
||||
|
||||
mapped_type& mapped() const {
|
||||
return PolicyTraits::value(&PolicyTraits::element(this->slot()));
|
||||
}
|
||||
|
||||
private:
|
||||
friend struct CommonAccess;
|
||||
|
||||
using Base::Base;
|
||||
};
|
||||
|
||||
// Provide access to non-public node-handle functions.
|
||||
struct CommonAccess {
|
||||
template <typename Node>
|
||||
static auto GetSlot(const Node& node) -> decltype(node.slot()) {
|
||||
return node.slot();
|
||||
}
|
||||
|
||||
template <typename Node>
|
||||
static void Destroy(Node* node) {
|
||||
node->destroy();
|
||||
}
|
||||
|
||||
template <typename Node>
|
||||
static void Reset(Node* node) {
|
||||
node->reset();
|
||||
}
|
||||
|
||||
template <typename T, typename... Args>
|
||||
static T Transfer(Args&&... args) {
|
||||
return T(typename T::transfer_tag_t{}, std::forward<Args>(args)...);
|
||||
}
|
||||
|
||||
template <typename T, typename... Args>
|
||||
static T Move(Args&&... args) {
|
||||
return T(typename T::move_tag_t{}, std::forward<Args>(args)...);
|
||||
}
|
||||
};
|
||||
|
||||
// Implement the insert_return_type<> concept of C++17.
|
||||
template <class Iterator, class NodeType>
|
||||
struct InsertReturnType {
|
||||
Iterator position;
|
||||
bool inserted;
|
||||
NodeType node;
|
||||
};
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_CONTAINER_H_
|
||||
290
third_party/abseil_cpp/absl/container/internal/compressed_tuple.h
vendored
Normal file
290
third_party/abseil_cpp/absl/container/internal/compressed_tuple.h
vendored
Normal file
|
|
@ -0,0 +1,290 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
// Helper class to perform the Empty Base Optimization.
|
||||
// Ts can contain classes and non-classes, empty or not. For the ones that
|
||||
// are empty classes, we perform the optimization. If all types in Ts are empty
|
||||
// classes, then CompressedTuple<Ts...> is itself an empty class.
|
||||
//
|
||||
// To access the members, use member get<N>() function.
|
||||
//
|
||||
// Eg:
|
||||
// absl::container_internal::CompressedTuple<int, T1, T2, T3> value(7, t1, t2,
|
||||
// t3);
|
||||
// assert(value.get<0>() == 7);
|
||||
// T1& t1 = value.get<1>();
|
||||
// const T2& t2 = value.get<2>();
|
||||
// ...
|
||||
//
|
||||
// https://en.cppreference.com/w/cpp/language/ebo
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_COMPRESSED_TUPLE_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_COMPRESSED_TUPLE_H_
|
||||
|
||||
#include <initializer_list>
|
||||
#include <tuple>
|
||||
#include <type_traits>
|
||||
#include <utility>
|
||||
|
||||
#include "absl/utility/utility.h"
|
||||
|
||||
#if defined(_MSC_VER) && !defined(__NVCC__)
|
||||
// We need to mark these classes with this declspec to ensure that
|
||||
// CompressedTuple happens.
|
||||
#define ABSL_INTERNAL_COMPRESSED_TUPLE_DECLSPEC __declspec(empty_bases)
|
||||
#else
|
||||
#define ABSL_INTERNAL_COMPRESSED_TUPLE_DECLSPEC
|
||||
#endif
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
template <typename... Ts>
|
||||
class CompressedTuple;
|
||||
|
||||
namespace internal_compressed_tuple {
|
||||
|
||||
template <typename D, size_t I>
|
||||
struct Elem;
|
||||
template <typename... B, size_t I>
|
||||
struct Elem<CompressedTuple<B...>, I>
|
||||
: std::tuple_element<I, std::tuple<B...>> {};
|
||||
template <typename D, size_t I>
|
||||
using ElemT = typename Elem<D, I>::type;
|
||||
|
||||
// Use the __is_final intrinsic if available. Where it's not available, classes
|
||||
// declared with the 'final' specifier cannot be used as CompressedTuple
|
||||
// elements.
|
||||
// TODO(sbenza): Replace this with std::is_final in C++14.
|
||||
template <typename T>
|
||||
constexpr bool IsFinal() {
|
||||
#if defined(__clang__) || defined(__GNUC__)
|
||||
return __is_final(T);
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
// We can't use EBCO on other CompressedTuples because that would mean that we
|
||||
// derive from multiple Storage<> instantiations with the same I parameter,
|
||||
// and potentially from multiple identical Storage<> instantiations. So anytime
|
||||
// we use type inheritance rather than encapsulation, we mark
|
||||
// CompressedTupleImpl, to make this easy to detect.
|
||||
struct uses_inheritance {};
|
||||
|
||||
template <typename T>
|
||||
constexpr bool ShouldUseBase() {
|
||||
return std::is_class<T>::value && std::is_empty<T>::value && !IsFinal<T>() &&
|
||||
!std::is_base_of<uses_inheritance, T>::value;
|
||||
}
|
||||
|
||||
// The storage class provides two specializations:
|
||||
// - For empty classes, it stores T as a base class.
|
||||
// - For everything else, it stores T as a member.
|
||||
template <typename T, size_t I,
|
||||
#if defined(_MSC_VER)
|
||||
bool UseBase =
|
||||
ShouldUseBase<typename std::enable_if<true, T>::type>()>
|
||||
#else
|
||||
bool UseBase = ShouldUseBase<T>()>
|
||||
#endif
|
||||
struct Storage {
|
||||
T value;
|
||||
constexpr Storage() = default;
|
||||
template <typename V>
|
||||
explicit constexpr Storage(absl::in_place_t, V&& v)
|
||||
: value(absl::forward<V>(v)) {}
|
||||
constexpr const T& get() const& { return value; }
|
||||
T& get() & { return value; }
|
||||
constexpr const T&& get() const&& { return absl::move(*this).value; }
|
||||
T&& get() && { return std::move(*this).value; }
|
||||
};
|
||||
|
||||
template <typename T, size_t I>
|
||||
struct ABSL_INTERNAL_COMPRESSED_TUPLE_DECLSPEC Storage<T, I, true> : T {
|
||||
constexpr Storage() = default;
|
||||
|
||||
template <typename V>
|
||||
explicit constexpr Storage(absl::in_place_t, V&& v)
|
||||
: T(absl::forward<V>(v)) {}
|
||||
|
||||
constexpr const T& get() const& { return *this; }
|
||||
T& get() & { return *this; }
|
||||
constexpr const T&& get() const&& { return absl::move(*this); }
|
||||
T&& get() && { return std::move(*this); }
|
||||
};
|
||||
|
||||
template <typename D, typename I, bool ShouldAnyUseBase>
|
||||
struct ABSL_INTERNAL_COMPRESSED_TUPLE_DECLSPEC CompressedTupleImpl;
|
||||
|
||||
template <typename... Ts, size_t... I, bool ShouldAnyUseBase>
|
||||
struct ABSL_INTERNAL_COMPRESSED_TUPLE_DECLSPEC CompressedTupleImpl<
|
||||
CompressedTuple<Ts...>, absl::index_sequence<I...>, ShouldAnyUseBase>
|
||||
// We use the dummy identity function through std::integral_constant to
|
||||
// convince MSVC of accepting and expanding I in that context. Without it
|
||||
// you would get:
|
||||
// error C3548: 'I': parameter pack cannot be used in this context
|
||||
: uses_inheritance,
|
||||
Storage<Ts, std::integral_constant<size_t, I>::value>... {
|
||||
constexpr CompressedTupleImpl() = default;
|
||||
template <typename... Vs>
|
||||
explicit constexpr CompressedTupleImpl(absl::in_place_t, Vs&&... args)
|
||||
: Storage<Ts, I>(absl::in_place, absl::forward<Vs>(args))... {}
|
||||
friend CompressedTuple<Ts...>;
|
||||
};
|
||||
|
||||
template <typename... Ts, size_t... I>
|
||||
struct ABSL_INTERNAL_COMPRESSED_TUPLE_DECLSPEC CompressedTupleImpl<
|
||||
CompressedTuple<Ts...>, absl::index_sequence<I...>, false>
|
||||
// We use the dummy identity function as above...
|
||||
: Storage<Ts, std::integral_constant<size_t, I>::value, false>... {
|
||||
constexpr CompressedTupleImpl() = default;
|
||||
template <typename... Vs>
|
||||
explicit constexpr CompressedTupleImpl(absl::in_place_t, Vs&&... args)
|
||||
: Storage<Ts, I, false>(absl::in_place, absl::forward<Vs>(args))... {}
|
||||
friend CompressedTuple<Ts...>;
|
||||
};
|
||||
|
||||
std::false_type Or(std::initializer_list<std::false_type>);
|
||||
std::true_type Or(std::initializer_list<bool>);
|
||||
|
||||
// MSVC requires this to be done separately rather than within the declaration
|
||||
// of CompressedTuple below.
|
||||
template <typename... Ts>
|
||||
constexpr bool ShouldAnyUseBase() {
|
||||
return decltype(
|
||||
Or({std::integral_constant<bool, ShouldUseBase<Ts>()>()...})){};
|
||||
}
|
||||
|
||||
template <typename T, typename V>
|
||||
using TupleElementMoveConstructible =
|
||||
typename std::conditional<std::is_reference<T>::value,
|
||||
std::is_convertible<V, T>,
|
||||
std::is_constructible<T, V&&>>::type;
|
||||
|
||||
template <bool SizeMatches, class T, class... Vs>
|
||||
struct TupleMoveConstructible : std::false_type {};
|
||||
|
||||
template <class... Ts, class... Vs>
|
||||
struct TupleMoveConstructible<true, CompressedTuple<Ts...>, Vs...>
|
||||
: std::integral_constant<
|
||||
bool, absl::conjunction<
|
||||
TupleElementMoveConstructible<Ts, Vs&&>...>::value> {};
|
||||
|
||||
template <typename T>
|
||||
struct compressed_tuple_size;
|
||||
|
||||
template <typename... Es>
|
||||
struct compressed_tuple_size<CompressedTuple<Es...>>
|
||||
: public std::integral_constant<std::size_t, sizeof...(Es)> {};
|
||||
|
||||
template <class T, class... Vs>
|
||||
struct TupleItemsMoveConstructible
|
||||
: std::integral_constant<
|
||||
bool, TupleMoveConstructible<compressed_tuple_size<T>::value ==
|
||||
sizeof...(Vs),
|
||||
T, Vs...>::value> {};
|
||||
|
||||
} // namespace internal_compressed_tuple
|
||||
|
||||
// Helper class to perform the Empty Base Class Optimization.
|
||||
// Ts can contain classes and non-classes, empty or not. For the ones that
|
||||
// are empty classes, we perform the CompressedTuple. If all types in Ts are
|
||||
// empty classes, then CompressedTuple<Ts...> is itself an empty class. (This
|
||||
// does not apply when one or more of those empty classes is itself an empty
|
||||
// CompressedTuple.)
|
||||
//
|
||||
// To access the members, use member .get<N>() function.
|
||||
//
|
||||
// Eg:
|
||||
// absl::container_internal::CompressedTuple<int, T1, T2, T3> value(7, t1, t2,
|
||||
// t3);
|
||||
// assert(value.get<0>() == 7);
|
||||
// T1& t1 = value.get<1>();
|
||||
// const T2& t2 = value.get<2>();
|
||||
// ...
|
||||
//
|
||||
// https://en.cppreference.com/w/cpp/language/ebo
|
||||
template <typename... Ts>
|
||||
class ABSL_INTERNAL_COMPRESSED_TUPLE_DECLSPEC CompressedTuple
|
||||
: private internal_compressed_tuple::CompressedTupleImpl<
|
||||
CompressedTuple<Ts...>, absl::index_sequence_for<Ts...>,
|
||||
internal_compressed_tuple::ShouldAnyUseBase<Ts...>()> {
|
||||
private:
|
||||
template <int I>
|
||||
using ElemT = internal_compressed_tuple::ElemT<CompressedTuple, I>;
|
||||
|
||||
template <int I>
|
||||
using StorageT = internal_compressed_tuple::Storage<ElemT<I>, I>;
|
||||
|
||||
public:
|
||||
// There seems to be a bug in MSVC dealing in which using '=default' here will
|
||||
// cause the compiler to ignore the body of other constructors. The work-
|
||||
// around is to explicitly implement the default constructor.
|
||||
#if defined(_MSC_VER)
|
||||
constexpr CompressedTuple() : CompressedTuple::CompressedTupleImpl() {}
|
||||
#else
|
||||
constexpr CompressedTuple() = default;
|
||||
#endif
|
||||
explicit constexpr CompressedTuple(const Ts&... base)
|
||||
: CompressedTuple::CompressedTupleImpl(absl::in_place, base...) {}
|
||||
|
||||
template <typename First, typename... Vs,
|
||||
absl::enable_if_t<
|
||||
absl::conjunction<
|
||||
// Ensure we are not hiding default copy/move constructors.
|
||||
absl::negation<std::is_same<void(CompressedTuple),
|
||||
void(absl::decay_t<First>)>>,
|
||||
internal_compressed_tuple::TupleItemsMoveConstructible<
|
||||
CompressedTuple<Ts...>, First, Vs...>>::value,
|
||||
bool> = true>
|
||||
explicit constexpr CompressedTuple(First&& first, Vs&&... base)
|
||||
: CompressedTuple::CompressedTupleImpl(absl::in_place,
|
||||
absl::forward<First>(first),
|
||||
absl::forward<Vs>(base)...) {}
|
||||
|
||||
template <int I>
|
||||
ElemT<I>& get() & {
|
||||
return internal_compressed_tuple::Storage<ElemT<I>, I>::get();
|
||||
}
|
||||
|
||||
template <int I>
|
||||
constexpr const ElemT<I>& get() const& {
|
||||
return StorageT<I>::get();
|
||||
}
|
||||
|
||||
template <int I>
|
||||
ElemT<I>&& get() && {
|
||||
return std::move(*this).StorageT<I>::get();
|
||||
}
|
||||
|
||||
template <int I>
|
||||
constexpr const ElemT<I>&& get() const&& {
|
||||
return absl::move(*this).StorageT<I>::get();
|
||||
}
|
||||
};
|
||||
|
||||
// Explicit specialization for a zero-element tuple
|
||||
// (needed to avoid ambiguous overloads for the default constructor).
|
||||
template <>
|
||||
class ABSL_INTERNAL_COMPRESSED_TUPLE_DECLSPEC CompressedTuple<> {};
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#undef ABSL_INTERNAL_COMPRESSED_TUPLE_DECLSPEC
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_COMPRESSED_TUPLE_H_
|
||||
409
third_party/abseil_cpp/absl/container/internal/compressed_tuple_test.cc
vendored
Normal file
409
third_party/abseil_cpp/absl/container/internal/compressed_tuple_test.cc
vendored
Normal file
|
|
@ -0,0 +1,409 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "absl/container/internal/compressed_tuple.h"
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
|
||||
#include "gmock/gmock.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "absl/container/internal/test_instance_tracker.h"
|
||||
#include "absl/memory/memory.h"
|
||||
#include "absl/types/any.h"
|
||||
#include "absl/types/optional.h"
|
||||
#include "absl/utility/utility.h"
|
||||
|
||||
// These are declared at global scope purely so that error messages
|
||||
// are smaller and easier to understand.
|
||||
enum class CallType { kConstRef, kConstMove };
|
||||
|
||||
template <int>
|
||||
struct Empty {
|
||||
constexpr CallType value() const& { return CallType::kConstRef; }
|
||||
constexpr CallType value() const&& { return CallType::kConstMove; }
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
struct NotEmpty {
|
||||
T value;
|
||||
};
|
||||
|
||||
template <typename T, typename U>
|
||||
struct TwoValues {
|
||||
T value1;
|
||||
U value2;
|
||||
};
|
||||
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
namespace {
|
||||
|
||||
using absl::test_internal::CopyableMovableInstance;
|
||||
using absl::test_internal::InstanceTracker;
|
||||
|
||||
TEST(CompressedTupleTest, Sizeof) {
|
||||
EXPECT_EQ(sizeof(int), sizeof(CompressedTuple<int>));
|
||||
EXPECT_EQ(sizeof(int), sizeof(CompressedTuple<int, Empty<0>>));
|
||||
EXPECT_EQ(sizeof(int), sizeof(CompressedTuple<int, Empty<0>, Empty<1>>));
|
||||
EXPECT_EQ(sizeof(int),
|
||||
sizeof(CompressedTuple<int, Empty<0>, Empty<1>, Empty<2>>));
|
||||
|
||||
EXPECT_EQ(sizeof(TwoValues<int, double>),
|
||||
sizeof(CompressedTuple<int, NotEmpty<double>>));
|
||||
EXPECT_EQ(sizeof(TwoValues<int, double>),
|
||||
sizeof(CompressedTuple<int, Empty<0>, NotEmpty<double>>));
|
||||
EXPECT_EQ(sizeof(TwoValues<int, double>),
|
||||
sizeof(CompressedTuple<int, Empty<0>, NotEmpty<double>, Empty<1>>));
|
||||
}
|
||||
|
||||
TEST(CompressedTupleTest, OneMoveOnRValueConstructionTemp) {
|
||||
InstanceTracker tracker;
|
||||
CompressedTuple<CopyableMovableInstance> x1(CopyableMovableInstance(1));
|
||||
EXPECT_EQ(tracker.instances(), 1);
|
||||
EXPECT_EQ(tracker.copies(), 0);
|
||||
EXPECT_LE(tracker.moves(), 1);
|
||||
EXPECT_EQ(x1.get<0>().value(), 1);
|
||||
}
|
||||
|
||||
TEST(CompressedTupleTest, OneMoveOnRValueConstructionMove) {
|
||||
InstanceTracker tracker;
|
||||
|
||||
CopyableMovableInstance i1(1);
|
||||
CompressedTuple<CopyableMovableInstance> x1(std::move(i1));
|
||||
EXPECT_EQ(tracker.instances(), 2);
|
||||
EXPECT_EQ(tracker.copies(), 0);
|
||||
EXPECT_LE(tracker.moves(), 1);
|
||||
EXPECT_EQ(x1.get<0>().value(), 1);
|
||||
}
|
||||
|
||||
TEST(CompressedTupleTest, OneMoveOnRValueConstructionMixedTypes) {
|
||||
InstanceTracker tracker;
|
||||
CopyableMovableInstance i1(1);
|
||||
CopyableMovableInstance i2(2);
|
||||
Empty<0> empty;
|
||||
CompressedTuple<CopyableMovableInstance, CopyableMovableInstance&, Empty<0>>
|
||||
x1(std::move(i1), i2, empty);
|
||||
EXPECT_EQ(x1.get<0>().value(), 1);
|
||||
EXPECT_EQ(x1.get<1>().value(), 2);
|
||||
EXPECT_EQ(tracker.copies(), 0);
|
||||
EXPECT_EQ(tracker.moves(), 1);
|
||||
}
|
||||
|
||||
struct IncompleteType;
|
||||
CompressedTuple<CopyableMovableInstance, IncompleteType&, Empty<0>>
|
||||
MakeWithIncomplete(CopyableMovableInstance i1,
|
||||
IncompleteType& t, // NOLINT
|
||||
Empty<0> empty) {
|
||||
return CompressedTuple<CopyableMovableInstance, IncompleteType&, Empty<0>>{
|
||||
std::move(i1), t, empty};
|
||||
}
|
||||
|
||||
struct IncompleteType {};
|
||||
TEST(CompressedTupleTest, OneMoveOnRValueConstructionWithIncompleteType) {
|
||||
InstanceTracker tracker;
|
||||
CopyableMovableInstance i1(1);
|
||||
Empty<0> empty;
|
||||
struct DerivedType : IncompleteType {int value = 0;};
|
||||
DerivedType fd;
|
||||
fd.value = 7;
|
||||
|
||||
CompressedTuple<CopyableMovableInstance, IncompleteType&, Empty<0>> x1 =
|
||||
MakeWithIncomplete(std::move(i1), fd, empty);
|
||||
|
||||
EXPECT_EQ(x1.get<0>().value(), 1);
|
||||
EXPECT_EQ(static_cast<DerivedType&>(x1.get<1>()).value, 7);
|
||||
|
||||
EXPECT_EQ(tracker.copies(), 0);
|
||||
EXPECT_EQ(tracker.moves(), 2);
|
||||
}
|
||||
|
||||
TEST(CompressedTupleTest,
|
||||
OneMoveOnRValueConstructionMixedTypes_BraceInitPoisonPillExpected) {
|
||||
InstanceTracker tracker;
|
||||
CopyableMovableInstance i1(1);
|
||||
CopyableMovableInstance i2(2);
|
||||
CompressedTuple<CopyableMovableInstance, CopyableMovableInstance&, Empty<0>>
|
||||
x1(std::move(i1), i2, {}); // NOLINT
|
||||
EXPECT_EQ(x1.get<0>().value(), 1);
|
||||
EXPECT_EQ(x1.get<1>().value(), 2);
|
||||
EXPECT_EQ(tracker.instances(), 3);
|
||||
// We are forced into the `const Ts&...` constructor (invoking copies)
|
||||
// because we need it to deduce the type of `{}`.
|
||||
// std::tuple also has this behavior.
|
||||
// Note, this test is proof that this is expected behavior, but it is not
|
||||
// _desired_ behavior.
|
||||
EXPECT_EQ(tracker.copies(), 1);
|
||||
EXPECT_EQ(tracker.moves(), 0);
|
||||
}
|
||||
|
||||
TEST(CompressedTupleTest, OneCopyOnLValueConstruction) {
|
||||
InstanceTracker tracker;
|
||||
CopyableMovableInstance i1(1);
|
||||
|
||||
CompressedTuple<CopyableMovableInstance> x1(i1);
|
||||
EXPECT_EQ(tracker.copies(), 1);
|
||||
EXPECT_EQ(tracker.moves(), 0);
|
||||
|
||||
tracker.ResetCopiesMovesSwaps();
|
||||
|
||||
CopyableMovableInstance i2(2);
|
||||
const CopyableMovableInstance& i2_ref = i2;
|
||||
CompressedTuple<CopyableMovableInstance> x2(i2_ref);
|
||||
EXPECT_EQ(tracker.copies(), 1);
|
||||
EXPECT_EQ(tracker.moves(), 0);
|
||||
}
|
||||
|
||||
TEST(CompressedTupleTest, OneMoveOnRValueAccess) {
|
||||
InstanceTracker tracker;
|
||||
CopyableMovableInstance i1(1);
|
||||
CompressedTuple<CopyableMovableInstance> x(std::move(i1));
|
||||
tracker.ResetCopiesMovesSwaps();
|
||||
|
||||
CopyableMovableInstance i2 = std::move(x).get<0>();
|
||||
EXPECT_EQ(tracker.copies(), 0);
|
||||
EXPECT_EQ(tracker.moves(), 1);
|
||||
}
|
||||
|
||||
TEST(CompressedTupleTest, OneCopyOnLValueAccess) {
|
||||
InstanceTracker tracker;
|
||||
|
||||
CompressedTuple<CopyableMovableInstance> x(CopyableMovableInstance(0));
|
||||
EXPECT_EQ(tracker.copies(), 0);
|
||||
EXPECT_EQ(tracker.moves(), 1);
|
||||
|
||||
CopyableMovableInstance t = x.get<0>();
|
||||
EXPECT_EQ(tracker.copies(), 1);
|
||||
EXPECT_EQ(tracker.moves(), 1);
|
||||
}
|
||||
|
||||
TEST(CompressedTupleTest, ZeroCopyOnRefAccess) {
|
||||
InstanceTracker tracker;
|
||||
|
||||
CompressedTuple<CopyableMovableInstance> x(CopyableMovableInstance(0));
|
||||
EXPECT_EQ(tracker.copies(), 0);
|
||||
EXPECT_EQ(tracker.moves(), 1);
|
||||
|
||||
CopyableMovableInstance& t1 = x.get<0>();
|
||||
const CopyableMovableInstance& t2 = x.get<0>();
|
||||
EXPECT_EQ(tracker.copies(), 0);
|
||||
EXPECT_EQ(tracker.moves(), 1);
|
||||
EXPECT_EQ(t1.value(), 0);
|
||||
EXPECT_EQ(t2.value(), 0);
|
||||
}
|
||||
|
||||
TEST(CompressedTupleTest, Access) {
|
||||
struct S {
|
||||
std::string x;
|
||||
};
|
||||
CompressedTuple<int, Empty<0>, S> x(7, {}, S{"ABC"});
|
||||
EXPECT_EQ(sizeof(x), sizeof(TwoValues<int, S>));
|
||||
EXPECT_EQ(7, x.get<0>());
|
||||
EXPECT_EQ("ABC", x.get<2>().x);
|
||||
}
|
||||
|
||||
TEST(CompressedTupleTest, NonClasses) {
|
||||
CompressedTuple<int, const char*> x(7, "ABC");
|
||||
EXPECT_EQ(7, x.get<0>());
|
||||
EXPECT_STREQ("ABC", x.get<1>());
|
||||
}
|
||||
|
||||
TEST(CompressedTupleTest, MixClassAndNonClass) {
|
||||
CompressedTuple<int, const char*, Empty<0>, NotEmpty<double>> x(7, "ABC", {},
|
||||
{1.25});
|
||||
struct Mock {
|
||||
int v;
|
||||
const char* p;
|
||||
double d;
|
||||
};
|
||||
EXPECT_EQ(sizeof(x), sizeof(Mock));
|
||||
EXPECT_EQ(7, x.get<0>());
|
||||
EXPECT_STREQ("ABC", x.get<1>());
|
||||
EXPECT_EQ(1.25, x.get<3>().value);
|
||||
}
|
||||
|
||||
TEST(CompressedTupleTest, Nested) {
|
||||
CompressedTuple<int, CompressedTuple<int>,
|
||||
CompressedTuple<int, CompressedTuple<int>>>
|
||||
x(1, CompressedTuple<int>(2),
|
||||
CompressedTuple<int, CompressedTuple<int>>(3, CompressedTuple<int>(4)));
|
||||
EXPECT_EQ(1, x.get<0>());
|
||||
EXPECT_EQ(2, x.get<1>().get<0>());
|
||||
EXPECT_EQ(3, x.get<2>().get<0>());
|
||||
EXPECT_EQ(4, x.get<2>().get<1>().get<0>());
|
||||
|
||||
CompressedTuple<Empty<0>, Empty<0>,
|
||||
CompressedTuple<Empty<0>, CompressedTuple<Empty<0>>>>
|
||||
y;
|
||||
std::set<Empty<0>*> empties{&y.get<0>(), &y.get<1>(), &y.get<2>().get<0>(),
|
||||
&y.get<2>().get<1>().get<0>()};
|
||||
#ifdef _MSC_VER
|
||||
// MSVC has a bug where many instances of the same base class are layed out in
|
||||
// the same address when using __declspec(empty_bases).
|
||||
// This will be fixed in a future version of MSVC.
|
||||
int expected = 1;
|
||||
#else
|
||||
int expected = 4;
|
||||
#endif
|
||||
EXPECT_EQ(expected, sizeof(y));
|
||||
EXPECT_EQ(expected, empties.size());
|
||||
EXPECT_EQ(sizeof(y), sizeof(Empty<0>) * empties.size());
|
||||
|
||||
EXPECT_EQ(4 * sizeof(char),
|
||||
sizeof(CompressedTuple<CompressedTuple<char, char>,
|
||||
CompressedTuple<char, char>>));
|
||||
EXPECT_TRUE((std::is_empty<CompressedTuple<Empty<0>, Empty<1>>>::value));
|
||||
|
||||
// Make sure everything still works when things are nested.
|
||||
struct CT_Empty : CompressedTuple<Empty<0>> {};
|
||||
CompressedTuple<Empty<0>, CT_Empty> nested_empty;
|
||||
auto contained = nested_empty.get<0>();
|
||||
auto nested = nested_empty.get<1>().get<0>();
|
||||
EXPECT_TRUE((std::is_same<decltype(contained), decltype(nested)>::value));
|
||||
}
|
||||
|
||||
TEST(CompressedTupleTest, Reference) {
|
||||
int i = 7;
|
||||
std::string s = "Very long string that goes in the heap";
|
||||
CompressedTuple<int, int&, std::string, std::string&> x(i, i, s, s);
|
||||
|
||||
// Sanity check. We should have not moved from `s`
|
||||
EXPECT_EQ(s, "Very long string that goes in the heap");
|
||||
|
||||
EXPECT_EQ(x.get<0>(), x.get<1>());
|
||||
EXPECT_NE(&x.get<0>(), &x.get<1>());
|
||||
EXPECT_EQ(&x.get<1>(), &i);
|
||||
|
||||
EXPECT_EQ(x.get<2>(), x.get<3>());
|
||||
EXPECT_NE(&x.get<2>(), &x.get<3>());
|
||||
EXPECT_EQ(&x.get<3>(), &s);
|
||||
}
|
||||
|
||||
TEST(CompressedTupleTest, NoElements) {
|
||||
CompressedTuple<> x;
|
||||
static_cast<void>(x); // Silence -Wunused-variable.
|
||||
EXPECT_TRUE(std::is_empty<CompressedTuple<>>::value);
|
||||
}
|
||||
|
||||
TEST(CompressedTupleTest, MoveOnlyElements) {
|
||||
CompressedTuple<std::unique_ptr<std::string>> str_tup(
|
||||
absl::make_unique<std::string>("str"));
|
||||
|
||||
CompressedTuple<CompressedTuple<std::unique_ptr<std::string>>,
|
||||
std::unique_ptr<int>>
|
||||
x(std::move(str_tup), absl::make_unique<int>(5));
|
||||
|
||||
EXPECT_EQ(*x.get<0>().get<0>(), "str");
|
||||
EXPECT_EQ(*x.get<1>(), 5);
|
||||
|
||||
std::unique_ptr<std::string> x0 = std::move(x.get<0>()).get<0>();
|
||||
std::unique_ptr<int> x1 = std::move(x).get<1>();
|
||||
|
||||
EXPECT_EQ(*x0, "str");
|
||||
EXPECT_EQ(*x1, 5);
|
||||
}
|
||||
|
||||
TEST(CompressedTupleTest, MoveConstructionMoveOnlyElements) {
|
||||
CompressedTuple<std::unique_ptr<std::string>> base(
|
||||
absl::make_unique<std::string>("str"));
|
||||
EXPECT_EQ(*base.get<0>(), "str");
|
||||
|
||||
CompressedTuple<std::unique_ptr<std::string>> copy(std::move(base));
|
||||
EXPECT_EQ(*copy.get<0>(), "str");
|
||||
}
|
||||
|
||||
TEST(CompressedTupleTest, AnyElements) {
|
||||
any a(std::string("str"));
|
||||
CompressedTuple<any, any&> x(any(5), a);
|
||||
EXPECT_EQ(absl::any_cast<int>(x.get<0>()), 5);
|
||||
EXPECT_EQ(absl::any_cast<std::string>(x.get<1>()), "str");
|
||||
|
||||
a = 0.5f;
|
||||
EXPECT_EQ(absl::any_cast<float>(x.get<1>()), 0.5);
|
||||
}
|
||||
|
||||
TEST(CompressedTupleTest, Constexpr) {
|
||||
struct NonTrivialStruct {
|
||||
constexpr NonTrivialStruct() = default;
|
||||
constexpr int value() const { return v; }
|
||||
int v = 5;
|
||||
};
|
||||
struct TrivialStruct {
|
||||
TrivialStruct() = default;
|
||||
constexpr int value() const { return v; }
|
||||
int v;
|
||||
};
|
||||
constexpr CompressedTuple<int, double, CompressedTuple<int>, Empty<0>> x(
|
||||
7, 1.25, CompressedTuple<int>(5), {});
|
||||
constexpr int x0 = x.get<0>();
|
||||
constexpr double x1 = x.get<1>();
|
||||
constexpr int x2 = x.get<2>().get<0>();
|
||||
constexpr CallType x3 = x.get<3>().value();
|
||||
|
||||
EXPECT_EQ(x0, 7);
|
||||
EXPECT_EQ(x1, 1.25);
|
||||
EXPECT_EQ(x2, 5);
|
||||
EXPECT_EQ(x3, CallType::kConstRef);
|
||||
|
||||
#if !defined(__GNUC__) || defined(__clang__) || __GNUC__ > 4
|
||||
constexpr CompressedTuple<Empty<0>, TrivialStruct, int> trivial = {};
|
||||
constexpr CallType trivial0 = trivial.get<0>().value();
|
||||
constexpr int trivial1 = trivial.get<1>().value();
|
||||
constexpr int trivial2 = trivial.get<2>();
|
||||
|
||||
EXPECT_EQ(trivial0, CallType::kConstRef);
|
||||
EXPECT_EQ(trivial1, 0);
|
||||
EXPECT_EQ(trivial2, 0);
|
||||
#endif
|
||||
|
||||
constexpr CompressedTuple<Empty<0>, NonTrivialStruct, absl::optional<int>>
|
||||
non_trivial = {};
|
||||
constexpr CallType non_trivial0 = non_trivial.get<0>().value();
|
||||
constexpr int non_trivial1 = non_trivial.get<1>().value();
|
||||
constexpr absl::optional<int> non_trivial2 = non_trivial.get<2>();
|
||||
|
||||
EXPECT_EQ(non_trivial0, CallType::kConstRef);
|
||||
EXPECT_EQ(non_trivial1, 5);
|
||||
EXPECT_EQ(non_trivial2, absl::nullopt);
|
||||
|
||||
static constexpr char data[] = "DEF";
|
||||
constexpr CompressedTuple<const char*> z(data);
|
||||
constexpr const char* z1 = z.get<0>();
|
||||
EXPECT_EQ(std::string(z1), std::string(data));
|
||||
|
||||
#if defined(__clang__)
|
||||
// An apparent bug in earlier versions of gcc claims these are ambiguous.
|
||||
constexpr int x2m = absl::move(x.get<2>()).get<0>();
|
||||
constexpr CallType x3m = absl::move(x).get<3>().value();
|
||||
EXPECT_EQ(x2m, 5);
|
||||
EXPECT_EQ(x3m, CallType::kConstMove);
|
||||
#endif
|
||||
}
|
||||
|
||||
#if defined(__clang__) || defined(__GNUC__)
|
||||
TEST(CompressedTupleTest, EmptyFinalClass) {
|
||||
struct S final {
|
||||
int f() const { return 5; }
|
||||
};
|
||||
CompressedTuple<S> x;
|
||||
EXPECT_EQ(x.get<0>().f(), 5);
|
||||
}
|
||||
#endif
|
||||
|
||||
} // namespace
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
445
third_party/abseil_cpp/absl/container/internal/container_memory.h
vendored
Normal file
445
third_party/abseil_cpp/absl/container/internal/container_memory.h
vendored
Normal file
|
|
@ -0,0 +1,445 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_CONTAINER_MEMORY_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_CONTAINER_MEMORY_H_
|
||||
|
||||
#ifdef ADDRESS_SANITIZER
|
||||
#include <sanitizer/asan_interface.h>
|
||||
#endif
|
||||
|
||||
#ifdef MEMORY_SANITIZER
|
||||
#include <sanitizer/msan_interface.h>
|
||||
#endif
|
||||
|
||||
#include <cassert>
|
||||
#include <cstddef>
|
||||
#include <memory>
|
||||
#include <tuple>
|
||||
#include <type_traits>
|
||||
#include <utility>
|
||||
|
||||
#include "absl/memory/memory.h"
|
||||
#include "absl/meta/type_traits.h"
|
||||
#include "absl/utility/utility.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
template <size_t Alignment>
|
||||
struct alignas(Alignment) AlignedType {};
|
||||
|
||||
// Allocates at least n bytes aligned to the specified alignment.
|
||||
// Alignment must be a power of 2. It must be positive.
|
||||
//
|
||||
// Note that many allocators don't honor alignment requirements above certain
|
||||
// threshold (usually either alignof(std::max_align_t) or alignof(void*)).
|
||||
// Allocate() doesn't apply alignment corrections. If the underlying allocator
|
||||
// returns insufficiently alignment pointer, that's what you are going to get.
|
||||
template <size_t Alignment, class Alloc>
|
||||
void* Allocate(Alloc* alloc, size_t n) {
|
||||
static_assert(Alignment > 0, "");
|
||||
assert(n && "n must be positive");
|
||||
using M = AlignedType<Alignment>;
|
||||
using A = typename absl::allocator_traits<Alloc>::template rebind_alloc<M>;
|
||||
using AT = typename absl::allocator_traits<Alloc>::template rebind_traits<M>;
|
||||
A mem_alloc(*alloc);
|
||||
void* p = AT::allocate(mem_alloc, (n + sizeof(M) - 1) / sizeof(M));
|
||||
assert(reinterpret_cast<uintptr_t>(p) % Alignment == 0 &&
|
||||
"allocator does not respect alignment");
|
||||
return p;
|
||||
}
|
||||
|
||||
// The pointer must have been previously obtained by calling
|
||||
// Allocate<Alignment>(alloc, n).
|
||||
template <size_t Alignment, class Alloc>
|
||||
void Deallocate(Alloc* alloc, void* p, size_t n) {
|
||||
static_assert(Alignment > 0, "");
|
||||
assert(n && "n must be positive");
|
||||
using M = AlignedType<Alignment>;
|
||||
using A = typename absl::allocator_traits<Alloc>::template rebind_alloc<M>;
|
||||
using AT = typename absl::allocator_traits<Alloc>::template rebind_traits<M>;
|
||||
A mem_alloc(*alloc);
|
||||
AT::deallocate(mem_alloc, static_cast<M*>(p),
|
||||
(n + sizeof(M) - 1) / sizeof(M));
|
||||
}
|
||||
|
||||
namespace memory_internal {
|
||||
|
||||
// Constructs T into uninitialized storage pointed by `ptr` using the args
|
||||
// specified in the tuple.
|
||||
template <class Alloc, class T, class Tuple, size_t... I>
|
||||
void ConstructFromTupleImpl(Alloc* alloc, T* ptr, Tuple&& t,
|
||||
absl::index_sequence<I...>) {
|
||||
absl::allocator_traits<Alloc>::construct(
|
||||
*alloc, ptr, std::get<I>(std::forward<Tuple>(t))...);
|
||||
}
|
||||
|
||||
template <class T, class F>
|
||||
struct WithConstructedImplF {
|
||||
template <class... Args>
|
||||
decltype(std::declval<F>()(std::declval<T>())) operator()(
|
||||
Args&&... args) const {
|
||||
return std::forward<F>(f)(T(std::forward<Args>(args)...));
|
||||
}
|
||||
F&& f;
|
||||
};
|
||||
|
||||
template <class T, class Tuple, size_t... Is, class F>
|
||||
decltype(std::declval<F>()(std::declval<T>())) WithConstructedImpl(
|
||||
Tuple&& t, absl::index_sequence<Is...>, F&& f) {
|
||||
return WithConstructedImplF<T, F>{std::forward<F>(f)}(
|
||||
std::get<Is>(std::forward<Tuple>(t))...);
|
||||
}
|
||||
|
||||
template <class T, size_t... Is>
|
||||
auto TupleRefImpl(T&& t, absl::index_sequence<Is...>)
|
||||
-> decltype(std::forward_as_tuple(std::get<Is>(std::forward<T>(t))...)) {
|
||||
return std::forward_as_tuple(std::get<Is>(std::forward<T>(t))...);
|
||||
}
|
||||
|
||||
// Returns a tuple of references to the elements of the input tuple. T must be a
|
||||
// tuple.
|
||||
template <class T>
|
||||
auto TupleRef(T&& t) -> decltype(
|
||||
TupleRefImpl(std::forward<T>(t),
|
||||
absl::make_index_sequence<
|
||||
std::tuple_size<typename std::decay<T>::type>::value>())) {
|
||||
return TupleRefImpl(
|
||||
std::forward<T>(t),
|
||||
absl::make_index_sequence<
|
||||
std::tuple_size<typename std::decay<T>::type>::value>());
|
||||
}
|
||||
|
||||
template <class F, class K, class V>
|
||||
decltype(std::declval<F>()(std::declval<const K&>(), std::piecewise_construct,
|
||||
std::declval<std::tuple<K>>(), std::declval<V>()))
|
||||
DecomposePairImpl(F&& f, std::pair<std::tuple<K>, V> p) {
|
||||
const auto& key = std::get<0>(p.first);
|
||||
return std::forward<F>(f)(key, std::piecewise_construct, std::move(p.first),
|
||||
std::move(p.second));
|
||||
}
|
||||
|
||||
} // namespace memory_internal
|
||||
|
||||
// Constructs T into uninitialized storage pointed by `ptr` using the args
|
||||
// specified in the tuple.
|
||||
template <class Alloc, class T, class Tuple>
|
||||
void ConstructFromTuple(Alloc* alloc, T* ptr, Tuple&& t) {
|
||||
memory_internal::ConstructFromTupleImpl(
|
||||
alloc, ptr, std::forward<Tuple>(t),
|
||||
absl::make_index_sequence<
|
||||
std::tuple_size<typename std::decay<Tuple>::type>::value>());
|
||||
}
|
||||
|
||||
// Constructs T using the args specified in the tuple and calls F with the
|
||||
// constructed value.
|
||||
template <class T, class Tuple, class F>
|
||||
decltype(std::declval<F>()(std::declval<T>())) WithConstructed(
|
||||
Tuple&& t, F&& f) {
|
||||
return memory_internal::WithConstructedImpl<T>(
|
||||
std::forward<Tuple>(t),
|
||||
absl::make_index_sequence<
|
||||
std::tuple_size<typename std::decay<Tuple>::type>::value>(),
|
||||
std::forward<F>(f));
|
||||
}
|
||||
|
||||
// Given arguments of an std::pair's consructor, PairArgs() returns a pair of
|
||||
// tuples with references to the passed arguments. The tuples contain
|
||||
// constructor arguments for the first and the second elements of the pair.
|
||||
//
|
||||
// The following two snippets are equivalent.
|
||||
//
|
||||
// 1. std::pair<F, S> p(args...);
|
||||
//
|
||||
// 2. auto a = PairArgs(args...);
|
||||
// std::pair<F, S> p(std::piecewise_construct,
|
||||
// std::move(p.first), std::move(p.second));
|
||||
inline std::pair<std::tuple<>, std::tuple<>> PairArgs() { return {}; }
|
||||
template <class F, class S>
|
||||
std::pair<std::tuple<F&&>, std::tuple<S&&>> PairArgs(F&& f, S&& s) {
|
||||
return {std::piecewise_construct, std::forward_as_tuple(std::forward<F>(f)),
|
||||
std::forward_as_tuple(std::forward<S>(s))};
|
||||
}
|
||||
template <class F, class S>
|
||||
std::pair<std::tuple<const F&>, std::tuple<const S&>> PairArgs(
|
||||
const std::pair<F, S>& p) {
|
||||
return PairArgs(p.first, p.second);
|
||||
}
|
||||
template <class F, class S>
|
||||
std::pair<std::tuple<F&&>, std::tuple<S&&>> PairArgs(std::pair<F, S>&& p) {
|
||||
return PairArgs(std::forward<F>(p.first), std::forward<S>(p.second));
|
||||
}
|
||||
template <class F, class S>
|
||||
auto PairArgs(std::piecewise_construct_t, F&& f, S&& s)
|
||||
-> decltype(std::make_pair(memory_internal::TupleRef(std::forward<F>(f)),
|
||||
memory_internal::TupleRef(std::forward<S>(s)))) {
|
||||
return std::make_pair(memory_internal::TupleRef(std::forward<F>(f)),
|
||||
memory_internal::TupleRef(std::forward<S>(s)));
|
||||
}
|
||||
|
||||
// A helper function for implementing apply() in map policies.
|
||||
template <class F, class... Args>
|
||||
auto DecomposePair(F&& f, Args&&... args)
|
||||
-> decltype(memory_internal::DecomposePairImpl(
|
||||
std::forward<F>(f), PairArgs(std::forward<Args>(args)...))) {
|
||||
return memory_internal::DecomposePairImpl(
|
||||
std::forward<F>(f), PairArgs(std::forward<Args>(args)...));
|
||||
}
|
||||
|
||||
// A helper function for implementing apply() in set policies.
|
||||
template <class F, class Arg>
|
||||
decltype(std::declval<F>()(std::declval<const Arg&>(), std::declval<Arg>()))
|
||||
DecomposeValue(F&& f, Arg&& arg) {
|
||||
const auto& key = arg;
|
||||
return std::forward<F>(f)(key, std::forward<Arg>(arg));
|
||||
}
|
||||
|
||||
// Helper functions for asan and msan.
|
||||
inline void SanitizerPoisonMemoryRegion(const void* m, size_t s) {
|
||||
#ifdef ADDRESS_SANITIZER
|
||||
ASAN_POISON_MEMORY_REGION(m, s);
|
||||
#endif
|
||||
#ifdef MEMORY_SANITIZER
|
||||
__msan_poison(m, s);
|
||||
#endif
|
||||
(void)m;
|
||||
(void)s;
|
||||
}
|
||||
|
||||
inline void SanitizerUnpoisonMemoryRegion(const void* m, size_t s) {
|
||||
#ifdef ADDRESS_SANITIZER
|
||||
ASAN_UNPOISON_MEMORY_REGION(m, s);
|
||||
#endif
|
||||
#ifdef MEMORY_SANITIZER
|
||||
__msan_unpoison(m, s);
|
||||
#endif
|
||||
(void)m;
|
||||
(void)s;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
inline void SanitizerPoisonObject(const T* object) {
|
||||
SanitizerPoisonMemoryRegion(object, sizeof(T));
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
inline void SanitizerUnpoisonObject(const T* object) {
|
||||
SanitizerUnpoisonMemoryRegion(object, sizeof(T));
|
||||
}
|
||||
|
||||
namespace memory_internal {
|
||||
|
||||
// If Pair is a standard-layout type, OffsetOf<Pair>::kFirst and
|
||||
// OffsetOf<Pair>::kSecond are equivalent to offsetof(Pair, first) and
|
||||
// offsetof(Pair, second) respectively. Otherwise they are -1.
|
||||
//
|
||||
// The purpose of OffsetOf is to avoid calling offsetof() on non-standard-layout
|
||||
// type, which is non-portable.
|
||||
template <class Pair, class = std::true_type>
|
||||
struct OffsetOf {
|
||||
static constexpr size_t kFirst = static_cast<size_t>(-1);
|
||||
static constexpr size_t kSecond = static_cast<size_t>(-1);
|
||||
};
|
||||
|
||||
template <class Pair>
|
||||
struct OffsetOf<Pair, typename std::is_standard_layout<Pair>::type> {
|
||||
static constexpr size_t kFirst = offsetof(Pair, first);
|
||||
static constexpr size_t kSecond = offsetof(Pair, second);
|
||||
};
|
||||
|
||||
template <class K, class V>
|
||||
struct IsLayoutCompatible {
|
||||
private:
|
||||
struct Pair {
|
||||
K first;
|
||||
V second;
|
||||
};
|
||||
|
||||
// Is P layout-compatible with Pair?
|
||||
template <class P>
|
||||
static constexpr bool LayoutCompatible() {
|
||||
return std::is_standard_layout<P>() && sizeof(P) == sizeof(Pair) &&
|
||||
alignof(P) == alignof(Pair) &&
|
||||
memory_internal::OffsetOf<P>::kFirst ==
|
||||
memory_internal::OffsetOf<Pair>::kFirst &&
|
||||
memory_internal::OffsetOf<P>::kSecond ==
|
||||
memory_internal::OffsetOf<Pair>::kSecond;
|
||||
}
|
||||
|
||||
public:
|
||||
// Whether pair<const K, V> and pair<K, V> are layout-compatible. If they are,
|
||||
// then it is safe to store them in a union and read from either.
|
||||
static constexpr bool value = std::is_standard_layout<K>() &&
|
||||
std::is_standard_layout<Pair>() &&
|
||||
memory_internal::OffsetOf<Pair>::kFirst == 0 &&
|
||||
LayoutCompatible<std::pair<K, V>>() &&
|
||||
LayoutCompatible<std::pair<const K, V>>();
|
||||
};
|
||||
|
||||
} // namespace memory_internal
|
||||
|
||||
// The internal storage type for key-value containers like flat_hash_map.
|
||||
//
|
||||
// It is convenient for the value_type of a flat_hash_map<K, V> to be
|
||||
// pair<const K, V>; the "const K" prevents accidental modification of the key
|
||||
// when dealing with the reference returned from find() and similar methods.
|
||||
// However, this creates other problems; we want to be able to emplace(K, V)
|
||||
// efficiently with move operations, and similarly be able to move a
|
||||
// pair<K, V> in insert().
|
||||
//
|
||||
// The solution is this union, which aliases the const and non-const versions
|
||||
// of the pair. This also allows flat_hash_map<const K, V> to work, even though
|
||||
// that has the same efficiency issues with move in emplace() and insert() -
|
||||
// but people do it anyway.
|
||||
//
|
||||
// If kMutableKeys is false, only the value member can be accessed.
|
||||
//
|
||||
// If kMutableKeys is true, key can be accessed through all slots while value
|
||||
// and mutable_value must be accessed only via INITIALIZED slots. Slots are
|
||||
// created and destroyed via mutable_value so that the key can be moved later.
|
||||
//
|
||||
// Accessing one of the union fields while the other is active is safe as
|
||||
// long as they are layout-compatible, which is guaranteed by the definition of
|
||||
// kMutableKeys. For C++11, the relevant section of the standard is
|
||||
// https://timsong-cpp.github.io/cppwp/n3337/class.mem#19 (9.2.19)
|
||||
template <class K, class V>
|
||||
union map_slot_type {
|
||||
map_slot_type() {}
|
||||
~map_slot_type() = delete;
|
||||
using value_type = std::pair<const K, V>;
|
||||
using mutable_value_type =
|
||||
std::pair<absl::remove_const_t<K>, absl::remove_const_t<V>>;
|
||||
|
||||
value_type value;
|
||||
mutable_value_type mutable_value;
|
||||
absl::remove_const_t<K> key;
|
||||
};
|
||||
|
||||
template <class K, class V>
|
||||
struct map_slot_policy {
|
||||
using slot_type = map_slot_type<K, V>;
|
||||
using value_type = std::pair<const K, V>;
|
||||
using mutable_value_type = std::pair<K, V>;
|
||||
|
||||
private:
|
||||
static void emplace(slot_type* slot) {
|
||||
// The construction of union doesn't do anything at runtime but it allows us
|
||||
// to access its members without violating aliasing rules.
|
||||
new (slot) slot_type;
|
||||
}
|
||||
// If pair<const K, V> and pair<K, V> are layout-compatible, we can accept one
|
||||
// or the other via slot_type. We are also free to access the key via
|
||||
// slot_type::key in this case.
|
||||
using kMutableKeys = memory_internal::IsLayoutCompatible<K, V>;
|
||||
|
||||
public:
|
||||
static value_type& element(slot_type* slot) { return slot->value; }
|
||||
static const value_type& element(const slot_type* slot) {
|
||||
return slot->value;
|
||||
}
|
||||
|
||||
static const K& key(const slot_type* slot) {
|
||||
return kMutableKeys::value ? slot->key : slot->value.first;
|
||||
}
|
||||
|
||||
template <class Allocator, class... Args>
|
||||
static void construct(Allocator* alloc, slot_type* slot, Args&&... args) {
|
||||
emplace(slot);
|
||||
if (kMutableKeys::value) {
|
||||
absl::allocator_traits<Allocator>::construct(*alloc, &slot->mutable_value,
|
||||
std::forward<Args>(args)...);
|
||||
} else {
|
||||
absl::allocator_traits<Allocator>::construct(*alloc, &slot->value,
|
||||
std::forward<Args>(args)...);
|
||||
}
|
||||
}
|
||||
|
||||
// Construct this slot by moving from another slot.
|
||||
template <class Allocator>
|
||||
static void construct(Allocator* alloc, slot_type* slot, slot_type* other) {
|
||||
emplace(slot);
|
||||
if (kMutableKeys::value) {
|
||||
absl::allocator_traits<Allocator>::construct(
|
||||
*alloc, &slot->mutable_value, std::move(other->mutable_value));
|
||||
} else {
|
||||
absl::allocator_traits<Allocator>::construct(*alloc, &slot->value,
|
||||
std::move(other->value));
|
||||
}
|
||||
}
|
||||
|
||||
template <class Allocator>
|
||||
static void destroy(Allocator* alloc, slot_type* slot) {
|
||||
if (kMutableKeys::value) {
|
||||
absl::allocator_traits<Allocator>::destroy(*alloc, &slot->mutable_value);
|
||||
} else {
|
||||
absl::allocator_traits<Allocator>::destroy(*alloc, &slot->value);
|
||||
}
|
||||
}
|
||||
|
||||
template <class Allocator>
|
||||
static void transfer(Allocator* alloc, slot_type* new_slot,
|
||||
slot_type* old_slot) {
|
||||
emplace(new_slot);
|
||||
if (kMutableKeys::value) {
|
||||
absl::allocator_traits<Allocator>::construct(
|
||||
*alloc, &new_slot->mutable_value, std::move(old_slot->mutable_value));
|
||||
} else {
|
||||
absl::allocator_traits<Allocator>::construct(*alloc, &new_slot->value,
|
||||
std::move(old_slot->value));
|
||||
}
|
||||
destroy(alloc, old_slot);
|
||||
}
|
||||
|
||||
template <class Allocator>
|
||||
static void swap(Allocator* alloc, slot_type* a, slot_type* b) {
|
||||
if (kMutableKeys::value) {
|
||||
using std::swap;
|
||||
swap(a->mutable_value, b->mutable_value);
|
||||
} else {
|
||||
value_type tmp = std::move(a->value);
|
||||
absl::allocator_traits<Allocator>::destroy(*alloc, &a->value);
|
||||
absl::allocator_traits<Allocator>::construct(*alloc, &a->value,
|
||||
std::move(b->value));
|
||||
absl::allocator_traits<Allocator>::destroy(*alloc, &b->value);
|
||||
absl::allocator_traits<Allocator>::construct(*alloc, &b->value,
|
||||
std::move(tmp));
|
||||
}
|
||||
}
|
||||
|
||||
template <class Allocator>
|
||||
static void move(Allocator* alloc, slot_type* src, slot_type* dest) {
|
||||
if (kMutableKeys::value) {
|
||||
dest->mutable_value = std::move(src->mutable_value);
|
||||
} else {
|
||||
absl::allocator_traits<Allocator>::destroy(*alloc, &dest->value);
|
||||
absl::allocator_traits<Allocator>::construct(*alloc, &dest->value,
|
||||
std::move(src->value));
|
||||
}
|
||||
}
|
||||
|
||||
template <class Allocator>
|
||||
static void move(Allocator* alloc, slot_type* first, slot_type* last,
|
||||
slot_type* result) {
|
||||
for (slot_type *src = first, *dest = result; src != last; ++src, ++dest)
|
||||
move(alloc, src, dest);
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_CONTAINER_MEMORY_H_
|
||||
256
third_party/abseil_cpp/absl/container/internal/container_memory_test.cc
vendored
Normal file
256
third_party/abseil_cpp/absl/container/internal/container_memory_test.cc
vendored
Normal file
|
|
@ -0,0 +1,256 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "absl/container/internal/container_memory.h"
|
||||
|
||||
#include <cstdint>
|
||||
#include <tuple>
|
||||
#include <typeindex>
|
||||
#include <typeinfo>
|
||||
#include <utility>
|
||||
|
||||
#include "gmock/gmock.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "absl/container/internal/test_instance_tracker.h"
|
||||
#include "absl/strings/string_view.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
namespace {
|
||||
|
||||
using ::absl::test_internal::CopyableMovableInstance;
|
||||
using ::absl::test_internal::InstanceTracker;
|
||||
using ::testing::_;
|
||||
using ::testing::ElementsAre;
|
||||
using ::testing::Gt;
|
||||
using ::testing::Pair;
|
||||
|
||||
TEST(Memory, AlignmentLargerThanBase) {
|
||||
std::allocator<int8_t> alloc;
|
||||
void* mem = Allocate<2>(&alloc, 3);
|
||||
EXPECT_EQ(0, reinterpret_cast<uintptr_t>(mem) % 2);
|
||||
memcpy(mem, "abc", 3);
|
||||
Deallocate<2>(&alloc, mem, 3);
|
||||
}
|
||||
|
||||
TEST(Memory, AlignmentSmallerThanBase) {
|
||||
std::allocator<int64_t> alloc;
|
||||
void* mem = Allocate<2>(&alloc, 3);
|
||||
EXPECT_EQ(0, reinterpret_cast<uintptr_t>(mem) % 2);
|
||||
memcpy(mem, "abc", 3);
|
||||
Deallocate<2>(&alloc, mem, 3);
|
||||
}
|
||||
|
||||
std::map<std::type_index, int>& AllocationMap() {
|
||||
static auto* map = new std::map<std::type_index, int>;
|
||||
return *map;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
struct TypeCountingAllocator {
|
||||
TypeCountingAllocator() = default;
|
||||
template <typename U>
|
||||
TypeCountingAllocator(const TypeCountingAllocator<U>&) {} // NOLINT
|
||||
|
||||
using value_type = T;
|
||||
|
||||
T* allocate(size_t n, const void* = nullptr) {
|
||||
AllocationMap()[typeid(T)] += n;
|
||||
return std::allocator<T>().allocate(n);
|
||||
}
|
||||
void deallocate(T* p, std::size_t n) {
|
||||
AllocationMap()[typeid(T)] -= n;
|
||||
return std::allocator<T>().deallocate(p, n);
|
||||
}
|
||||
};
|
||||
|
||||
TEST(Memory, AllocateDeallocateMatchType) {
|
||||
TypeCountingAllocator<int> alloc;
|
||||
void* mem = Allocate<1>(&alloc, 1);
|
||||
// Verify that it was allocated
|
||||
EXPECT_THAT(AllocationMap(), ElementsAre(Pair(_, Gt(0))));
|
||||
Deallocate<1>(&alloc, mem, 1);
|
||||
// Verify that the deallocation matched.
|
||||
EXPECT_THAT(AllocationMap(), ElementsAre(Pair(_, 0)));
|
||||
}
|
||||
|
||||
class Fixture : public ::testing::Test {
|
||||
using Alloc = std::allocator<std::string>;
|
||||
|
||||
public:
|
||||
Fixture() { ptr_ = std::allocator_traits<Alloc>::allocate(*alloc(), 1); }
|
||||
~Fixture() override {
|
||||
std::allocator_traits<Alloc>::destroy(*alloc(), ptr_);
|
||||
std::allocator_traits<Alloc>::deallocate(*alloc(), ptr_, 1);
|
||||
}
|
||||
std::string* ptr() { return ptr_; }
|
||||
Alloc* alloc() { return &alloc_; }
|
||||
|
||||
private:
|
||||
Alloc alloc_;
|
||||
std::string* ptr_;
|
||||
};
|
||||
|
||||
TEST_F(Fixture, ConstructNoArgs) {
|
||||
ConstructFromTuple(alloc(), ptr(), std::forward_as_tuple());
|
||||
EXPECT_EQ(*ptr(), "");
|
||||
}
|
||||
|
||||
TEST_F(Fixture, ConstructOneArg) {
|
||||
ConstructFromTuple(alloc(), ptr(), std::forward_as_tuple("abcde"));
|
||||
EXPECT_EQ(*ptr(), "abcde");
|
||||
}
|
||||
|
||||
TEST_F(Fixture, ConstructTwoArg) {
|
||||
ConstructFromTuple(alloc(), ptr(), std::forward_as_tuple(5, 'a'));
|
||||
EXPECT_EQ(*ptr(), "aaaaa");
|
||||
}
|
||||
|
||||
TEST(PairArgs, NoArgs) {
|
||||
EXPECT_THAT(PairArgs(),
|
||||
Pair(std::forward_as_tuple(), std::forward_as_tuple()));
|
||||
}
|
||||
|
||||
TEST(PairArgs, TwoArgs) {
|
||||
EXPECT_EQ(
|
||||
std::make_pair(std::forward_as_tuple(1), std::forward_as_tuple('A')),
|
||||
PairArgs(1, 'A'));
|
||||
}
|
||||
|
||||
TEST(PairArgs, Pair) {
|
||||
EXPECT_EQ(
|
||||
std::make_pair(std::forward_as_tuple(1), std::forward_as_tuple('A')),
|
||||
PairArgs(std::make_pair(1, 'A')));
|
||||
}
|
||||
|
||||
TEST(PairArgs, Piecewise) {
|
||||
EXPECT_EQ(
|
||||
std::make_pair(std::forward_as_tuple(1), std::forward_as_tuple('A')),
|
||||
PairArgs(std::piecewise_construct, std::forward_as_tuple(1),
|
||||
std::forward_as_tuple('A')));
|
||||
}
|
||||
|
||||
TEST(WithConstructed, Simple) {
|
||||
EXPECT_EQ(1, WithConstructed<absl::string_view>(
|
||||
std::make_tuple(std::string("a")),
|
||||
[](absl::string_view str) { return str.size(); }));
|
||||
}
|
||||
|
||||
template <class F, class Arg>
|
||||
decltype(DecomposeValue(std::declval<F>(), std::declval<Arg>()))
|
||||
DecomposeValueImpl(int, F&& f, Arg&& arg) {
|
||||
return DecomposeValue(std::forward<F>(f), std::forward<Arg>(arg));
|
||||
}
|
||||
|
||||
template <class F, class Arg>
|
||||
const char* DecomposeValueImpl(char, F&& f, Arg&& arg) {
|
||||
return "not decomposable";
|
||||
}
|
||||
|
||||
template <class F, class Arg>
|
||||
decltype(DecomposeValueImpl(0, std::declval<F>(), std::declval<Arg>()))
|
||||
TryDecomposeValue(F&& f, Arg&& arg) {
|
||||
return DecomposeValueImpl(0, std::forward<F>(f), std::forward<Arg>(arg));
|
||||
}
|
||||
|
||||
TEST(DecomposeValue, Decomposable) {
|
||||
auto f = [](const int& x, int&& y) {
|
||||
EXPECT_EQ(&x, &y);
|
||||
EXPECT_EQ(42, x);
|
||||
return 'A';
|
||||
};
|
||||
EXPECT_EQ('A', TryDecomposeValue(f, 42));
|
||||
}
|
||||
|
||||
TEST(DecomposeValue, NotDecomposable) {
|
||||
auto f = [](void*) {
|
||||
ADD_FAILURE() << "Must not be called";
|
||||
return 'A';
|
||||
};
|
||||
EXPECT_STREQ("not decomposable", TryDecomposeValue(f, 42));
|
||||
}
|
||||
|
||||
template <class F, class... Args>
|
||||
decltype(DecomposePair(std::declval<F>(), std::declval<Args>()...))
|
||||
DecomposePairImpl(int, F&& f, Args&&... args) {
|
||||
return DecomposePair(std::forward<F>(f), std::forward<Args>(args)...);
|
||||
}
|
||||
|
||||
template <class F, class... Args>
|
||||
const char* DecomposePairImpl(char, F&& f, Args&&... args) {
|
||||
return "not decomposable";
|
||||
}
|
||||
|
||||
template <class F, class... Args>
|
||||
decltype(DecomposePairImpl(0, std::declval<F>(), std::declval<Args>()...))
|
||||
TryDecomposePair(F&& f, Args&&... args) {
|
||||
return DecomposePairImpl(0, std::forward<F>(f), std::forward<Args>(args)...);
|
||||
}
|
||||
|
||||
TEST(DecomposePair, Decomposable) {
|
||||
auto f = [](const int& x, std::piecewise_construct_t, std::tuple<int&&> k,
|
||||
std::tuple<double>&& v) {
|
||||
EXPECT_EQ(&x, &std::get<0>(k));
|
||||
EXPECT_EQ(42, x);
|
||||
EXPECT_EQ(0.5, std::get<0>(v));
|
||||
return 'A';
|
||||
};
|
||||
EXPECT_EQ('A', TryDecomposePair(f, 42, 0.5));
|
||||
EXPECT_EQ('A', TryDecomposePair(f, std::make_pair(42, 0.5)));
|
||||
EXPECT_EQ('A', TryDecomposePair(f, std::piecewise_construct,
|
||||
std::make_tuple(42), std::make_tuple(0.5)));
|
||||
}
|
||||
|
||||
TEST(DecomposePair, NotDecomposable) {
|
||||
auto f = [](...) {
|
||||
ADD_FAILURE() << "Must not be called";
|
||||
return 'A';
|
||||
};
|
||||
EXPECT_STREQ("not decomposable",
|
||||
TryDecomposePair(f));
|
||||
EXPECT_STREQ("not decomposable",
|
||||
TryDecomposePair(f, std::piecewise_construct, std::make_tuple(),
|
||||
std::make_tuple(0.5)));
|
||||
}
|
||||
|
||||
TEST(MapSlotPolicy, ConstKeyAndValue) {
|
||||
using slot_policy = map_slot_policy<const CopyableMovableInstance,
|
||||
const CopyableMovableInstance>;
|
||||
using slot_type = typename slot_policy::slot_type;
|
||||
|
||||
union Slots {
|
||||
Slots() {}
|
||||
~Slots() {}
|
||||
slot_type slots[100];
|
||||
} slots;
|
||||
|
||||
std::allocator<
|
||||
std::pair<const CopyableMovableInstance, const CopyableMovableInstance>>
|
||||
alloc;
|
||||
InstanceTracker tracker;
|
||||
slot_policy::construct(&alloc, &slots.slots[0], CopyableMovableInstance(1),
|
||||
CopyableMovableInstance(1));
|
||||
for (int i = 0; i < 99; ++i) {
|
||||
slot_policy::transfer(&alloc, &slots.slots[i + 1], &slots.slots[i]);
|
||||
}
|
||||
slot_policy::destroy(&alloc, &slots.slots[99]);
|
||||
|
||||
EXPECT_EQ(tracker.copies(), 0);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
114
third_party/abseil_cpp/absl/container/internal/counting_allocator.h
vendored
Normal file
114
third_party/abseil_cpp/absl/container/internal/counting_allocator.h
vendored
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_COUNTING_ALLOCATOR_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_COUNTING_ALLOCATOR_H_
|
||||
|
||||
#include <cstdint>
|
||||
#include <memory>
|
||||
|
||||
#include "absl/base/config.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
// This is a stateful allocator, but the state lives outside of the
|
||||
// allocator (in whatever test is using the allocator). This is odd
|
||||
// but helps in tests where the allocator is propagated into nested
|
||||
// containers - that chain of allocators uses the same state and is
|
||||
// thus easier to query for aggregate allocation information.
|
||||
template <typename T>
|
||||
class CountingAllocator {
|
||||
public:
|
||||
using Allocator = std::allocator<T>;
|
||||
using AllocatorTraits = std::allocator_traits<Allocator>;
|
||||
using value_type = typename AllocatorTraits::value_type;
|
||||
using pointer = typename AllocatorTraits::pointer;
|
||||
using const_pointer = typename AllocatorTraits::const_pointer;
|
||||
using size_type = typename AllocatorTraits::size_type;
|
||||
using difference_type = typename AllocatorTraits::difference_type;
|
||||
|
||||
CountingAllocator() = default;
|
||||
explicit CountingAllocator(int64_t* bytes_used) : bytes_used_(bytes_used) {}
|
||||
CountingAllocator(int64_t* bytes_used, int64_t* instance_count)
|
||||
: bytes_used_(bytes_used), instance_count_(instance_count) {}
|
||||
|
||||
template <typename U>
|
||||
CountingAllocator(const CountingAllocator<U>& x)
|
||||
: bytes_used_(x.bytes_used_), instance_count_(x.instance_count_) {}
|
||||
|
||||
pointer allocate(
|
||||
size_type n,
|
||||
typename AllocatorTraits::const_void_pointer hint = nullptr) {
|
||||
Allocator allocator;
|
||||
pointer ptr = AllocatorTraits::allocate(allocator, n, hint);
|
||||
if (bytes_used_ != nullptr) {
|
||||
*bytes_used_ += n * sizeof(T);
|
||||
}
|
||||
return ptr;
|
||||
}
|
||||
|
||||
void deallocate(pointer p, size_type n) {
|
||||
Allocator allocator;
|
||||
AllocatorTraits::deallocate(allocator, p, n);
|
||||
if (bytes_used_ != nullptr) {
|
||||
*bytes_used_ -= n * sizeof(T);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename U, typename... Args>
|
||||
void construct(U* p, Args&&... args) {
|
||||
Allocator allocator;
|
||||
AllocatorTraits::construct(allocator, p, std::forward<Args>(args)...);
|
||||
if (instance_count_ != nullptr) {
|
||||
*instance_count_ += 1;
|
||||
}
|
||||
}
|
||||
|
||||
template <typename U>
|
||||
void destroy(U* p) {
|
||||
Allocator allocator;
|
||||
AllocatorTraits::destroy(allocator, p);
|
||||
if (instance_count_ != nullptr) {
|
||||
*instance_count_ -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
template <typename U>
|
||||
class rebind {
|
||||
public:
|
||||
using other = CountingAllocator<U>;
|
||||
};
|
||||
|
||||
friend bool operator==(const CountingAllocator& a,
|
||||
const CountingAllocator& b) {
|
||||
return a.bytes_used_ == b.bytes_used_ &&
|
||||
a.instance_count_ == b.instance_count_;
|
||||
}
|
||||
|
||||
friend bool operator!=(const CountingAllocator& a,
|
||||
const CountingAllocator& b) {
|
||||
return !(a == b);
|
||||
}
|
||||
|
||||
int64_t* bytes_used_ = nullptr;
|
||||
int64_t* instance_count_ = nullptr;
|
||||
};
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_COUNTING_ALLOCATOR_H_
|
||||
161
third_party/abseil_cpp/absl/container/internal/hash_function_defaults.h
vendored
Normal file
161
third_party/abseil_cpp/absl/container/internal/hash_function_defaults.h
vendored
Normal file
|
|
@ -0,0 +1,161 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
// Define the default Hash and Eq functions for SwissTable containers.
|
||||
//
|
||||
// std::hash<T> and std::equal_to<T> are not appropriate hash and equal
|
||||
// functions for SwissTable containers. There are two reasons for this.
|
||||
//
|
||||
// SwissTable containers are power of 2 sized containers:
|
||||
//
|
||||
// This means they use the lower bits of the hash value to find the slot for
|
||||
// each entry. The typical hash function for integral types is the identity.
|
||||
// This is a very weak hash function for SwissTable and any power of 2 sized
|
||||
// hashtable implementation which will lead to excessive collisions. For
|
||||
// SwissTable we use murmur3 style mixing to reduce collisions to a minimum.
|
||||
//
|
||||
// SwissTable containers support heterogeneous lookup:
|
||||
//
|
||||
// In order to make heterogeneous lookup work, hash and equal functions must be
|
||||
// polymorphic. At the same time they have to satisfy the same requirements the
|
||||
// C++ standard imposes on hash functions and equality operators. That is:
|
||||
//
|
||||
// if hash_default_eq<T>(a, b) returns true for any a and b of type T, then
|
||||
// hash_default_hash<T>(a) must equal hash_default_hash<T>(b)
|
||||
//
|
||||
// For SwissTable containers this requirement is relaxed to allow a and b of
|
||||
// any and possibly different types. Note that like the standard the hash and
|
||||
// equal functions are still bound to T. This is important because some type U
|
||||
// can be hashed by/tested for equality differently depending on T. A notable
|
||||
// example is `const char*`. `const char*` is treated as a c-style string when
|
||||
// the hash function is hash<std::string> but as a pointer when the hash
|
||||
// function is hash<void*>.
|
||||
//
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_HASH_FUNCTION_DEFAULTS_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_HASH_FUNCTION_DEFAULTS_H_
|
||||
|
||||
#include <stdint.h>
|
||||
#include <cstddef>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <type_traits>
|
||||
|
||||
#include "absl/base/config.h"
|
||||
#include "absl/hash/hash.h"
|
||||
#include "absl/strings/cord.h"
|
||||
#include "absl/strings/string_view.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
// The hash of an object of type T is computed by using absl::Hash.
|
||||
template <class T, class E = void>
|
||||
struct HashEq {
|
||||
using Hash = absl::Hash<T>;
|
||||
using Eq = std::equal_to<T>;
|
||||
};
|
||||
|
||||
struct StringHash {
|
||||
using is_transparent = void;
|
||||
|
||||
size_t operator()(absl::string_view v) const {
|
||||
return absl::Hash<absl::string_view>{}(v);
|
||||
}
|
||||
size_t operator()(const absl::Cord& v) const {
|
||||
return absl::Hash<absl::Cord>{}(v);
|
||||
}
|
||||
};
|
||||
|
||||
// Supports heterogeneous lookup for string-like elements.
|
||||
struct StringHashEq {
|
||||
using Hash = StringHash;
|
||||
struct Eq {
|
||||
using is_transparent = void;
|
||||
bool operator()(absl::string_view lhs, absl::string_view rhs) const {
|
||||
return lhs == rhs;
|
||||
}
|
||||
bool operator()(const absl::Cord& lhs, const absl::Cord& rhs) const {
|
||||
return lhs == rhs;
|
||||
}
|
||||
bool operator()(const absl::Cord& lhs, absl::string_view rhs) const {
|
||||
return lhs == rhs;
|
||||
}
|
||||
bool operator()(absl::string_view lhs, const absl::Cord& rhs) const {
|
||||
return lhs == rhs;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
template <>
|
||||
struct HashEq<std::string> : StringHashEq {};
|
||||
template <>
|
||||
struct HashEq<absl::string_view> : StringHashEq {};
|
||||
template <>
|
||||
struct HashEq<absl::Cord> : StringHashEq {};
|
||||
|
||||
// Supports heterogeneous lookup for pointers and smart pointers.
|
||||
template <class T>
|
||||
struct HashEq<T*> {
|
||||
struct Hash {
|
||||
using is_transparent = void;
|
||||
template <class U>
|
||||
size_t operator()(const U& ptr) const {
|
||||
return absl::Hash<const T*>{}(HashEq::ToPtr(ptr));
|
||||
}
|
||||
};
|
||||
struct Eq {
|
||||
using is_transparent = void;
|
||||
template <class A, class B>
|
||||
bool operator()(const A& a, const B& b) const {
|
||||
return HashEq::ToPtr(a) == HashEq::ToPtr(b);
|
||||
}
|
||||
};
|
||||
|
||||
private:
|
||||
static const T* ToPtr(const T* ptr) { return ptr; }
|
||||
template <class U, class D>
|
||||
static const T* ToPtr(const std::unique_ptr<U, D>& ptr) {
|
||||
return ptr.get();
|
||||
}
|
||||
template <class U>
|
||||
static const T* ToPtr(const std::shared_ptr<U>& ptr) {
|
||||
return ptr.get();
|
||||
}
|
||||
};
|
||||
|
||||
template <class T, class D>
|
||||
struct HashEq<std::unique_ptr<T, D>> : HashEq<T*> {};
|
||||
template <class T>
|
||||
struct HashEq<std::shared_ptr<T>> : HashEq<T*> {};
|
||||
|
||||
// This header's visibility is restricted. If you need to access the default
|
||||
// hasher please use the container's ::hasher alias instead.
|
||||
//
|
||||
// Example: typename Hash = typename absl::flat_hash_map<K, V>::hasher
|
||||
template <class T>
|
||||
using hash_default_hash = typename container_internal::HashEq<T>::Hash;
|
||||
|
||||
// This header's visibility is restricted. If you need to access the default
|
||||
// key equal please use the container's ::key_equal alias instead.
|
||||
//
|
||||
// Example: typename Eq = typename absl::flat_hash_map<K, V, Hash>::key_equal
|
||||
template <class T>
|
||||
using hash_default_eq = typename container_internal::HashEq<T>::Eq;
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_HASH_FUNCTION_DEFAULTS_H_
|
||||
383
third_party/abseil_cpp/absl/container/internal/hash_function_defaults_test.cc
vendored
Normal file
383
third_party/abseil_cpp/absl/container/internal/hash_function_defaults_test.cc
vendored
Normal file
|
|
@ -0,0 +1,383 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "absl/container/internal/hash_function_defaults.h"
|
||||
|
||||
#include <functional>
|
||||
#include <type_traits>
|
||||
#include <utility>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "absl/random/random.h"
|
||||
#include "absl/strings/cord.h"
|
||||
#include "absl/strings/cord_test_helpers.h"
|
||||
#include "absl/strings/string_view.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
namespace {
|
||||
|
||||
using ::testing::Types;
|
||||
|
||||
TEST(Eq, Int32) {
|
||||
hash_default_eq<int32_t> eq;
|
||||
EXPECT_TRUE(eq(1, 1u));
|
||||
EXPECT_TRUE(eq(1, char{1}));
|
||||
EXPECT_TRUE(eq(1, true));
|
||||
EXPECT_TRUE(eq(1, double{1.1}));
|
||||
EXPECT_FALSE(eq(1, char{2}));
|
||||
EXPECT_FALSE(eq(1, 2u));
|
||||
EXPECT_FALSE(eq(1, false));
|
||||
EXPECT_FALSE(eq(1, 2.));
|
||||
}
|
||||
|
||||
TEST(Hash, Int32) {
|
||||
hash_default_hash<int32_t> hash;
|
||||
auto h = hash(1);
|
||||
EXPECT_EQ(h, hash(1u));
|
||||
EXPECT_EQ(h, hash(char{1}));
|
||||
EXPECT_EQ(h, hash(true));
|
||||
EXPECT_EQ(h, hash(double{1.1}));
|
||||
EXPECT_NE(h, hash(2u));
|
||||
EXPECT_NE(h, hash(char{2}));
|
||||
EXPECT_NE(h, hash(false));
|
||||
EXPECT_NE(h, hash(2.));
|
||||
}
|
||||
|
||||
enum class MyEnum { A, B, C, D };
|
||||
|
||||
TEST(Eq, Enum) {
|
||||
hash_default_eq<MyEnum> eq;
|
||||
EXPECT_TRUE(eq(MyEnum::A, MyEnum::A));
|
||||
EXPECT_FALSE(eq(MyEnum::A, MyEnum::B));
|
||||
}
|
||||
|
||||
TEST(Hash, Enum) {
|
||||
hash_default_hash<MyEnum> hash;
|
||||
|
||||
for (MyEnum e : {MyEnum::A, MyEnum::B, MyEnum::C}) {
|
||||
auto h = hash(e);
|
||||
EXPECT_EQ(h, hash_default_hash<int>{}(static_cast<int>(e)));
|
||||
EXPECT_NE(h, hash(MyEnum::D));
|
||||
}
|
||||
}
|
||||
|
||||
using StringTypes = ::testing::Types<std::string, absl::string_view>;
|
||||
|
||||
template <class T>
|
||||
struct EqString : ::testing::Test {
|
||||
hash_default_eq<T> key_eq;
|
||||
};
|
||||
|
||||
TYPED_TEST_SUITE(EqString, StringTypes);
|
||||
|
||||
template <class T>
|
||||
struct HashString : ::testing::Test {
|
||||
hash_default_hash<T> hasher;
|
||||
};
|
||||
|
||||
TYPED_TEST_SUITE(HashString, StringTypes);
|
||||
|
||||
TYPED_TEST(EqString, Works) {
|
||||
auto eq = this->key_eq;
|
||||
EXPECT_TRUE(eq("a", "a"));
|
||||
EXPECT_TRUE(eq("a", absl::string_view("a")));
|
||||
EXPECT_TRUE(eq("a", std::string("a")));
|
||||
EXPECT_FALSE(eq("a", "b"));
|
||||
EXPECT_FALSE(eq("a", absl::string_view("b")));
|
||||
EXPECT_FALSE(eq("a", std::string("b")));
|
||||
}
|
||||
|
||||
TYPED_TEST(HashString, Works) {
|
||||
auto hash = this->hasher;
|
||||
auto h = hash("a");
|
||||
EXPECT_EQ(h, hash(absl::string_view("a")));
|
||||
EXPECT_EQ(h, hash(std::string("a")));
|
||||
EXPECT_NE(h, hash(absl::string_view("b")));
|
||||
EXPECT_NE(h, hash(std::string("b")));
|
||||
}
|
||||
|
||||
struct NoDeleter {
|
||||
template <class T>
|
||||
void operator()(const T* ptr) const {}
|
||||
};
|
||||
|
||||
using PointerTypes =
|
||||
::testing::Types<const int*, int*, std::unique_ptr<const int>,
|
||||
std::unique_ptr<const int, NoDeleter>,
|
||||
std::unique_ptr<int>, std::unique_ptr<int, NoDeleter>,
|
||||
std::shared_ptr<const int>, std::shared_ptr<int>>;
|
||||
|
||||
template <class T>
|
||||
struct EqPointer : ::testing::Test {
|
||||
hash_default_eq<T> key_eq;
|
||||
};
|
||||
|
||||
TYPED_TEST_SUITE(EqPointer, PointerTypes);
|
||||
|
||||
template <class T>
|
||||
struct HashPointer : ::testing::Test {
|
||||
hash_default_hash<T> hasher;
|
||||
};
|
||||
|
||||
TYPED_TEST_SUITE(HashPointer, PointerTypes);
|
||||
|
||||
TYPED_TEST(EqPointer, Works) {
|
||||
int dummy;
|
||||
auto eq = this->key_eq;
|
||||
auto sptr = std::make_shared<int>();
|
||||
std::shared_ptr<const int> csptr = sptr;
|
||||
int* ptr = sptr.get();
|
||||
const int* cptr = ptr;
|
||||
std::unique_ptr<int, NoDeleter> uptr(ptr);
|
||||
std::unique_ptr<const int, NoDeleter> cuptr(ptr);
|
||||
|
||||
EXPECT_TRUE(eq(ptr, cptr));
|
||||
EXPECT_TRUE(eq(ptr, sptr));
|
||||
EXPECT_TRUE(eq(ptr, uptr));
|
||||
EXPECT_TRUE(eq(ptr, csptr));
|
||||
EXPECT_TRUE(eq(ptr, cuptr));
|
||||
EXPECT_FALSE(eq(&dummy, cptr));
|
||||
EXPECT_FALSE(eq(&dummy, sptr));
|
||||
EXPECT_FALSE(eq(&dummy, uptr));
|
||||
EXPECT_FALSE(eq(&dummy, csptr));
|
||||
EXPECT_FALSE(eq(&dummy, cuptr));
|
||||
}
|
||||
|
||||
TEST(Hash, DerivedAndBase) {
|
||||
struct Base {};
|
||||
struct Derived : Base {};
|
||||
|
||||
hash_default_hash<Base*> hasher;
|
||||
|
||||
Base base;
|
||||
Derived derived;
|
||||
EXPECT_NE(hasher(&base), hasher(&derived));
|
||||
EXPECT_EQ(hasher(static_cast<Base*>(&derived)), hasher(&derived));
|
||||
|
||||
auto dp = std::make_shared<Derived>();
|
||||
EXPECT_EQ(hasher(static_cast<Base*>(dp.get())), hasher(dp));
|
||||
}
|
||||
|
||||
TEST(Hash, FunctionPointer) {
|
||||
using Func = int (*)();
|
||||
hash_default_hash<Func> hasher;
|
||||
hash_default_eq<Func> eq;
|
||||
|
||||
Func p1 = [] { return 1; }, p2 = [] { return 2; };
|
||||
EXPECT_EQ(hasher(p1), hasher(p1));
|
||||
EXPECT_TRUE(eq(p1, p1));
|
||||
|
||||
EXPECT_NE(hasher(p1), hasher(p2));
|
||||
EXPECT_FALSE(eq(p1, p2));
|
||||
}
|
||||
|
||||
TYPED_TEST(HashPointer, Works) {
|
||||
int dummy;
|
||||
auto hash = this->hasher;
|
||||
auto sptr = std::make_shared<int>();
|
||||
std::shared_ptr<const int> csptr = sptr;
|
||||
int* ptr = sptr.get();
|
||||
const int* cptr = ptr;
|
||||
std::unique_ptr<int, NoDeleter> uptr(ptr);
|
||||
std::unique_ptr<const int, NoDeleter> cuptr(ptr);
|
||||
|
||||
EXPECT_EQ(hash(ptr), hash(cptr));
|
||||
EXPECT_EQ(hash(ptr), hash(sptr));
|
||||
EXPECT_EQ(hash(ptr), hash(uptr));
|
||||
EXPECT_EQ(hash(ptr), hash(csptr));
|
||||
EXPECT_EQ(hash(ptr), hash(cuptr));
|
||||
EXPECT_NE(hash(&dummy), hash(cptr));
|
||||
EXPECT_NE(hash(&dummy), hash(sptr));
|
||||
EXPECT_NE(hash(&dummy), hash(uptr));
|
||||
EXPECT_NE(hash(&dummy), hash(csptr));
|
||||
EXPECT_NE(hash(&dummy), hash(cuptr));
|
||||
}
|
||||
|
||||
TEST(EqCord, Works) {
|
||||
hash_default_eq<absl::Cord> eq;
|
||||
const absl::string_view a_string_view = "a";
|
||||
const absl::Cord a_cord(a_string_view);
|
||||
const absl::string_view b_string_view = "b";
|
||||
const absl::Cord b_cord(b_string_view);
|
||||
|
||||
EXPECT_TRUE(eq(a_cord, a_cord));
|
||||
EXPECT_TRUE(eq(a_cord, a_string_view));
|
||||
EXPECT_TRUE(eq(a_string_view, a_cord));
|
||||
EXPECT_FALSE(eq(a_cord, b_cord));
|
||||
EXPECT_FALSE(eq(a_cord, b_string_view));
|
||||
EXPECT_FALSE(eq(b_string_view, a_cord));
|
||||
}
|
||||
|
||||
TEST(HashCord, Works) {
|
||||
hash_default_hash<absl::Cord> hash;
|
||||
const absl::string_view a_string_view = "a";
|
||||
const absl::Cord a_cord(a_string_view);
|
||||
const absl::string_view b_string_view = "b";
|
||||
const absl::Cord b_cord(b_string_view);
|
||||
|
||||
EXPECT_EQ(hash(a_cord), hash(a_cord));
|
||||
EXPECT_EQ(hash(b_cord), hash(b_cord));
|
||||
EXPECT_EQ(hash(a_string_view), hash(a_cord));
|
||||
EXPECT_EQ(hash(b_string_view), hash(b_cord));
|
||||
EXPECT_EQ(hash(absl::Cord("")), hash(""));
|
||||
EXPECT_EQ(hash(absl::Cord()), hash(absl::string_view()));
|
||||
|
||||
EXPECT_NE(hash(a_cord), hash(b_cord));
|
||||
EXPECT_NE(hash(a_cord), hash(b_string_view));
|
||||
EXPECT_NE(hash(a_string_view), hash(b_cord));
|
||||
EXPECT_NE(hash(a_string_view), hash(b_string_view));
|
||||
}
|
||||
|
||||
void NoOpReleaser(absl::string_view data, void* arg) {}
|
||||
|
||||
TEST(HashCord, FragmentedCordWorks) {
|
||||
hash_default_hash<absl::Cord> hash;
|
||||
absl::Cord c = absl::MakeFragmentedCord({"a", "b", "c"});
|
||||
EXPECT_FALSE(c.TryFlat().has_value());
|
||||
EXPECT_EQ(hash(c), hash("abc"));
|
||||
}
|
||||
|
||||
TEST(HashCord, FragmentedLongCordWorks) {
|
||||
hash_default_hash<absl::Cord> hash;
|
||||
// Crete some large strings which do not fit on the stack.
|
||||
std::string a(65536, 'a');
|
||||
std::string b(65536, 'b');
|
||||
absl::Cord c = absl::MakeFragmentedCord({a, b});
|
||||
EXPECT_FALSE(c.TryFlat().has_value());
|
||||
EXPECT_EQ(hash(c), hash(a + b));
|
||||
}
|
||||
|
||||
TEST(HashCord, RandomCord) {
|
||||
hash_default_hash<absl::Cord> hash;
|
||||
auto bitgen = absl::BitGen();
|
||||
for (int i = 0; i < 1000; ++i) {
|
||||
const int number_of_segments = absl::Uniform(bitgen, 0, 10);
|
||||
std::vector<std::string> pieces;
|
||||
for (size_t s = 0; s < number_of_segments; ++s) {
|
||||
std::string str;
|
||||
str.resize(absl::Uniform(bitgen, 0, 4096));
|
||||
// MSVC needed the explicit return type in the lambda.
|
||||
std::generate(str.begin(), str.end(), [&]() -> char {
|
||||
return static_cast<char>(absl::Uniform<unsigned char>(bitgen));
|
||||
});
|
||||
pieces.push_back(str);
|
||||
}
|
||||
absl::Cord c = absl::MakeFragmentedCord(pieces);
|
||||
EXPECT_EQ(hash(c), hash(std::string(c)));
|
||||
}
|
||||
}
|
||||
|
||||
// Cartesian product of (std::string, absl::string_view)
|
||||
// with (std::string, absl::string_view, const char*, absl::Cord).
|
||||
using StringTypesCartesianProduct = Types<
|
||||
// clang-format off
|
||||
std::pair<absl::Cord, std::string>,
|
||||
std::pair<absl::Cord, absl::string_view>,
|
||||
std::pair<absl::Cord, absl::Cord>,
|
||||
std::pair<absl::Cord, const char*>,
|
||||
|
||||
std::pair<std::string, absl::Cord>,
|
||||
std::pair<absl::string_view, absl::Cord>,
|
||||
|
||||
std::pair<absl::string_view, std::string>,
|
||||
std::pair<absl::string_view, absl::string_view>,
|
||||
std::pair<absl::string_view, const char*>>;
|
||||
// clang-format on
|
||||
|
||||
constexpr char kFirstString[] = "abc123";
|
||||
constexpr char kSecondString[] = "ijk456";
|
||||
|
||||
template <typename T>
|
||||
struct StringLikeTest : public ::testing::Test {
|
||||
typename T::first_type a1{kFirstString};
|
||||
typename T::second_type b1{kFirstString};
|
||||
typename T::first_type a2{kSecondString};
|
||||
typename T::second_type b2{kSecondString};
|
||||
hash_default_eq<typename T::first_type> eq;
|
||||
hash_default_hash<typename T::first_type> hash;
|
||||
};
|
||||
|
||||
TYPED_TEST_CASE_P(StringLikeTest);
|
||||
|
||||
TYPED_TEST_P(StringLikeTest, Eq) {
|
||||
EXPECT_TRUE(this->eq(this->a1, this->b1));
|
||||
EXPECT_TRUE(this->eq(this->b1, this->a1));
|
||||
}
|
||||
|
||||
TYPED_TEST_P(StringLikeTest, NotEq) {
|
||||
EXPECT_FALSE(this->eq(this->a1, this->b2));
|
||||
EXPECT_FALSE(this->eq(this->b2, this->a1));
|
||||
}
|
||||
|
||||
TYPED_TEST_P(StringLikeTest, HashEq) {
|
||||
EXPECT_EQ(this->hash(this->a1), this->hash(this->b1));
|
||||
EXPECT_EQ(this->hash(this->a2), this->hash(this->b2));
|
||||
// It would be a poor hash function which collides on these strings.
|
||||
EXPECT_NE(this->hash(this->a1), this->hash(this->b2));
|
||||
}
|
||||
|
||||
TYPED_TEST_SUITE(StringLikeTest, StringTypesCartesianProduct);
|
||||
|
||||
} // namespace
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
enum Hash : size_t {
|
||||
kStd = 0x2, // std::hash
|
||||
#ifdef _MSC_VER
|
||||
kExtension = kStd, // In MSVC, std::hash == ::hash
|
||||
#else // _MSC_VER
|
||||
kExtension = 0x4, // ::hash (GCC extension)
|
||||
#endif // _MSC_VER
|
||||
};
|
||||
|
||||
// H is a bitmask of Hash enumerations.
|
||||
// Hashable<H> is hashable via all means specified in H.
|
||||
template <int H>
|
||||
struct Hashable {
|
||||
static constexpr bool HashableBy(Hash h) { return h & H; }
|
||||
};
|
||||
|
||||
namespace std {
|
||||
template <int H>
|
||||
struct hash<Hashable<H>> {
|
||||
template <class E = Hashable<H>,
|
||||
class = typename std::enable_if<E::HashableBy(kStd)>::type>
|
||||
size_t operator()(E) const {
|
||||
return kStd;
|
||||
}
|
||||
};
|
||||
} // namespace std
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
namespace {
|
||||
|
||||
template <class T>
|
||||
size_t Hash(const T& v) {
|
||||
return hash_default_hash<T>()(v);
|
||||
}
|
||||
|
||||
TEST(Delegate, HashDispatch) {
|
||||
EXPECT_EQ(Hash(kStd), Hash(Hashable<kStd>()));
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
74
third_party/abseil_cpp/absl/container/internal/hash_generator_testing.cc
vendored
Normal file
74
third_party/abseil_cpp/absl/container/internal/hash_generator_testing.cc
vendored
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "absl/container/internal/hash_generator_testing.h"
|
||||
|
||||
#include <deque>
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
namespace hash_internal {
|
||||
namespace {
|
||||
|
||||
class RandomDeviceSeedSeq {
|
||||
public:
|
||||
using result_type = typename std::random_device::result_type;
|
||||
|
||||
template <class Iterator>
|
||||
void generate(Iterator start, Iterator end) {
|
||||
while (start != end) {
|
||||
*start = gen_();
|
||||
++start;
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
std::random_device gen_;
|
||||
};
|
||||
|
||||
} // namespace
|
||||
|
||||
std::mt19937_64* GetSharedRng() {
|
||||
RandomDeviceSeedSeq seed_seq;
|
||||
static auto* rng = new std::mt19937_64(seed_seq);
|
||||
return rng;
|
||||
}
|
||||
|
||||
std::string Generator<std::string>::operator()() const {
|
||||
// NOLINTNEXTLINE(runtime/int)
|
||||
std::uniform_int_distribution<short> chars(0x20, 0x7E);
|
||||
std::string res;
|
||||
res.resize(32);
|
||||
std::generate(res.begin(), res.end(),
|
||||
[&]() { return chars(*GetSharedRng()); });
|
||||
return res;
|
||||
}
|
||||
|
||||
absl::string_view Generator<absl::string_view>::operator()() const {
|
||||
static auto* arena = new std::deque<std::string>();
|
||||
// NOLINTNEXTLINE(runtime/int)
|
||||
std::uniform_int_distribution<short> chars(0x20, 0x7E);
|
||||
arena->emplace_back();
|
||||
auto& res = arena->back();
|
||||
res.resize(32);
|
||||
std::generate(res.begin(), res.end(),
|
||||
[&]() { return chars(*GetSharedRng()); });
|
||||
return res;
|
||||
}
|
||||
|
||||
} // namespace hash_internal
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
161
third_party/abseil_cpp/absl/container/internal/hash_generator_testing.h
vendored
Normal file
161
third_party/abseil_cpp/absl/container/internal/hash_generator_testing.h
vendored
Normal file
|
|
@ -0,0 +1,161 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
// Generates random values for testing. Specialized only for the few types we
|
||||
// care about.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_HASH_GENERATOR_TESTING_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_HASH_GENERATOR_TESTING_H_
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <iosfwd>
|
||||
#include <random>
|
||||
#include <tuple>
|
||||
#include <type_traits>
|
||||
#include <utility>
|
||||
|
||||
#include "absl/container/internal/hash_policy_testing.h"
|
||||
#include "absl/memory/memory.h"
|
||||
#include "absl/meta/type_traits.h"
|
||||
#include "absl/strings/string_view.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
namespace hash_internal {
|
||||
namespace generator_internal {
|
||||
|
||||
template <class Container, class = void>
|
||||
struct IsMap : std::false_type {};
|
||||
|
||||
template <class Map>
|
||||
struct IsMap<Map, absl::void_t<typename Map::mapped_type>> : std::true_type {};
|
||||
|
||||
} // namespace generator_internal
|
||||
|
||||
std::mt19937_64* GetSharedRng();
|
||||
|
||||
enum Enum {
|
||||
kEnumEmpty,
|
||||
kEnumDeleted,
|
||||
};
|
||||
|
||||
enum class EnumClass : uint64_t {
|
||||
kEmpty,
|
||||
kDeleted,
|
||||
};
|
||||
|
||||
inline std::ostream& operator<<(std::ostream& o, const EnumClass& ec) {
|
||||
return o << static_cast<uint64_t>(ec);
|
||||
}
|
||||
|
||||
template <class T, class E = void>
|
||||
struct Generator;
|
||||
|
||||
template <class T>
|
||||
struct Generator<T, typename std::enable_if<std::is_integral<T>::value>::type> {
|
||||
T operator()() const {
|
||||
std::uniform_int_distribution<T> dist;
|
||||
return dist(*GetSharedRng());
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct Generator<Enum> {
|
||||
Enum operator()() const {
|
||||
std::uniform_int_distribution<typename std::underlying_type<Enum>::type>
|
||||
dist;
|
||||
while (true) {
|
||||
auto variate = dist(*GetSharedRng());
|
||||
if (variate != kEnumEmpty && variate != kEnumDeleted)
|
||||
return static_cast<Enum>(variate);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct Generator<EnumClass> {
|
||||
EnumClass operator()() const {
|
||||
std::uniform_int_distribution<
|
||||
typename std::underlying_type<EnumClass>::type>
|
||||
dist;
|
||||
while (true) {
|
||||
EnumClass variate = static_cast<EnumClass>(dist(*GetSharedRng()));
|
||||
if (variate != EnumClass::kEmpty && variate != EnumClass::kDeleted)
|
||||
return static_cast<EnumClass>(variate);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct Generator<std::string> {
|
||||
std::string operator()() const;
|
||||
};
|
||||
|
||||
template <>
|
||||
struct Generator<absl::string_view> {
|
||||
absl::string_view operator()() const;
|
||||
};
|
||||
|
||||
template <>
|
||||
struct Generator<NonStandardLayout> {
|
||||
NonStandardLayout operator()() const {
|
||||
return NonStandardLayout(Generator<std::string>()());
|
||||
}
|
||||
};
|
||||
|
||||
template <class K, class V>
|
||||
struct Generator<std::pair<K, V>> {
|
||||
std::pair<K, V> operator()() const {
|
||||
return std::pair<K, V>(Generator<typename std::decay<K>::type>()(),
|
||||
Generator<typename std::decay<V>::type>()());
|
||||
}
|
||||
};
|
||||
|
||||
template <class... Ts>
|
||||
struct Generator<std::tuple<Ts...>> {
|
||||
std::tuple<Ts...> operator()() const {
|
||||
return std::tuple<Ts...>(Generator<typename std::decay<Ts>::type>()()...);
|
||||
}
|
||||
};
|
||||
|
||||
template <class T>
|
||||
struct Generator<std::unique_ptr<T>> {
|
||||
std::unique_ptr<T> operator()() const {
|
||||
return absl::make_unique<T>(Generator<T>()());
|
||||
}
|
||||
};
|
||||
|
||||
template <class U>
|
||||
struct Generator<U, absl::void_t<decltype(std::declval<U&>().key()),
|
||||
decltype(std::declval<U&>().value())>>
|
||||
: Generator<std::pair<
|
||||
typename std::decay<decltype(std::declval<U&>().key())>::type,
|
||||
typename std::decay<decltype(std::declval<U&>().value())>::type>> {};
|
||||
|
||||
template <class Container>
|
||||
using GeneratedType = decltype(
|
||||
std::declval<const Generator<
|
||||
typename std::conditional<generator_internal::IsMap<Container>::value,
|
||||
typename Container::value_type,
|
||||
typename Container::key_type>::type>&>()());
|
||||
|
||||
} // namespace hash_internal
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_HASH_GENERATOR_TESTING_H_
|
||||
184
third_party/abseil_cpp/absl/container/internal/hash_policy_testing.h
vendored
Normal file
184
third_party/abseil_cpp/absl/container/internal/hash_policy_testing.h
vendored
Normal file
|
|
@ -0,0 +1,184 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
// Utilities to help tests verify that hash tables properly handle stateful
|
||||
// allocators and hash functions.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_HASH_POLICY_TESTING_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_HASH_POLICY_TESTING_H_
|
||||
|
||||
#include <cstdlib>
|
||||
#include <limits>
|
||||
#include <memory>
|
||||
#include <ostream>
|
||||
#include <type_traits>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/hash/hash.h"
|
||||
#include "absl/strings/string_view.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
namespace hash_testing_internal {
|
||||
|
||||
template <class Derived>
|
||||
struct WithId {
|
||||
WithId() : id_(next_id<Derived>()) {}
|
||||
WithId(const WithId& that) : id_(that.id_) {}
|
||||
WithId(WithId&& that) : id_(that.id_) { that.id_ = 0; }
|
||||
WithId& operator=(const WithId& that) {
|
||||
id_ = that.id_;
|
||||
return *this;
|
||||
}
|
||||
WithId& operator=(WithId&& that) {
|
||||
id_ = that.id_;
|
||||
that.id_ = 0;
|
||||
return *this;
|
||||
}
|
||||
|
||||
size_t id() const { return id_; }
|
||||
|
||||
friend bool operator==(const WithId& a, const WithId& b) {
|
||||
return a.id_ == b.id_;
|
||||
}
|
||||
friend bool operator!=(const WithId& a, const WithId& b) { return !(a == b); }
|
||||
|
||||
protected:
|
||||
explicit WithId(size_t id) : id_(id) {}
|
||||
|
||||
private:
|
||||
size_t id_;
|
||||
|
||||
template <class T>
|
||||
static size_t next_id() {
|
||||
// 0 is reserved for moved from state.
|
||||
static size_t gId = 1;
|
||||
return gId++;
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace hash_testing_internal
|
||||
|
||||
struct NonStandardLayout {
|
||||
NonStandardLayout() {}
|
||||
explicit NonStandardLayout(std::string s) : value(std::move(s)) {}
|
||||
virtual ~NonStandardLayout() {}
|
||||
|
||||
friend bool operator==(const NonStandardLayout& a,
|
||||
const NonStandardLayout& b) {
|
||||
return a.value == b.value;
|
||||
}
|
||||
friend bool operator!=(const NonStandardLayout& a,
|
||||
const NonStandardLayout& b) {
|
||||
return a.value != b.value;
|
||||
}
|
||||
|
||||
template <typename H>
|
||||
friend H AbslHashValue(H h, const NonStandardLayout& v) {
|
||||
return H::combine(std::move(h), v.value);
|
||||
}
|
||||
|
||||
std::string value;
|
||||
};
|
||||
|
||||
struct StatefulTestingHash
|
||||
: absl::container_internal::hash_testing_internal::WithId<
|
||||
StatefulTestingHash> {
|
||||
template <class T>
|
||||
size_t operator()(const T& t) const {
|
||||
return absl::Hash<T>{}(t);
|
||||
}
|
||||
};
|
||||
|
||||
struct StatefulTestingEqual
|
||||
: absl::container_internal::hash_testing_internal::WithId<
|
||||
StatefulTestingEqual> {
|
||||
template <class T, class U>
|
||||
bool operator()(const T& t, const U& u) const {
|
||||
return t == u;
|
||||
}
|
||||
};
|
||||
|
||||
// It is expected that Alloc() == Alloc() for all allocators so we cannot use
|
||||
// WithId base. We need to explicitly assign ids.
|
||||
template <class T = int>
|
||||
struct Alloc : std::allocator<T> {
|
||||
using propagate_on_container_swap = std::true_type;
|
||||
|
||||
// Using old paradigm for this to ensure compatibility.
|
||||
explicit Alloc(size_t id = 0) : id_(id) {}
|
||||
|
||||
Alloc(const Alloc&) = default;
|
||||
Alloc& operator=(const Alloc&) = default;
|
||||
|
||||
template <class U>
|
||||
Alloc(const Alloc<U>& that) : std::allocator<T>(that), id_(that.id()) {}
|
||||
|
||||
template <class U>
|
||||
struct rebind {
|
||||
using other = Alloc<U>;
|
||||
};
|
||||
|
||||
size_t id() const { return id_; }
|
||||
|
||||
friend bool operator==(const Alloc& a, const Alloc& b) {
|
||||
return a.id_ == b.id_;
|
||||
}
|
||||
friend bool operator!=(const Alloc& a, const Alloc& b) { return !(a == b); }
|
||||
|
||||
private:
|
||||
size_t id_ = (std::numeric_limits<size_t>::max)();
|
||||
};
|
||||
|
||||
template <class Map>
|
||||
auto items(const Map& m) -> std::vector<
|
||||
std::pair<typename Map::key_type, typename Map::mapped_type>> {
|
||||
using std::get;
|
||||
std::vector<std::pair<typename Map::key_type, typename Map::mapped_type>> res;
|
||||
res.reserve(m.size());
|
||||
for (const auto& v : m) res.emplace_back(get<0>(v), get<1>(v));
|
||||
return res;
|
||||
}
|
||||
|
||||
template <class Set>
|
||||
auto keys(const Set& s)
|
||||
-> std::vector<typename std::decay<typename Set::key_type>::type> {
|
||||
std::vector<typename std::decay<typename Set::key_type>::type> res;
|
||||
res.reserve(s.size());
|
||||
for (const auto& v : s) res.emplace_back(v);
|
||||
return res;
|
||||
}
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
// ABSL_UNORDERED_SUPPORTS_ALLOC_CTORS is false for glibcxx versions
|
||||
// where the unordered containers are missing certain constructors that
|
||||
// take allocator arguments. This test is defined ad-hoc for the platforms
|
||||
// we care about (notably Crosstool 17) because libstdcxx's useless
|
||||
// versioning scheme precludes a more principled solution.
|
||||
// From GCC-4.9 Changelog: (src: https://gcc.gnu.org/gcc-4.9/changes.html)
|
||||
// "the unordered associative containers in <unordered_map> and <unordered_set>
|
||||
// meet the allocator-aware container requirements;"
|
||||
#if (defined(__GLIBCXX__) && __GLIBCXX__ <= 20140425 ) || \
|
||||
( __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 9 ))
|
||||
#define ABSL_UNORDERED_SUPPORTS_ALLOC_CTORS 0
|
||||
#else
|
||||
#define ABSL_UNORDERED_SUPPORTS_ALLOC_CTORS 1
|
||||
#endif
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_HASH_POLICY_TESTING_H_
|
||||
45
third_party/abseil_cpp/absl/container/internal/hash_policy_testing_test.cc
vendored
Normal file
45
third_party/abseil_cpp/absl/container/internal/hash_policy_testing_test.cc
vendored
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "absl/container/internal/hash_policy_testing.h"
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
namespace {
|
||||
|
||||
TEST(_, Hash) {
|
||||
StatefulTestingHash h1;
|
||||
EXPECT_EQ(1, h1.id());
|
||||
StatefulTestingHash h2;
|
||||
EXPECT_EQ(2, h2.id());
|
||||
StatefulTestingHash h1c(h1);
|
||||
EXPECT_EQ(1, h1c.id());
|
||||
StatefulTestingHash h2m(std::move(h2));
|
||||
EXPECT_EQ(2, h2m.id());
|
||||
EXPECT_EQ(0, h2.id());
|
||||
StatefulTestingHash h3;
|
||||
EXPECT_EQ(3, h3.id());
|
||||
h3 = StatefulTestingHash();
|
||||
EXPECT_EQ(4, h3.id());
|
||||
h3 = std::move(h1);
|
||||
EXPECT_EQ(1, h3.id());
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
191
third_party/abseil_cpp/absl/container/internal/hash_policy_traits.h
vendored
Normal file
191
third_party/abseil_cpp/absl/container/internal/hash_policy_traits.h
vendored
Normal file
|
|
@ -0,0 +1,191 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_HASH_POLICY_TRAITS_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_HASH_POLICY_TRAITS_H_
|
||||
|
||||
#include <cstddef>
|
||||
#include <memory>
|
||||
#include <type_traits>
|
||||
#include <utility>
|
||||
|
||||
#include "absl/meta/type_traits.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
// Defines how slots are initialized/destroyed/moved.
|
||||
template <class Policy, class = void>
|
||||
struct hash_policy_traits {
|
||||
private:
|
||||
struct ReturnKey {
|
||||
// We return `Key` here.
|
||||
// When Key=T&, we forward the lvalue reference.
|
||||
// When Key=T, we return by value to avoid a dangling reference.
|
||||
// eg, for string_hash_map.
|
||||
template <class Key, class... Args>
|
||||
Key operator()(Key&& k, const Args&...) const {
|
||||
return std::forward<Key>(k);
|
||||
}
|
||||
};
|
||||
|
||||
template <class P = Policy, class = void>
|
||||
struct ConstantIteratorsImpl : std::false_type {};
|
||||
|
||||
template <class P>
|
||||
struct ConstantIteratorsImpl<P, absl::void_t<typename P::constant_iterators>>
|
||||
: P::constant_iterators {};
|
||||
|
||||
public:
|
||||
// The actual object stored in the hash table.
|
||||
using slot_type = typename Policy::slot_type;
|
||||
|
||||
// The type of the keys stored in the hashtable.
|
||||
using key_type = typename Policy::key_type;
|
||||
|
||||
// The argument type for insertions into the hashtable. This is different
|
||||
// from value_type for increased performance. See initializer_list constructor
|
||||
// and insert() member functions for more details.
|
||||
using init_type = typename Policy::init_type;
|
||||
|
||||
using reference = decltype(Policy::element(std::declval<slot_type*>()));
|
||||
using pointer = typename std::remove_reference<reference>::type*;
|
||||
using value_type = typename std::remove_reference<reference>::type;
|
||||
|
||||
// Policies can set this variable to tell raw_hash_set that all iterators
|
||||
// should be constant, even `iterator`. This is useful for set-like
|
||||
// containers.
|
||||
// Defaults to false if not provided by the policy.
|
||||
using constant_iterators = ConstantIteratorsImpl<>;
|
||||
|
||||
// PRECONDITION: `slot` is UNINITIALIZED
|
||||
// POSTCONDITION: `slot` is INITIALIZED
|
||||
template <class Alloc, class... Args>
|
||||
static void construct(Alloc* alloc, slot_type* slot, Args&&... args) {
|
||||
Policy::construct(alloc, slot, std::forward<Args>(args)...);
|
||||
}
|
||||
|
||||
// PRECONDITION: `slot` is INITIALIZED
|
||||
// POSTCONDITION: `slot` is UNINITIALIZED
|
||||
template <class Alloc>
|
||||
static void destroy(Alloc* alloc, slot_type* slot) {
|
||||
Policy::destroy(alloc, slot);
|
||||
}
|
||||
|
||||
// Transfers the `old_slot` to `new_slot`. Any memory allocated by the
|
||||
// allocator inside `old_slot` to `new_slot` can be transferred.
|
||||
//
|
||||
// OPTIONAL: defaults to:
|
||||
//
|
||||
// clone(new_slot, std::move(*old_slot));
|
||||
// destroy(old_slot);
|
||||
//
|
||||
// PRECONDITION: `new_slot` is UNINITIALIZED and `old_slot` is INITIALIZED
|
||||
// POSTCONDITION: `new_slot` is INITIALIZED and `old_slot` is
|
||||
// UNINITIALIZED
|
||||
template <class Alloc>
|
||||
static void transfer(Alloc* alloc, slot_type* new_slot, slot_type* old_slot) {
|
||||
transfer_impl(alloc, new_slot, old_slot, 0);
|
||||
}
|
||||
|
||||
// PRECONDITION: `slot` is INITIALIZED
|
||||
// POSTCONDITION: `slot` is INITIALIZED
|
||||
template <class P = Policy>
|
||||
static auto element(slot_type* slot) -> decltype(P::element(slot)) {
|
||||
return P::element(slot);
|
||||
}
|
||||
|
||||
// Returns the amount of memory owned by `slot`, exclusive of `sizeof(*slot)`.
|
||||
//
|
||||
// If `slot` is nullptr, returns the constant amount of memory owned by any
|
||||
// full slot or -1 if slots own variable amounts of memory.
|
||||
//
|
||||
// PRECONDITION: `slot` is INITIALIZED or nullptr
|
||||
template <class P = Policy>
|
||||
static size_t space_used(const slot_type* slot) {
|
||||
return P::space_used(slot);
|
||||
}
|
||||
|
||||
// Provides generalized access to the key for elements, both for elements in
|
||||
// the table and for elements that have not yet been inserted (or even
|
||||
// constructed). We would like an API that allows us to say: `key(args...)`
|
||||
// but we cannot do that for all cases, so we use this more general API that
|
||||
// can be used for many things, including the following:
|
||||
//
|
||||
// - Given an element in a table, get its key.
|
||||
// - Given an element initializer, get its key.
|
||||
// - Given `emplace()` arguments, get the element key.
|
||||
//
|
||||
// Implementations of this must adhere to a very strict technical
|
||||
// specification around aliasing and consuming arguments:
|
||||
//
|
||||
// Let `value_type` be the result type of `element()` without ref- and
|
||||
// cv-qualifiers. The first argument is a functor, the rest are constructor
|
||||
// arguments for `value_type`. Returns `std::forward<F>(f)(k, xs...)`, where
|
||||
// `k` is the element key, and `xs...` are the new constructor arguments for
|
||||
// `value_type`. It's allowed for `k` to alias `xs...`, and for both to alias
|
||||
// `ts...`. The key won't be touched once `xs...` are used to construct an
|
||||
// element; `ts...` won't be touched at all, which allows `apply()` to consume
|
||||
// any rvalues among them.
|
||||
//
|
||||
// If `value_type` is constructible from `Ts&&...`, `Policy::apply()` must not
|
||||
// trigger a hard compile error unless it originates from `f`. In other words,
|
||||
// `Policy::apply()` must be SFINAE-friendly. If `value_type` is not
|
||||
// constructible from `Ts&&...`, either SFINAE or a hard compile error is OK.
|
||||
//
|
||||
// If `Ts...` is `[cv] value_type[&]` or `[cv] init_type[&]`,
|
||||
// `Policy::apply()` must work. A compile error is not allowed, SFINAE or not.
|
||||
template <class F, class... Ts, class P = Policy>
|
||||
static auto apply(F&& f, Ts&&... ts)
|
||||
-> decltype(P::apply(std::forward<F>(f), std::forward<Ts>(ts)...)) {
|
||||
return P::apply(std::forward<F>(f), std::forward<Ts>(ts)...);
|
||||
}
|
||||
|
||||
// Returns the "key" portion of the slot.
|
||||
// Used for node handle manipulation.
|
||||
template <class P = Policy>
|
||||
static auto key(slot_type* slot)
|
||||
-> decltype(P::apply(ReturnKey(), element(slot))) {
|
||||
return P::apply(ReturnKey(), element(slot));
|
||||
}
|
||||
|
||||
// Returns the "value" (as opposed to the "key") portion of the element. Used
|
||||
// by maps to implement `operator[]`, `at()` and `insert_or_assign()`.
|
||||
template <class T, class P = Policy>
|
||||
static auto value(T* elem) -> decltype(P::value(elem)) {
|
||||
return P::value(elem);
|
||||
}
|
||||
|
||||
private:
|
||||
// Use auto -> decltype as an enabler.
|
||||
template <class Alloc, class P = Policy>
|
||||
static auto transfer_impl(Alloc* alloc, slot_type* new_slot,
|
||||
slot_type* old_slot, int)
|
||||
-> decltype((void)P::transfer(alloc, new_slot, old_slot)) {
|
||||
P::transfer(alloc, new_slot, old_slot);
|
||||
}
|
||||
template <class Alloc>
|
||||
static void transfer_impl(Alloc* alloc, slot_type* new_slot,
|
||||
slot_type* old_slot, char) {
|
||||
construct(alloc, new_slot, std::move(element(old_slot)));
|
||||
destroy(alloc, old_slot);
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_HASH_POLICY_TRAITS_H_
|
||||
144
third_party/abseil_cpp/absl/container/internal/hash_policy_traits_test.cc
vendored
Normal file
144
third_party/abseil_cpp/absl/container/internal/hash_policy_traits_test.cc
vendored
Normal file
|
|
@ -0,0 +1,144 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "absl/container/internal/hash_policy_traits.h"
|
||||
|
||||
#include <functional>
|
||||
#include <memory>
|
||||
#include <new>
|
||||
|
||||
#include "gmock/gmock.h"
|
||||
#include "gtest/gtest.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
namespace {
|
||||
|
||||
using ::testing::MockFunction;
|
||||
using ::testing::Return;
|
||||
using ::testing::ReturnRef;
|
||||
|
||||
using Alloc = std::allocator<int>;
|
||||
using Slot = int;
|
||||
|
||||
struct PolicyWithoutOptionalOps {
|
||||
using slot_type = Slot;
|
||||
using key_type = Slot;
|
||||
using init_type = Slot;
|
||||
|
||||
static std::function<void(void*, Slot*, Slot)> construct;
|
||||
static std::function<void(void*, Slot*)> destroy;
|
||||
|
||||
static std::function<Slot&(Slot*)> element;
|
||||
static int apply(int v) { return apply_impl(v); }
|
||||
static std::function<int(int)> apply_impl;
|
||||
static std::function<Slot&(Slot*)> value;
|
||||
};
|
||||
|
||||
std::function<void(void*, Slot*, Slot)> PolicyWithoutOptionalOps::construct;
|
||||
std::function<void(void*, Slot*)> PolicyWithoutOptionalOps::destroy;
|
||||
|
||||
std::function<Slot&(Slot*)> PolicyWithoutOptionalOps::element;
|
||||
std::function<int(int)> PolicyWithoutOptionalOps::apply_impl;
|
||||
std::function<Slot&(Slot*)> PolicyWithoutOptionalOps::value;
|
||||
|
||||
struct PolicyWithOptionalOps : PolicyWithoutOptionalOps {
|
||||
static std::function<void(void*, Slot*, Slot*)> transfer;
|
||||
};
|
||||
|
||||
std::function<void(void*, Slot*, Slot*)> PolicyWithOptionalOps::transfer;
|
||||
|
||||
struct Test : ::testing::Test {
|
||||
Test() {
|
||||
PolicyWithoutOptionalOps::construct = [&](void* a1, Slot* a2, Slot a3) {
|
||||
construct.Call(a1, a2, std::move(a3));
|
||||
};
|
||||
PolicyWithoutOptionalOps::destroy = [&](void* a1, Slot* a2) {
|
||||
destroy.Call(a1, a2);
|
||||
};
|
||||
|
||||
PolicyWithoutOptionalOps::element = [&](Slot* a1) -> Slot& {
|
||||
return element.Call(a1);
|
||||
};
|
||||
PolicyWithoutOptionalOps::apply_impl = [&](int a1) -> int {
|
||||
return apply.Call(a1);
|
||||
};
|
||||
PolicyWithoutOptionalOps::value = [&](Slot* a1) -> Slot& {
|
||||
return value.Call(a1);
|
||||
};
|
||||
|
||||
PolicyWithOptionalOps::transfer = [&](void* a1, Slot* a2, Slot* a3) {
|
||||
return transfer.Call(a1, a2, a3);
|
||||
};
|
||||
}
|
||||
|
||||
std::allocator<int> alloc;
|
||||
int a = 53;
|
||||
|
||||
MockFunction<void(void*, Slot*, Slot)> construct;
|
||||
MockFunction<void(void*, Slot*)> destroy;
|
||||
|
||||
MockFunction<Slot&(Slot*)> element;
|
||||
MockFunction<int(int)> apply;
|
||||
MockFunction<Slot&(Slot*)> value;
|
||||
|
||||
MockFunction<void(void*, Slot*, Slot*)> transfer;
|
||||
};
|
||||
|
||||
TEST_F(Test, construct) {
|
||||
EXPECT_CALL(construct, Call(&alloc, &a, 53));
|
||||
hash_policy_traits<PolicyWithoutOptionalOps>::construct(&alloc, &a, 53);
|
||||
}
|
||||
|
||||
TEST_F(Test, destroy) {
|
||||
EXPECT_CALL(destroy, Call(&alloc, &a));
|
||||
hash_policy_traits<PolicyWithoutOptionalOps>::destroy(&alloc, &a);
|
||||
}
|
||||
|
||||
TEST_F(Test, element) {
|
||||
int b = 0;
|
||||
EXPECT_CALL(element, Call(&a)).WillOnce(ReturnRef(b));
|
||||
EXPECT_EQ(&b, &hash_policy_traits<PolicyWithoutOptionalOps>::element(&a));
|
||||
}
|
||||
|
||||
TEST_F(Test, apply) {
|
||||
EXPECT_CALL(apply, Call(42)).WillOnce(Return(1337));
|
||||
EXPECT_EQ(1337, (hash_policy_traits<PolicyWithoutOptionalOps>::apply(42)));
|
||||
}
|
||||
|
||||
TEST_F(Test, value) {
|
||||
int b = 0;
|
||||
EXPECT_CALL(value, Call(&a)).WillOnce(ReturnRef(b));
|
||||
EXPECT_EQ(&b, &hash_policy_traits<PolicyWithoutOptionalOps>::value(&a));
|
||||
}
|
||||
|
||||
TEST_F(Test, without_transfer) {
|
||||
int b = 42;
|
||||
EXPECT_CALL(element, Call(&b)).WillOnce(::testing::ReturnRef(b));
|
||||
EXPECT_CALL(construct, Call(&alloc, &a, b));
|
||||
EXPECT_CALL(destroy, Call(&alloc, &b));
|
||||
hash_policy_traits<PolicyWithoutOptionalOps>::transfer(&alloc, &a, &b);
|
||||
}
|
||||
|
||||
TEST_F(Test, with_transfer) {
|
||||
int b = 42;
|
||||
EXPECT_CALL(transfer, Call(&alloc, &a, &b));
|
||||
hash_policy_traits<PolicyWithOptionalOps>::transfer(&alloc, &a, &b);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
110
third_party/abseil_cpp/absl/container/internal/hashtable_debug.h
vendored
Normal file
110
third_party/abseil_cpp/absl/container/internal/hashtable_debug.h
vendored
Normal file
|
|
@ -0,0 +1,110 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
// This library provides APIs to debug the probing behavior of hash tables.
|
||||
//
|
||||
// In general, the probing behavior is a black box for users and only the
|
||||
// side effects can be measured in the form of performance differences.
|
||||
// These APIs give a glimpse on the actual behavior of the probing algorithms in
|
||||
// these hashtables given a specified hash function and a set of elements.
|
||||
//
|
||||
// The probe count distribution can be used to assess the quality of the hash
|
||||
// function for that particular hash table. Note that a hash function that
|
||||
// performs well in one hash table implementation does not necessarily performs
|
||||
// well in a different one.
|
||||
//
|
||||
// This library supports std::unordered_{set,map}, dense_hash_{set,map} and
|
||||
// absl::{flat,node,string}_hash_{set,map}.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_HASHTABLE_DEBUG_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_HASHTABLE_DEBUG_H_
|
||||
|
||||
#include <cstddef>
|
||||
#include <algorithm>
|
||||
#include <type_traits>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/container/internal/hashtable_debug_hooks.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
// Returns the number of probes required to lookup `key`. Returns 0 for a
|
||||
// search with no collisions. Higher values mean more hash collisions occurred;
|
||||
// however, the exact meaning of this number varies according to the container
|
||||
// type.
|
||||
template <typename C>
|
||||
size_t GetHashtableDebugNumProbes(
|
||||
const C& c, const typename C::key_type& key) {
|
||||
return absl::container_internal::hashtable_debug_internal::
|
||||
HashtableDebugAccess<C>::GetNumProbes(c, key);
|
||||
}
|
||||
|
||||
// Gets a histogram of the number of probes for each elements in the container.
|
||||
// The sum of all the values in the vector is equal to container.size().
|
||||
template <typename C>
|
||||
std::vector<size_t> GetHashtableDebugNumProbesHistogram(const C& container) {
|
||||
std::vector<size_t> v;
|
||||
for (auto it = container.begin(); it != container.end(); ++it) {
|
||||
size_t num_probes = GetHashtableDebugNumProbes(
|
||||
container,
|
||||
absl::container_internal::hashtable_debug_internal::GetKey<C>(*it, 0));
|
||||
v.resize((std::max)(v.size(), num_probes + 1));
|
||||
v[num_probes]++;
|
||||
}
|
||||
return v;
|
||||
}
|
||||
|
||||
struct HashtableDebugProbeSummary {
|
||||
size_t total_elements;
|
||||
size_t total_num_probes;
|
||||
double mean;
|
||||
};
|
||||
|
||||
// Gets a summary of the probe count distribution for the elements in the
|
||||
// container.
|
||||
template <typename C>
|
||||
HashtableDebugProbeSummary GetHashtableDebugProbeSummary(const C& container) {
|
||||
auto probes = GetHashtableDebugNumProbesHistogram(container);
|
||||
HashtableDebugProbeSummary summary = {};
|
||||
for (size_t i = 0; i < probes.size(); ++i) {
|
||||
summary.total_elements += probes[i];
|
||||
summary.total_num_probes += probes[i] * i;
|
||||
}
|
||||
summary.mean = 1.0 * summary.total_num_probes / summary.total_elements;
|
||||
return summary;
|
||||
}
|
||||
|
||||
// Returns the number of bytes requested from the allocator by the container
|
||||
// and not freed.
|
||||
template <typename C>
|
||||
size_t AllocatedByteSize(const C& c) {
|
||||
return absl::container_internal::hashtable_debug_internal::
|
||||
HashtableDebugAccess<C>::AllocatedByteSize(c);
|
||||
}
|
||||
|
||||
// Returns a tight lower bound for AllocatedByteSize(c) where `c` is of type `C`
|
||||
// and `c.size()` is equal to `num_elements`.
|
||||
template <typename C>
|
||||
size_t LowerBoundAllocatedByteSize(size_t num_elements) {
|
||||
return absl::container_internal::hashtable_debug_internal::
|
||||
HashtableDebugAccess<C>::LowerBoundAllocatedByteSize(num_elements);
|
||||
}
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_HASHTABLE_DEBUG_H_
|
||||
85
third_party/abseil_cpp/absl/container/internal/hashtable_debug_hooks.h
vendored
Normal file
85
third_party/abseil_cpp/absl/container/internal/hashtable_debug_hooks.h
vendored
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
// Provides the internal API for hashtable_debug.h.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_HASHTABLE_DEBUG_HOOKS_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_HASHTABLE_DEBUG_HOOKS_H_
|
||||
|
||||
#include <cstddef>
|
||||
|
||||
#include <algorithm>
|
||||
#include <type_traits>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/base/config.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
namespace hashtable_debug_internal {
|
||||
|
||||
// If it is a map, call get<0>().
|
||||
using std::get;
|
||||
template <typename T, typename = typename T::mapped_type>
|
||||
auto GetKey(const typename T::value_type& pair, int) -> decltype(get<0>(pair)) {
|
||||
return get<0>(pair);
|
||||
}
|
||||
|
||||
// If it is not a map, return the value directly.
|
||||
template <typename T>
|
||||
const typename T::key_type& GetKey(const typename T::key_type& key, char) {
|
||||
return key;
|
||||
}
|
||||
|
||||
// Containers should specialize this to provide debug information for that
|
||||
// container.
|
||||
template <class Container, typename Enabler = void>
|
||||
struct HashtableDebugAccess {
|
||||
// Returns the number of probes required to find `key` in `c`. The "number of
|
||||
// probes" is a concept that can vary by container. Implementations should
|
||||
// return 0 when `key` was found in the minimum number of operations and
|
||||
// should increment the result for each non-trivial operation required to find
|
||||
// `key`.
|
||||
//
|
||||
// The default implementation uses the bucket api from the standard and thus
|
||||
// works for `std::unordered_*` containers.
|
||||
static size_t GetNumProbes(const Container& c,
|
||||
const typename Container::key_type& key) {
|
||||
if (!c.bucket_count()) return {};
|
||||
size_t num_probes = 0;
|
||||
size_t bucket = c.bucket(key);
|
||||
for (auto it = c.begin(bucket), e = c.end(bucket);; ++it, ++num_probes) {
|
||||
if (it == e) return num_probes;
|
||||
if (c.key_eq()(key, GetKey<Container>(*it, 0))) return num_probes;
|
||||
}
|
||||
}
|
||||
|
||||
// Returns the number of bytes requested from the allocator by the container
|
||||
// and not freed.
|
||||
//
|
||||
// static size_t AllocatedByteSize(const Container& c);
|
||||
|
||||
// Returns a tight lower bound for AllocatedByteSize(c) where `c` is of type
|
||||
// `Container` and `c.size()` is equal to `num_elements`.
|
||||
//
|
||||
// static size_t LowerBoundAllocatedByteSize(size_t num_elements);
|
||||
};
|
||||
|
||||
} // namespace hashtable_debug_internal
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_HASHTABLE_DEBUG_HOOKS_H_
|
||||
269
third_party/abseil_cpp/absl/container/internal/hashtablez_sampler.cc
vendored
Normal file
269
third_party/abseil_cpp/absl/container/internal/hashtablez_sampler.cc
vendored
Normal file
|
|
@ -0,0 +1,269 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "absl/container/internal/hashtablez_sampler.h"
|
||||
|
||||
#include <atomic>
|
||||
#include <cassert>
|
||||
#include <cmath>
|
||||
#include <functional>
|
||||
#include <limits>
|
||||
|
||||
#include "absl/base/attributes.h"
|
||||
#include "absl/base/internal/exponential_biased.h"
|
||||
#include "absl/container/internal/have_sse.h"
|
||||
#include "absl/debugging/stacktrace.h"
|
||||
#include "absl/memory/memory.h"
|
||||
#include "absl/synchronization/mutex.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
constexpr int HashtablezInfo::kMaxStackDepth;
|
||||
|
||||
namespace {
|
||||
ABSL_CONST_INIT std::atomic<bool> g_hashtablez_enabled{
|
||||
false
|
||||
};
|
||||
ABSL_CONST_INIT std::atomic<int32_t> g_hashtablez_sample_parameter{1 << 10};
|
||||
ABSL_CONST_INIT std::atomic<int32_t> g_hashtablez_max_samples{1 << 20};
|
||||
|
||||
#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
|
||||
ABSL_PER_THREAD_TLS_KEYWORD absl::base_internal::ExponentialBiased
|
||||
g_exponential_biased_generator;
|
||||
#endif
|
||||
|
||||
} // namespace
|
||||
|
||||
#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
|
||||
ABSL_PER_THREAD_TLS_KEYWORD int64_t global_next_sample = 0;
|
||||
#endif // defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
|
||||
|
||||
HashtablezSampler& HashtablezSampler::Global() {
|
||||
static auto* sampler = new HashtablezSampler();
|
||||
return *sampler;
|
||||
}
|
||||
|
||||
HashtablezSampler::DisposeCallback HashtablezSampler::SetDisposeCallback(
|
||||
DisposeCallback f) {
|
||||
return dispose_.exchange(f, std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
HashtablezInfo::HashtablezInfo() { PrepareForSampling(); }
|
||||
HashtablezInfo::~HashtablezInfo() = default;
|
||||
|
||||
void HashtablezInfo::PrepareForSampling() {
|
||||
capacity.store(0, std::memory_order_relaxed);
|
||||
size.store(0, std::memory_order_relaxed);
|
||||
num_erases.store(0, std::memory_order_relaxed);
|
||||
max_probe_length.store(0, std::memory_order_relaxed);
|
||||
total_probe_length.store(0, std::memory_order_relaxed);
|
||||
hashes_bitwise_or.store(0, std::memory_order_relaxed);
|
||||
hashes_bitwise_and.store(~size_t{}, std::memory_order_relaxed);
|
||||
|
||||
create_time = absl::Now();
|
||||
// The inliner makes hardcoded skip_count difficult (especially when combined
|
||||
// with LTO). We use the ability to exclude stacks by regex when encoding
|
||||
// instead.
|
||||
depth = absl::GetStackTrace(stack, HashtablezInfo::kMaxStackDepth,
|
||||
/* skip_count= */ 0);
|
||||
dead = nullptr;
|
||||
}
|
||||
|
||||
HashtablezSampler::HashtablezSampler()
|
||||
: dropped_samples_(0), size_estimate_(0), all_(nullptr), dispose_(nullptr) {
|
||||
absl::MutexLock l(&graveyard_.init_mu);
|
||||
graveyard_.dead = &graveyard_;
|
||||
}
|
||||
|
||||
HashtablezSampler::~HashtablezSampler() {
|
||||
HashtablezInfo* s = all_.load(std::memory_order_acquire);
|
||||
while (s != nullptr) {
|
||||
HashtablezInfo* next = s->next;
|
||||
delete s;
|
||||
s = next;
|
||||
}
|
||||
}
|
||||
|
||||
void HashtablezSampler::PushNew(HashtablezInfo* sample) {
|
||||
sample->next = all_.load(std::memory_order_relaxed);
|
||||
while (!all_.compare_exchange_weak(sample->next, sample,
|
||||
std::memory_order_release,
|
||||
std::memory_order_relaxed)) {
|
||||
}
|
||||
}
|
||||
|
||||
void HashtablezSampler::PushDead(HashtablezInfo* sample) {
|
||||
if (auto* dispose = dispose_.load(std::memory_order_relaxed)) {
|
||||
dispose(*sample);
|
||||
}
|
||||
|
||||
absl::MutexLock graveyard_lock(&graveyard_.init_mu);
|
||||
absl::MutexLock sample_lock(&sample->init_mu);
|
||||
sample->dead = graveyard_.dead;
|
||||
graveyard_.dead = sample;
|
||||
}
|
||||
|
||||
HashtablezInfo* HashtablezSampler::PopDead() {
|
||||
absl::MutexLock graveyard_lock(&graveyard_.init_mu);
|
||||
|
||||
// The list is circular, so eventually it collapses down to
|
||||
// graveyard_.dead == &graveyard_
|
||||
// when it is empty.
|
||||
HashtablezInfo* sample = graveyard_.dead;
|
||||
if (sample == &graveyard_) return nullptr;
|
||||
|
||||
absl::MutexLock sample_lock(&sample->init_mu);
|
||||
graveyard_.dead = sample->dead;
|
||||
sample->PrepareForSampling();
|
||||
return sample;
|
||||
}
|
||||
|
||||
HashtablezInfo* HashtablezSampler::Register() {
|
||||
int64_t size = size_estimate_.fetch_add(1, std::memory_order_relaxed);
|
||||
if (size > g_hashtablez_max_samples.load(std::memory_order_relaxed)) {
|
||||
size_estimate_.fetch_sub(1, std::memory_order_relaxed);
|
||||
dropped_samples_.fetch_add(1, std::memory_order_relaxed);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
HashtablezInfo* sample = PopDead();
|
||||
if (sample == nullptr) {
|
||||
// Resurrection failed. Hire a new warlock.
|
||||
sample = new HashtablezInfo();
|
||||
PushNew(sample);
|
||||
}
|
||||
|
||||
return sample;
|
||||
}
|
||||
|
||||
void HashtablezSampler::Unregister(HashtablezInfo* sample) {
|
||||
PushDead(sample);
|
||||
size_estimate_.fetch_sub(1, std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
int64_t HashtablezSampler::Iterate(
|
||||
const std::function<void(const HashtablezInfo& stack)>& f) {
|
||||
HashtablezInfo* s = all_.load(std::memory_order_acquire);
|
||||
while (s != nullptr) {
|
||||
absl::MutexLock l(&s->init_mu);
|
||||
if (s->dead == nullptr) {
|
||||
f(*s);
|
||||
}
|
||||
s = s->next;
|
||||
}
|
||||
|
||||
return dropped_samples_.load(std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
static bool ShouldForceSampling() {
|
||||
enum ForceState {
|
||||
kDontForce,
|
||||
kForce,
|
||||
kUninitialized
|
||||
};
|
||||
ABSL_CONST_INIT static std::atomic<ForceState> global_state{
|
||||
kUninitialized};
|
||||
ForceState state = global_state.load(std::memory_order_relaxed);
|
||||
if (ABSL_PREDICT_TRUE(state == kDontForce)) return false;
|
||||
|
||||
if (state == kUninitialized) {
|
||||
state = AbslContainerInternalSampleEverything() ? kForce : kDontForce;
|
||||
global_state.store(state, std::memory_order_relaxed);
|
||||
}
|
||||
return state == kForce;
|
||||
}
|
||||
|
||||
HashtablezInfo* SampleSlow(int64_t* next_sample) {
|
||||
if (ABSL_PREDICT_FALSE(ShouldForceSampling())) {
|
||||
*next_sample = 1;
|
||||
return HashtablezSampler::Global().Register();
|
||||
}
|
||||
|
||||
#if !defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
|
||||
*next_sample = std::numeric_limits<int64_t>::max();
|
||||
return nullptr;
|
||||
#else
|
||||
bool first = *next_sample < 0;
|
||||
*next_sample = g_exponential_biased_generator.GetStride(
|
||||
g_hashtablez_sample_parameter.load(std::memory_order_relaxed));
|
||||
// Small values of interval are equivalent to just sampling next time.
|
||||
ABSL_ASSERT(*next_sample >= 1);
|
||||
|
||||
// g_hashtablez_enabled can be dynamically flipped, we need to set a threshold
|
||||
// low enough that we will start sampling in a reasonable time, so we just use
|
||||
// the default sampling rate.
|
||||
if (!g_hashtablez_enabled.load(std::memory_order_relaxed)) return nullptr;
|
||||
|
||||
// We will only be negative on our first count, so we should just retry in
|
||||
// that case.
|
||||
if (first) {
|
||||
if (ABSL_PREDICT_TRUE(--*next_sample > 0)) return nullptr;
|
||||
return SampleSlow(next_sample);
|
||||
}
|
||||
|
||||
return HashtablezSampler::Global().Register();
|
||||
#endif
|
||||
}
|
||||
|
||||
void UnsampleSlow(HashtablezInfo* info) {
|
||||
HashtablezSampler::Global().Unregister(info);
|
||||
}
|
||||
|
||||
void RecordInsertSlow(HashtablezInfo* info, size_t hash,
|
||||
size_t distance_from_desired) {
|
||||
// SwissTables probe in groups of 16, so scale this to count items probes and
|
||||
// not offset from desired.
|
||||
size_t probe_length = distance_from_desired;
|
||||
#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2
|
||||
probe_length /= 16;
|
||||
#else
|
||||
probe_length /= 8;
|
||||
#endif
|
||||
|
||||
info->hashes_bitwise_and.fetch_and(hash, std::memory_order_relaxed);
|
||||
info->hashes_bitwise_or.fetch_or(hash, std::memory_order_relaxed);
|
||||
info->max_probe_length.store(
|
||||
std::max(info->max_probe_length.load(std::memory_order_relaxed),
|
||||
probe_length),
|
||||
std::memory_order_relaxed);
|
||||
info->total_probe_length.fetch_add(probe_length, std::memory_order_relaxed);
|
||||
info->size.fetch_add(1, std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
void SetHashtablezEnabled(bool enabled) {
|
||||
g_hashtablez_enabled.store(enabled, std::memory_order_release);
|
||||
}
|
||||
|
||||
void SetHashtablezSampleParameter(int32_t rate) {
|
||||
if (rate > 0) {
|
||||
g_hashtablez_sample_parameter.store(rate, std::memory_order_release);
|
||||
} else {
|
||||
ABSL_RAW_LOG(ERROR, "Invalid hashtablez sample rate: %lld",
|
||||
static_cast<long long>(rate)); // NOLINT(runtime/int)
|
||||
}
|
||||
}
|
||||
|
||||
void SetHashtablezMaxSamples(int32_t max) {
|
||||
if (max > 0) {
|
||||
g_hashtablez_max_samples.store(max, std::memory_order_release);
|
||||
} else {
|
||||
ABSL_RAW_LOG(ERROR, "Invalid hashtablez max samples: %lld",
|
||||
static_cast<long long>(max)); // NOLINT(runtime/int)
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
292
third_party/abseil_cpp/absl/container/internal/hashtablez_sampler.h
vendored
Normal file
292
third_party/abseil_cpp/absl/container/internal/hashtablez_sampler.h
vendored
Normal file
|
|
@ -0,0 +1,292 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
// -----------------------------------------------------------------------------
|
||||
// File: hashtablez_sampler.h
|
||||
// -----------------------------------------------------------------------------
|
||||
//
|
||||
// This header file defines the API for a low level library to sample hashtables
|
||||
// and collect runtime statistics about them.
|
||||
//
|
||||
// `HashtablezSampler` controls the lifecycle of `HashtablezInfo` objects which
|
||||
// store information about a single sample.
|
||||
//
|
||||
// `Record*` methods store information into samples.
|
||||
// `Sample()` and `Unsample()` make use of a single global sampler with
|
||||
// properties controlled by the flags hashtablez_enabled,
|
||||
// hashtablez_sample_rate, and hashtablez_max_samples.
|
||||
//
|
||||
// WARNING
|
||||
//
|
||||
// Using this sampling API may cause sampled Swiss tables to use the global
|
||||
// allocator (operator `new`) in addition to any custom allocator. If you
|
||||
// are using a table in an unusual circumstance where allocation or calling a
|
||||
// linux syscall is unacceptable, this could interfere.
|
||||
//
|
||||
// This utility is internal-only. Use at your own risk.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_HASHTABLEZ_SAMPLER_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_HASHTABLEZ_SAMPLER_H_
|
||||
|
||||
#include <atomic>
|
||||
#include <functional>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/base/internal/per_thread_tls.h"
|
||||
#include "absl/base/optimization.h"
|
||||
#include "absl/container/internal/have_sse.h"
|
||||
#include "absl/synchronization/mutex.h"
|
||||
#include "absl/utility/utility.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
// Stores information about a sampled hashtable. All mutations to this *must*
|
||||
// be made through `Record*` functions below. All reads from this *must* only
|
||||
// occur in the callback to `HashtablezSampler::Iterate`.
|
||||
struct HashtablezInfo {
|
||||
// Constructs the object but does not fill in any fields.
|
||||
HashtablezInfo();
|
||||
~HashtablezInfo();
|
||||
HashtablezInfo(const HashtablezInfo&) = delete;
|
||||
HashtablezInfo& operator=(const HashtablezInfo&) = delete;
|
||||
|
||||
// Puts the object into a clean state, fills in the logically `const` members,
|
||||
// blocking for any readers that are currently sampling the object.
|
||||
void PrepareForSampling() ABSL_EXCLUSIVE_LOCKS_REQUIRED(init_mu);
|
||||
|
||||
// These fields are mutated by the various Record* APIs and need to be
|
||||
// thread-safe.
|
||||
std::atomic<size_t> capacity;
|
||||
std::atomic<size_t> size;
|
||||
std::atomic<size_t> num_erases;
|
||||
std::atomic<size_t> max_probe_length;
|
||||
std::atomic<size_t> total_probe_length;
|
||||
std::atomic<size_t> hashes_bitwise_or;
|
||||
std::atomic<size_t> hashes_bitwise_and;
|
||||
|
||||
// `HashtablezSampler` maintains intrusive linked lists for all samples. See
|
||||
// comments on `HashtablezSampler::all_` for details on these. `init_mu`
|
||||
// guards the ability to restore the sample to a pristine state. This
|
||||
// prevents races with sampling and resurrecting an object.
|
||||
absl::Mutex init_mu;
|
||||
HashtablezInfo* next;
|
||||
HashtablezInfo* dead ABSL_GUARDED_BY(init_mu);
|
||||
|
||||
// All of the fields below are set by `PrepareForSampling`, they must not be
|
||||
// mutated in `Record*` functions. They are logically `const` in that sense.
|
||||
// These are guarded by init_mu, but that is not externalized to clients, who
|
||||
// can only read them during `HashtablezSampler::Iterate` which will hold the
|
||||
// lock.
|
||||
static constexpr int kMaxStackDepth = 64;
|
||||
absl::Time create_time;
|
||||
int32_t depth;
|
||||
void* stack[kMaxStackDepth];
|
||||
};
|
||||
|
||||
inline void RecordRehashSlow(HashtablezInfo* info, size_t total_probe_length) {
|
||||
#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2
|
||||
total_probe_length /= 16;
|
||||
#else
|
||||
total_probe_length /= 8;
|
||||
#endif
|
||||
info->total_probe_length.store(total_probe_length, std::memory_order_relaxed);
|
||||
info->num_erases.store(0, std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
inline void RecordStorageChangedSlow(HashtablezInfo* info, size_t size,
|
||||
size_t capacity) {
|
||||
info->size.store(size, std::memory_order_relaxed);
|
||||
info->capacity.store(capacity, std::memory_order_relaxed);
|
||||
if (size == 0) {
|
||||
// This is a clear, reset the total/num_erases too.
|
||||
RecordRehashSlow(info, 0);
|
||||
}
|
||||
}
|
||||
|
||||
void RecordInsertSlow(HashtablezInfo* info, size_t hash,
|
||||
size_t distance_from_desired);
|
||||
|
||||
inline void RecordEraseSlow(HashtablezInfo* info) {
|
||||
info->size.fetch_sub(1, std::memory_order_relaxed);
|
||||
info->num_erases.fetch_add(1, std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
HashtablezInfo* SampleSlow(int64_t* next_sample);
|
||||
void UnsampleSlow(HashtablezInfo* info);
|
||||
|
||||
class HashtablezInfoHandle {
|
||||
public:
|
||||
explicit HashtablezInfoHandle() : info_(nullptr) {}
|
||||
explicit HashtablezInfoHandle(HashtablezInfo* info) : info_(info) {}
|
||||
~HashtablezInfoHandle() {
|
||||
if (ABSL_PREDICT_TRUE(info_ == nullptr)) return;
|
||||
UnsampleSlow(info_);
|
||||
}
|
||||
|
||||
HashtablezInfoHandle(const HashtablezInfoHandle&) = delete;
|
||||
HashtablezInfoHandle& operator=(const HashtablezInfoHandle&) = delete;
|
||||
|
||||
HashtablezInfoHandle(HashtablezInfoHandle&& o) noexcept
|
||||
: info_(absl::exchange(o.info_, nullptr)) {}
|
||||
HashtablezInfoHandle& operator=(HashtablezInfoHandle&& o) noexcept {
|
||||
if (ABSL_PREDICT_FALSE(info_ != nullptr)) {
|
||||
UnsampleSlow(info_);
|
||||
}
|
||||
info_ = absl::exchange(o.info_, nullptr);
|
||||
return *this;
|
||||
}
|
||||
|
||||
inline void RecordStorageChanged(size_t size, size_t capacity) {
|
||||
if (ABSL_PREDICT_TRUE(info_ == nullptr)) return;
|
||||
RecordStorageChangedSlow(info_, size, capacity);
|
||||
}
|
||||
|
||||
inline void RecordRehash(size_t total_probe_length) {
|
||||
if (ABSL_PREDICT_TRUE(info_ == nullptr)) return;
|
||||
RecordRehashSlow(info_, total_probe_length);
|
||||
}
|
||||
|
||||
inline void RecordInsert(size_t hash, size_t distance_from_desired) {
|
||||
if (ABSL_PREDICT_TRUE(info_ == nullptr)) return;
|
||||
RecordInsertSlow(info_, hash, distance_from_desired);
|
||||
}
|
||||
|
||||
inline void RecordErase() {
|
||||
if (ABSL_PREDICT_TRUE(info_ == nullptr)) return;
|
||||
RecordEraseSlow(info_);
|
||||
}
|
||||
|
||||
friend inline void swap(HashtablezInfoHandle& lhs,
|
||||
HashtablezInfoHandle& rhs) {
|
||||
std::swap(lhs.info_, rhs.info_);
|
||||
}
|
||||
|
||||
private:
|
||||
friend class HashtablezInfoHandlePeer;
|
||||
HashtablezInfo* info_;
|
||||
};
|
||||
|
||||
#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
|
||||
#error ABSL_INTERNAL_HASHTABLEZ_SAMPLE cannot be directly set
|
||||
#endif // defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
|
||||
|
||||
#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
|
||||
extern ABSL_PER_THREAD_TLS_KEYWORD int64_t global_next_sample;
|
||||
#endif // ABSL_PER_THREAD_TLS
|
||||
|
||||
// Returns an RAII sampling handle that manages registration and unregistation
|
||||
// with the global sampler.
|
||||
inline HashtablezInfoHandle Sample() {
|
||||
#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
|
||||
if (ABSL_PREDICT_TRUE(--global_next_sample > 0)) {
|
||||
return HashtablezInfoHandle(nullptr);
|
||||
}
|
||||
return HashtablezInfoHandle(SampleSlow(&global_next_sample));
|
||||
#else
|
||||
return HashtablezInfoHandle(nullptr);
|
||||
#endif // !ABSL_PER_THREAD_TLS
|
||||
}
|
||||
|
||||
// Holds samples and their associated stack traces with a soft limit of
|
||||
// `SetHashtablezMaxSamples()`.
|
||||
//
|
||||
// Thread safe.
|
||||
class HashtablezSampler {
|
||||
public:
|
||||
// Returns a global Sampler.
|
||||
static HashtablezSampler& Global();
|
||||
|
||||
HashtablezSampler();
|
||||
~HashtablezSampler();
|
||||
|
||||
// Registers for sampling. Returns an opaque registration info.
|
||||
HashtablezInfo* Register();
|
||||
|
||||
// Unregisters the sample.
|
||||
void Unregister(HashtablezInfo* sample);
|
||||
|
||||
// The dispose callback will be called on all samples the moment they are
|
||||
// being unregistered. Only affects samples that are unregistered after the
|
||||
// callback has been set.
|
||||
// Returns the previous callback.
|
||||
using DisposeCallback = void (*)(const HashtablezInfo&);
|
||||
DisposeCallback SetDisposeCallback(DisposeCallback f);
|
||||
|
||||
// Iterates over all the registered `StackInfo`s. Returning the number of
|
||||
// samples that have been dropped.
|
||||
int64_t Iterate(const std::function<void(const HashtablezInfo& stack)>& f);
|
||||
|
||||
private:
|
||||
void PushNew(HashtablezInfo* sample);
|
||||
void PushDead(HashtablezInfo* sample);
|
||||
HashtablezInfo* PopDead();
|
||||
|
||||
std::atomic<size_t> dropped_samples_;
|
||||
std::atomic<size_t> size_estimate_;
|
||||
|
||||
// Intrusive lock free linked lists for tracking samples.
|
||||
//
|
||||
// `all_` records all samples (they are never removed from this list) and is
|
||||
// terminated with a `nullptr`.
|
||||
//
|
||||
// `graveyard_.dead` is a circular linked list. When it is empty,
|
||||
// `graveyard_.dead == &graveyard`. The list is circular so that
|
||||
// every item on it (even the last) has a non-null dead pointer. This allows
|
||||
// `Iterate` to determine if a given sample is live or dead using only
|
||||
// information on the sample itself.
|
||||
//
|
||||
// For example, nodes [A, B, C, D, E] with [A, C, E] alive and [B, D] dead
|
||||
// looks like this (G is the Graveyard):
|
||||
//
|
||||
// +---+ +---+ +---+ +---+ +---+
|
||||
// all -->| A |--->| B |--->| C |--->| D |--->| E |
|
||||
// | | | | | | | | | |
|
||||
// +---+ | | +->| |-+ | | +->| |-+ | |
|
||||
// | G | +---+ | +---+ | +---+ | +---+ | +---+
|
||||
// | | | | | |
|
||||
// | | --------+ +--------+ |
|
||||
// +---+ |
|
||||
// ^ |
|
||||
// +--------------------------------------+
|
||||
//
|
||||
std::atomic<HashtablezInfo*> all_;
|
||||
HashtablezInfo graveyard_;
|
||||
|
||||
std::atomic<DisposeCallback> dispose_;
|
||||
};
|
||||
|
||||
// Enables or disables sampling for Swiss tables.
|
||||
void SetHashtablezEnabled(bool enabled);
|
||||
|
||||
// Sets the rate at which Swiss tables will be sampled.
|
||||
void SetHashtablezSampleParameter(int32_t rate);
|
||||
|
||||
// Sets a soft max for the number of samples that will be kept.
|
||||
void SetHashtablezMaxSamples(int32_t max);
|
||||
|
||||
// Configuration override.
|
||||
// This allows process-wide sampling without depending on order of
|
||||
// initialization of static storage duration objects.
|
||||
// The definition of this constant is weak, which allows us to inject a
|
||||
// different value for it at link time.
|
||||
extern "C" bool AbslContainerInternalSampleEverything();
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_HASHTABLEZ_SAMPLER_H_
|
||||
30
third_party/abseil_cpp/absl/container/internal/hashtablez_sampler_force_weak_definition.cc
vendored
Normal file
30
third_party/abseil_cpp/absl/container/internal/hashtablez_sampler_force_weak_definition.cc
vendored
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "absl/container/internal/hashtablez_sampler.h"
|
||||
|
||||
#include "absl/base/attributes.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
// See hashtablez_sampler.h for details.
|
||||
extern "C" ABSL_ATTRIBUTE_WEAK bool AbslContainerInternalSampleEverything() {
|
||||
return false;
|
||||
}
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
359
third_party/abseil_cpp/absl/container/internal/hashtablez_sampler_test.cc
vendored
Normal file
359
third_party/abseil_cpp/absl/container/internal/hashtablez_sampler_test.cc
vendored
Normal file
|
|
@ -0,0 +1,359 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "absl/container/internal/hashtablez_sampler.h"
|
||||
|
||||
#include <atomic>
|
||||
#include <limits>
|
||||
#include <random>
|
||||
|
||||
#include "gmock/gmock.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "absl/base/attributes.h"
|
||||
#include "absl/container/internal/have_sse.h"
|
||||
#include "absl/synchronization/blocking_counter.h"
|
||||
#include "absl/synchronization/internal/thread_pool.h"
|
||||
#include "absl/synchronization/mutex.h"
|
||||
#include "absl/synchronization/notification.h"
|
||||
#include "absl/time/clock.h"
|
||||
#include "absl/time/time.h"
|
||||
|
||||
#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2
|
||||
constexpr int kProbeLength = 16;
|
||||
#else
|
||||
constexpr int kProbeLength = 8;
|
||||
#endif
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
class HashtablezInfoHandlePeer {
|
||||
public:
|
||||
static bool IsSampled(const HashtablezInfoHandle& h) {
|
||||
return h.info_ != nullptr;
|
||||
}
|
||||
|
||||
static HashtablezInfo* GetInfo(HashtablezInfoHandle* h) { return h->info_; }
|
||||
};
|
||||
|
||||
namespace {
|
||||
using ::absl::synchronization_internal::ThreadPool;
|
||||
using ::testing::IsEmpty;
|
||||
using ::testing::UnorderedElementsAre;
|
||||
|
||||
std::vector<size_t> GetSizes(HashtablezSampler* s) {
|
||||
std::vector<size_t> res;
|
||||
s->Iterate([&](const HashtablezInfo& info) {
|
||||
res.push_back(info.size.load(std::memory_order_acquire));
|
||||
});
|
||||
return res;
|
||||
}
|
||||
|
||||
HashtablezInfo* Register(HashtablezSampler* s, size_t size) {
|
||||
auto* info = s->Register();
|
||||
assert(info != nullptr);
|
||||
info->size.store(size);
|
||||
return info;
|
||||
}
|
||||
|
||||
TEST(HashtablezInfoTest, PrepareForSampling) {
|
||||
absl::Time test_start = absl::Now();
|
||||
HashtablezInfo info;
|
||||
absl::MutexLock l(&info.init_mu);
|
||||
info.PrepareForSampling();
|
||||
|
||||
EXPECT_EQ(info.capacity.load(), 0);
|
||||
EXPECT_EQ(info.size.load(), 0);
|
||||
EXPECT_EQ(info.num_erases.load(), 0);
|
||||
EXPECT_EQ(info.max_probe_length.load(), 0);
|
||||
EXPECT_EQ(info.total_probe_length.load(), 0);
|
||||
EXPECT_EQ(info.hashes_bitwise_or.load(), 0);
|
||||
EXPECT_EQ(info.hashes_bitwise_and.load(), ~size_t{});
|
||||
EXPECT_GE(info.create_time, test_start);
|
||||
|
||||
info.capacity.store(1, std::memory_order_relaxed);
|
||||
info.size.store(1, std::memory_order_relaxed);
|
||||
info.num_erases.store(1, std::memory_order_relaxed);
|
||||
info.max_probe_length.store(1, std::memory_order_relaxed);
|
||||
info.total_probe_length.store(1, std::memory_order_relaxed);
|
||||
info.hashes_bitwise_or.store(1, std::memory_order_relaxed);
|
||||
info.hashes_bitwise_and.store(1, std::memory_order_relaxed);
|
||||
info.create_time = test_start - absl::Hours(20);
|
||||
|
||||
info.PrepareForSampling();
|
||||
EXPECT_EQ(info.capacity.load(), 0);
|
||||
EXPECT_EQ(info.size.load(), 0);
|
||||
EXPECT_EQ(info.num_erases.load(), 0);
|
||||
EXPECT_EQ(info.max_probe_length.load(), 0);
|
||||
EXPECT_EQ(info.total_probe_length.load(), 0);
|
||||
EXPECT_EQ(info.hashes_bitwise_or.load(), 0);
|
||||
EXPECT_EQ(info.hashes_bitwise_and.load(), ~size_t{});
|
||||
EXPECT_GE(info.create_time, test_start);
|
||||
}
|
||||
|
||||
TEST(HashtablezInfoTest, RecordStorageChanged) {
|
||||
HashtablezInfo info;
|
||||
absl::MutexLock l(&info.init_mu);
|
||||
info.PrepareForSampling();
|
||||
RecordStorageChangedSlow(&info, 17, 47);
|
||||
EXPECT_EQ(info.size.load(), 17);
|
||||
EXPECT_EQ(info.capacity.load(), 47);
|
||||
RecordStorageChangedSlow(&info, 20, 20);
|
||||
EXPECT_EQ(info.size.load(), 20);
|
||||
EXPECT_EQ(info.capacity.load(), 20);
|
||||
}
|
||||
|
||||
TEST(HashtablezInfoTest, RecordInsert) {
|
||||
HashtablezInfo info;
|
||||
absl::MutexLock l(&info.init_mu);
|
||||
info.PrepareForSampling();
|
||||
EXPECT_EQ(info.max_probe_length.load(), 0);
|
||||
RecordInsertSlow(&info, 0x0000FF00, 6 * kProbeLength);
|
||||
EXPECT_EQ(info.max_probe_length.load(), 6);
|
||||
EXPECT_EQ(info.hashes_bitwise_and.load(), 0x0000FF00);
|
||||
EXPECT_EQ(info.hashes_bitwise_or.load(), 0x0000FF00);
|
||||
RecordInsertSlow(&info, 0x000FF000, 4 * kProbeLength);
|
||||
EXPECT_EQ(info.max_probe_length.load(), 6);
|
||||
EXPECT_EQ(info.hashes_bitwise_and.load(), 0x0000F000);
|
||||
EXPECT_EQ(info.hashes_bitwise_or.load(), 0x000FFF00);
|
||||
RecordInsertSlow(&info, 0x00FF0000, 12 * kProbeLength);
|
||||
EXPECT_EQ(info.max_probe_length.load(), 12);
|
||||
EXPECT_EQ(info.hashes_bitwise_and.load(), 0x00000000);
|
||||
EXPECT_EQ(info.hashes_bitwise_or.load(), 0x00FFFF00);
|
||||
}
|
||||
|
||||
TEST(HashtablezInfoTest, RecordErase) {
|
||||
HashtablezInfo info;
|
||||
absl::MutexLock l(&info.init_mu);
|
||||
info.PrepareForSampling();
|
||||
EXPECT_EQ(info.num_erases.load(), 0);
|
||||
EXPECT_EQ(info.size.load(), 0);
|
||||
RecordInsertSlow(&info, 0x0000FF00, 6 * kProbeLength);
|
||||
EXPECT_EQ(info.size.load(), 1);
|
||||
RecordEraseSlow(&info);
|
||||
EXPECT_EQ(info.size.load(), 0);
|
||||
EXPECT_EQ(info.num_erases.load(), 1);
|
||||
}
|
||||
|
||||
TEST(HashtablezInfoTest, RecordRehash) {
|
||||
HashtablezInfo info;
|
||||
absl::MutexLock l(&info.init_mu);
|
||||
info.PrepareForSampling();
|
||||
RecordInsertSlow(&info, 0x1, 0);
|
||||
RecordInsertSlow(&info, 0x2, kProbeLength);
|
||||
RecordInsertSlow(&info, 0x4, kProbeLength);
|
||||
RecordInsertSlow(&info, 0x8, 2 * kProbeLength);
|
||||
EXPECT_EQ(info.size.load(), 4);
|
||||
EXPECT_EQ(info.total_probe_length.load(), 4);
|
||||
|
||||
RecordEraseSlow(&info);
|
||||
RecordEraseSlow(&info);
|
||||
EXPECT_EQ(info.size.load(), 2);
|
||||
EXPECT_EQ(info.total_probe_length.load(), 4);
|
||||
EXPECT_EQ(info.num_erases.load(), 2);
|
||||
|
||||
RecordRehashSlow(&info, 3 * kProbeLength);
|
||||
EXPECT_EQ(info.size.load(), 2);
|
||||
EXPECT_EQ(info.total_probe_length.load(), 3);
|
||||
EXPECT_EQ(info.num_erases.load(), 0);
|
||||
}
|
||||
|
||||
#if defined(ABSL_HASHTABLEZ_SAMPLE)
|
||||
TEST(HashtablezSamplerTest, SmallSampleParameter) {
|
||||
SetHashtablezEnabled(true);
|
||||
SetHashtablezSampleParameter(100);
|
||||
|
||||
for (int i = 0; i < 1000; ++i) {
|
||||
int64_t next_sample = 0;
|
||||
HashtablezInfo* sample = SampleSlow(&next_sample);
|
||||
EXPECT_GT(next_sample, 0);
|
||||
EXPECT_NE(sample, nullptr);
|
||||
UnsampleSlow(sample);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(HashtablezSamplerTest, LargeSampleParameter) {
|
||||
SetHashtablezEnabled(true);
|
||||
SetHashtablezSampleParameter(std::numeric_limits<int32_t>::max());
|
||||
|
||||
for (int i = 0; i < 1000; ++i) {
|
||||
int64_t next_sample = 0;
|
||||
HashtablezInfo* sample = SampleSlow(&next_sample);
|
||||
EXPECT_GT(next_sample, 0);
|
||||
EXPECT_NE(sample, nullptr);
|
||||
UnsampleSlow(sample);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(HashtablezSamplerTest, Sample) {
|
||||
SetHashtablezEnabled(true);
|
||||
SetHashtablezSampleParameter(100);
|
||||
int64_t num_sampled = 0;
|
||||
int64_t total = 0;
|
||||
double sample_rate = 0.0;
|
||||
for (int i = 0; i < 1000000; ++i) {
|
||||
HashtablezInfoHandle h = Sample();
|
||||
++total;
|
||||
if (HashtablezInfoHandlePeer::IsSampled(h)) {
|
||||
++num_sampled;
|
||||
}
|
||||
sample_rate = static_cast<double>(num_sampled) / total;
|
||||
if (0.005 < sample_rate && sample_rate < 0.015) break;
|
||||
}
|
||||
EXPECT_NEAR(sample_rate, 0.01, 0.005);
|
||||
}
|
||||
#endif
|
||||
|
||||
TEST(HashtablezSamplerTest, Handle) {
|
||||
auto& sampler = HashtablezSampler::Global();
|
||||
HashtablezInfoHandle h(sampler.Register());
|
||||
auto* info = HashtablezInfoHandlePeer::GetInfo(&h);
|
||||
info->hashes_bitwise_and.store(0x12345678, std::memory_order_relaxed);
|
||||
|
||||
bool found = false;
|
||||
sampler.Iterate([&](const HashtablezInfo& h) {
|
||||
if (&h == info) {
|
||||
EXPECT_EQ(h.hashes_bitwise_and.load(), 0x12345678);
|
||||
found = true;
|
||||
}
|
||||
});
|
||||
EXPECT_TRUE(found);
|
||||
|
||||
h = HashtablezInfoHandle();
|
||||
found = false;
|
||||
sampler.Iterate([&](const HashtablezInfo& h) {
|
||||
if (&h == info) {
|
||||
// this will only happen if some other thread has resurrected the info
|
||||
// the old handle was using.
|
||||
if (h.hashes_bitwise_and.load() == 0x12345678) {
|
||||
found = true;
|
||||
}
|
||||
}
|
||||
});
|
||||
EXPECT_FALSE(found);
|
||||
}
|
||||
|
||||
TEST(HashtablezSamplerTest, Registration) {
|
||||
HashtablezSampler sampler;
|
||||
auto* info1 = Register(&sampler, 1);
|
||||
EXPECT_THAT(GetSizes(&sampler), UnorderedElementsAre(1));
|
||||
|
||||
auto* info2 = Register(&sampler, 2);
|
||||
EXPECT_THAT(GetSizes(&sampler), UnorderedElementsAre(1, 2));
|
||||
info1->size.store(3);
|
||||
EXPECT_THAT(GetSizes(&sampler), UnorderedElementsAre(3, 2));
|
||||
|
||||
sampler.Unregister(info1);
|
||||
sampler.Unregister(info2);
|
||||
}
|
||||
|
||||
TEST(HashtablezSamplerTest, Unregistration) {
|
||||
HashtablezSampler sampler;
|
||||
std::vector<HashtablezInfo*> infos;
|
||||
for (size_t i = 0; i < 3; ++i) {
|
||||
infos.push_back(Register(&sampler, i));
|
||||
}
|
||||
EXPECT_THAT(GetSizes(&sampler), UnorderedElementsAre(0, 1, 2));
|
||||
|
||||
sampler.Unregister(infos[1]);
|
||||
EXPECT_THAT(GetSizes(&sampler), UnorderedElementsAre(0, 2));
|
||||
|
||||
infos.push_back(Register(&sampler, 3));
|
||||
infos.push_back(Register(&sampler, 4));
|
||||
EXPECT_THAT(GetSizes(&sampler), UnorderedElementsAre(0, 2, 3, 4));
|
||||
sampler.Unregister(infos[3]);
|
||||
EXPECT_THAT(GetSizes(&sampler), UnorderedElementsAre(0, 2, 4));
|
||||
|
||||
sampler.Unregister(infos[0]);
|
||||
sampler.Unregister(infos[2]);
|
||||
sampler.Unregister(infos[4]);
|
||||
EXPECT_THAT(GetSizes(&sampler), IsEmpty());
|
||||
}
|
||||
|
||||
TEST(HashtablezSamplerTest, MultiThreaded) {
|
||||
HashtablezSampler sampler;
|
||||
Notification stop;
|
||||
ThreadPool pool(10);
|
||||
|
||||
for (int i = 0; i < 10; ++i) {
|
||||
pool.Schedule([&sampler, &stop]() {
|
||||
std::random_device rd;
|
||||
std::mt19937 gen(rd());
|
||||
|
||||
std::vector<HashtablezInfo*> infoz;
|
||||
while (!stop.HasBeenNotified()) {
|
||||
if (infoz.empty()) {
|
||||
infoz.push_back(sampler.Register());
|
||||
}
|
||||
switch (std::uniform_int_distribution<>(0, 2)(gen)) {
|
||||
case 0: {
|
||||
infoz.push_back(sampler.Register());
|
||||
break;
|
||||
}
|
||||
case 1: {
|
||||
size_t p =
|
||||
std::uniform_int_distribution<>(0, infoz.size() - 1)(gen);
|
||||
HashtablezInfo* info = infoz[p];
|
||||
infoz[p] = infoz.back();
|
||||
infoz.pop_back();
|
||||
sampler.Unregister(info);
|
||||
break;
|
||||
}
|
||||
case 2: {
|
||||
absl::Duration oldest = absl::ZeroDuration();
|
||||
sampler.Iterate([&](const HashtablezInfo& info) {
|
||||
oldest = std::max(oldest, absl::Now() - info.create_time);
|
||||
});
|
||||
ASSERT_GE(oldest, absl::ZeroDuration());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
// The threads will hammer away. Give it a little bit of time for tsan to
|
||||
// spot errors.
|
||||
absl::SleepFor(absl::Seconds(3));
|
||||
stop.Notify();
|
||||
}
|
||||
|
||||
TEST(HashtablezSamplerTest, Callback) {
|
||||
HashtablezSampler sampler;
|
||||
|
||||
auto* info1 = Register(&sampler, 1);
|
||||
auto* info2 = Register(&sampler, 2);
|
||||
|
||||
static const HashtablezInfo* expected;
|
||||
|
||||
auto callback = [](const HashtablezInfo& info) {
|
||||
// We can't use `info` outside of this callback because the object will be
|
||||
// disposed as soon as we return from here.
|
||||
EXPECT_EQ(&info, expected);
|
||||
};
|
||||
|
||||
// Set the callback.
|
||||
EXPECT_EQ(sampler.SetDisposeCallback(callback), nullptr);
|
||||
expected = info1;
|
||||
sampler.Unregister(info1);
|
||||
|
||||
// Unset the callback.
|
||||
EXPECT_EQ(callback, sampler.SetDisposeCallback(nullptr));
|
||||
expected = nullptr; // no more calls.
|
||||
sampler.Unregister(info2);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
50
third_party/abseil_cpp/absl/container/internal/have_sse.h
vendored
Normal file
50
third_party/abseil_cpp/absl/container/internal/have_sse.h
vendored
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
// Shared config probing for SSE instructions used in Swiss tables.
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_HAVE_SSE_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_HAVE_SSE_H_
|
||||
|
||||
#ifndef ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2
|
||||
#if defined(__SSE2__) || \
|
||||
(defined(_MSC_VER) && \
|
||||
(defined(_M_X64) || (defined(_M_IX86) && _M_IX86_FP >= 2)))
|
||||
#define ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2 1
|
||||
#else
|
||||
#define ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2 0
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#ifndef ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSSE3
|
||||
#ifdef __SSSE3__
|
||||
#define ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSSE3 1
|
||||
#else
|
||||
#define ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSSE3 0
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSSE3 && \
|
||||
!ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2
|
||||
#error "Bad configuration!"
|
||||
#endif
|
||||
|
||||
#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2
|
||||
#include <emmintrin.h>
|
||||
#endif
|
||||
|
||||
#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSSE3
|
||||
#include <tmmintrin.h>
|
||||
#endif
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_HAVE_SSE_H_
|
||||
892
third_party/abseil_cpp/absl/container/internal/inlined_vector.h
vendored
Normal file
892
third_party/abseil_cpp/absl/container/internal/inlined_vector.h
vendored
Normal file
|
|
@ -0,0 +1,892 @@
|
|||
// Copyright 2019 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_INTERNAL_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_INTERNAL_H_
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstddef>
|
||||
#include <cstring>
|
||||
#include <iterator>
|
||||
#include <limits>
|
||||
#include <memory>
|
||||
#include <utility>
|
||||
|
||||
#include "absl/base/macros.h"
|
||||
#include "absl/container/internal/compressed_tuple.h"
|
||||
#include "absl/memory/memory.h"
|
||||
#include "absl/meta/type_traits.h"
|
||||
#include "absl/types/span.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace inlined_vector_internal {
|
||||
|
||||
template <typename Iterator>
|
||||
using IsAtLeastForwardIterator = std::is_convertible<
|
||||
typename std::iterator_traits<Iterator>::iterator_category,
|
||||
std::forward_iterator_tag>;
|
||||
|
||||
template <typename AllocatorType,
|
||||
typename ValueType =
|
||||
typename absl::allocator_traits<AllocatorType>::value_type>
|
||||
using IsMemcpyOk =
|
||||
absl::conjunction<std::is_same<AllocatorType, std::allocator<ValueType>>,
|
||||
absl::is_trivially_copy_constructible<ValueType>,
|
||||
absl::is_trivially_copy_assignable<ValueType>,
|
||||
absl::is_trivially_destructible<ValueType>>;
|
||||
|
||||
template <typename AllocatorType, typename Pointer, typename SizeType>
|
||||
void DestroyElements(AllocatorType* alloc_ptr, Pointer destroy_first,
|
||||
SizeType destroy_size) {
|
||||
using AllocatorTraits = absl::allocator_traits<AllocatorType>;
|
||||
|
||||
if (destroy_first != nullptr) {
|
||||
for (auto i = destroy_size; i != 0;) {
|
||||
--i;
|
||||
AllocatorTraits::destroy(*alloc_ptr, destroy_first + i);
|
||||
}
|
||||
|
||||
#if !defined(NDEBUG)
|
||||
{
|
||||
using ValueType = typename AllocatorTraits::value_type;
|
||||
|
||||
// Overwrite unused memory with `0xab` so we can catch uninitialized
|
||||
// usage.
|
||||
//
|
||||
// Cast to `void*` to tell the compiler that we don't care that we might
|
||||
// be scribbling on a vtable pointer.
|
||||
void* memory_ptr = destroy_first;
|
||||
auto memory_size = destroy_size * sizeof(ValueType);
|
||||
std::memset(memory_ptr, 0xab, memory_size);
|
||||
}
|
||||
#endif // !defined(NDEBUG)
|
||||
}
|
||||
}
|
||||
|
||||
template <typename AllocatorType, typename Pointer, typename ValueAdapter,
|
||||
typename SizeType>
|
||||
void ConstructElements(AllocatorType* alloc_ptr, Pointer construct_first,
|
||||
ValueAdapter* values_ptr, SizeType construct_size) {
|
||||
for (SizeType i = 0; i < construct_size; ++i) {
|
||||
ABSL_INTERNAL_TRY {
|
||||
values_ptr->ConstructNext(alloc_ptr, construct_first + i);
|
||||
}
|
||||
ABSL_INTERNAL_CATCH_ANY {
|
||||
inlined_vector_internal::DestroyElements(alloc_ptr, construct_first, i);
|
||||
ABSL_INTERNAL_RETHROW;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
template <typename Pointer, typename ValueAdapter, typename SizeType>
|
||||
void AssignElements(Pointer assign_first, ValueAdapter* values_ptr,
|
||||
SizeType assign_size) {
|
||||
for (SizeType i = 0; i < assign_size; ++i) {
|
||||
values_ptr->AssignNext(assign_first + i);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename AllocatorType>
|
||||
struct StorageView {
|
||||
using AllocatorTraits = absl::allocator_traits<AllocatorType>;
|
||||
using Pointer = typename AllocatorTraits::pointer;
|
||||
using SizeType = typename AllocatorTraits::size_type;
|
||||
|
||||
Pointer data;
|
||||
SizeType size;
|
||||
SizeType capacity;
|
||||
};
|
||||
|
||||
template <typename AllocatorType, typename Iterator>
|
||||
class IteratorValueAdapter {
|
||||
using AllocatorTraits = absl::allocator_traits<AllocatorType>;
|
||||
using Pointer = typename AllocatorTraits::pointer;
|
||||
|
||||
public:
|
||||
explicit IteratorValueAdapter(const Iterator& it) : it_(it) {}
|
||||
|
||||
void ConstructNext(AllocatorType* alloc_ptr, Pointer construct_at) {
|
||||
AllocatorTraits::construct(*alloc_ptr, construct_at, *it_);
|
||||
++it_;
|
||||
}
|
||||
|
||||
void AssignNext(Pointer assign_at) {
|
||||
*assign_at = *it_;
|
||||
++it_;
|
||||
}
|
||||
|
||||
private:
|
||||
Iterator it_;
|
||||
};
|
||||
|
||||
template <typename AllocatorType>
|
||||
class CopyValueAdapter {
|
||||
using AllocatorTraits = absl::allocator_traits<AllocatorType>;
|
||||
using ValueType = typename AllocatorTraits::value_type;
|
||||
using Pointer = typename AllocatorTraits::pointer;
|
||||
using ConstPointer = typename AllocatorTraits::const_pointer;
|
||||
|
||||
public:
|
||||
explicit CopyValueAdapter(const ValueType& v) : ptr_(std::addressof(v)) {}
|
||||
|
||||
void ConstructNext(AllocatorType* alloc_ptr, Pointer construct_at) {
|
||||
AllocatorTraits::construct(*alloc_ptr, construct_at, *ptr_);
|
||||
}
|
||||
|
||||
void AssignNext(Pointer assign_at) { *assign_at = *ptr_; }
|
||||
|
||||
private:
|
||||
ConstPointer ptr_;
|
||||
};
|
||||
|
||||
template <typename AllocatorType>
|
||||
class DefaultValueAdapter {
|
||||
using AllocatorTraits = absl::allocator_traits<AllocatorType>;
|
||||
using ValueType = typename AllocatorTraits::value_type;
|
||||
using Pointer = typename AllocatorTraits::pointer;
|
||||
|
||||
public:
|
||||
explicit DefaultValueAdapter() {}
|
||||
|
||||
void ConstructNext(AllocatorType* alloc_ptr, Pointer construct_at) {
|
||||
AllocatorTraits::construct(*alloc_ptr, construct_at);
|
||||
}
|
||||
|
||||
void AssignNext(Pointer assign_at) { *assign_at = ValueType(); }
|
||||
};
|
||||
|
||||
template <typename AllocatorType>
|
||||
class AllocationTransaction {
|
||||
using AllocatorTraits = absl::allocator_traits<AllocatorType>;
|
||||
using Pointer = typename AllocatorTraits::pointer;
|
||||
using SizeType = typename AllocatorTraits::size_type;
|
||||
|
||||
public:
|
||||
explicit AllocationTransaction(AllocatorType* alloc_ptr)
|
||||
: alloc_data_(*alloc_ptr, nullptr) {}
|
||||
|
||||
~AllocationTransaction() {
|
||||
if (DidAllocate()) {
|
||||
AllocatorTraits::deallocate(GetAllocator(), GetData(), GetCapacity());
|
||||
}
|
||||
}
|
||||
|
||||
AllocationTransaction(const AllocationTransaction&) = delete;
|
||||
void operator=(const AllocationTransaction&) = delete;
|
||||
|
||||
AllocatorType& GetAllocator() { return alloc_data_.template get<0>(); }
|
||||
Pointer& GetData() { return alloc_data_.template get<1>(); }
|
||||
SizeType& GetCapacity() { return capacity_; }
|
||||
|
||||
bool DidAllocate() { return GetData() != nullptr; }
|
||||
Pointer Allocate(SizeType capacity) {
|
||||
GetData() = AllocatorTraits::allocate(GetAllocator(), capacity);
|
||||
GetCapacity() = capacity;
|
||||
return GetData();
|
||||
}
|
||||
|
||||
void Reset() {
|
||||
GetData() = nullptr;
|
||||
GetCapacity() = 0;
|
||||
}
|
||||
|
||||
private:
|
||||
container_internal::CompressedTuple<AllocatorType, Pointer> alloc_data_;
|
||||
SizeType capacity_ = 0;
|
||||
};
|
||||
|
||||
template <typename AllocatorType>
|
||||
class ConstructionTransaction {
|
||||
using AllocatorTraits = absl::allocator_traits<AllocatorType>;
|
||||
using Pointer = typename AllocatorTraits::pointer;
|
||||
using SizeType = typename AllocatorTraits::size_type;
|
||||
|
||||
public:
|
||||
explicit ConstructionTransaction(AllocatorType* alloc_ptr)
|
||||
: alloc_data_(*alloc_ptr, nullptr) {}
|
||||
|
||||
~ConstructionTransaction() {
|
||||
if (DidConstruct()) {
|
||||
inlined_vector_internal::DestroyElements(std::addressof(GetAllocator()),
|
||||
GetData(), GetSize());
|
||||
}
|
||||
}
|
||||
|
||||
ConstructionTransaction(const ConstructionTransaction&) = delete;
|
||||
void operator=(const ConstructionTransaction&) = delete;
|
||||
|
||||
AllocatorType& GetAllocator() { return alloc_data_.template get<0>(); }
|
||||
Pointer& GetData() { return alloc_data_.template get<1>(); }
|
||||
SizeType& GetSize() { return size_; }
|
||||
|
||||
bool DidConstruct() { return GetData() != nullptr; }
|
||||
template <typename ValueAdapter>
|
||||
void Construct(Pointer data, ValueAdapter* values_ptr, SizeType size) {
|
||||
inlined_vector_internal::ConstructElements(std::addressof(GetAllocator()),
|
||||
data, values_ptr, size);
|
||||
GetData() = data;
|
||||
GetSize() = size;
|
||||
}
|
||||
void Commit() {
|
||||
GetData() = nullptr;
|
||||
GetSize() = 0;
|
||||
}
|
||||
|
||||
private:
|
||||
container_internal::CompressedTuple<AllocatorType, Pointer> alloc_data_;
|
||||
SizeType size_ = 0;
|
||||
};
|
||||
|
||||
template <typename T, size_t N, typename A>
|
||||
class Storage {
|
||||
public:
|
||||
using AllocatorTraits = absl::allocator_traits<A>;
|
||||
using allocator_type = typename AllocatorTraits::allocator_type;
|
||||
using value_type = typename AllocatorTraits::value_type;
|
||||
using pointer = typename AllocatorTraits::pointer;
|
||||
using const_pointer = typename AllocatorTraits::const_pointer;
|
||||
using size_type = typename AllocatorTraits::size_type;
|
||||
using difference_type = typename AllocatorTraits::difference_type;
|
||||
|
||||
using reference = value_type&;
|
||||
using const_reference = const value_type&;
|
||||
using RValueReference = value_type&&;
|
||||
using iterator = pointer;
|
||||
using const_iterator = const_pointer;
|
||||
using reverse_iterator = std::reverse_iterator<iterator>;
|
||||
using const_reverse_iterator = std::reverse_iterator<const_iterator>;
|
||||
using MoveIterator = std::move_iterator<iterator>;
|
||||
using IsMemcpyOk = inlined_vector_internal::IsMemcpyOk<allocator_type>;
|
||||
|
||||
using StorageView = inlined_vector_internal::StorageView<allocator_type>;
|
||||
|
||||
template <typename Iterator>
|
||||
using IteratorValueAdapter =
|
||||
inlined_vector_internal::IteratorValueAdapter<allocator_type, Iterator>;
|
||||
using CopyValueAdapter =
|
||||
inlined_vector_internal::CopyValueAdapter<allocator_type>;
|
||||
using DefaultValueAdapter =
|
||||
inlined_vector_internal::DefaultValueAdapter<allocator_type>;
|
||||
|
||||
using AllocationTransaction =
|
||||
inlined_vector_internal::AllocationTransaction<allocator_type>;
|
||||
using ConstructionTransaction =
|
||||
inlined_vector_internal::ConstructionTransaction<allocator_type>;
|
||||
|
||||
static size_type NextCapacity(size_type current_capacity) {
|
||||
return current_capacity * 2;
|
||||
}
|
||||
|
||||
static size_type ComputeCapacity(size_type current_capacity,
|
||||
size_type requested_capacity) {
|
||||
return (std::max)(NextCapacity(current_capacity), requested_capacity);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Storage Constructors and Destructor
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
Storage() : metadata_() {}
|
||||
|
||||
explicit Storage(const allocator_type& alloc) : metadata_(alloc, {}) {}
|
||||
|
||||
~Storage() {
|
||||
pointer data = GetIsAllocated() ? GetAllocatedData() : GetInlinedData();
|
||||
inlined_vector_internal::DestroyElements(GetAllocPtr(), data, GetSize());
|
||||
DeallocateIfAllocated();
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Storage Member Accessors
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
size_type& GetSizeAndIsAllocated() { return metadata_.template get<1>(); }
|
||||
|
||||
const size_type& GetSizeAndIsAllocated() const {
|
||||
return metadata_.template get<1>();
|
||||
}
|
||||
|
||||
size_type GetSize() const { return GetSizeAndIsAllocated() >> 1; }
|
||||
|
||||
bool GetIsAllocated() const { return GetSizeAndIsAllocated() & 1; }
|
||||
|
||||
pointer GetAllocatedData() { return data_.allocated.allocated_data; }
|
||||
|
||||
const_pointer GetAllocatedData() const {
|
||||
return data_.allocated.allocated_data;
|
||||
}
|
||||
|
||||
pointer GetInlinedData() {
|
||||
return reinterpret_cast<pointer>(
|
||||
std::addressof(data_.inlined.inlined_data[0]));
|
||||
}
|
||||
|
||||
const_pointer GetInlinedData() const {
|
||||
return reinterpret_cast<const_pointer>(
|
||||
std::addressof(data_.inlined.inlined_data[0]));
|
||||
}
|
||||
|
||||
size_type GetAllocatedCapacity() const {
|
||||
return data_.allocated.allocated_capacity;
|
||||
}
|
||||
|
||||
size_type GetInlinedCapacity() const { return static_cast<size_type>(N); }
|
||||
|
||||
StorageView MakeStorageView() {
|
||||
return GetIsAllocated()
|
||||
? StorageView{GetAllocatedData(), GetSize(),
|
||||
GetAllocatedCapacity()}
|
||||
: StorageView{GetInlinedData(), GetSize(), GetInlinedCapacity()};
|
||||
}
|
||||
|
||||
allocator_type* GetAllocPtr() {
|
||||
return std::addressof(metadata_.template get<0>());
|
||||
}
|
||||
|
||||
const allocator_type* GetAllocPtr() const {
|
||||
return std::addressof(metadata_.template get<0>());
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Storage Member Mutators
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
template <typename ValueAdapter>
|
||||
void Initialize(ValueAdapter values, size_type new_size);
|
||||
|
||||
template <typename ValueAdapter>
|
||||
void Assign(ValueAdapter values, size_type new_size);
|
||||
|
||||
template <typename ValueAdapter>
|
||||
void Resize(ValueAdapter values, size_type new_size);
|
||||
|
||||
template <typename ValueAdapter>
|
||||
iterator Insert(const_iterator pos, ValueAdapter values,
|
||||
size_type insert_count);
|
||||
|
||||
template <typename... Args>
|
||||
reference EmplaceBack(Args&&... args);
|
||||
|
||||
iterator Erase(const_iterator from, const_iterator to);
|
||||
|
||||
void Reserve(size_type requested_capacity);
|
||||
|
||||
void ShrinkToFit();
|
||||
|
||||
void Swap(Storage* other_storage_ptr);
|
||||
|
||||
void SetIsAllocated() {
|
||||
GetSizeAndIsAllocated() |= static_cast<size_type>(1);
|
||||
}
|
||||
|
||||
void UnsetIsAllocated() {
|
||||
GetSizeAndIsAllocated() &= ((std::numeric_limits<size_type>::max)() - 1);
|
||||
}
|
||||
|
||||
void SetSize(size_type size) {
|
||||
GetSizeAndIsAllocated() =
|
||||
(size << 1) | static_cast<size_type>(GetIsAllocated());
|
||||
}
|
||||
|
||||
void SetAllocatedSize(size_type size) {
|
||||
GetSizeAndIsAllocated() = (size << 1) | static_cast<size_type>(1);
|
||||
}
|
||||
|
||||
void SetInlinedSize(size_type size) {
|
||||
GetSizeAndIsAllocated() = size << static_cast<size_type>(1);
|
||||
}
|
||||
|
||||
void AddSize(size_type count) {
|
||||
GetSizeAndIsAllocated() += count << static_cast<size_type>(1);
|
||||
}
|
||||
|
||||
void SubtractSize(size_type count) {
|
||||
assert(count <= GetSize());
|
||||
|
||||
GetSizeAndIsAllocated() -= count << static_cast<size_type>(1);
|
||||
}
|
||||
|
||||
void SetAllocatedData(pointer data, size_type capacity) {
|
||||
data_.allocated.allocated_data = data;
|
||||
data_.allocated.allocated_capacity = capacity;
|
||||
}
|
||||
|
||||
void AcquireAllocatedData(AllocationTransaction* allocation_tx_ptr) {
|
||||
SetAllocatedData(allocation_tx_ptr->GetData(),
|
||||
allocation_tx_ptr->GetCapacity());
|
||||
|
||||
allocation_tx_ptr->Reset();
|
||||
}
|
||||
|
||||
void MemcpyFrom(const Storage& other_storage) {
|
||||
assert(IsMemcpyOk::value || other_storage.GetIsAllocated());
|
||||
|
||||
GetSizeAndIsAllocated() = other_storage.GetSizeAndIsAllocated();
|
||||
data_ = other_storage.data_;
|
||||
}
|
||||
|
||||
void DeallocateIfAllocated() {
|
||||
if (GetIsAllocated()) {
|
||||
AllocatorTraits::deallocate(*GetAllocPtr(), GetAllocatedData(),
|
||||
GetAllocatedCapacity());
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
using Metadata =
|
||||
container_internal::CompressedTuple<allocator_type, size_type>;
|
||||
|
||||
struct Allocated {
|
||||
pointer allocated_data;
|
||||
size_type allocated_capacity;
|
||||
};
|
||||
|
||||
struct Inlined {
|
||||
alignas(value_type) char inlined_data[sizeof(value_type[N])];
|
||||
};
|
||||
|
||||
union Data {
|
||||
Allocated allocated;
|
||||
Inlined inlined;
|
||||
};
|
||||
|
||||
Metadata metadata_;
|
||||
Data data_;
|
||||
};
|
||||
|
||||
template <typename T, size_t N, typename A>
|
||||
template <typename ValueAdapter>
|
||||
auto Storage<T, N, A>::Initialize(ValueAdapter values, size_type new_size)
|
||||
-> void {
|
||||
// Only callable from constructors!
|
||||
assert(!GetIsAllocated());
|
||||
assert(GetSize() == 0);
|
||||
|
||||
pointer construct_data;
|
||||
if (new_size > GetInlinedCapacity()) {
|
||||
// Because this is only called from the `InlinedVector` constructors, it's
|
||||
// safe to take on the allocation with size `0`. If `ConstructElements(...)`
|
||||
// throws, deallocation will be automatically handled by `~Storage()`.
|
||||
size_type new_capacity = ComputeCapacity(GetInlinedCapacity(), new_size);
|
||||
construct_data = AllocatorTraits::allocate(*GetAllocPtr(), new_capacity);
|
||||
SetAllocatedData(construct_data, new_capacity);
|
||||
SetIsAllocated();
|
||||
} else {
|
||||
construct_data = GetInlinedData();
|
||||
}
|
||||
|
||||
inlined_vector_internal::ConstructElements(GetAllocPtr(), construct_data,
|
||||
&values, new_size);
|
||||
|
||||
// Since the initial size was guaranteed to be `0` and the allocated bit is
|
||||
// already correct for either case, *adding* `new_size` gives us the correct
|
||||
// result faster than setting it directly.
|
||||
AddSize(new_size);
|
||||
}
|
||||
|
||||
template <typename T, size_t N, typename A>
|
||||
template <typename ValueAdapter>
|
||||
auto Storage<T, N, A>::Assign(ValueAdapter values, size_type new_size) -> void {
|
||||
StorageView storage_view = MakeStorageView();
|
||||
|
||||
AllocationTransaction allocation_tx(GetAllocPtr());
|
||||
|
||||
absl::Span<value_type> assign_loop;
|
||||
absl::Span<value_type> construct_loop;
|
||||
absl::Span<value_type> destroy_loop;
|
||||
|
||||
if (new_size > storage_view.capacity) {
|
||||
size_type new_capacity = ComputeCapacity(storage_view.capacity, new_size);
|
||||
construct_loop = {allocation_tx.Allocate(new_capacity), new_size};
|
||||
destroy_loop = {storage_view.data, storage_view.size};
|
||||
} else if (new_size > storage_view.size) {
|
||||
assign_loop = {storage_view.data, storage_view.size};
|
||||
construct_loop = {storage_view.data + storage_view.size,
|
||||
new_size - storage_view.size};
|
||||
} else {
|
||||
assign_loop = {storage_view.data, new_size};
|
||||
destroy_loop = {storage_view.data + new_size, storage_view.size - new_size};
|
||||
}
|
||||
|
||||
inlined_vector_internal::AssignElements(assign_loop.data(), &values,
|
||||
assign_loop.size());
|
||||
|
||||
inlined_vector_internal::ConstructElements(
|
||||
GetAllocPtr(), construct_loop.data(), &values, construct_loop.size());
|
||||
|
||||
inlined_vector_internal::DestroyElements(GetAllocPtr(), destroy_loop.data(),
|
||||
destroy_loop.size());
|
||||
|
||||
if (allocation_tx.DidAllocate()) {
|
||||
DeallocateIfAllocated();
|
||||
AcquireAllocatedData(&allocation_tx);
|
||||
SetIsAllocated();
|
||||
}
|
||||
|
||||
SetSize(new_size);
|
||||
}
|
||||
|
||||
template <typename T, size_t N, typename A>
|
||||
template <typename ValueAdapter>
|
||||
auto Storage<T, N, A>::Resize(ValueAdapter values, size_type new_size) -> void {
|
||||
StorageView storage_view = MakeStorageView();
|
||||
|
||||
IteratorValueAdapter<MoveIterator> move_values(
|
||||
MoveIterator(storage_view.data));
|
||||
|
||||
AllocationTransaction allocation_tx(GetAllocPtr());
|
||||
ConstructionTransaction construction_tx(GetAllocPtr());
|
||||
|
||||
absl::Span<value_type> construct_loop;
|
||||
absl::Span<value_type> move_construct_loop;
|
||||
absl::Span<value_type> destroy_loop;
|
||||
|
||||
if (new_size > storage_view.capacity) {
|
||||
size_type new_capacity = ComputeCapacity(storage_view.capacity, new_size);
|
||||
pointer new_data = allocation_tx.Allocate(new_capacity);
|
||||
construct_loop = {new_data + storage_view.size,
|
||||
new_size - storage_view.size};
|
||||
move_construct_loop = {new_data, storage_view.size};
|
||||
destroy_loop = {storage_view.data, storage_view.size};
|
||||
} else if (new_size > storage_view.size) {
|
||||
construct_loop = {storage_view.data + storage_view.size,
|
||||
new_size - storage_view.size};
|
||||
} else {
|
||||
destroy_loop = {storage_view.data + new_size, storage_view.size - new_size};
|
||||
}
|
||||
|
||||
construction_tx.Construct(construct_loop.data(), &values,
|
||||
construct_loop.size());
|
||||
|
||||
inlined_vector_internal::ConstructElements(
|
||||
GetAllocPtr(), move_construct_loop.data(), &move_values,
|
||||
move_construct_loop.size());
|
||||
|
||||
inlined_vector_internal::DestroyElements(GetAllocPtr(), destroy_loop.data(),
|
||||
destroy_loop.size());
|
||||
|
||||
construction_tx.Commit();
|
||||
if (allocation_tx.DidAllocate()) {
|
||||
DeallocateIfAllocated();
|
||||
AcquireAllocatedData(&allocation_tx);
|
||||
SetIsAllocated();
|
||||
}
|
||||
|
||||
SetSize(new_size);
|
||||
}
|
||||
|
||||
template <typename T, size_t N, typename A>
|
||||
template <typename ValueAdapter>
|
||||
auto Storage<T, N, A>::Insert(const_iterator pos, ValueAdapter values,
|
||||
size_type insert_count) -> iterator {
|
||||
StorageView storage_view = MakeStorageView();
|
||||
|
||||
size_type insert_index =
|
||||
std::distance(const_iterator(storage_view.data), pos);
|
||||
size_type insert_end_index = insert_index + insert_count;
|
||||
size_type new_size = storage_view.size + insert_count;
|
||||
|
||||
if (new_size > storage_view.capacity) {
|
||||
AllocationTransaction allocation_tx(GetAllocPtr());
|
||||
ConstructionTransaction construction_tx(GetAllocPtr());
|
||||
ConstructionTransaction move_construciton_tx(GetAllocPtr());
|
||||
|
||||
IteratorValueAdapter<MoveIterator> move_values(
|
||||
MoveIterator(storage_view.data));
|
||||
|
||||
size_type new_capacity = ComputeCapacity(storage_view.capacity, new_size);
|
||||
pointer new_data = allocation_tx.Allocate(new_capacity);
|
||||
|
||||
construction_tx.Construct(new_data + insert_index, &values, insert_count);
|
||||
|
||||
move_construciton_tx.Construct(new_data, &move_values, insert_index);
|
||||
|
||||
inlined_vector_internal::ConstructElements(
|
||||
GetAllocPtr(), new_data + insert_end_index, &move_values,
|
||||
storage_view.size - insert_index);
|
||||
|
||||
inlined_vector_internal::DestroyElements(GetAllocPtr(), storage_view.data,
|
||||
storage_view.size);
|
||||
|
||||
construction_tx.Commit();
|
||||
move_construciton_tx.Commit();
|
||||
DeallocateIfAllocated();
|
||||
AcquireAllocatedData(&allocation_tx);
|
||||
|
||||
SetAllocatedSize(new_size);
|
||||
return iterator(new_data + insert_index);
|
||||
} else {
|
||||
size_type move_construction_destination_index =
|
||||
(std::max)(insert_end_index, storage_view.size);
|
||||
|
||||
ConstructionTransaction move_construction_tx(GetAllocPtr());
|
||||
|
||||
IteratorValueAdapter<MoveIterator> move_construction_values(
|
||||
MoveIterator(storage_view.data +
|
||||
(move_construction_destination_index - insert_count)));
|
||||
absl::Span<value_type> move_construction = {
|
||||
storage_view.data + move_construction_destination_index,
|
||||
new_size - move_construction_destination_index};
|
||||
|
||||
pointer move_assignment_values = storage_view.data + insert_index;
|
||||
absl::Span<value_type> move_assignment = {
|
||||
storage_view.data + insert_end_index,
|
||||
move_construction_destination_index - insert_end_index};
|
||||
|
||||
absl::Span<value_type> insert_assignment = {move_assignment_values,
|
||||
move_construction.size()};
|
||||
|
||||
absl::Span<value_type> insert_construction = {
|
||||
insert_assignment.data() + insert_assignment.size(),
|
||||
insert_count - insert_assignment.size()};
|
||||
|
||||
move_construction_tx.Construct(move_construction.data(),
|
||||
&move_construction_values,
|
||||
move_construction.size());
|
||||
|
||||
for (pointer destination = move_assignment.data() + move_assignment.size(),
|
||||
last_destination = move_assignment.data(),
|
||||
source = move_assignment_values + move_assignment.size();
|
||||
;) {
|
||||
--destination;
|
||||
--source;
|
||||
if (destination < last_destination) break;
|
||||
*destination = std::move(*source);
|
||||
}
|
||||
|
||||
inlined_vector_internal::AssignElements(insert_assignment.data(), &values,
|
||||
insert_assignment.size());
|
||||
|
||||
inlined_vector_internal::ConstructElements(
|
||||
GetAllocPtr(), insert_construction.data(), &values,
|
||||
insert_construction.size());
|
||||
|
||||
move_construction_tx.Commit();
|
||||
|
||||
AddSize(insert_count);
|
||||
return iterator(storage_view.data + insert_index);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename T, size_t N, typename A>
|
||||
template <typename... Args>
|
||||
auto Storage<T, N, A>::EmplaceBack(Args&&... args) -> reference {
|
||||
StorageView storage_view = MakeStorageView();
|
||||
|
||||
AllocationTransaction allocation_tx(GetAllocPtr());
|
||||
|
||||
IteratorValueAdapter<MoveIterator> move_values(
|
||||
MoveIterator(storage_view.data));
|
||||
|
||||
pointer construct_data;
|
||||
if (storage_view.size == storage_view.capacity) {
|
||||
size_type new_capacity = NextCapacity(storage_view.capacity);
|
||||
construct_data = allocation_tx.Allocate(new_capacity);
|
||||
} else {
|
||||
construct_data = storage_view.data;
|
||||
}
|
||||
|
||||
pointer last_ptr = construct_data + storage_view.size;
|
||||
|
||||
AllocatorTraits::construct(*GetAllocPtr(), last_ptr,
|
||||
std::forward<Args>(args)...);
|
||||
|
||||
if (allocation_tx.DidAllocate()) {
|
||||
ABSL_INTERNAL_TRY {
|
||||
inlined_vector_internal::ConstructElements(
|
||||
GetAllocPtr(), allocation_tx.GetData(), &move_values,
|
||||
storage_view.size);
|
||||
}
|
||||
ABSL_INTERNAL_CATCH_ANY {
|
||||
AllocatorTraits::destroy(*GetAllocPtr(), last_ptr);
|
||||
ABSL_INTERNAL_RETHROW;
|
||||
}
|
||||
|
||||
inlined_vector_internal::DestroyElements(GetAllocPtr(), storage_view.data,
|
||||
storage_view.size);
|
||||
|
||||
DeallocateIfAllocated();
|
||||
AcquireAllocatedData(&allocation_tx);
|
||||
SetIsAllocated();
|
||||
}
|
||||
|
||||
AddSize(1);
|
||||
return *last_ptr;
|
||||
}
|
||||
|
||||
template <typename T, size_t N, typename A>
|
||||
auto Storage<T, N, A>::Erase(const_iterator from, const_iterator to)
|
||||
-> iterator {
|
||||
StorageView storage_view = MakeStorageView();
|
||||
|
||||
size_type erase_size = std::distance(from, to);
|
||||
size_type erase_index =
|
||||
std::distance(const_iterator(storage_view.data), from);
|
||||
size_type erase_end_index = erase_index + erase_size;
|
||||
|
||||
IteratorValueAdapter<MoveIterator> move_values(
|
||||
MoveIterator(storage_view.data + erase_end_index));
|
||||
|
||||
inlined_vector_internal::AssignElements(storage_view.data + erase_index,
|
||||
&move_values,
|
||||
storage_view.size - erase_end_index);
|
||||
|
||||
inlined_vector_internal::DestroyElements(
|
||||
GetAllocPtr(), storage_view.data + (storage_view.size - erase_size),
|
||||
erase_size);
|
||||
|
||||
SubtractSize(erase_size);
|
||||
return iterator(storage_view.data + erase_index);
|
||||
}
|
||||
|
||||
template <typename T, size_t N, typename A>
|
||||
auto Storage<T, N, A>::Reserve(size_type requested_capacity) -> void {
|
||||
StorageView storage_view = MakeStorageView();
|
||||
|
||||
if (ABSL_PREDICT_FALSE(requested_capacity <= storage_view.capacity)) return;
|
||||
|
||||
AllocationTransaction allocation_tx(GetAllocPtr());
|
||||
|
||||
IteratorValueAdapter<MoveIterator> move_values(
|
||||
MoveIterator(storage_view.data));
|
||||
|
||||
size_type new_capacity =
|
||||
ComputeCapacity(storage_view.capacity, requested_capacity);
|
||||
pointer new_data = allocation_tx.Allocate(new_capacity);
|
||||
|
||||
inlined_vector_internal::ConstructElements(GetAllocPtr(), new_data,
|
||||
&move_values, storage_view.size);
|
||||
|
||||
inlined_vector_internal::DestroyElements(GetAllocPtr(), storage_view.data,
|
||||
storage_view.size);
|
||||
|
||||
DeallocateIfAllocated();
|
||||
AcquireAllocatedData(&allocation_tx);
|
||||
SetIsAllocated();
|
||||
}
|
||||
|
||||
template <typename T, size_t N, typename A>
|
||||
auto Storage<T, N, A>::ShrinkToFit() -> void {
|
||||
// May only be called on allocated instances!
|
||||
assert(GetIsAllocated());
|
||||
|
||||
StorageView storage_view{GetAllocatedData(), GetSize(),
|
||||
GetAllocatedCapacity()};
|
||||
|
||||
if (ABSL_PREDICT_FALSE(storage_view.size == storage_view.capacity)) return;
|
||||
|
||||
AllocationTransaction allocation_tx(GetAllocPtr());
|
||||
|
||||
IteratorValueAdapter<MoveIterator> move_values(
|
||||
MoveIterator(storage_view.data));
|
||||
|
||||
pointer construct_data;
|
||||
if (storage_view.size > GetInlinedCapacity()) {
|
||||
size_type new_capacity = storage_view.size;
|
||||
construct_data = allocation_tx.Allocate(new_capacity);
|
||||
} else {
|
||||
construct_data = GetInlinedData();
|
||||
}
|
||||
|
||||
ABSL_INTERNAL_TRY {
|
||||
inlined_vector_internal::ConstructElements(GetAllocPtr(), construct_data,
|
||||
&move_values, storage_view.size);
|
||||
}
|
||||
ABSL_INTERNAL_CATCH_ANY {
|
||||
SetAllocatedData(storage_view.data, storage_view.capacity);
|
||||
ABSL_INTERNAL_RETHROW;
|
||||
}
|
||||
|
||||
inlined_vector_internal::DestroyElements(GetAllocPtr(), storage_view.data,
|
||||
storage_view.size);
|
||||
|
||||
AllocatorTraits::deallocate(*GetAllocPtr(), storage_view.data,
|
||||
storage_view.capacity);
|
||||
|
||||
if (allocation_tx.DidAllocate()) {
|
||||
AcquireAllocatedData(&allocation_tx);
|
||||
} else {
|
||||
UnsetIsAllocated();
|
||||
}
|
||||
}
|
||||
|
||||
template <typename T, size_t N, typename A>
|
||||
auto Storage<T, N, A>::Swap(Storage* other_storage_ptr) -> void {
|
||||
using std::swap;
|
||||
assert(this != other_storage_ptr);
|
||||
|
||||
if (GetIsAllocated() && other_storage_ptr->GetIsAllocated()) {
|
||||
swap(data_.allocated, other_storage_ptr->data_.allocated);
|
||||
} else if (!GetIsAllocated() && !other_storage_ptr->GetIsAllocated()) {
|
||||
Storage* small_ptr = this;
|
||||
Storage* large_ptr = other_storage_ptr;
|
||||
if (small_ptr->GetSize() > large_ptr->GetSize()) swap(small_ptr, large_ptr);
|
||||
|
||||
for (size_type i = 0; i < small_ptr->GetSize(); ++i) {
|
||||
swap(small_ptr->GetInlinedData()[i], large_ptr->GetInlinedData()[i]);
|
||||
}
|
||||
|
||||
IteratorValueAdapter<MoveIterator> move_values(
|
||||
MoveIterator(large_ptr->GetInlinedData() + small_ptr->GetSize()));
|
||||
|
||||
inlined_vector_internal::ConstructElements(
|
||||
large_ptr->GetAllocPtr(),
|
||||
small_ptr->GetInlinedData() + small_ptr->GetSize(), &move_values,
|
||||
large_ptr->GetSize() - small_ptr->GetSize());
|
||||
|
||||
inlined_vector_internal::DestroyElements(
|
||||
large_ptr->GetAllocPtr(),
|
||||
large_ptr->GetInlinedData() + small_ptr->GetSize(),
|
||||
large_ptr->GetSize() - small_ptr->GetSize());
|
||||
} else {
|
||||
Storage* allocated_ptr = this;
|
||||
Storage* inlined_ptr = other_storage_ptr;
|
||||
if (!allocated_ptr->GetIsAllocated()) swap(allocated_ptr, inlined_ptr);
|
||||
|
||||
StorageView allocated_storage_view{allocated_ptr->GetAllocatedData(),
|
||||
allocated_ptr->GetSize(),
|
||||
allocated_ptr->GetAllocatedCapacity()};
|
||||
|
||||
IteratorValueAdapter<MoveIterator> move_values(
|
||||
MoveIterator(inlined_ptr->GetInlinedData()));
|
||||
|
||||
ABSL_INTERNAL_TRY {
|
||||
inlined_vector_internal::ConstructElements(
|
||||
inlined_ptr->GetAllocPtr(), allocated_ptr->GetInlinedData(),
|
||||
&move_values, inlined_ptr->GetSize());
|
||||
}
|
||||
ABSL_INTERNAL_CATCH_ANY {
|
||||
allocated_ptr->SetAllocatedData(allocated_storage_view.data,
|
||||
allocated_storage_view.capacity);
|
||||
ABSL_INTERNAL_RETHROW;
|
||||
}
|
||||
|
||||
inlined_vector_internal::DestroyElements(inlined_ptr->GetAllocPtr(),
|
||||
inlined_ptr->GetInlinedData(),
|
||||
inlined_ptr->GetSize());
|
||||
|
||||
inlined_ptr->SetAllocatedData(allocated_storage_view.data,
|
||||
allocated_storage_view.capacity);
|
||||
}
|
||||
|
||||
swap(GetSizeAndIsAllocated(), other_storage_ptr->GetSizeAndIsAllocated());
|
||||
swap(*GetAllocPtr(), *other_storage_ptr->GetAllocPtr());
|
||||
}
|
||||
|
||||
} // namespace inlined_vector_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_INTERNAL_H_
|
||||
741
third_party/abseil_cpp/absl/container/internal/layout.h
vendored
Normal file
741
third_party/abseil_cpp/absl/container/internal/layout.h
vendored
Normal file
|
|
@ -0,0 +1,741 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
// MOTIVATION AND TUTORIAL
|
||||
//
|
||||
// If you want to put in a single heap allocation N doubles followed by M ints,
|
||||
// it's easy if N and M are known at compile time.
|
||||
//
|
||||
// struct S {
|
||||
// double a[N];
|
||||
// int b[M];
|
||||
// };
|
||||
//
|
||||
// S* p = new S;
|
||||
//
|
||||
// But what if N and M are known only in run time? Class template Layout to the
|
||||
// rescue! It's a portable generalization of the technique known as struct hack.
|
||||
//
|
||||
// // This object will tell us everything we need to know about the memory
|
||||
// // layout of double[N] followed by int[M]. It's structurally identical to
|
||||
// // size_t[2] that stores N and M. It's very cheap to create.
|
||||
// const Layout<double, int> layout(N, M);
|
||||
//
|
||||
// // Allocate enough memory for both arrays. `AllocSize()` tells us how much
|
||||
// // memory is needed. We are free to use any allocation function we want as
|
||||
// // long as it returns aligned memory.
|
||||
// std::unique_ptr<unsigned char[]> p(new unsigned char[layout.AllocSize()]);
|
||||
//
|
||||
// // Obtain the pointer to the array of doubles.
|
||||
// // Equivalent to `reinterpret_cast<double*>(p.get())`.
|
||||
// //
|
||||
// // We could have written layout.Pointer<0>(p) instead. If all the types are
|
||||
// // unique you can use either form, but if some types are repeated you must
|
||||
// // use the index form.
|
||||
// double* a = layout.Pointer<double>(p.get());
|
||||
//
|
||||
// // Obtain the pointer to the array of ints.
|
||||
// // Equivalent to `reinterpret_cast<int*>(p.get() + N * 8)`.
|
||||
// int* b = layout.Pointer<int>(p);
|
||||
//
|
||||
// If we are unable to specify sizes of all fields, we can pass as many sizes as
|
||||
// we can to `Partial()`. In return, it'll allow us to access the fields whose
|
||||
// locations and sizes can be computed from the provided information.
|
||||
// `Partial()` comes in handy when the array sizes are embedded into the
|
||||
// allocation.
|
||||
//
|
||||
// // size_t[1] containing N, size_t[1] containing M, double[N], int[M].
|
||||
// using L = Layout<size_t, size_t, double, int>;
|
||||
//
|
||||
// unsigned char* Allocate(size_t n, size_t m) {
|
||||
// const L layout(1, 1, n, m);
|
||||
// unsigned char* p = new unsigned char[layout.AllocSize()];
|
||||
// *layout.Pointer<0>(p) = n;
|
||||
// *layout.Pointer<1>(p) = m;
|
||||
// return p;
|
||||
// }
|
||||
//
|
||||
// void Use(unsigned char* p) {
|
||||
// // First, extract N and M.
|
||||
// // Specify that the first array has only one element. Using `prefix` we
|
||||
// // can access the first two arrays but not more.
|
||||
// constexpr auto prefix = L::Partial(1);
|
||||
// size_t n = *prefix.Pointer<0>(p);
|
||||
// size_t m = *prefix.Pointer<1>(p);
|
||||
//
|
||||
// // Now we can get pointers to the payload.
|
||||
// const L layout(1, 1, n, m);
|
||||
// double* a = layout.Pointer<double>(p);
|
||||
// int* b = layout.Pointer<int>(p);
|
||||
// }
|
||||
//
|
||||
// The layout we used above combines fixed-size with dynamically-sized fields.
|
||||
// This is quite common. Layout is optimized for this use case and generates
|
||||
// optimal code. All computations that can be performed at compile time are
|
||||
// indeed performed at compile time.
|
||||
//
|
||||
// Efficiency tip: The order of fields matters. In `Layout<T1, ..., TN>` try to
|
||||
// ensure that `alignof(T1) >= ... >= alignof(TN)`. This way you'll have no
|
||||
// padding in between arrays.
|
||||
//
|
||||
// You can manually override the alignment of an array by wrapping the type in
|
||||
// `Aligned<T, N>`. `Layout<..., Aligned<T, N>, ...>` has exactly the same API
|
||||
// and behavior as `Layout<..., T, ...>` except that the first element of the
|
||||
// array of `T` is aligned to `N` (the rest of the elements follow without
|
||||
// padding). `N` cannot be less than `alignof(T)`.
|
||||
//
|
||||
// `AllocSize()` and `Pointer()` are the most basic methods for dealing with
|
||||
// memory layouts. Check out the reference or code below to discover more.
|
||||
//
|
||||
// EXAMPLE
|
||||
//
|
||||
// // Immutable move-only string with sizeof equal to sizeof(void*). The
|
||||
// // string size and the characters are kept in the same heap allocation.
|
||||
// class CompactString {
|
||||
// public:
|
||||
// CompactString(const char* s = "") {
|
||||
// const size_t size = strlen(s);
|
||||
// // size_t[1] followed by char[size + 1].
|
||||
// const L layout(1, size + 1);
|
||||
// p_.reset(new unsigned char[layout.AllocSize()]);
|
||||
// // If running under ASAN, mark the padding bytes, if any, to catch
|
||||
// // memory errors.
|
||||
// layout.PoisonPadding(p_.get());
|
||||
// // Store the size in the allocation.
|
||||
// *layout.Pointer<size_t>(p_.get()) = size;
|
||||
// // Store the characters in the allocation.
|
||||
// memcpy(layout.Pointer<char>(p_.get()), s, size + 1);
|
||||
// }
|
||||
//
|
||||
// size_t size() const {
|
||||
// // Equivalent to reinterpret_cast<size_t&>(*p).
|
||||
// return *L::Partial().Pointer<size_t>(p_.get());
|
||||
// }
|
||||
//
|
||||
// const char* c_str() const {
|
||||
// // Equivalent to reinterpret_cast<char*>(p.get() + sizeof(size_t)).
|
||||
// // The argument in Partial(1) specifies that we have size_t[1] in front
|
||||
// // of the characters.
|
||||
// return L::Partial(1).Pointer<char>(p_.get());
|
||||
// }
|
||||
//
|
||||
// private:
|
||||
// // Our heap allocation contains a size_t followed by an array of chars.
|
||||
// using L = Layout<size_t, char>;
|
||||
// std::unique_ptr<unsigned char[]> p_;
|
||||
// };
|
||||
//
|
||||
// int main() {
|
||||
// CompactString s = "hello";
|
||||
// assert(s.size() == 5);
|
||||
// assert(strcmp(s.c_str(), "hello") == 0);
|
||||
// }
|
||||
//
|
||||
// DOCUMENTATION
|
||||
//
|
||||
// The interface exported by this file consists of:
|
||||
// - class `Layout<>` and its public members.
|
||||
// - The public members of class `internal_layout::LayoutImpl<>`. That class
|
||||
// isn't intended to be used directly, and its name and template parameter
|
||||
// list are internal implementation details, but the class itself provides
|
||||
// most of the functionality in this file. See comments on its members for
|
||||
// detailed documentation.
|
||||
//
|
||||
// `Layout<T1,... Tn>::Partial(count1,..., countm)` (where `m` <= `n`) returns a
|
||||
// `LayoutImpl<>` object. `Layout<T1,..., Tn> layout(count1,..., countn)`
|
||||
// creates a `Layout` object, which exposes the same functionality by inheriting
|
||||
// from `LayoutImpl<>`.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_LAYOUT_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_LAYOUT_H_
|
||||
|
||||
#include <assert.h>
|
||||
#include <stddef.h>
|
||||
#include <stdint.h>
|
||||
#include <ostream>
|
||||
#include <string>
|
||||
#include <tuple>
|
||||
#include <type_traits>
|
||||
#include <typeinfo>
|
||||
#include <utility>
|
||||
|
||||
#ifdef ADDRESS_SANITIZER
|
||||
#include <sanitizer/asan_interface.h>
|
||||
#endif
|
||||
|
||||
#include "absl/meta/type_traits.h"
|
||||
#include "absl/strings/str_cat.h"
|
||||
#include "absl/types/span.h"
|
||||
#include "absl/utility/utility.h"
|
||||
|
||||
#if defined(__GXX_RTTI)
|
||||
#define ABSL_INTERNAL_HAS_CXA_DEMANGLE
|
||||
#endif
|
||||
|
||||
#ifdef ABSL_INTERNAL_HAS_CXA_DEMANGLE
|
||||
#include <cxxabi.h>
|
||||
#endif
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
// A type wrapper that instructs `Layout` to use the specific alignment for the
|
||||
// array. `Layout<..., Aligned<T, N>, ...>` has exactly the same API
|
||||
// and behavior as `Layout<..., T, ...>` except that the first element of the
|
||||
// array of `T` is aligned to `N` (the rest of the elements follow without
|
||||
// padding).
|
||||
//
|
||||
// Requires: `N >= alignof(T)` and `N` is a power of 2.
|
||||
template <class T, size_t N>
|
||||
struct Aligned;
|
||||
|
||||
namespace internal_layout {
|
||||
|
||||
template <class T>
|
||||
struct NotAligned {};
|
||||
|
||||
template <class T, size_t N>
|
||||
struct NotAligned<const Aligned<T, N>> {
|
||||
static_assert(sizeof(T) == 0, "Aligned<T, N> cannot be const-qualified");
|
||||
};
|
||||
|
||||
template <size_t>
|
||||
using IntToSize = size_t;
|
||||
|
||||
template <class>
|
||||
using TypeToSize = size_t;
|
||||
|
||||
template <class T>
|
||||
struct Type : NotAligned<T> {
|
||||
using type = T;
|
||||
};
|
||||
|
||||
template <class T, size_t N>
|
||||
struct Type<Aligned<T, N>> {
|
||||
using type = T;
|
||||
};
|
||||
|
||||
template <class T>
|
||||
struct SizeOf : NotAligned<T>, std::integral_constant<size_t, sizeof(T)> {};
|
||||
|
||||
template <class T, size_t N>
|
||||
struct SizeOf<Aligned<T, N>> : std::integral_constant<size_t, sizeof(T)> {};
|
||||
|
||||
// Note: workaround for https://gcc.gnu.org/PR88115
|
||||
template <class T>
|
||||
struct AlignOf : NotAligned<T> {
|
||||
static constexpr size_t value = alignof(T);
|
||||
};
|
||||
|
||||
template <class T, size_t N>
|
||||
struct AlignOf<Aligned<T, N>> {
|
||||
static_assert(N % alignof(T) == 0,
|
||||
"Custom alignment can't be lower than the type's alignment");
|
||||
static constexpr size_t value = N;
|
||||
};
|
||||
|
||||
// Does `Ts...` contain `T`?
|
||||
template <class T, class... Ts>
|
||||
using Contains = absl::disjunction<std::is_same<T, Ts>...>;
|
||||
|
||||
template <class From, class To>
|
||||
using CopyConst =
|
||||
typename std::conditional<std::is_const<From>::value, const To, To>::type;
|
||||
|
||||
// Note: We're not qualifying this with absl:: because it doesn't compile under
|
||||
// MSVC.
|
||||
template <class T>
|
||||
using SliceType = Span<T>;
|
||||
|
||||
// This namespace contains no types. It prevents functions defined in it from
|
||||
// being found by ADL.
|
||||
namespace adl_barrier {
|
||||
|
||||
template <class Needle, class... Ts>
|
||||
constexpr size_t Find(Needle, Needle, Ts...) {
|
||||
static_assert(!Contains<Needle, Ts...>(), "Duplicate element type");
|
||||
return 0;
|
||||
}
|
||||
|
||||
template <class Needle, class T, class... Ts>
|
||||
constexpr size_t Find(Needle, T, Ts...) {
|
||||
return adl_barrier::Find(Needle(), Ts()...) + 1;
|
||||
}
|
||||
|
||||
constexpr bool IsPow2(size_t n) { return !(n & (n - 1)); }
|
||||
|
||||
// Returns `q * m` for the smallest `q` such that `q * m >= n`.
|
||||
// Requires: `m` is a power of two. It's enforced by IsLegalElementType below.
|
||||
constexpr size_t Align(size_t n, size_t m) { return (n + m - 1) & ~(m - 1); }
|
||||
|
||||
constexpr size_t Min(size_t a, size_t b) { return b < a ? b : a; }
|
||||
|
||||
constexpr size_t Max(size_t a) { return a; }
|
||||
|
||||
template <class... Ts>
|
||||
constexpr size_t Max(size_t a, size_t b, Ts... rest) {
|
||||
return adl_barrier::Max(b < a ? a : b, rest...);
|
||||
}
|
||||
|
||||
template <class T>
|
||||
std::string TypeName() {
|
||||
std::string out;
|
||||
int status = 0;
|
||||
char* demangled = nullptr;
|
||||
#ifdef ABSL_INTERNAL_HAS_CXA_DEMANGLE
|
||||
demangled = abi::__cxa_demangle(typeid(T).name(), nullptr, nullptr, &status);
|
||||
#endif
|
||||
if (status == 0 && demangled != nullptr) { // Demangling succeeded.
|
||||
absl::StrAppend(&out, "<", demangled, ">");
|
||||
free(demangled);
|
||||
} else {
|
||||
#if defined(__GXX_RTTI) || defined(_CPPRTTI)
|
||||
absl::StrAppend(&out, "<", typeid(T).name(), ">");
|
||||
#endif
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
} // namespace adl_barrier
|
||||
|
||||
template <bool C>
|
||||
using EnableIf = typename std::enable_if<C, int>::type;
|
||||
|
||||
// Can `T` be a template argument of `Layout`?
|
||||
template <class T>
|
||||
using IsLegalElementType = std::integral_constant<
|
||||
bool, !std::is_reference<T>::value && !std::is_volatile<T>::value &&
|
||||
!std::is_reference<typename Type<T>::type>::value &&
|
||||
!std::is_volatile<typename Type<T>::type>::value &&
|
||||
adl_barrier::IsPow2(AlignOf<T>::value)>;
|
||||
|
||||
template <class Elements, class SizeSeq, class OffsetSeq>
|
||||
class LayoutImpl;
|
||||
|
||||
// Public base class of `Layout` and the result type of `Layout::Partial()`.
|
||||
//
|
||||
// `Elements...` contains all template arguments of `Layout` that created this
|
||||
// instance.
|
||||
//
|
||||
// `SizeSeq...` is `[0, NumSizes)` where `NumSizes` is the number of arguments
|
||||
// passed to `Layout::Partial()` or `Layout::Layout()`.
|
||||
//
|
||||
// `OffsetSeq...` is `[0, NumOffsets)` where `NumOffsets` is
|
||||
// `Min(sizeof...(Elements), NumSizes + 1)` (the number of arrays for which we
|
||||
// can compute offsets).
|
||||
template <class... Elements, size_t... SizeSeq, size_t... OffsetSeq>
|
||||
class LayoutImpl<std::tuple<Elements...>, absl::index_sequence<SizeSeq...>,
|
||||
absl::index_sequence<OffsetSeq...>> {
|
||||
private:
|
||||
static_assert(sizeof...(Elements) > 0, "At least one field is required");
|
||||
static_assert(absl::conjunction<IsLegalElementType<Elements>...>::value,
|
||||
"Invalid element type (see IsLegalElementType)");
|
||||
|
||||
enum {
|
||||
NumTypes = sizeof...(Elements),
|
||||
NumSizes = sizeof...(SizeSeq),
|
||||
NumOffsets = sizeof...(OffsetSeq),
|
||||
};
|
||||
|
||||
// These are guaranteed by `Layout`.
|
||||
static_assert(NumOffsets == adl_barrier::Min(NumTypes, NumSizes + 1),
|
||||
"Internal error");
|
||||
static_assert(NumTypes > 0, "Internal error");
|
||||
|
||||
// Returns the index of `T` in `Elements...`. Results in a compilation error
|
||||
// if `Elements...` doesn't contain exactly one instance of `T`.
|
||||
template <class T>
|
||||
static constexpr size_t ElementIndex() {
|
||||
static_assert(Contains<Type<T>, Type<typename Type<Elements>::type>...>(),
|
||||
"Type not found");
|
||||
return adl_barrier::Find(Type<T>(),
|
||||
Type<typename Type<Elements>::type>()...);
|
||||
}
|
||||
|
||||
template <size_t N>
|
||||
using ElementAlignment =
|
||||
AlignOf<typename std::tuple_element<N, std::tuple<Elements...>>::type>;
|
||||
|
||||
public:
|
||||
// Element types of all arrays packed in a tuple.
|
||||
using ElementTypes = std::tuple<typename Type<Elements>::type...>;
|
||||
|
||||
// Element type of the Nth array.
|
||||
template <size_t N>
|
||||
using ElementType = typename std::tuple_element<N, ElementTypes>::type;
|
||||
|
||||
constexpr explicit LayoutImpl(IntToSize<SizeSeq>... sizes)
|
||||
: size_{sizes...} {}
|
||||
|
||||
// Alignment of the layout, equal to the strictest alignment of all elements.
|
||||
// All pointers passed to the methods of layout must be aligned to this value.
|
||||
static constexpr size_t Alignment() {
|
||||
return adl_barrier::Max(AlignOf<Elements>::value...);
|
||||
}
|
||||
|
||||
// Offset in bytes of the Nth array.
|
||||
//
|
||||
// // int[3], 4 bytes of padding, double[4].
|
||||
// Layout<int, double> x(3, 4);
|
||||
// assert(x.Offset<0>() == 0); // The ints starts from 0.
|
||||
// assert(x.Offset<1>() == 16); // The doubles starts from 16.
|
||||
//
|
||||
// Requires: `N <= NumSizes && N < sizeof...(Ts)`.
|
||||
template <size_t N, EnableIf<N == 0> = 0>
|
||||
constexpr size_t Offset() const {
|
||||
return 0;
|
||||
}
|
||||
|
||||
template <size_t N, EnableIf<N != 0> = 0>
|
||||
constexpr size_t Offset() const {
|
||||
static_assert(N < NumOffsets, "Index out of bounds");
|
||||
return adl_barrier::Align(
|
||||
Offset<N - 1>() + SizeOf<ElementType<N - 1>>() * size_[N - 1],
|
||||
ElementAlignment<N>::value);
|
||||
}
|
||||
|
||||
// Offset in bytes of the array with the specified element type. There must
|
||||
// be exactly one such array and its zero-based index must be at most
|
||||
// `NumSizes`.
|
||||
//
|
||||
// // int[3], 4 bytes of padding, double[4].
|
||||
// Layout<int, double> x(3, 4);
|
||||
// assert(x.Offset<int>() == 0); // The ints starts from 0.
|
||||
// assert(x.Offset<double>() == 16); // The doubles starts from 16.
|
||||
template <class T>
|
||||
constexpr size_t Offset() const {
|
||||
return Offset<ElementIndex<T>()>();
|
||||
}
|
||||
|
||||
// Offsets in bytes of all arrays for which the offsets are known.
|
||||
constexpr std::array<size_t, NumOffsets> Offsets() const {
|
||||
return {{Offset<OffsetSeq>()...}};
|
||||
}
|
||||
|
||||
// The number of elements in the Nth array. This is the Nth argument of
|
||||
// `Layout::Partial()` or `Layout::Layout()` (zero-based).
|
||||
//
|
||||
// // int[3], 4 bytes of padding, double[4].
|
||||
// Layout<int, double> x(3, 4);
|
||||
// assert(x.Size<0>() == 3);
|
||||
// assert(x.Size<1>() == 4);
|
||||
//
|
||||
// Requires: `N < NumSizes`.
|
||||
template <size_t N>
|
||||
constexpr size_t Size() const {
|
||||
static_assert(N < NumSizes, "Index out of bounds");
|
||||
return size_[N];
|
||||
}
|
||||
|
||||
// The number of elements in the array with the specified element type.
|
||||
// There must be exactly one such array and its zero-based index must be
|
||||
// at most `NumSizes`.
|
||||
//
|
||||
// // int[3], 4 bytes of padding, double[4].
|
||||
// Layout<int, double> x(3, 4);
|
||||
// assert(x.Size<int>() == 3);
|
||||
// assert(x.Size<double>() == 4);
|
||||
template <class T>
|
||||
constexpr size_t Size() const {
|
||||
return Size<ElementIndex<T>()>();
|
||||
}
|
||||
|
||||
// The number of elements of all arrays for which they are known.
|
||||
constexpr std::array<size_t, NumSizes> Sizes() const {
|
||||
return {{Size<SizeSeq>()...}};
|
||||
}
|
||||
|
||||
// Pointer to the beginning of the Nth array.
|
||||
//
|
||||
// `Char` must be `[const] [signed|unsigned] char`.
|
||||
//
|
||||
// // int[3], 4 bytes of padding, double[4].
|
||||
// Layout<int, double> x(3, 4);
|
||||
// unsigned char* p = new unsigned char[x.AllocSize()];
|
||||
// int* ints = x.Pointer<0>(p);
|
||||
// double* doubles = x.Pointer<1>(p);
|
||||
//
|
||||
// Requires: `N <= NumSizes && N < sizeof...(Ts)`.
|
||||
// Requires: `p` is aligned to `Alignment()`.
|
||||
template <size_t N, class Char>
|
||||
CopyConst<Char, ElementType<N>>* Pointer(Char* p) const {
|
||||
using C = typename std::remove_const<Char>::type;
|
||||
static_assert(
|
||||
std::is_same<C, char>() || std::is_same<C, unsigned char>() ||
|
||||
std::is_same<C, signed char>(),
|
||||
"The argument must be a pointer to [const] [signed|unsigned] char");
|
||||
constexpr size_t alignment = Alignment();
|
||||
(void)alignment;
|
||||
assert(reinterpret_cast<uintptr_t>(p) % alignment == 0);
|
||||
return reinterpret_cast<CopyConst<Char, ElementType<N>>*>(p + Offset<N>());
|
||||
}
|
||||
|
||||
// Pointer to the beginning of the array with the specified element type.
|
||||
// There must be exactly one such array and its zero-based index must be at
|
||||
// most `NumSizes`.
|
||||
//
|
||||
// `Char` must be `[const] [signed|unsigned] char`.
|
||||
//
|
||||
// // int[3], 4 bytes of padding, double[4].
|
||||
// Layout<int, double> x(3, 4);
|
||||
// unsigned char* p = new unsigned char[x.AllocSize()];
|
||||
// int* ints = x.Pointer<int>(p);
|
||||
// double* doubles = x.Pointer<double>(p);
|
||||
//
|
||||
// Requires: `p` is aligned to `Alignment()`.
|
||||
template <class T, class Char>
|
||||
CopyConst<Char, T>* Pointer(Char* p) const {
|
||||
return Pointer<ElementIndex<T>()>(p);
|
||||
}
|
||||
|
||||
// Pointers to all arrays for which pointers are known.
|
||||
//
|
||||
// `Char` must be `[const] [signed|unsigned] char`.
|
||||
//
|
||||
// // int[3], 4 bytes of padding, double[4].
|
||||
// Layout<int, double> x(3, 4);
|
||||
// unsigned char* p = new unsigned char[x.AllocSize()];
|
||||
//
|
||||
// int* ints;
|
||||
// double* doubles;
|
||||
// std::tie(ints, doubles) = x.Pointers(p);
|
||||
//
|
||||
// Requires: `p` is aligned to `Alignment()`.
|
||||
//
|
||||
// Note: We're not using ElementType alias here because it does not compile
|
||||
// under MSVC.
|
||||
template <class Char>
|
||||
std::tuple<CopyConst<
|
||||
Char, typename std::tuple_element<OffsetSeq, ElementTypes>::type>*...>
|
||||
Pointers(Char* p) const {
|
||||
return std::tuple<CopyConst<Char, ElementType<OffsetSeq>>*...>(
|
||||
Pointer<OffsetSeq>(p)...);
|
||||
}
|
||||
|
||||
// The Nth array.
|
||||
//
|
||||
// `Char` must be `[const] [signed|unsigned] char`.
|
||||
//
|
||||
// // int[3], 4 bytes of padding, double[4].
|
||||
// Layout<int, double> x(3, 4);
|
||||
// unsigned char* p = new unsigned char[x.AllocSize()];
|
||||
// Span<int> ints = x.Slice<0>(p);
|
||||
// Span<double> doubles = x.Slice<1>(p);
|
||||
//
|
||||
// Requires: `N < NumSizes`.
|
||||
// Requires: `p` is aligned to `Alignment()`.
|
||||
template <size_t N, class Char>
|
||||
SliceType<CopyConst<Char, ElementType<N>>> Slice(Char* p) const {
|
||||
return SliceType<CopyConst<Char, ElementType<N>>>(Pointer<N>(p), Size<N>());
|
||||
}
|
||||
|
||||
// The array with the specified element type. There must be exactly one
|
||||
// such array and its zero-based index must be less than `NumSizes`.
|
||||
//
|
||||
// `Char` must be `[const] [signed|unsigned] char`.
|
||||
//
|
||||
// // int[3], 4 bytes of padding, double[4].
|
||||
// Layout<int, double> x(3, 4);
|
||||
// unsigned char* p = new unsigned char[x.AllocSize()];
|
||||
// Span<int> ints = x.Slice<int>(p);
|
||||
// Span<double> doubles = x.Slice<double>(p);
|
||||
//
|
||||
// Requires: `p` is aligned to `Alignment()`.
|
||||
template <class T, class Char>
|
||||
SliceType<CopyConst<Char, T>> Slice(Char* p) const {
|
||||
return Slice<ElementIndex<T>()>(p);
|
||||
}
|
||||
|
||||
// All arrays with known sizes.
|
||||
//
|
||||
// `Char` must be `[const] [signed|unsigned] char`.
|
||||
//
|
||||
// // int[3], 4 bytes of padding, double[4].
|
||||
// Layout<int, double> x(3, 4);
|
||||
// unsigned char* p = new unsigned char[x.AllocSize()];
|
||||
//
|
||||
// Span<int> ints;
|
||||
// Span<double> doubles;
|
||||
// std::tie(ints, doubles) = x.Slices(p);
|
||||
//
|
||||
// Requires: `p` is aligned to `Alignment()`.
|
||||
//
|
||||
// Note: We're not using ElementType alias here because it does not compile
|
||||
// under MSVC.
|
||||
template <class Char>
|
||||
std::tuple<SliceType<CopyConst<
|
||||
Char, typename std::tuple_element<SizeSeq, ElementTypes>::type>>...>
|
||||
Slices(Char* p) const {
|
||||
// Workaround for https://gcc.gnu.org/bugzilla/show_bug.cgi?id=63875 (fixed
|
||||
// in 6.1).
|
||||
(void)p;
|
||||
return std::tuple<SliceType<CopyConst<Char, ElementType<SizeSeq>>>...>(
|
||||
Slice<SizeSeq>(p)...);
|
||||
}
|
||||
|
||||
// The size of the allocation that fits all arrays.
|
||||
//
|
||||
// // int[3], 4 bytes of padding, double[4].
|
||||
// Layout<int, double> x(3, 4);
|
||||
// unsigned char* p = new unsigned char[x.AllocSize()]; // 48 bytes
|
||||
//
|
||||
// Requires: `NumSizes == sizeof...(Ts)`.
|
||||
constexpr size_t AllocSize() const {
|
||||
static_assert(NumTypes == NumSizes, "You must specify sizes of all fields");
|
||||
return Offset<NumTypes - 1>() +
|
||||
SizeOf<ElementType<NumTypes - 1>>() * size_[NumTypes - 1];
|
||||
}
|
||||
|
||||
// If built with --config=asan, poisons padding bytes (if any) in the
|
||||
// allocation. The pointer must point to a memory block at least
|
||||
// `AllocSize()` bytes in length.
|
||||
//
|
||||
// `Char` must be `[const] [signed|unsigned] char`.
|
||||
//
|
||||
// Requires: `p` is aligned to `Alignment()`.
|
||||
template <class Char, size_t N = NumOffsets - 1, EnableIf<N == 0> = 0>
|
||||
void PoisonPadding(const Char* p) const {
|
||||
Pointer<0>(p); // verify the requirements on `Char` and `p`
|
||||
}
|
||||
|
||||
template <class Char, size_t N = NumOffsets - 1, EnableIf<N != 0> = 0>
|
||||
void PoisonPadding(const Char* p) const {
|
||||
static_assert(N < NumOffsets, "Index out of bounds");
|
||||
(void)p;
|
||||
#ifdef ADDRESS_SANITIZER
|
||||
PoisonPadding<Char, N - 1>(p);
|
||||
// The `if` is an optimization. It doesn't affect the observable behaviour.
|
||||
if (ElementAlignment<N - 1>::value % ElementAlignment<N>::value) {
|
||||
size_t start =
|
||||
Offset<N - 1>() + SizeOf<ElementType<N - 1>>() * size_[N - 1];
|
||||
ASAN_POISON_MEMORY_REGION(p + start, Offset<N>() - start);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
// Human-readable description of the memory layout. Useful for debugging.
|
||||
// Slow.
|
||||
//
|
||||
// // char[5], 3 bytes of padding, int[3], 4 bytes of padding, followed
|
||||
// // by an unknown number of doubles.
|
||||
// auto x = Layout<char, int, double>::Partial(5, 3);
|
||||
// assert(x.DebugString() ==
|
||||
// "@0<char>(1)[5]; @8<int>(4)[3]; @24<double>(8)");
|
||||
//
|
||||
// Each field is in the following format: @offset<type>(sizeof)[size] (<type>
|
||||
// may be missing depending on the target platform). For example,
|
||||
// @8<int>(4)[3] means that at offset 8 we have an array of ints, where each
|
||||
// int is 4 bytes, and we have 3 of those ints. The size of the last field may
|
||||
// be missing (as in the example above). Only fields with known offsets are
|
||||
// described. Type names may differ across platforms: one compiler might
|
||||
// produce "unsigned*" where another produces "unsigned int *".
|
||||
std::string DebugString() const {
|
||||
const auto offsets = Offsets();
|
||||
const size_t sizes[] = {SizeOf<ElementType<OffsetSeq>>()...};
|
||||
const std::string types[] = {
|
||||
adl_barrier::TypeName<ElementType<OffsetSeq>>()...};
|
||||
std::string res = absl::StrCat("@0", types[0], "(", sizes[0], ")");
|
||||
for (size_t i = 0; i != NumOffsets - 1; ++i) {
|
||||
absl::StrAppend(&res, "[", size_[i], "]; @", offsets[i + 1], types[i + 1],
|
||||
"(", sizes[i + 1], ")");
|
||||
}
|
||||
// NumSizes is a constant that may be zero. Some compilers cannot see that
|
||||
// inside the if statement "size_[NumSizes - 1]" must be valid.
|
||||
int last = static_cast<int>(NumSizes) - 1;
|
||||
if (NumTypes == NumSizes && last >= 0) {
|
||||
absl::StrAppend(&res, "[", size_[last], "]");
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
private:
|
||||
// Arguments of `Layout::Partial()` or `Layout::Layout()`.
|
||||
size_t size_[NumSizes > 0 ? NumSizes : 1];
|
||||
};
|
||||
|
||||
template <size_t NumSizes, class... Ts>
|
||||
using LayoutType = LayoutImpl<
|
||||
std::tuple<Ts...>, absl::make_index_sequence<NumSizes>,
|
||||
absl::make_index_sequence<adl_barrier::Min(sizeof...(Ts), NumSizes + 1)>>;
|
||||
|
||||
} // namespace internal_layout
|
||||
|
||||
// Descriptor of arrays of various types and sizes laid out in memory one after
|
||||
// another. See the top of the file for documentation.
|
||||
//
|
||||
// Check out the public API of internal_layout::LayoutImpl above. The type is
|
||||
// internal to the library but its methods are public, and they are inherited
|
||||
// by `Layout`.
|
||||
template <class... Ts>
|
||||
class Layout : public internal_layout::LayoutType<sizeof...(Ts), Ts...> {
|
||||
public:
|
||||
static_assert(sizeof...(Ts) > 0, "At least one field is required");
|
||||
static_assert(
|
||||
absl::conjunction<internal_layout::IsLegalElementType<Ts>...>::value,
|
||||
"Invalid element type (see IsLegalElementType)");
|
||||
|
||||
// The result type of `Partial()` with `NumSizes` arguments.
|
||||
template <size_t NumSizes>
|
||||
using PartialType = internal_layout::LayoutType<NumSizes, Ts...>;
|
||||
|
||||
// `Layout` knows the element types of the arrays we want to lay out in
|
||||
// memory but not the number of elements in each array.
|
||||
// `Partial(size1, ..., sizeN)` allows us to specify the latter. The
|
||||
// resulting immutable object can be used to obtain pointers to the
|
||||
// individual arrays.
|
||||
//
|
||||
// It's allowed to pass fewer array sizes than the number of arrays. E.g.,
|
||||
// if all you need is to the offset of the second array, you only need to
|
||||
// pass one argument -- the number of elements in the first array.
|
||||
//
|
||||
// // int[3] followed by 4 bytes of padding and an unknown number of
|
||||
// // doubles.
|
||||
// auto x = Layout<int, double>::Partial(3);
|
||||
// // doubles start at byte 16.
|
||||
// assert(x.Offset<1>() == 16);
|
||||
//
|
||||
// If you know the number of elements in all arrays, you can still call
|
||||
// `Partial()` but it's more convenient to use the constructor of `Layout`.
|
||||
//
|
||||
// Layout<int, double> x(3, 5);
|
||||
//
|
||||
// Note: The sizes of the arrays must be specified in number of elements,
|
||||
// not in bytes.
|
||||
//
|
||||
// Requires: `sizeof...(Sizes) <= sizeof...(Ts)`.
|
||||
// Requires: all arguments are convertible to `size_t`.
|
||||
template <class... Sizes>
|
||||
static constexpr PartialType<sizeof...(Sizes)> Partial(Sizes&&... sizes) {
|
||||
static_assert(sizeof...(Sizes) <= sizeof...(Ts), "");
|
||||
return PartialType<sizeof...(Sizes)>(absl::forward<Sizes>(sizes)...);
|
||||
}
|
||||
|
||||
// Creates a layout with the sizes of all arrays specified. If you know
|
||||
// only the sizes of the first N arrays (where N can be zero), you can use
|
||||
// `Partial()` defined above. The constructor is essentially equivalent to
|
||||
// calling `Partial()` and passing in all array sizes; the constructor is
|
||||
// provided as a convenient abbreviation.
|
||||
//
|
||||
// Note: The sizes of the arrays must be specified in number of elements,
|
||||
// not in bytes.
|
||||
constexpr explicit Layout(internal_layout::TypeToSize<Ts>... sizes)
|
||||
: internal_layout::LayoutType<sizeof...(Ts), Ts...>(sizes...) {}
|
||||
};
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_LAYOUT_H_
|
||||
1567
third_party/abseil_cpp/absl/container/internal/layout_test.cc
vendored
Normal file
1567
third_party/abseil_cpp/absl/container/internal/layout_test.cc
vendored
Normal file
File diff suppressed because it is too large
Load diff
92
third_party/abseil_cpp/absl/container/internal/node_hash_policy.h
vendored
Normal file
92
third_party/abseil_cpp/absl/container/internal/node_hash_policy.h
vendored
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
// Adapts a policy for nodes.
|
||||
//
|
||||
// The node policy should model:
|
||||
//
|
||||
// struct Policy {
|
||||
// // Returns a new node allocated and constructed using the allocator, using
|
||||
// // the specified arguments.
|
||||
// template <class Alloc, class... Args>
|
||||
// value_type* new_element(Alloc* alloc, Args&&... args) const;
|
||||
//
|
||||
// // Destroys and deallocates node using the allocator.
|
||||
// template <class Alloc>
|
||||
// void delete_element(Alloc* alloc, value_type* node) const;
|
||||
// };
|
||||
//
|
||||
// It may also optionally define `value()` and `apply()`. For documentation on
|
||||
// these, see hash_policy_traits.h.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_NODE_HASH_POLICY_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_NODE_HASH_POLICY_H_
|
||||
|
||||
#include <cassert>
|
||||
#include <cstddef>
|
||||
#include <memory>
|
||||
#include <type_traits>
|
||||
#include <utility>
|
||||
|
||||
#include "absl/base/config.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
template <class Reference, class Policy>
|
||||
struct node_hash_policy {
|
||||
static_assert(std::is_lvalue_reference<Reference>::value, "");
|
||||
|
||||
using slot_type = typename std::remove_cv<
|
||||
typename std::remove_reference<Reference>::type>::type*;
|
||||
|
||||
template <class Alloc, class... Args>
|
||||
static void construct(Alloc* alloc, slot_type* slot, Args&&... args) {
|
||||
*slot = Policy::new_element(alloc, std::forward<Args>(args)...);
|
||||
}
|
||||
|
||||
template <class Alloc>
|
||||
static void destroy(Alloc* alloc, slot_type* slot) {
|
||||
Policy::delete_element(alloc, *slot);
|
||||
}
|
||||
|
||||
template <class Alloc>
|
||||
static void transfer(Alloc*, slot_type* new_slot, slot_type* old_slot) {
|
||||
*new_slot = *old_slot;
|
||||
}
|
||||
|
||||
static size_t space_used(const slot_type* slot) {
|
||||
if (slot == nullptr) return Policy::element_space_used(nullptr);
|
||||
return Policy::element_space_used(*slot);
|
||||
}
|
||||
|
||||
static Reference element(slot_type* slot) { return **slot; }
|
||||
|
||||
template <class T, class P = Policy>
|
||||
static auto value(T* elem) -> decltype(P::value(elem)) {
|
||||
return P::value(elem);
|
||||
}
|
||||
|
||||
template <class... Ts, class P = Policy>
|
||||
static auto apply(Ts&&... ts) -> decltype(P::apply(std::forward<Ts>(ts)...)) {
|
||||
return P::apply(std::forward<Ts>(ts)...);
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_NODE_HASH_POLICY_H_
|
||||
69
third_party/abseil_cpp/absl/container/internal/node_hash_policy_test.cc
vendored
Normal file
69
third_party/abseil_cpp/absl/container/internal/node_hash_policy_test.cc
vendored
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "absl/container/internal/node_hash_policy.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "gmock/gmock.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "absl/container/internal/hash_policy_traits.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
namespace {
|
||||
|
||||
using ::testing::Pointee;
|
||||
|
||||
struct Policy : node_hash_policy<int&, Policy> {
|
||||
using key_type = int;
|
||||
using init_type = int;
|
||||
|
||||
template <class Alloc>
|
||||
static int* new_element(Alloc* alloc, int value) {
|
||||
return new int(value);
|
||||
}
|
||||
|
||||
template <class Alloc>
|
||||
static void delete_element(Alloc* alloc, int* elem) {
|
||||
delete elem;
|
||||
}
|
||||
};
|
||||
|
||||
using NodePolicy = hash_policy_traits<Policy>;
|
||||
|
||||
struct NodeTest : ::testing::Test {
|
||||
std::allocator<int> alloc;
|
||||
int n = 53;
|
||||
int* a = &n;
|
||||
};
|
||||
|
||||
TEST_F(NodeTest, ConstructDestroy) {
|
||||
NodePolicy::construct(&alloc, &a, 42);
|
||||
EXPECT_THAT(a, Pointee(42));
|
||||
NodePolicy::destroy(&alloc, &a);
|
||||
}
|
||||
|
||||
TEST_F(NodeTest, transfer) {
|
||||
int s = 42;
|
||||
int* b = &s;
|
||||
NodePolicy::transfer(&alloc, &a, &b);
|
||||
EXPECT_EQ(&s, a);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
197
third_party/abseil_cpp/absl/container/internal/raw_hash_map.h
vendored
Normal file
197
third_party/abseil_cpp/absl/container/internal/raw_hash_map.h
vendored
Normal file
|
|
@ -0,0 +1,197 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_RAW_HASH_MAP_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_RAW_HASH_MAP_H_
|
||||
|
||||
#include <tuple>
|
||||
#include <type_traits>
|
||||
#include <utility>
|
||||
|
||||
#include "absl/base/internal/throw_delegate.h"
|
||||
#include "absl/container/internal/container_memory.h"
|
||||
#include "absl/container/internal/raw_hash_set.h" // IWYU pragma: export
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
template <class Policy, class Hash, class Eq, class Alloc>
|
||||
class raw_hash_map : public raw_hash_set<Policy, Hash, Eq, Alloc> {
|
||||
// P is Policy. It's passed as a template argument to support maps that have
|
||||
// incomplete types as values, as in unordered_map<K, IncompleteType>.
|
||||
// MappedReference<> may be a non-reference type.
|
||||
template <class P>
|
||||
using MappedReference = decltype(P::value(
|
||||
std::addressof(std::declval<typename raw_hash_map::reference>())));
|
||||
|
||||
// MappedConstReference<> may be a non-reference type.
|
||||
template <class P>
|
||||
using MappedConstReference = decltype(P::value(
|
||||
std::addressof(std::declval<typename raw_hash_map::const_reference>())));
|
||||
|
||||
using KeyArgImpl =
|
||||
KeyArg<IsTransparent<Eq>::value && IsTransparent<Hash>::value>;
|
||||
|
||||
public:
|
||||
using key_type = typename Policy::key_type;
|
||||
using mapped_type = typename Policy::mapped_type;
|
||||
template <class K>
|
||||
using key_arg = typename KeyArgImpl::template type<K, key_type>;
|
||||
|
||||
static_assert(!std::is_reference<key_type>::value, "");
|
||||
// TODO(alkis): remove this assertion and verify that reference mapped_type is
|
||||
// supported.
|
||||
static_assert(!std::is_reference<mapped_type>::value, "");
|
||||
|
||||
using iterator = typename raw_hash_map::raw_hash_set::iterator;
|
||||
using const_iterator = typename raw_hash_map::raw_hash_set::const_iterator;
|
||||
|
||||
raw_hash_map() {}
|
||||
using raw_hash_map::raw_hash_set::raw_hash_set;
|
||||
|
||||
// The last two template parameters ensure that both arguments are rvalues
|
||||
// (lvalue arguments are handled by the overloads below). This is necessary
|
||||
// for supporting bitfield arguments.
|
||||
//
|
||||
// union { int n : 1; };
|
||||
// flat_hash_map<int, int> m;
|
||||
// m.insert_or_assign(n, n);
|
||||
template <class K = key_type, class V = mapped_type, K* = nullptr,
|
||||
V* = nullptr>
|
||||
std::pair<iterator, bool> insert_or_assign(key_arg<K>&& k, V&& v) {
|
||||
return insert_or_assign_impl(std::forward<K>(k), std::forward<V>(v));
|
||||
}
|
||||
|
||||
template <class K = key_type, class V = mapped_type, K* = nullptr>
|
||||
std::pair<iterator, bool> insert_or_assign(key_arg<K>&& k, const V& v) {
|
||||
return insert_or_assign_impl(std::forward<K>(k), v);
|
||||
}
|
||||
|
||||
template <class K = key_type, class V = mapped_type, V* = nullptr>
|
||||
std::pair<iterator, bool> insert_or_assign(const key_arg<K>& k, V&& v) {
|
||||
return insert_or_assign_impl(k, std::forward<V>(v));
|
||||
}
|
||||
|
||||
template <class K = key_type, class V = mapped_type>
|
||||
std::pair<iterator, bool> insert_or_assign(const key_arg<K>& k, const V& v) {
|
||||
return insert_or_assign_impl(k, v);
|
||||
}
|
||||
|
||||
template <class K = key_type, class V = mapped_type, K* = nullptr,
|
||||
V* = nullptr>
|
||||
iterator insert_or_assign(const_iterator, key_arg<K>&& k, V&& v) {
|
||||
return insert_or_assign(std::forward<K>(k), std::forward<V>(v)).first;
|
||||
}
|
||||
|
||||
template <class K = key_type, class V = mapped_type, K* = nullptr>
|
||||
iterator insert_or_assign(const_iterator, key_arg<K>&& k, const V& v) {
|
||||
return insert_or_assign(std::forward<K>(k), v).first;
|
||||
}
|
||||
|
||||
template <class K = key_type, class V = mapped_type, V* = nullptr>
|
||||
iterator insert_or_assign(const_iterator, const key_arg<K>& k, V&& v) {
|
||||
return insert_or_assign(k, std::forward<V>(v)).first;
|
||||
}
|
||||
|
||||
template <class K = key_type, class V = mapped_type>
|
||||
iterator insert_or_assign(const_iterator, const key_arg<K>& k, const V& v) {
|
||||
return insert_or_assign(k, v).first;
|
||||
}
|
||||
|
||||
// All `try_emplace()` overloads make the same guarantees regarding rvalue
|
||||
// arguments as `std::unordered_map::try_emplace()`, namely that these
|
||||
// functions will not move from rvalue arguments if insertions do not happen.
|
||||
template <class K = key_type, class... Args,
|
||||
typename std::enable_if<
|
||||
!std::is_convertible<K, const_iterator>::value, int>::type = 0,
|
||||
K* = nullptr>
|
||||
std::pair<iterator, bool> try_emplace(key_arg<K>&& k, Args&&... args) {
|
||||
return try_emplace_impl(std::forward<K>(k), std::forward<Args>(args)...);
|
||||
}
|
||||
|
||||
template <class K = key_type, class... Args,
|
||||
typename std::enable_if<
|
||||
!std::is_convertible<K, const_iterator>::value, int>::type = 0>
|
||||
std::pair<iterator, bool> try_emplace(const key_arg<K>& k, Args&&... args) {
|
||||
return try_emplace_impl(k, std::forward<Args>(args)...);
|
||||
}
|
||||
|
||||
template <class K = key_type, class... Args, K* = nullptr>
|
||||
iterator try_emplace(const_iterator, key_arg<K>&& k, Args&&... args) {
|
||||
return try_emplace(std::forward<K>(k), std::forward<Args>(args)...).first;
|
||||
}
|
||||
|
||||
template <class K = key_type, class... Args>
|
||||
iterator try_emplace(const_iterator, const key_arg<K>& k, Args&&... args) {
|
||||
return try_emplace(k, std::forward<Args>(args)...).first;
|
||||
}
|
||||
|
||||
template <class K = key_type, class P = Policy>
|
||||
MappedReference<P> at(const key_arg<K>& key) {
|
||||
auto it = this->find(key);
|
||||
if (it == this->end()) {
|
||||
base_internal::ThrowStdOutOfRange(
|
||||
"absl::container_internal::raw_hash_map<>::at");
|
||||
}
|
||||
return Policy::value(&*it);
|
||||
}
|
||||
|
||||
template <class K = key_type, class P = Policy>
|
||||
MappedConstReference<P> at(const key_arg<K>& key) const {
|
||||
auto it = this->find(key);
|
||||
if (it == this->end()) {
|
||||
base_internal::ThrowStdOutOfRange(
|
||||
"absl::container_internal::raw_hash_map<>::at");
|
||||
}
|
||||
return Policy::value(&*it);
|
||||
}
|
||||
|
||||
template <class K = key_type, class P = Policy, K* = nullptr>
|
||||
MappedReference<P> operator[](key_arg<K>&& key) {
|
||||
return Policy::value(&*try_emplace(std::forward<K>(key)).first);
|
||||
}
|
||||
|
||||
template <class K = key_type, class P = Policy>
|
||||
MappedReference<P> operator[](const key_arg<K>& key) {
|
||||
return Policy::value(&*try_emplace(key).first);
|
||||
}
|
||||
|
||||
private:
|
||||
template <class K, class V>
|
||||
std::pair<iterator, bool> insert_or_assign_impl(K&& k, V&& v) {
|
||||
auto res = this->find_or_prepare_insert(k);
|
||||
if (res.second)
|
||||
this->emplace_at(res.first, std::forward<K>(k), std::forward<V>(v));
|
||||
else
|
||||
Policy::value(&*this->iterator_at(res.first)) = std::forward<V>(v);
|
||||
return {this->iterator_at(res.first), res.second};
|
||||
}
|
||||
|
||||
template <class K = key_type, class... Args>
|
||||
std::pair<iterator, bool> try_emplace_impl(K&& k, Args&&... args) {
|
||||
auto res = this->find_or_prepare_insert(k);
|
||||
if (res.second)
|
||||
this->emplace_at(res.first, std::piecewise_construct,
|
||||
std::forward_as_tuple(std::forward<K>(k)),
|
||||
std::forward_as_tuple(std::forward<Args>(args)...));
|
||||
return {this->iterator_at(res.first), res.second};
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_RAW_HASH_MAP_H_
|
||||
48
third_party/abseil_cpp/absl/container/internal/raw_hash_set.cc
vendored
Normal file
48
third_party/abseil_cpp/absl/container/internal/raw_hash_set.cc
vendored
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "absl/container/internal/raw_hash_set.h"
|
||||
|
||||
#include <atomic>
|
||||
#include <cstddef>
|
||||
|
||||
#include "absl/base/config.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
constexpr size_t Group::kWidth;
|
||||
|
||||
// Returns "random" seed.
|
||||
inline size_t RandomSeed() {
|
||||
#if ABSL_HAVE_THREAD_LOCAL
|
||||
static thread_local size_t counter = 0;
|
||||
size_t value = ++counter;
|
||||
#else // ABSL_HAVE_THREAD_LOCAL
|
||||
static std::atomic<size_t> counter(0);
|
||||
size_t value = counter.fetch_add(1, std::memory_order_relaxed);
|
||||
#endif // ABSL_HAVE_THREAD_LOCAL
|
||||
return value ^ static_cast<size_t>(reinterpret_cast<uintptr_t>(&counter));
|
||||
}
|
||||
|
||||
bool ShouldInsertBackwards(size_t hash, ctrl_t* ctrl) {
|
||||
// To avoid problems with weak hashes and single bit tests, we use % 13.
|
||||
// TODO(kfm,sbenza): revisit after we do unconditional mixing
|
||||
return (H1(hash, ctrl) ^ RandomSeed()) % 13 > 6;
|
||||
}
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
1885
third_party/abseil_cpp/absl/container/internal/raw_hash_set.h
vendored
Normal file
1885
third_party/abseil_cpp/absl/container/internal/raw_hash_set.h
vendored
Normal file
File diff suppressed because it is too large
Load diff
430
third_party/abseil_cpp/absl/container/internal/raw_hash_set_allocator_test.cc
vendored
Normal file
430
third_party/abseil_cpp/absl/container/internal/raw_hash_set_allocator_test.cc
vendored
Normal file
|
|
@ -0,0 +1,430 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include <limits>
|
||||
#include <scoped_allocator>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "absl/container/internal/raw_hash_set.h"
|
||||
#include "absl/container/internal/tracked.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
namespace {
|
||||
|
||||
enum AllocSpec {
|
||||
kPropagateOnCopy = 1,
|
||||
kPropagateOnMove = 2,
|
||||
kPropagateOnSwap = 4,
|
||||
};
|
||||
|
||||
struct AllocState {
|
||||
size_t num_allocs = 0;
|
||||
std::set<void*> owned;
|
||||
};
|
||||
|
||||
template <class T,
|
||||
int Spec = kPropagateOnCopy | kPropagateOnMove | kPropagateOnSwap>
|
||||
class CheckedAlloc {
|
||||
public:
|
||||
template <class, int>
|
||||
friend class CheckedAlloc;
|
||||
|
||||
using value_type = T;
|
||||
|
||||
CheckedAlloc() {}
|
||||
explicit CheckedAlloc(size_t id) : id_(id) {}
|
||||
CheckedAlloc(const CheckedAlloc&) = default;
|
||||
CheckedAlloc& operator=(const CheckedAlloc&) = default;
|
||||
|
||||
template <class U>
|
||||
CheckedAlloc(const CheckedAlloc<U, Spec>& that)
|
||||
: id_(that.id_), state_(that.state_) {}
|
||||
|
||||
template <class U>
|
||||
struct rebind {
|
||||
using other = CheckedAlloc<U, Spec>;
|
||||
};
|
||||
|
||||
using propagate_on_container_copy_assignment =
|
||||
std::integral_constant<bool, (Spec & kPropagateOnCopy) != 0>;
|
||||
|
||||
using propagate_on_container_move_assignment =
|
||||
std::integral_constant<bool, (Spec & kPropagateOnMove) != 0>;
|
||||
|
||||
using propagate_on_container_swap =
|
||||
std::integral_constant<bool, (Spec & kPropagateOnSwap) != 0>;
|
||||
|
||||
CheckedAlloc select_on_container_copy_construction() const {
|
||||
if (Spec & kPropagateOnCopy) return *this;
|
||||
return {};
|
||||
}
|
||||
|
||||
T* allocate(size_t n) {
|
||||
T* ptr = std::allocator<T>().allocate(n);
|
||||
track_alloc(ptr);
|
||||
return ptr;
|
||||
}
|
||||
void deallocate(T* ptr, size_t n) {
|
||||
memset(ptr, 0, n * sizeof(T)); // The freed memory must be unpoisoned.
|
||||
track_dealloc(ptr);
|
||||
return std::allocator<T>().deallocate(ptr, n);
|
||||
}
|
||||
|
||||
friend bool operator==(const CheckedAlloc& a, const CheckedAlloc& b) {
|
||||
return a.id_ == b.id_;
|
||||
}
|
||||
friend bool operator!=(const CheckedAlloc& a, const CheckedAlloc& b) {
|
||||
return !(a == b);
|
||||
}
|
||||
|
||||
size_t num_allocs() const { return state_->num_allocs; }
|
||||
|
||||
void swap(CheckedAlloc& that) {
|
||||
using std::swap;
|
||||
swap(id_, that.id_);
|
||||
swap(state_, that.state_);
|
||||
}
|
||||
|
||||
friend void swap(CheckedAlloc& a, CheckedAlloc& b) { a.swap(b); }
|
||||
|
||||
friend std::ostream& operator<<(std::ostream& o, const CheckedAlloc& a) {
|
||||
return o << "alloc(" << a.id_ << ")";
|
||||
}
|
||||
|
||||
private:
|
||||
void track_alloc(void* ptr) {
|
||||
AllocState* state = state_.get();
|
||||
++state->num_allocs;
|
||||
if (!state->owned.insert(ptr).second)
|
||||
ADD_FAILURE() << *this << " got previously allocated memory: " << ptr;
|
||||
}
|
||||
void track_dealloc(void* ptr) {
|
||||
if (state_->owned.erase(ptr) != 1)
|
||||
ADD_FAILURE() << *this
|
||||
<< " deleting memory owned by another allocator: " << ptr;
|
||||
}
|
||||
|
||||
size_t id_ = std::numeric_limits<size_t>::max();
|
||||
|
||||
std::shared_ptr<AllocState> state_ = std::make_shared<AllocState>();
|
||||
};
|
||||
|
||||
struct Identity {
|
||||
int32_t operator()(int32_t v) const { return v; }
|
||||
};
|
||||
|
||||
struct Policy {
|
||||
using slot_type = Tracked<int32_t>;
|
||||
using init_type = Tracked<int32_t>;
|
||||
using key_type = int32_t;
|
||||
|
||||
template <class allocator_type, class... Args>
|
||||
static void construct(allocator_type* alloc, slot_type* slot,
|
||||
Args&&... args) {
|
||||
std::allocator_traits<allocator_type>::construct(
|
||||
*alloc, slot, std::forward<Args>(args)...);
|
||||
}
|
||||
|
||||
template <class allocator_type>
|
||||
static void destroy(allocator_type* alloc, slot_type* slot) {
|
||||
std::allocator_traits<allocator_type>::destroy(*alloc, slot);
|
||||
}
|
||||
|
||||
template <class allocator_type>
|
||||
static void transfer(allocator_type* alloc, slot_type* new_slot,
|
||||
slot_type* old_slot) {
|
||||
construct(alloc, new_slot, std::move(*old_slot));
|
||||
destroy(alloc, old_slot);
|
||||
}
|
||||
|
||||
template <class F>
|
||||
static auto apply(F&& f, int32_t v) -> decltype(std::forward<F>(f)(v, v)) {
|
||||
return std::forward<F>(f)(v, v);
|
||||
}
|
||||
|
||||
template <class F>
|
||||
static auto apply(F&& f, const slot_type& v)
|
||||
-> decltype(std::forward<F>(f)(v.val(), v)) {
|
||||
return std::forward<F>(f)(v.val(), v);
|
||||
}
|
||||
|
||||
template <class F>
|
||||
static auto apply(F&& f, slot_type&& v)
|
||||
-> decltype(std::forward<F>(f)(v.val(), std::move(v))) {
|
||||
return std::forward<F>(f)(v.val(), std::move(v));
|
||||
}
|
||||
|
||||
static slot_type& element(slot_type* slot) { return *slot; }
|
||||
};
|
||||
|
||||
template <int Spec>
|
||||
struct PropagateTest : public ::testing::Test {
|
||||
using Alloc = CheckedAlloc<Tracked<int32_t>, Spec>;
|
||||
|
||||
using Table = raw_hash_set<Policy, Identity, std::equal_to<int32_t>, Alloc>;
|
||||
|
||||
PropagateTest() {
|
||||
EXPECT_EQ(a1, t1.get_allocator());
|
||||
EXPECT_NE(a2, t1.get_allocator());
|
||||
}
|
||||
|
||||
Alloc a1 = Alloc(1);
|
||||
Table t1 = Table(0, a1);
|
||||
Alloc a2 = Alloc(2);
|
||||
};
|
||||
|
||||
using PropagateOnAll =
|
||||
PropagateTest<kPropagateOnCopy | kPropagateOnMove | kPropagateOnSwap>;
|
||||
using NoPropagateOnCopy = PropagateTest<kPropagateOnMove | kPropagateOnSwap>;
|
||||
using NoPropagateOnMove = PropagateTest<kPropagateOnCopy | kPropagateOnSwap>;
|
||||
|
||||
TEST_F(PropagateOnAll, Empty) { EXPECT_EQ(0, a1.num_allocs()); }
|
||||
|
||||
TEST_F(PropagateOnAll, InsertAllocates) {
|
||||
auto it = t1.insert(0).first;
|
||||
EXPECT_EQ(1, a1.num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(0, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(PropagateOnAll, InsertDecomposes) {
|
||||
auto it = t1.insert(0).first;
|
||||
EXPECT_EQ(1, a1.num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(0, it->num_copies());
|
||||
|
||||
EXPECT_FALSE(t1.insert(0).second);
|
||||
EXPECT_EQ(1, a1.num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(0, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(PropagateOnAll, RehashMoves) {
|
||||
auto it = t1.insert(0).first;
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
t1.rehash(2 * t1.capacity());
|
||||
EXPECT_EQ(2, a1.num_allocs());
|
||||
it = t1.find(0);
|
||||
EXPECT_EQ(1, it->num_moves());
|
||||
EXPECT_EQ(0, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(PropagateOnAll, CopyConstructor) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(t1);
|
||||
EXPECT_EQ(2, a1.num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(1, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(NoPropagateOnCopy, CopyConstructor) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(t1);
|
||||
EXPECT_EQ(1, a1.num_allocs());
|
||||
EXPECT_EQ(1, u.get_allocator().num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(1, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(PropagateOnAll, CopyConstructorWithSameAlloc) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(t1, a1);
|
||||
EXPECT_EQ(2, a1.num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(1, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(NoPropagateOnCopy, CopyConstructorWithSameAlloc) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(t1, a1);
|
||||
EXPECT_EQ(2, a1.num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(1, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(PropagateOnAll, CopyConstructorWithDifferentAlloc) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(t1, a2);
|
||||
EXPECT_EQ(a2, u.get_allocator());
|
||||
EXPECT_EQ(1, a1.num_allocs());
|
||||
EXPECT_EQ(1, a2.num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(1, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(NoPropagateOnCopy, CopyConstructorWithDifferentAlloc) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(t1, a2);
|
||||
EXPECT_EQ(a2, u.get_allocator());
|
||||
EXPECT_EQ(1, a1.num_allocs());
|
||||
EXPECT_EQ(1, a2.num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(1, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(PropagateOnAll, MoveConstructor) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(std::move(t1));
|
||||
EXPECT_EQ(1, a1.num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(0, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(NoPropagateOnMove, MoveConstructor) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(std::move(t1));
|
||||
EXPECT_EQ(1, a1.num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(0, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(PropagateOnAll, MoveConstructorWithSameAlloc) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(std::move(t1), a1);
|
||||
EXPECT_EQ(1, a1.num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(0, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(NoPropagateOnMove, MoveConstructorWithSameAlloc) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(std::move(t1), a1);
|
||||
EXPECT_EQ(1, a1.num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(0, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(PropagateOnAll, MoveConstructorWithDifferentAlloc) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(std::move(t1), a2);
|
||||
it = u.find(0);
|
||||
EXPECT_EQ(a2, u.get_allocator());
|
||||
EXPECT_EQ(1, a1.num_allocs());
|
||||
EXPECT_EQ(1, a2.num_allocs());
|
||||
EXPECT_EQ(1, it->num_moves());
|
||||
EXPECT_EQ(0, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(NoPropagateOnMove, MoveConstructorWithDifferentAlloc) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(std::move(t1), a2);
|
||||
it = u.find(0);
|
||||
EXPECT_EQ(a2, u.get_allocator());
|
||||
EXPECT_EQ(1, a1.num_allocs());
|
||||
EXPECT_EQ(1, a2.num_allocs());
|
||||
EXPECT_EQ(1, it->num_moves());
|
||||
EXPECT_EQ(0, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(PropagateOnAll, CopyAssignmentWithSameAlloc) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(0, a1);
|
||||
u = t1;
|
||||
EXPECT_EQ(2, a1.num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(1, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(NoPropagateOnCopy, CopyAssignmentWithSameAlloc) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(0, a1);
|
||||
u = t1;
|
||||
EXPECT_EQ(2, a1.num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(1, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(PropagateOnAll, CopyAssignmentWithDifferentAlloc) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(0, a2);
|
||||
u = t1;
|
||||
EXPECT_EQ(a1, u.get_allocator());
|
||||
EXPECT_EQ(2, a1.num_allocs());
|
||||
EXPECT_EQ(0, a2.num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(1, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(NoPropagateOnCopy, CopyAssignmentWithDifferentAlloc) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(0, a2);
|
||||
u = t1;
|
||||
EXPECT_EQ(a2, u.get_allocator());
|
||||
EXPECT_EQ(1, a1.num_allocs());
|
||||
EXPECT_EQ(1, a2.num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(1, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(PropagateOnAll, MoveAssignmentWithSameAlloc) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(0, a1);
|
||||
u = std::move(t1);
|
||||
EXPECT_EQ(a1, u.get_allocator());
|
||||
EXPECT_EQ(1, a1.num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(0, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(NoPropagateOnMove, MoveAssignmentWithSameAlloc) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(0, a1);
|
||||
u = std::move(t1);
|
||||
EXPECT_EQ(a1, u.get_allocator());
|
||||
EXPECT_EQ(1, a1.num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(0, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(PropagateOnAll, MoveAssignmentWithDifferentAlloc) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(0, a2);
|
||||
u = std::move(t1);
|
||||
EXPECT_EQ(a1, u.get_allocator());
|
||||
EXPECT_EQ(1, a1.num_allocs());
|
||||
EXPECT_EQ(0, a2.num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(0, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(NoPropagateOnMove, MoveAssignmentWithDifferentAlloc) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(0, a2);
|
||||
u = std::move(t1);
|
||||
it = u.find(0);
|
||||
EXPECT_EQ(a2, u.get_allocator());
|
||||
EXPECT_EQ(1, a1.num_allocs());
|
||||
EXPECT_EQ(1, a2.num_allocs());
|
||||
EXPECT_EQ(1, it->num_moves());
|
||||
EXPECT_EQ(0, it->num_copies());
|
||||
}
|
||||
|
||||
TEST_F(PropagateOnAll, Swap) {
|
||||
auto it = t1.insert(0).first;
|
||||
Table u(0, a2);
|
||||
u.swap(t1);
|
||||
EXPECT_EQ(a1, u.get_allocator());
|
||||
EXPECT_EQ(a2, t1.get_allocator());
|
||||
EXPECT_EQ(1, a1.num_allocs());
|
||||
EXPECT_EQ(0, a2.num_allocs());
|
||||
EXPECT_EQ(0, it->num_moves());
|
||||
EXPECT_EQ(0, it->num_copies());
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
1871
third_party/abseil_cpp/absl/container/internal/raw_hash_set_test.cc
vendored
Normal file
1871
third_party/abseil_cpp/absl/container/internal/raw_hash_set_test.cc
vendored
Normal file
File diff suppressed because it is too large
Load diff
29
third_party/abseil_cpp/absl/container/internal/test_instance_tracker.cc
vendored
Normal file
29
third_party/abseil_cpp/absl/container/internal/test_instance_tracker.cc
vendored
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
// Copyright 2017 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "absl/container/internal/test_instance_tracker.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace test_internal {
|
||||
int BaseCountedInstance::num_instances_ = 0;
|
||||
int BaseCountedInstance::num_live_instances_ = 0;
|
||||
int BaseCountedInstance::num_moves_ = 0;
|
||||
int BaseCountedInstance::num_copies_ = 0;
|
||||
int BaseCountedInstance::num_swaps_ = 0;
|
||||
int BaseCountedInstance::num_comparisons_ = 0;
|
||||
|
||||
} // namespace test_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
274
third_party/abseil_cpp/absl/container/internal/test_instance_tracker.h
vendored
Normal file
274
third_party/abseil_cpp/absl/container/internal/test_instance_tracker.h
vendored
Normal file
|
|
@ -0,0 +1,274 @@
|
|||
// Copyright 2017 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_TEST_INSTANCE_TRACKER_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_TEST_INSTANCE_TRACKER_H_
|
||||
|
||||
#include <cstdlib>
|
||||
#include <ostream>
|
||||
|
||||
#include "absl/types/compare.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace test_internal {
|
||||
|
||||
// A type that counts number of occurrences of the type, the live occurrences of
|
||||
// the type, as well as the number of copies, moves, swaps, and comparisons that
|
||||
// have occurred on the type. This is used as a base class for the copyable,
|
||||
// copyable+movable, and movable types below that are used in actual tests. Use
|
||||
// InstanceTracker in tests to track the number of instances.
|
||||
class BaseCountedInstance {
|
||||
public:
|
||||
explicit BaseCountedInstance(int x) : value_(x) {
|
||||
++num_instances_;
|
||||
++num_live_instances_;
|
||||
}
|
||||
BaseCountedInstance(const BaseCountedInstance& x)
|
||||
: value_(x.value_), is_live_(x.is_live_) {
|
||||
++num_instances_;
|
||||
if (is_live_) ++num_live_instances_;
|
||||
++num_copies_;
|
||||
}
|
||||
BaseCountedInstance(BaseCountedInstance&& x)
|
||||
: value_(x.value_), is_live_(x.is_live_) {
|
||||
x.is_live_ = false;
|
||||
++num_instances_;
|
||||
++num_moves_;
|
||||
}
|
||||
~BaseCountedInstance() {
|
||||
--num_instances_;
|
||||
if (is_live_) --num_live_instances_;
|
||||
}
|
||||
|
||||
BaseCountedInstance& operator=(const BaseCountedInstance& x) {
|
||||
value_ = x.value_;
|
||||
if (is_live_) --num_live_instances_;
|
||||
is_live_ = x.is_live_;
|
||||
if (is_live_) ++num_live_instances_;
|
||||
++num_copies_;
|
||||
return *this;
|
||||
}
|
||||
BaseCountedInstance& operator=(BaseCountedInstance&& x) {
|
||||
value_ = x.value_;
|
||||
if (is_live_) --num_live_instances_;
|
||||
is_live_ = x.is_live_;
|
||||
x.is_live_ = false;
|
||||
++num_moves_;
|
||||
return *this;
|
||||
}
|
||||
|
||||
bool operator==(const BaseCountedInstance& x) const {
|
||||
++num_comparisons_;
|
||||
return value_ == x.value_;
|
||||
}
|
||||
|
||||
bool operator!=(const BaseCountedInstance& x) const {
|
||||
++num_comparisons_;
|
||||
return value_ != x.value_;
|
||||
}
|
||||
|
||||
bool operator<(const BaseCountedInstance& x) const {
|
||||
++num_comparisons_;
|
||||
return value_ < x.value_;
|
||||
}
|
||||
|
||||
bool operator>(const BaseCountedInstance& x) const {
|
||||
++num_comparisons_;
|
||||
return value_ > x.value_;
|
||||
}
|
||||
|
||||
bool operator<=(const BaseCountedInstance& x) const {
|
||||
++num_comparisons_;
|
||||
return value_ <= x.value_;
|
||||
}
|
||||
|
||||
bool operator>=(const BaseCountedInstance& x) const {
|
||||
++num_comparisons_;
|
||||
return value_ >= x.value_;
|
||||
}
|
||||
|
||||
absl::weak_ordering compare(const BaseCountedInstance& x) const {
|
||||
++num_comparisons_;
|
||||
return value_ < x.value_
|
||||
? absl::weak_ordering::less
|
||||
: value_ == x.value_ ? absl::weak_ordering::equivalent
|
||||
: absl::weak_ordering::greater;
|
||||
}
|
||||
|
||||
int value() const {
|
||||
if (!is_live_) std::abort();
|
||||
return value_;
|
||||
}
|
||||
|
||||
friend std::ostream& operator<<(std::ostream& o,
|
||||
const BaseCountedInstance& v) {
|
||||
return o << "[value:" << v.value() << "]";
|
||||
}
|
||||
|
||||
// Implementation of efficient swap() that counts swaps.
|
||||
static void SwapImpl(
|
||||
BaseCountedInstance& lhs, // NOLINT(runtime/references)
|
||||
BaseCountedInstance& rhs) { // NOLINT(runtime/references)
|
||||
using std::swap;
|
||||
swap(lhs.value_, rhs.value_);
|
||||
swap(lhs.is_live_, rhs.is_live_);
|
||||
++BaseCountedInstance::num_swaps_;
|
||||
}
|
||||
|
||||
private:
|
||||
friend class InstanceTracker;
|
||||
|
||||
int value_;
|
||||
|
||||
// Indicates if the value is live, ie it hasn't been moved away from.
|
||||
bool is_live_ = true;
|
||||
|
||||
// Number of instances.
|
||||
static int num_instances_;
|
||||
|
||||
// Number of live instances (those that have not been moved away from.)
|
||||
static int num_live_instances_;
|
||||
|
||||
// Number of times that BaseCountedInstance objects were moved.
|
||||
static int num_moves_;
|
||||
|
||||
// Number of times that BaseCountedInstance objects were copied.
|
||||
static int num_copies_;
|
||||
|
||||
// Number of times that BaseCountedInstance objects were swapped.
|
||||
static int num_swaps_;
|
||||
|
||||
// Number of times that BaseCountedInstance objects were compared.
|
||||
static int num_comparisons_;
|
||||
};
|
||||
|
||||
// Helper to track the BaseCountedInstance instance counters. Expects that the
|
||||
// number of instances and live_instances are the same when it is constructed
|
||||
// and when it is destructed.
|
||||
class InstanceTracker {
|
||||
public:
|
||||
InstanceTracker()
|
||||
: start_instances_(BaseCountedInstance::num_instances_),
|
||||
start_live_instances_(BaseCountedInstance::num_live_instances_) {
|
||||
ResetCopiesMovesSwaps();
|
||||
}
|
||||
~InstanceTracker() {
|
||||
if (instances() != 0) std::abort();
|
||||
if (live_instances() != 0) std::abort();
|
||||
}
|
||||
|
||||
// Returns the number of BaseCountedInstance instances both containing valid
|
||||
// values and those moved away from compared to when the InstanceTracker was
|
||||
// constructed
|
||||
int instances() const {
|
||||
return BaseCountedInstance::num_instances_ - start_instances_;
|
||||
}
|
||||
|
||||
// Returns the number of live BaseCountedInstance instances compared to when
|
||||
// the InstanceTracker was constructed
|
||||
int live_instances() const {
|
||||
return BaseCountedInstance::num_live_instances_ - start_live_instances_;
|
||||
}
|
||||
|
||||
// Returns the number of moves on BaseCountedInstance objects since
|
||||
// construction or since the last call to ResetCopiesMovesSwaps().
|
||||
int moves() const { return BaseCountedInstance::num_moves_ - start_moves_; }
|
||||
|
||||
// Returns the number of copies on BaseCountedInstance objects since
|
||||
// construction or the last call to ResetCopiesMovesSwaps().
|
||||
int copies() const {
|
||||
return BaseCountedInstance::num_copies_ - start_copies_;
|
||||
}
|
||||
|
||||
// Returns the number of swaps on BaseCountedInstance objects since
|
||||
// construction or the last call to ResetCopiesMovesSwaps().
|
||||
int swaps() const { return BaseCountedInstance::num_swaps_ - start_swaps_; }
|
||||
|
||||
// Returns the number of comparisons on BaseCountedInstance objects since
|
||||
// construction or the last call to ResetCopiesMovesSwaps().
|
||||
int comparisons() const {
|
||||
return BaseCountedInstance::num_comparisons_ - start_comparisons_;
|
||||
}
|
||||
|
||||
// Resets the base values for moves, copies, comparisons, and swaps to the
|
||||
// current values, so that subsequent Get*() calls for moves, copies,
|
||||
// comparisons, and swaps will compare to the situation at the point of this
|
||||
// call.
|
||||
void ResetCopiesMovesSwaps() {
|
||||
start_moves_ = BaseCountedInstance::num_moves_;
|
||||
start_copies_ = BaseCountedInstance::num_copies_;
|
||||
start_swaps_ = BaseCountedInstance::num_swaps_;
|
||||
start_comparisons_ = BaseCountedInstance::num_comparisons_;
|
||||
}
|
||||
|
||||
private:
|
||||
int start_instances_;
|
||||
int start_live_instances_;
|
||||
int start_moves_;
|
||||
int start_copies_;
|
||||
int start_swaps_;
|
||||
int start_comparisons_;
|
||||
};
|
||||
|
||||
// Copyable, not movable.
|
||||
class CopyableOnlyInstance : public BaseCountedInstance {
|
||||
public:
|
||||
explicit CopyableOnlyInstance(int x) : BaseCountedInstance(x) {}
|
||||
CopyableOnlyInstance(const CopyableOnlyInstance& rhs) = default;
|
||||
CopyableOnlyInstance& operator=(const CopyableOnlyInstance& rhs) = default;
|
||||
|
||||
friend void swap(CopyableOnlyInstance& lhs, CopyableOnlyInstance& rhs) {
|
||||
BaseCountedInstance::SwapImpl(lhs, rhs);
|
||||
}
|
||||
|
||||
static bool supports_move() { return false; }
|
||||
};
|
||||
|
||||
// Copyable and movable.
|
||||
class CopyableMovableInstance : public BaseCountedInstance {
|
||||
public:
|
||||
explicit CopyableMovableInstance(int x) : BaseCountedInstance(x) {}
|
||||
CopyableMovableInstance(const CopyableMovableInstance& rhs) = default;
|
||||
CopyableMovableInstance(CopyableMovableInstance&& rhs) = default;
|
||||
CopyableMovableInstance& operator=(const CopyableMovableInstance& rhs) =
|
||||
default;
|
||||
CopyableMovableInstance& operator=(CopyableMovableInstance&& rhs) = default;
|
||||
|
||||
friend void swap(CopyableMovableInstance& lhs, CopyableMovableInstance& rhs) {
|
||||
BaseCountedInstance::SwapImpl(lhs, rhs);
|
||||
}
|
||||
|
||||
static bool supports_move() { return true; }
|
||||
};
|
||||
|
||||
// Only movable, not default-constructible.
|
||||
class MovableOnlyInstance : public BaseCountedInstance {
|
||||
public:
|
||||
explicit MovableOnlyInstance(int x) : BaseCountedInstance(x) {}
|
||||
MovableOnlyInstance(MovableOnlyInstance&& other) = default;
|
||||
MovableOnlyInstance& operator=(MovableOnlyInstance&& other) = default;
|
||||
|
||||
friend void swap(MovableOnlyInstance& lhs, MovableOnlyInstance& rhs) {
|
||||
BaseCountedInstance::SwapImpl(lhs, rhs);
|
||||
}
|
||||
|
||||
static bool supports_move() { return true; }
|
||||
};
|
||||
|
||||
} // namespace test_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_TEST_INSTANCE_TRACKER_H_
|
||||
184
third_party/abseil_cpp/absl/container/internal/test_instance_tracker_test.cc
vendored
Normal file
184
third_party/abseil_cpp/absl/container/internal/test_instance_tracker_test.cc
vendored
Normal file
|
|
@ -0,0 +1,184 @@
|
|||
// Copyright 2017 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "absl/container/internal/test_instance_tracker.h"
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
|
||||
namespace {
|
||||
|
||||
using absl::test_internal::CopyableMovableInstance;
|
||||
using absl::test_internal::CopyableOnlyInstance;
|
||||
using absl::test_internal::InstanceTracker;
|
||||
using absl::test_internal::MovableOnlyInstance;
|
||||
|
||||
TEST(TestInstanceTracker, CopyableMovable) {
|
||||
InstanceTracker tracker;
|
||||
CopyableMovableInstance src(1);
|
||||
EXPECT_EQ(1, src.value()) << src;
|
||||
CopyableMovableInstance copy(src);
|
||||
CopyableMovableInstance move(std::move(src));
|
||||
EXPECT_EQ(1, tracker.copies());
|
||||
EXPECT_EQ(1, tracker.moves());
|
||||
EXPECT_EQ(0, tracker.swaps());
|
||||
EXPECT_EQ(3, tracker.instances());
|
||||
EXPECT_EQ(2, tracker.live_instances());
|
||||
tracker.ResetCopiesMovesSwaps();
|
||||
|
||||
CopyableMovableInstance copy_assign(1);
|
||||
copy_assign = copy;
|
||||
CopyableMovableInstance move_assign(1);
|
||||
move_assign = std::move(move);
|
||||
EXPECT_EQ(1, tracker.copies());
|
||||
EXPECT_EQ(1, tracker.moves());
|
||||
EXPECT_EQ(0, tracker.swaps());
|
||||
EXPECT_EQ(5, tracker.instances());
|
||||
EXPECT_EQ(3, tracker.live_instances());
|
||||
tracker.ResetCopiesMovesSwaps();
|
||||
|
||||
{
|
||||
using std::swap;
|
||||
swap(move_assign, copy);
|
||||
swap(copy, move_assign);
|
||||
EXPECT_EQ(2, tracker.swaps());
|
||||
EXPECT_EQ(0, tracker.copies());
|
||||
EXPECT_EQ(0, tracker.moves());
|
||||
EXPECT_EQ(5, tracker.instances());
|
||||
EXPECT_EQ(3, tracker.live_instances());
|
||||
}
|
||||
}
|
||||
|
||||
TEST(TestInstanceTracker, CopyableOnly) {
|
||||
InstanceTracker tracker;
|
||||
CopyableOnlyInstance src(1);
|
||||
EXPECT_EQ(1, src.value()) << src;
|
||||
CopyableOnlyInstance copy(src);
|
||||
CopyableOnlyInstance copy2(std::move(src)); // NOLINT
|
||||
EXPECT_EQ(2, tracker.copies());
|
||||
EXPECT_EQ(0, tracker.moves());
|
||||
EXPECT_EQ(3, tracker.instances());
|
||||
EXPECT_EQ(3, tracker.live_instances());
|
||||
tracker.ResetCopiesMovesSwaps();
|
||||
|
||||
CopyableOnlyInstance copy_assign(1);
|
||||
copy_assign = copy;
|
||||
CopyableOnlyInstance copy_assign2(1);
|
||||
copy_assign2 = std::move(copy2); // NOLINT
|
||||
EXPECT_EQ(2, tracker.copies());
|
||||
EXPECT_EQ(0, tracker.moves());
|
||||
EXPECT_EQ(5, tracker.instances());
|
||||
EXPECT_EQ(5, tracker.live_instances());
|
||||
tracker.ResetCopiesMovesSwaps();
|
||||
|
||||
{
|
||||
using std::swap;
|
||||
swap(src, copy);
|
||||
swap(copy, src);
|
||||
EXPECT_EQ(2, tracker.swaps());
|
||||
EXPECT_EQ(0, tracker.copies());
|
||||
EXPECT_EQ(0, tracker.moves());
|
||||
EXPECT_EQ(5, tracker.instances());
|
||||
EXPECT_EQ(5, tracker.live_instances());
|
||||
}
|
||||
}
|
||||
|
||||
TEST(TestInstanceTracker, MovableOnly) {
|
||||
InstanceTracker tracker;
|
||||
MovableOnlyInstance src(1);
|
||||
EXPECT_EQ(1, src.value()) << src;
|
||||
MovableOnlyInstance move(std::move(src));
|
||||
MovableOnlyInstance move_assign(2);
|
||||
move_assign = std::move(move);
|
||||
EXPECT_EQ(3, tracker.instances());
|
||||
EXPECT_EQ(1, tracker.live_instances());
|
||||
EXPECT_EQ(2, tracker.moves());
|
||||
EXPECT_EQ(0, tracker.copies());
|
||||
tracker.ResetCopiesMovesSwaps();
|
||||
|
||||
{
|
||||
using std::swap;
|
||||
MovableOnlyInstance other(2);
|
||||
swap(move_assign, other);
|
||||
swap(other, move_assign);
|
||||
EXPECT_EQ(2, tracker.swaps());
|
||||
EXPECT_EQ(0, tracker.copies());
|
||||
EXPECT_EQ(0, tracker.moves());
|
||||
EXPECT_EQ(4, tracker.instances());
|
||||
EXPECT_EQ(2, tracker.live_instances());
|
||||
}
|
||||
}
|
||||
|
||||
TEST(TestInstanceTracker, ExistingInstances) {
|
||||
CopyableMovableInstance uncounted_instance(1);
|
||||
CopyableMovableInstance uncounted_live_instance(
|
||||
std::move(uncounted_instance));
|
||||
InstanceTracker tracker;
|
||||
EXPECT_EQ(0, tracker.instances());
|
||||
EXPECT_EQ(0, tracker.live_instances());
|
||||
EXPECT_EQ(0, tracker.copies());
|
||||
{
|
||||
CopyableMovableInstance instance1(1);
|
||||
EXPECT_EQ(1, tracker.instances());
|
||||
EXPECT_EQ(1, tracker.live_instances());
|
||||
EXPECT_EQ(0, tracker.copies());
|
||||
EXPECT_EQ(0, tracker.moves());
|
||||
{
|
||||
InstanceTracker tracker2;
|
||||
CopyableMovableInstance instance2(instance1);
|
||||
CopyableMovableInstance instance3(std::move(instance2));
|
||||
EXPECT_EQ(3, tracker.instances());
|
||||
EXPECT_EQ(2, tracker.live_instances());
|
||||
EXPECT_EQ(1, tracker.copies());
|
||||
EXPECT_EQ(1, tracker.moves());
|
||||
EXPECT_EQ(2, tracker2.instances());
|
||||
EXPECT_EQ(1, tracker2.live_instances());
|
||||
EXPECT_EQ(1, tracker2.copies());
|
||||
EXPECT_EQ(1, tracker2.moves());
|
||||
}
|
||||
EXPECT_EQ(1, tracker.instances());
|
||||
EXPECT_EQ(1, tracker.live_instances());
|
||||
EXPECT_EQ(1, tracker.copies());
|
||||
EXPECT_EQ(1, tracker.moves());
|
||||
}
|
||||
EXPECT_EQ(0, tracker.instances());
|
||||
EXPECT_EQ(0, tracker.live_instances());
|
||||
EXPECT_EQ(1, tracker.copies());
|
||||
EXPECT_EQ(1, tracker.moves());
|
||||
}
|
||||
|
||||
TEST(TestInstanceTracker, Comparisons) {
|
||||
InstanceTracker tracker;
|
||||
MovableOnlyInstance one(1), two(2);
|
||||
|
||||
EXPECT_EQ(0, tracker.comparisons());
|
||||
EXPECT_FALSE(one == two);
|
||||
EXPECT_EQ(1, tracker.comparisons());
|
||||
EXPECT_TRUE(one != two);
|
||||
EXPECT_EQ(2, tracker.comparisons());
|
||||
EXPECT_TRUE(one < two);
|
||||
EXPECT_EQ(3, tracker.comparisons());
|
||||
EXPECT_FALSE(one > two);
|
||||
EXPECT_EQ(4, tracker.comparisons());
|
||||
EXPECT_TRUE(one <= two);
|
||||
EXPECT_EQ(5, tracker.comparisons());
|
||||
EXPECT_FALSE(one >= two);
|
||||
EXPECT_EQ(6, tracker.comparisons());
|
||||
EXPECT_TRUE(one.compare(two) < 0); // NOLINT
|
||||
EXPECT_EQ(7, tracker.comparisons());
|
||||
|
||||
tracker.ResetCopiesMovesSwaps();
|
||||
EXPECT_EQ(0, tracker.comparisons());
|
||||
}
|
||||
|
||||
} // namespace
|
||||
83
third_party/abseil_cpp/absl/container/internal/tracked.h
vendored
Normal file
83
third_party/abseil_cpp/absl/container/internal/tracked.h
vendored
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_TRACKED_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_TRACKED_H_
|
||||
|
||||
#include <stddef.h>
|
||||
|
||||
#include <memory>
|
||||
#include <utility>
|
||||
|
||||
#include "absl/base/config.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
// A class that tracks its copies and moves so that it can be queried in tests.
|
||||
template <class T>
|
||||
class Tracked {
|
||||
public:
|
||||
Tracked() {}
|
||||
// NOLINTNEXTLINE(runtime/explicit)
|
||||
Tracked(const T& val) : val_(val) {}
|
||||
Tracked(const Tracked& that)
|
||||
: val_(that.val_),
|
||||
num_moves_(that.num_moves_),
|
||||
num_copies_(that.num_copies_) {
|
||||
++(*num_copies_);
|
||||
}
|
||||
Tracked(Tracked&& that)
|
||||
: val_(std::move(that.val_)),
|
||||
num_moves_(std::move(that.num_moves_)),
|
||||
num_copies_(std::move(that.num_copies_)) {
|
||||
++(*num_moves_);
|
||||
}
|
||||
Tracked& operator=(const Tracked& that) {
|
||||
val_ = that.val_;
|
||||
num_moves_ = that.num_moves_;
|
||||
num_copies_ = that.num_copies_;
|
||||
++(*num_copies_);
|
||||
}
|
||||
Tracked& operator=(Tracked&& that) {
|
||||
val_ = std::move(that.val_);
|
||||
num_moves_ = std::move(that.num_moves_);
|
||||
num_copies_ = std::move(that.num_copies_);
|
||||
++(*num_moves_);
|
||||
}
|
||||
|
||||
const T& val() const { return val_; }
|
||||
|
||||
friend bool operator==(const Tracked& a, const Tracked& b) {
|
||||
return a.val_ == b.val_;
|
||||
}
|
||||
friend bool operator!=(const Tracked& a, const Tracked& b) {
|
||||
return !(a == b);
|
||||
}
|
||||
|
||||
size_t num_copies() { return *num_copies_; }
|
||||
size_t num_moves() { return *num_moves_; }
|
||||
|
||||
private:
|
||||
T val_;
|
||||
std::shared_ptr<size_t> num_moves_ = std::make_shared<size_t>(0);
|
||||
std::shared_ptr<size_t> num_copies_ = std::make_shared<size_t>(0);
|
||||
};
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_TRACKED_H_
|
||||
489
third_party/abseil_cpp/absl/container/internal/unordered_map_constructor_test.h
vendored
Normal file
489
third_party/abseil_cpp/absl/container/internal/unordered_map_constructor_test.h
vendored
Normal file
|
|
@ -0,0 +1,489 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_UNORDERED_MAP_CONSTRUCTOR_TEST_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_UNORDERED_MAP_CONSTRUCTOR_TEST_H_
|
||||
|
||||
#include <algorithm>
|
||||
#include <vector>
|
||||
|
||||
#include "gmock/gmock.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "absl/container/internal/hash_generator_testing.h"
|
||||
#include "absl/container/internal/hash_policy_testing.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
template <class UnordMap>
|
||||
class ConstructorTest : public ::testing::Test {};
|
||||
|
||||
TYPED_TEST_SUITE_P(ConstructorTest);
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, NoArgs) {
|
||||
TypeParam m;
|
||||
EXPECT_TRUE(m.empty());
|
||||
EXPECT_THAT(m, ::testing::UnorderedElementsAre());
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, BucketCount) {
|
||||
TypeParam m(123);
|
||||
EXPECT_TRUE(m.empty());
|
||||
EXPECT_THAT(m, ::testing::UnorderedElementsAre());
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, BucketCountHash) {
|
||||
using H = typename TypeParam::hasher;
|
||||
H hasher;
|
||||
TypeParam m(123, hasher);
|
||||
EXPECT_EQ(m.hash_function(), hasher);
|
||||
EXPECT_TRUE(m.empty());
|
||||
EXPECT_THAT(m, ::testing::UnorderedElementsAre());
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, BucketCountHashEqual) {
|
||||
using H = typename TypeParam::hasher;
|
||||
using E = typename TypeParam::key_equal;
|
||||
H hasher;
|
||||
E equal;
|
||||
TypeParam m(123, hasher, equal);
|
||||
EXPECT_EQ(m.hash_function(), hasher);
|
||||
EXPECT_EQ(m.key_eq(), equal);
|
||||
EXPECT_TRUE(m.empty());
|
||||
EXPECT_THAT(m, ::testing::UnorderedElementsAre());
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, BucketCountHashEqualAlloc) {
|
||||
using H = typename TypeParam::hasher;
|
||||
using E = typename TypeParam::key_equal;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
E equal;
|
||||
A alloc(0);
|
||||
TypeParam m(123, hasher, equal, alloc);
|
||||
EXPECT_EQ(m.hash_function(), hasher);
|
||||
EXPECT_EQ(m.key_eq(), equal);
|
||||
EXPECT_EQ(m.get_allocator(), alloc);
|
||||
EXPECT_TRUE(m.empty());
|
||||
EXPECT_THAT(m, ::testing::UnorderedElementsAre());
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
struct is_std_unordered_map : std::false_type {};
|
||||
|
||||
template <typename... T>
|
||||
struct is_std_unordered_map<std::unordered_map<T...>> : std::true_type {};
|
||||
|
||||
#if defined(UNORDERED_MAP_CXX14) || defined(UNORDERED_MAP_CXX17)
|
||||
using has_cxx14_std_apis = std::true_type;
|
||||
#else
|
||||
using has_cxx14_std_apis = std::false_type;
|
||||
#endif
|
||||
|
||||
template <typename T>
|
||||
using expect_cxx14_apis =
|
||||
absl::disjunction<absl::negation<is_std_unordered_map<T>>,
|
||||
has_cxx14_std_apis>;
|
||||
|
||||
template <typename TypeParam>
|
||||
void BucketCountAllocTest(std::false_type) {}
|
||||
|
||||
template <typename TypeParam>
|
||||
void BucketCountAllocTest(std::true_type) {
|
||||
using A = typename TypeParam::allocator_type;
|
||||
A alloc(0);
|
||||
TypeParam m(123, alloc);
|
||||
EXPECT_EQ(m.get_allocator(), alloc);
|
||||
EXPECT_TRUE(m.empty());
|
||||
EXPECT_THAT(m, ::testing::UnorderedElementsAre());
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, BucketCountAlloc) {
|
||||
BucketCountAllocTest<TypeParam>(expect_cxx14_apis<TypeParam>());
|
||||
}
|
||||
|
||||
template <typename TypeParam>
|
||||
void BucketCountHashAllocTest(std::false_type) {}
|
||||
|
||||
template <typename TypeParam>
|
||||
void BucketCountHashAllocTest(std::true_type) {
|
||||
using H = typename TypeParam::hasher;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
A alloc(0);
|
||||
TypeParam m(123, hasher, alloc);
|
||||
EXPECT_EQ(m.hash_function(), hasher);
|
||||
EXPECT_EQ(m.get_allocator(), alloc);
|
||||
EXPECT_TRUE(m.empty());
|
||||
EXPECT_THAT(m, ::testing::UnorderedElementsAre());
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, BucketCountHashAlloc) {
|
||||
BucketCountHashAllocTest<TypeParam>(expect_cxx14_apis<TypeParam>());
|
||||
}
|
||||
|
||||
#if ABSL_UNORDERED_SUPPORTS_ALLOC_CTORS
|
||||
using has_alloc_std_constructors = std::true_type;
|
||||
#else
|
||||
using has_alloc_std_constructors = std::false_type;
|
||||
#endif
|
||||
|
||||
template <typename T>
|
||||
using expect_alloc_constructors =
|
||||
absl::disjunction<absl::negation<is_std_unordered_map<T>>,
|
||||
has_alloc_std_constructors>;
|
||||
|
||||
template <typename TypeParam>
|
||||
void AllocTest(std::false_type) {}
|
||||
|
||||
template <typename TypeParam>
|
||||
void AllocTest(std::true_type) {
|
||||
using A = typename TypeParam::allocator_type;
|
||||
A alloc(0);
|
||||
TypeParam m(alloc);
|
||||
EXPECT_EQ(m.get_allocator(), alloc);
|
||||
EXPECT_TRUE(m.empty());
|
||||
EXPECT_THAT(m, ::testing::UnorderedElementsAre());
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, Alloc) {
|
||||
AllocTest<TypeParam>(expect_alloc_constructors<TypeParam>());
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, InputIteratorBucketHashEqualAlloc) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using H = typename TypeParam::hasher;
|
||||
using E = typename TypeParam::key_equal;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
E equal;
|
||||
A alloc(0);
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m(values.begin(), values.end(), 123, hasher, equal, alloc);
|
||||
EXPECT_EQ(m.hash_function(), hasher);
|
||||
EXPECT_EQ(m.key_eq(), equal);
|
||||
EXPECT_EQ(m.get_allocator(), alloc);
|
||||
EXPECT_THAT(items(m), ::testing::UnorderedElementsAreArray(values));
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
template <typename TypeParam>
|
||||
void InputIteratorBucketAllocTest(std::false_type) {}
|
||||
|
||||
template <typename TypeParam>
|
||||
void InputIteratorBucketAllocTest(std::true_type) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
A alloc(0);
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m(values.begin(), values.end(), 123, alloc);
|
||||
EXPECT_EQ(m.get_allocator(), alloc);
|
||||
EXPECT_THAT(items(m), ::testing::UnorderedElementsAreArray(values));
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, InputIteratorBucketAlloc) {
|
||||
InputIteratorBucketAllocTest<TypeParam>(expect_cxx14_apis<TypeParam>());
|
||||
}
|
||||
|
||||
template <typename TypeParam>
|
||||
void InputIteratorBucketHashAllocTest(std::false_type) {}
|
||||
|
||||
template <typename TypeParam>
|
||||
void InputIteratorBucketHashAllocTest(std::true_type) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using H = typename TypeParam::hasher;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
A alloc(0);
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m(values.begin(), values.end(), 123, hasher, alloc);
|
||||
EXPECT_EQ(m.hash_function(), hasher);
|
||||
EXPECT_EQ(m.get_allocator(), alloc);
|
||||
EXPECT_THAT(items(m), ::testing::UnorderedElementsAreArray(values));
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, InputIteratorBucketHashAlloc) {
|
||||
InputIteratorBucketHashAllocTest<TypeParam>(expect_cxx14_apis<TypeParam>());
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, CopyConstructor) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using H = typename TypeParam::hasher;
|
||||
using E = typename TypeParam::key_equal;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
E equal;
|
||||
A alloc(0);
|
||||
TypeParam m(123, hasher, equal, alloc);
|
||||
for (size_t i = 0; i != 10; ++i) m.insert(hash_internal::Generator<T>()());
|
||||
TypeParam n(m);
|
||||
EXPECT_EQ(m.hash_function(), n.hash_function());
|
||||
EXPECT_EQ(m.key_eq(), n.key_eq());
|
||||
EXPECT_EQ(m.get_allocator(), n.get_allocator());
|
||||
EXPECT_EQ(m, n);
|
||||
}
|
||||
|
||||
template <typename TypeParam>
|
||||
void CopyConstructorAllocTest(std::false_type) {}
|
||||
|
||||
template <typename TypeParam>
|
||||
void CopyConstructorAllocTest(std::true_type) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using H = typename TypeParam::hasher;
|
||||
using E = typename TypeParam::key_equal;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
E equal;
|
||||
A alloc(0);
|
||||
TypeParam m(123, hasher, equal, alloc);
|
||||
for (size_t i = 0; i != 10; ++i) m.insert(hash_internal::Generator<T>()());
|
||||
TypeParam n(m, A(11));
|
||||
EXPECT_EQ(m.hash_function(), n.hash_function());
|
||||
EXPECT_EQ(m.key_eq(), n.key_eq());
|
||||
EXPECT_NE(m.get_allocator(), n.get_allocator());
|
||||
EXPECT_EQ(m, n);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, CopyConstructorAlloc) {
|
||||
CopyConstructorAllocTest<TypeParam>(expect_alloc_constructors<TypeParam>());
|
||||
}
|
||||
|
||||
// TODO(alkis): Test non-propagating allocators on copy constructors.
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, MoveConstructor) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using H = typename TypeParam::hasher;
|
||||
using E = typename TypeParam::key_equal;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
E equal;
|
||||
A alloc(0);
|
||||
TypeParam m(123, hasher, equal, alloc);
|
||||
for (size_t i = 0; i != 10; ++i) m.insert(hash_internal::Generator<T>()());
|
||||
TypeParam t(m);
|
||||
TypeParam n(std::move(t));
|
||||
EXPECT_EQ(m.hash_function(), n.hash_function());
|
||||
EXPECT_EQ(m.key_eq(), n.key_eq());
|
||||
EXPECT_EQ(m.get_allocator(), n.get_allocator());
|
||||
EXPECT_EQ(m, n);
|
||||
}
|
||||
|
||||
template <typename TypeParam>
|
||||
void MoveConstructorAllocTest(std::false_type) {}
|
||||
|
||||
template <typename TypeParam>
|
||||
void MoveConstructorAllocTest(std::true_type) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using H = typename TypeParam::hasher;
|
||||
using E = typename TypeParam::key_equal;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
E equal;
|
||||
A alloc(0);
|
||||
TypeParam m(123, hasher, equal, alloc);
|
||||
for (size_t i = 0; i != 10; ++i) m.insert(hash_internal::Generator<T>()());
|
||||
TypeParam t(m);
|
||||
TypeParam n(std::move(t), A(1));
|
||||
EXPECT_EQ(m.hash_function(), n.hash_function());
|
||||
EXPECT_EQ(m.key_eq(), n.key_eq());
|
||||
EXPECT_NE(m.get_allocator(), n.get_allocator());
|
||||
EXPECT_EQ(m, n);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, MoveConstructorAlloc) {
|
||||
MoveConstructorAllocTest<TypeParam>(expect_alloc_constructors<TypeParam>());
|
||||
}
|
||||
|
||||
// TODO(alkis): Test non-propagating allocators on move constructors.
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, InitializerListBucketHashEqualAlloc) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
hash_internal::Generator<T> gen;
|
||||
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
|
||||
using H = typename TypeParam::hasher;
|
||||
using E = typename TypeParam::key_equal;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
E equal;
|
||||
A alloc(0);
|
||||
TypeParam m(values, 123, hasher, equal, alloc);
|
||||
EXPECT_EQ(m.hash_function(), hasher);
|
||||
EXPECT_EQ(m.key_eq(), equal);
|
||||
EXPECT_EQ(m.get_allocator(), alloc);
|
||||
EXPECT_THAT(items(m), ::testing::UnorderedElementsAreArray(values));
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
template <typename TypeParam>
|
||||
void InitializerListBucketAllocTest(std::false_type) {}
|
||||
|
||||
template <typename TypeParam>
|
||||
void InitializerListBucketAllocTest(std::true_type) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
hash_internal::Generator<T> gen;
|
||||
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
|
||||
A alloc(0);
|
||||
TypeParam m(values, 123, alloc);
|
||||
EXPECT_EQ(m.get_allocator(), alloc);
|
||||
EXPECT_THAT(items(m), ::testing::UnorderedElementsAreArray(values));
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, InitializerListBucketAlloc) {
|
||||
InitializerListBucketAllocTest<TypeParam>(expect_cxx14_apis<TypeParam>());
|
||||
}
|
||||
|
||||
template <typename TypeParam>
|
||||
void InitializerListBucketHashAllocTest(std::false_type) {}
|
||||
|
||||
template <typename TypeParam>
|
||||
void InitializerListBucketHashAllocTest(std::true_type) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using H = typename TypeParam::hasher;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
A alloc(0);
|
||||
hash_internal::Generator<T> gen;
|
||||
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
|
||||
TypeParam m(values, 123, hasher, alloc);
|
||||
EXPECT_EQ(m.hash_function(), hasher);
|
||||
EXPECT_EQ(m.get_allocator(), alloc);
|
||||
EXPECT_THAT(items(m), ::testing::UnorderedElementsAreArray(values));
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, InitializerListBucketHashAlloc) {
|
||||
InitializerListBucketHashAllocTest<TypeParam>(expect_cxx14_apis<TypeParam>());
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, Assignment) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using H = typename TypeParam::hasher;
|
||||
using E = typename TypeParam::key_equal;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
E equal;
|
||||
A alloc(0);
|
||||
hash_internal::Generator<T> gen;
|
||||
TypeParam m({gen(), gen(), gen()}, 123, hasher, equal, alloc);
|
||||
TypeParam n;
|
||||
n = m;
|
||||
EXPECT_EQ(m.hash_function(), n.hash_function());
|
||||
EXPECT_EQ(m.key_eq(), n.key_eq());
|
||||
EXPECT_EQ(m, n);
|
||||
}
|
||||
|
||||
// TODO(alkis): Test [non-]propagating allocators on move/copy assignments
|
||||
// (it depends on traits).
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, MoveAssignment) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using H = typename TypeParam::hasher;
|
||||
using E = typename TypeParam::key_equal;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
E equal;
|
||||
A alloc(0);
|
||||
hash_internal::Generator<T> gen;
|
||||
TypeParam m({gen(), gen(), gen()}, 123, hasher, equal, alloc);
|
||||
TypeParam t(m);
|
||||
TypeParam n;
|
||||
n = std::move(t);
|
||||
EXPECT_EQ(m.hash_function(), n.hash_function());
|
||||
EXPECT_EQ(m.key_eq(), n.key_eq());
|
||||
EXPECT_EQ(m, n);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, AssignmentFromInitializerList) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
hash_internal::Generator<T> gen;
|
||||
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
|
||||
TypeParam m;
|
||||
m = values;
|
||||
EXPECT_THAT(items(m), ::testing::UnorderedElementsAreArray(values));
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, AssignmentOverwritesExisting) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
hash_internal::Generator<T> gen;
|
||||
TypeParam m({gen(), gen(), gen()});
|
||||
TypeParam n({gen()});
|
||||
n = m;
|
||||
EXPECT_EQ(m, n);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, MoveAssignmentOverwritesExisting) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
hash_internal::Generator<T> gen;
|
||||
TypeParam m({gen(), gen(), gen()});
|
||||
TypeParam t(m);
|
||||
TypeParam n({gen()});
|
||||
n = std::move(t);
|
||||
EXPECT_EQ(m, n);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, AssignmentFromInitializerListOverwritesExisting) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
hash_internal::Generator<T> gen;
|
||||
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
|
||||
TypeParam m;
|
||||
m = values;
|
||||
EXPECT_THAT(items(m), ::testing::UnorderedElementsAreArray(values));
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, AssignmentOnSelf) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
hash_internal::Generator<T> gen;
|
||||
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
|
||||
TypeParam m(values);
|
||||
m = *&m; // Avoid -Wself-assign
|
||||
EXPECT_THAT(items(m), ::testing::UnorderedElementsAreArray(values));
|
||||
}
|
||||
|
||||
// We cannot test self move as standard states that it leaves standard
|
||||
// containers in unspecified state (and in practice in causes memory-leak
|
||||
// according to heap-checker!).
|
||||
|
||||
REGISTER_TYPED_TEST_CASE_P(
|
||||
ConstructorTest, NoArgs, BucketCount, BucketCountHash, BucketCountHashEqual,
|
||||
BucketCountHashEqualAlloc, BucketCountAlloc, BucketCountHashAlloc, Alloc,
|
||||
InputIteratorBucketHashEqualAlloc, InputIteratorBucketAlloc,
|
||||
InputIteratorBucketHashAlloc, CopyConstructor, CopyConstructorAlloc,
|
||||
MoveConstructor, MoveConstructorAlloc, InitializerListBucketHashEqualAlloc,
|
||||
InitializerListBucketAlloc, InitializerListBucketHashAlloc, Assignment,
|
||||
MoveAssignment, AssignmentFromInitializerList, AssignmentOverwritesExisting,
|
||||
MoveAssignmentOverwritesExisting,
|
||||
AssignmentFromInitializerListOverwritesExisting, AssignmentOnSelf);
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_UNORDERED_MAP_CONSTRUCTOR_TEST_H_
|
||||
117
third_party/abseil_cpp/absl/container/internal/unordered_map_lookup_test.h
vendored
Normal file
117
third_party/abseil_cpp/absl/container/internal/unordered_map_lookup_test.h
vendored
Normal file
|
|
@ -0,0 +1,117 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_UNORDERED_MAP_LOOKUP_TEST_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_UNORDERED_MAP_LOOKUP_TEST_H_
|
||||
|
||||
#include "gmock/gmock.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "absl/container/internal/hash_generator_testing.h"
|
||||
#include "absl/container/internal/hash_policy_testing.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
template <class UnordMap>
|
||||
class LookupTest : public ::testing::Test {};
|
||||
|
||||
TYPED_TEST_SUITE_P(LookupTest);
|
||||
|
||||
TYPED_TEST_P(LookupTest, At) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m(values.begin(), values.end());
|
||||
for (const auto& p : values) {
|
||||
const auto& val = m.at(p.first);
|
||||
EXPECT_EQ(p.second, val) << ::testing::PrintToString(p.first);
|
||||
}
|
||||
}
|
||||
|
||||
TYPED_TEST_P(LookupTest, OperatorBracket) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using V = typename TypeParam::mapped_type;
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m;
|
||||
for (const auto& p : values) {
|
||||
auto& val = m[p.first];
|
||||
EXPECT_EQ(V(), val) << ::testing::PrintToString(p.first);
|
||||
val = p.second;
|
||||
}
|
||||
for (const auto& p : values)
|
||||
EXPECT_EQ(p.second, m[p.first]) << ::testing::PrintToString(p.first);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(LookupTest, Count) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m;
|
||||
for (const auto& p : values)
|
||||
EXPECT_EQ(0, m.count(p.first)) << ::testing::PrintToString(p.first);
|
||||
m.insert(values.begin(), values.end());
|
||||
for (const auto& p : values)
|
||||
EXPECT_EQ(1, m.count(p.first)) << ::testing::PrintToString(p.first);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(LookupTest, Find) {
|
||||
using std::get;
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m;
|
||||
for (const auto& p : values)
|
||||
EXPECT_TRUE(m.end() == m.find(p.first))
|
||||
<< ::testing::PrintToString(p.first);
|
||||
m.insert(values.begin(), values.end());
|
||||
for (const auto& p : values) {
|
||||
auto it = m.find(p.first);
|
||||
EXPECT_TRUE(m.end() != it) << ::testing::PrintToString(p.first);
|
||||
EXPECT_EQ(p.second, get<1>(*it)) << ::testing::PrintToString(p.first);
|
||||
}
|
||||
}
|
||||
|
||||
TYPED_TEST_P(LookupTest, EqualRange) {
|
||||
using std::get;
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m;
|
||||
for (const auto& p : values) {
|
||||
auto r = m.equal_range(p.first);
|
||||
ASSERT_EQ(0, std::distance(r.first, r.second));
|
||||
}
|
||||
m.insert(values.begin(), values.end());
|
||||
for (const auto& p : values) {
|
||||
auto r = m.equal_range(p.first);
|
||||
ASSERT_EQ(1, std::distance(r.first, r.second));
|
||||
EXPECT_EQ(p.second, get<1>(*r.first)) << ::testing::PrintToString(p.first);
|
||||
}
|
||||
}
|
||||
|
||||
REGISTER_TYPED_TEST_CASE_P(LookupTest, At, OperatorBracket, Count, Find,
|
||||
EqualRange);
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_UNORDERED_MAP_LOOKUP_TEST_H_
|
||||
87
third_party/abseil_cpp/absl/container/internal/unordered_map_members_test.h
vendored
Normal file
87
third_party/abseil_cpp/absl/container/internal/unordered_map_members_test.h
vendored
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
// Copyright 2019 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_UNORDERED_MAP_MEMBERS_TEST_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_UNORDERED_MAP_MEMBERS_TEST_H_
|
||||
|
||||
#include <type_traits>
|
||||
#include "gmock/gmock.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "absl/meta/type_traits.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
template <class UnordMap>
|
||||
class MembersTest : public ::testing::Test {};
|
||||
|
||||
TYPED_TEST_SUITE_P(MembersTest);
|
||||
|
||||
template <typename T>
|
||||
void UseType() {}
|
||||
|
||||
TYPED_TEST_P(MembersTest, Typedefs) {
|
||||
EXPECT_TRUE((std::is_same<std::pair<const typename TypeParam::key_type,
|
||||
typename TypeParam::mapped_type>,
|
||||
typename TypeParam::value_type>()));
|
||||
EXPECT_TRUE((absl::conjunction<
|
||||
absl::negation<std::is_signed<typename TypeParam::size_type>>,
|
||||
std::is_integral<typename TypeParam::size_type>>()));
|
||||
EXPECT_TRUE((absl::conjunction<
|
||||
std::is_signed<typename TypeParam::difference_type>,
|
||||
std::is_integral<typename TypeParam::difference_type>>()));
|
||||
EXPECT_TRUE((std::is_convertible<
|
||||
decltype(std::declval<const typename TypeParam::hasher&>()(
|
||||
std::declval<const typename TypeParam::key_type&>())),
|
||||
size_t>()));
|
||||
EXPECT_TRUE((std::is_convertible<
|
||||
decltype(std::declval<const typename TypeParam::key_equal&>()(
|
||||
std::declval<const typename TypeParam::key_type&>(),
|
||||
std::declval<const typename TypeParam::key_type&>())),
|
||||
bool>()));
|
||||
EXPECT_TRUE((std::is_same<typename TypeParam::allocator_type::value_type,
|
||||
typename TypeParam::value_type>()));
|
||||
EXPECT_TRUE((std::is_same<typename TypeParam::value_type&,
|
||||
typename TypeParam::reference>()));
|
||||
EXPECT_TRUE((std::is_same<const typename TypeParam::value_type&,
|
||||
typename TypeParam::const_reference>()));
|
||||
EXPECT_TRUE((std::is_same<typename std::allocator_traits<
|
||||
typename TypeParam::allocator_type>::pointer,
|
||||
typename TypeParam::pointer>()));
|
||||
EXPECT_TRUE(
|
||||
(std::is_same<typename std::allocator_traits<
|
||||
typename TypeParam::allocator_type>::const_pointer,
|
||||
typename TypeParam::const_pointer>()));
|
||||
}
|
||||
|
||||
TYPED_TEST_P(MembersTest, SimpleFunctions) {
|
||||
EXPECT_GT(TypeParam().max_size(), 0);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(MembersTest, BeginEnd) {
|
||||
TypeParam t = {typename TypeParam::value_type{}};
|
||||
EXPECT_EQ(t.begin(), t.cbegin());
|
||||
EXPECT_EQ(t.end(), t.cend());
|
||||
EXPECT_NE(t.begin(), t.end());
|
||||
EXPECT_NE(t.cbegin(), t.cend());
|
||||
}
|
||||
|
||||
REGISTER_TYPED_TEST_SUITE_P(MembersTest, Typedefs, SimpleFunctions, BeginEnd);
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_UNORDERED_MAP_MEMBERS_TEST_H_
|
||||
316
third_party/abseil_cpp/absl/container/internal/unordered_map_modifiers_test.h
vendored
Normal file
316
third_party/abseil_cpp/absl/container/internal/unordered_map_modifiers_test.h
vendored
Normal file
|
|
@ -0,0 +1,316 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_UNORDERED_MAP_MODIFIERS_TEST_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_UNORDERED_MAP_MODIFIERS_TEST_H_
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "gmock/gmock.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "absl/container/internal/hash_generator_testing.h"
|
||||
#include "absl/container/internal/hash_policy_testing.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
template <class UnordMap>
|
||||
class ModifiersTest : public ::testing::Test {};
|
||||
|
||||
TYPED_TEST_SUITE_P(ModifiersTest);
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, Clear) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m(values.begin(), values.end());
|
||||
ASSERT_THAT(items(m), ::testing::UnorderedElementsAreArray(values));
|
||||
m.clear();
|
||||
EXPECT_THAT(items(m), ::testing::UnorderedElementsAre());
|
||||
EXPECT_TRUE(m.empty());
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, Insert) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using V = typename TypeParam::mapped_type;
|
||||
T val = hash_internal::Generator<T>()();
|
||||
TypeParam m;
|
||||
auto p = m.insert(val);
|
||||
EXPECT_TRUE(p.second);
|
||||
EXPECT_EQ(val, *p.first);
|
||||
T val2 = {val.first, hash_internal::Generator<V>()()};
|
||||
p = m.insert(val2);
|
||||
EXPECT_FALSE(p.second);
|
||||
EXPECT_EQ(val, *p.first);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, InsertHint) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using V = typename TypeParam::mapped_type;
|
||||
T val = hash_internal::Generator<T>()();
|
||||
TypeParam m;
|
||||
auto it = m.insert(m.end(), val);
|
||||
EXPECT_TRUE(it != m.end());
|
||||
EXPECT_EQ(val, *it);
|
||||
T val2 = {val.first, hash_internal::Generator<V>()()};
|
||||
it = m.insert(it, val2);
|
||||
EXPECT_TRUE(it != m.end());
|
||||
EXPECT_EQ(val, *it);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, InsertRange) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m;
|
||||
m.insert(values.begin(), values.end());
|
||||
ASSERT_THAT(items(m), ::testing::UnorderedElementsAreArray(values));
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, InsertOrAssign) {
|
||||
#ifdef UNORDERED_MAP_CXX17
|
||||
using std::get;
|
||||
using K = typename TypeParam::key_type;
|
||||
using V = typename TypeParam::mapped_type;
|
||||
K k = hash_internal::Generator<K>()();
|
||||
V val = hash_internal::Generator<V>()();
|
||||
TypeParam m;
|
||||
auto p = m.insert_or_assign(k, val);
|
||||
EXPECT_TRUE(p.second);
|
||||
EXPECT_EQ(k, get<0>(*p.first));
|
||||
EXPECT_EQ(val, get<1>(*p.first));
|
||||
V val2 = hash_internal::Generator<V>()();
|
||||
p = m.insert_or_assign(k, val2);
|
||||
EXPECT_FALSE(p.second);
|
||||
EXPECT_EQ(k, get<0>(*p.first));
|
||||
EXPECT_EQ(val2, get<1>(*p.first));
|
||||
#endif
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, InsertOrAssignHint) {
|
||||
#ifdef UNORDERED_MAP_CXX17
|
||||
using std::get;
|
||||
using K = typename TypeParam::key_type;
|
||||
using V = typename TypeParam::mapped_type;
|
||||
K k = hash_internal::Generator<K>()();
|
||||
V val = hash_internal::Generator<V>()();
|
||||
TypeParam m;
|
||||
auto it = m.insert_or_assign(m.end(), k, val);
|
||||
EXPECT_TRUE(it != m.end());
|
||||
EXPECT_EQ(k, get<0>(*it));
|
||||
EXPECT_EQ(val, get<1>(*it));
|
||||
V val2 = hash_internal::Generator<V>()();
|
||||
it = m.insert_or_assign(it, k, val2);
|
||||
EXPECT_EQ(k, get<0>(*it));
|
||||
EXPECT_EQ(val2, get<1>(*it));
|
||||
#endif
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, Emplace) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using V = typename TypeParam::mapped_type;
|
||||
T val = hash_internal::Generator<T>()();
|
||||
TypeParam m;
|
||||
// TODO(alkis): We need a way to run emplace in a more meaningful way. Perhaps
|
||||
// with test traits/policy.
|
||||
auto p = m.emplace(val);
|
||||
EXPECT_TRUE(p.second);
|
||||
EXPECT_EQ(val, *p.first);
|
||||
T val2 = {val.first, hash_internal::Generator<V>()()};
|
||||
p = m.emplace(val2);
|
||||
EXPECT_FALSE(p.second);
|
||||
EXPECT_EQ(val, *p.first);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, EmplaceHint) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using V = typename TypeParam::mapped_type;
|
||||
T val = hash_internal::Generator<T>()();
|
||||
TypeParam m;
|
||||
// TODO(alkis): We need a way to run emplace in a more meaningful way. Perhaps
|
||||
// with test traits/policy.
|
||||
auto it = m.emplace_hint(m.end(), val);
|
||||
EXPECT_EQ(val, *it);
|
||||
T val2 = {val.first, hash_internal::Generator<V>()()};
|
||||
it = m.emplace_hint(it, val2);
|
||||
EXPECT_EQ(val, *it);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, TryEmplace) {
|
||||
#ifdef UNORDERED_MAP_CXX17
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using V = typename TypeParam::mapped_type;
|
||||
T val = hash_internal::Generator<T>()();
|
||||
TypeParam m;
|
||||
// TODO(alkis): We need a way to run emplace in a more meaningful way. Perhaps
|
||||
// with test traits/policy.
|
||||
auto p = m.try_emplace(val.first, val.second);
|
||||
EXPECT_TRUE(p.second);
|
||||
EXPECT_EQ(val, *p.first);
|
||||
T val2 = {val.first, hash_internal::Generator<V>()()};
|
||||
p = m.try_emplace(val2.first, val2.second);
|
||||
EXPECT_FALSE(p.second);
|
||||
EXPECT_EQ(val, *p.first);
|
||||
#endif
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, TryEmplaceHint) {
|
||||
#ifdef UNORDERED_MAP_CXX17
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using V = typename TypeParam::mapped_type;
|
||||
T val = hash_internal::Generator<T>()();
|
||||
TypeParam m;
|
||||
// TODO(alkis): We need a way to run emplace in a more meaningful way. Perhaps
|
||||
// with test traits/policy.
|
||||
auto it = m.try_emplace(m.end(), val.first, val.second);
|
||||
EXPECT_EQ(val, *it);
|
||||
T val2 = {val.first, hash_internal::Generator<V>()()};
|
||||
it = m.try_emplace(it, val2.first, val2.second);
|
||||
EXPECT_EQ(val, *it);
|
||||
#endif
|
||||
}
|
||||
|
||||
template <class V>
|
||||
using IfNotVoid = typename std::enable_if<!std::is_void<V>::value, V>::type;
|
||||
|
||||
// In openmap we chose not to return the iterator from erase because that's
|
||||
// more expensive. As such we adapt erase to return an iterator here.
|
||||
struct EraseFirst {
|
||||
template <class Map>
|
||||
auto operator()(Map* m, int) const
|
||||
-> IfNotVoid<decltype(m->erase(m->begin()))> {
|
||||
return m->erase(m->begin());
|
||||
}
|
||||
template <class Map>
|
||||
typename Map::iterator operator()(Map* m, ...) const {
|
||||
auto it = m->begin();
|
||||
m->erase(it++);
|
||||
return it;
|
||||
}
|
||||
};
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, Erase) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using std::get;
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m(values.begin(), values.end());
|
||||
ASSERT_THAT(items(m), ::testing::UnorderedElementsAreArray(values));
|
||||
auto& first = *m.begin();
|
||||
std::vector<T> values2;
|
||||
for (const auto& val : values)
|
||||
if (get<0>(val) != get<0>(first)) values2.push_back(val);
|
||||
auto it = EraseFirst()(&m, 0);
|
||||
ASSERT_TRUE(it != m.end());
|
||||
EXPECT_EQ(1, std::count(values2.begin(), values2.end(), *it));
|
||||
EXPECT_THAT(items(m), ::testing::UnorderedElementsAreArray(values2.begin(),
|
||||
values2.end()));
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, EraseRange) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m(values.begin(), values.end());
|
||||
ASSERT_THAT(items(m), ::testing::UnorderedElementsAreArray(values));
|
||||
auto it = m.erase(m.begin(), m.end());
|
||||
EXPECT_THAT(items(m), ::testing::UnorderedElementsAre());
|
||||
EXPECT_TRUE(it == m.end());
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, EraseKey) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m(values.begin(), values.end());
|
||||
ASSERT_THAT(items(m), ::testing::UnorderedElementsAreArray(values));
|
||||
EXPECT_EQ(1, m.erase(values[0].first));
|
||||
EXPECT_EQ(0, std::count(m.begin(), m.end(), values[0]));
|
||||
EXPECT_THAT(items(m), ::testing::UnorderedElementsAreArray(values.begin() + 1,
|
||||
values.end()));
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, Swap) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
std::vector<T> v1;
|
||||
std::vector<T> v2;
|
||||
std::generate_n(std::back_inserter(v1), 5, hash_internal::Generator<T>());
|
||||
std::generate_n(std::back_inserter(v2), 5, hash_internal::Generator<T>());
|
||||
TypeParam m1(v1.begin(), v1.end());
|
||||
TypeParam m2(v2.begin(), v2.end());
|
||||
EXPECT_THAT(items(m1), ::testing::UnorderedElementsAreArray(v1));
|
||||
EXPECT_THAT(items(m2), ::testing::UnorderedElementsAreArray(v2));
|
||||
m1.swap(m2);
|
||||
EXPECT_THAT(items(m1), ::testing::UnorderedElementsAreArray(v2));
|
||||
EXPECT_THAT(items(m2), ::testing::UnorderedElementsAreArray(v1));
|
||||
}
|
||||
|
||||
// TODO(alkis): Write tests for extract.
|
||||
// TODO(alkis): Write tests for merge.
|
||||
|
||||
REGISTER_TYPED_TEST_CASE_P(ModifiersTest, Clear, Insert, InsertHint,
|
||||
InsertRange, InsertOrAssign, InsertOrAssignHint,
|
||||
Emplace, EmplaceHint, TryEmplace, TryEmplaceHint,
|
||||
Erase, EraseRange, EraseKey, Swap);
|
||||
|
||||
template <typename Type>
|
||||
struct is_unique_ptr : std::false_type {};
|
||||
|
||||
template <typename Type>
|
||||
struct is_unique_ptr<std::unique_ptr<Type>> : std::true_type {};
|
||||
|
||||
template <class UnordMap>
|
||||
class UniquePtrModifiersTest : public ::testing::Test {
|
||||
protected:
|
||||
UniquePtrModifiersTest() {
|
||||
static_assert(is_unique_ptr<typename UnordMap::mapped_type>::value,
|
||||
"UniquePtrModifiersTyest may only be called with a "
|
||||
"std::unique_ptr value type.");
|
||||
}
|
||||
};
|
||||
|
||||
TYPED_TEST_SUITE_P(UniquePtrModifiersTest);
|
||||
|
||||
// Test that we do not move from rvalue arguments if an insertion does not
|
||||
// happen.
|
||||
TYPED_TEST_P(UniquePtrModifiersTest, TryEmplace) {
|
||||
#ifdef UNORDERED_MAP_CXX17
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using V = typename TypeParam::mapped_type;
|
||||
T val = hash_internal::Generator<T>()();
|
||||
TypeParam m;
|
||||
auto p = m.try_emplace(val.first, std::move(val.second));
|
||||
EXPECT_TRUE(p.second);
|
||||
// A moved from std::unique_ptr is guaranteed to be nullptr.
|
||||
EXPECT_EQ(val.second, nullptr);
|
||||
T val2 = {val.first, hash_internal::Generator<V>()()};
|
||||
p = m.try_emplace(val2.first, std::move(val2.second));
|
||||
EXPECT_FALSE(p.second);
|
||||
EXPECT_NE(val2.second, nullptr);
|
||||
#endif
|
||||
}
|
||||
|
||||
REGISTER_TYPED_TEST_SUITE_P(UniquePtrModifiersTest, TryEmplace);
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_UNORDERED_MAP_MODIFIERS_TEST_H_
|
||||
50
third_party/abseil_cpp/absl/container/internal/unordered_map_test.cc
vendored
Normal file
50
third_party/abseil_cpp/absl/container/internal/unordered_map_test.cc
vendored
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include <memory>
|
||||
#include <unordered_map>
|
||||
|
||||
#include "absl/container/internal/unordered_map_constructor_test.h"
|
||||
#include "absl/container/internal/unordered_map_lookup_test.h"
|
||||
#include "absl/container/internal/unordered_map_members_test.h"
|
||||
#include "absl/container/internal/unordered_map_modifiers_test.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
namespace {
|
||||
|
||||
using MapTypes = ::testing::Types<
|
||||
std::unordered_map<int, int, StatefulTestingHash, StatefulTestingEqual,
|
||||
Alloc<std::pair<const int, int>>>,
|
||||
std::unordered_map<std::string, std::string, StatefulTestingHash,
|
||||
StatefulTestingEqual,
|
||||
Alloc<std::pair<const std::string, std::string>>>>;
|
||||
|
||||
INSTANTIATE_TYPED_TEST_SUITE_P(UnorderedMap, ConstructorTest, MapTypes);
|
||||
INSTANTIATE_TYPED_TEST_SUITE_P(UnorderedMap, LookupTest, MapTypes);
|
||||
INSTANTIATE_TYPED_TEST_SUITE_P(UnorderedMap, MembersTest, MapTypes);
|
||||
INSTANTIATE_TYPED_TEST_SUITE_P(UnorderedMap, ModifiersTest, MapTypes);
|
||||
|
||||
using UniquePtrMapTypes = ::testing::Types<std::unordered_map<
|
||||
int, std::unique_ptr<int>, StatefulTestingHash, StatefulTestingEqual,
|
||||
Alloc<std::pair<const int, std::unique_ptr<int>>>>>;
|
||||
|
||||
INSTANTIATE_TYPED_TEST_SUITE_P(UnorderedMap, UniquePtrModifiersTest,
|
||||
UniquePtrMapTypes);
|
||||
|
||||
} // namespace
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
496
third_party/abseil_cpp/absl/container/internal/unordered_set_constructor_test.h
vendored
Normal file
496
third_party/abseil_cpp/absl/container/internal/unordered_set_constructor_test.h
vendored
Normal file
|
|
@ -0,0 +1,496 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_UNORDERED_SET_CONSTRUCTOR_TEST_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_UNORDERED_SET_CONSTRUCTOR_TEST_H_
|
||||
|
||||
#include <algorithm>
|
||||
#include <unordered_set>
|
||||
#include <vector>
|
||||
|
||||
#include "gmock/gmock.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "absl/container/internal/hash_generator_testing.h"
|
||||
#include "absl/container/internal/hash_policy_testing.h"
|
||||
#include "absl/meta/type_traits.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
template <class UnordMap>
|
||||
class ConstructorTest : public ::testing::Test {};
|
||||
|
||||
TYPED_TEST_SUITE_P(ConstructorTest);
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, NoArgs) {
|
||||
TypeParam m;
|
||||
EXPECT_TRUE(m.empty());
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAre());
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, BucketCount) {
|
||||
TypeParam m(123);
|
||||
EXPECT_TRUE(m.empty());
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAre());
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, BucketCountHash) {
|
||||
using H = typename TypeParam::hasher;
|
||||
H hasher;
|
||||
TypeParam m(123, hasher);
|
||||
EXPECT_EQ(m.hash_function(), hasher);
|
||||
EXPECT_TRUE(m.empty());
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAre());
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, BucketCountHashEqual) {
|
||||
using H = typename TypeParam::hasher;
|
||||
using E = typename TypeParam::key_equal;
|
||||
H hasher;
|
||||
E equal;
|
||||
TypeParam m(123, hasher, equal);
|
||||
EXPECT_EQ(m.hash_function(), hasher);
|
||||
EXPECT_EQ(m.key_eq(), equal);
|
||||
EXPECT_TRUE(m.empty());
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAre());
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, BucketCountHashEqualAlloc) {
|
||||
using H = typename TypeParam::hasher;
|
||||
using E = typename TypeParam::key_equal;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
E equal;
|
||||
A alloc(0);
|
||||
TypeParam m(123, hasher, equal, alloc);
|
||||
EXPECT_EQ(m.hash_function(), hasher);
|
||||
EXPECT_EQ(m.key_eq(), equal);
|
||||
EXPECT_EQ(m.get_allocator(), alloc);
|
||||
EXPECT_TRUE(m.empty());
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAre());
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
|
||||
const auto& cm = m;
|
||||
EXPECT_EQ(cm.hash_function(), hasher);
|
||||
EXPECT_EQ(cm.key_eq(), equal);
|
||||
EXPECT_EQ(cm.get_allocator(), alloc);
|
||||
EXPECT_TRUE(cm.empty());
|
||||
EXPECT_THAT(keys(cm), ::testing::UnorderedElementsAre());
|
||||
EXPECT_GE(cm.bucket_count(), 123);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
struct is_std_unordered_set : std::false_type {};
|
||||
|
||||
template <typename... T>
|
||||
struct is_std_unordered_set<std::unordered_set<T...>> : std::true_type {};
|
||||
|
||||
#if defined(UNORDERED_SET_CXX14) || defined(UNORDERED_SET_CXX17)
|
||||
using has_cxx14_std_apis = std::true_type;
|
||||
#else
|
||||
using has_cxx14_std_apis = std::false_type;
|
||||
#endif
|
||||
|
||||
template <typename T>
|
||||
using expect_cxx14_apis =
|
||||
absl::disjunction<absl::negation<is_std_unordered_set<T>>,
|
||||
has_cxx14_std_apis>;
|
||||
|
||||
template <typename TypeParam>
|
||||
void BucketCountAllocTest(std::false_type) {}
|
||||
|
||||
template <typename TypeParam>
|
||||
void BucketCountAllocTest(std::true_type) {
|
||||
using A = typename TypeParam::allocator_type;
|
||||
A alloc(0);
|
||||
TypeParam m(123, alloc);
|
||||
EXPECT_EQ(m.get_allocator(), alloc);
|
||||
EXPECT_TRUE(m.empty());
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAre());
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, BucketCountAlloc) {
|
||||
BucketCountAllocTest<TypeParam>(expect_cxx14_apis<TypeParam>());
|
||||
}
|
||||
|
||||
template <typename TypeParam>
|
||||
void BucketCountHashAllocTest(std::false_type) {}
|
||||
|
||||
template <typename TypeParam>
|
||||
void BucketCountHashAllocTest(std::true_type) {
|
||||
using H = typename TypeParam::hasher;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
A alloc(0);
|
||||
TypeParam m(123, hasher, alloc);
|
||||
EXPECT_EQ(m.hash_function(), hasher);
|
||||
EXPECT_EQ(m.get_allocator(), alloc);
|
||||
EXPECT_TRUE(m.empty());
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAre());
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, BucketCountHashAlloc) {
|
||||
BucketCountHashAllocTest<TypeParam>(expect_cxx14_apis<TypeParam>());
|
||||
}
|
||||
|
||||
#if ABSL_UNORDERED_SUPPORTS_ALLOC_CTORS
|
||||
using has_alloc_std_constructors = std::true_type;
|
||||
#else
|
||||
using has_alloc_std_constructors = std::false_type;
|
||||
#endif
|
||||
|
||||
template <typename T>
|
||||
using expect_alloc_constructors =
|
||||
absl::disjunction<absl::negation<is_std_unordered_set<T>>,
|
||||
has_alloc_std_constructors>;
|
||||
|
||||
template <typename TypeParam>
|
||||
void AllocTest(std::false_type) {}
|
||||
|
||||
template <typename TypeParam>
|
||||
void AllocTest(std::true_type) {
|
||||
using A = typename TypeParam::allocator_type;
|
||||
A alloc(0);
|
||||
TypeParam m(alloc);
|
||||
EXPECT_EQ(m.get_allocator(), alloc);
|
||||
EXPECT_TRUE(m.empty());
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAre());
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, Alloc) {
|
||||
AllocTest<TypeParam>(expect_alloc_constructors<TypeParam>());
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, InputIteratorBucketHashEqualAlloc) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using H = typename TypeParam::hasher;
|
||||
using E = typename TypeParam::key_equal;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
E equal;
|
||||
A alloc(0);
|
||||
std::vector<T> values;
|
||||
for (size_t i = 0; i != 10; ++i)
|
||||
values.push_back(hash_internal::Generator<T>()());
|
||||
TypeParam m(values.begin(), values.end(), 123, hasher, equal, alloc);
|
||||
EXPECT_EQ(m.hash_function(), hasher);
|
||||
EXPECT_EQ(m.key_eq(), equal);
|
||||
EXPECT_EQ(m.get_allocator(), alloc);
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAreArray(values));
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
template <typename TypeParam>
|
||||
void InputIteratorBucketAllocTest(std::false_type) {}
|
||||
|
||||
template <typename TypeParam>
|
||||
void InputIteratorBucketAllocTest(std::true_type) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
A alloc(0);
|
||||
std::vector<T> values;
|
||||
for (size_t i = 0; i != 10; ++i)
|
||||
values.push_back(hash_internal::Generator<T>()());
|
||||
TypeParam m(values.begin(), values.end(), 123, alloc);
|
||||
EXPECT_EQ(m.get_allocator(), alloc);
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAreArray(values));
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, InputIteratorBucketAlloc) {
|
||||
InputIteratorBucketAllocTest<TypeParam>(expect_cxx14_apis<TypeParam>());
|
||||
}
|
||||
|
||||
template <typename TypeParam>
|
||||
void InputIteratorBucketHashAllocTest(std::false_type) {}
|
||||
|
||||
template <typename TypeParam>
|
||||
void InputIteratorBucketHashAllocTest(std::true_type) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using H = typename TypeParam::hasher;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
A alloc(0);
|
||||
std::vector<T> values;
|
||||
for (size_t i = 0; i != 10; ++i)
|
||||
values.push_back(hash_internal::Generator<T>()());
|
||||
TypeParam m(values.begin(), values.end(), 123, hasher, alloc);
|
||||
EXPECT_EQ(m.hash_function(), hasher);
|
||||
EXPECT_EQ(m.get_allocator(), alloc);
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAreArray(values));
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, InputIteratorBucketHashAlloc) {
|
||||
InputIteratorBucketHashAllocTest<TypeParam>(expect_cxx14_apis<TypeParam>());
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, CopyConstructor) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using H = typename TypeParam::hasher;
|
||||
using E = typename TypeParam::key_equal;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
E equal;
|
||||
A alloc(0);
|
||||
TypeParam m(123, hasher, equal, alloc);
|
||||
for (size_t i = 0; i != 10; ++i) m.insert(hash_internal::Generator<T>()());
|
||||
TypeParam n(m);
|
||||
EXPECT_EQ(m.hash_function(), n.hash_function());
|
||||
EXPECT_EQ(m.key_eq(), n.key_eq());
|
||||
EXPECT_EQ(m.get_allocator(), n.get_allocator());
|
||||
EXPECT_EQ(m, n);
|
||||
EXPECT_NE(TypeParam(0, hasher, equal, alloc), n);
|
||||
}
|
||||
|
||||
template <typename TypeParam>
|
||||
void CopyConstructorAllocTest(std::false_type) {}
|
||||
|
||||
template <typename TypeParam>
|
||||
void CopyConstructorAllocTest(std::true_type) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using H = typename TypeParam::hasher;
|
||||
using E = typename TypeParam::key_equal;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
E equal;
|
||||
A alloc(0);
|
||||
TypeParam m(123, hasher, equal, alloc);
|
||||
for (size_t i = 0; i != 10; ++i) m.insert(hash_internal::Generator<T>()());
|
||||
TypeParam n(m, A(11));
|
||||
EXPECT_EQ(m.hash_function(), n.hash_function());
|
||||
EXPECT_EQ(m.key_eq(), n.key_eq());
|
||||
EXPECT_NE(m.get_allocator(), n.get_allocator());
|
||||
EXPECT_EQ(m, n);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, CopyConstructorAlloc) {
|
||||
CopyConstructorAllocTest<TypeParam>(expect_alloc_constructors<TypeParam>());
|
||||
}
|
||||
|
||||
// TODO(alkis): Test non-propagating allocators on copy constructors.
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, MoveConstructor) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using H = typename TypeParam::hasher;
|
||||
using E = typename TypeParam::key_equal;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
E equal;
|
||||
A alloc(0);
|
||||
TypeParam m(123, hasher, equal, alloc);
|
||||
for (size_t i = 0; i != 10; ++i) m.insert(hash_internal::Generator<T>()());
|
||||
TypeParam t(m);
|
||||
TypeParam n(std::move(t));
|
||||
EXPECT_EQ(m.hash_function(), n.hash_function());
|
||||
EXPECT_EQ(m.key_eq(), n.key_eq());
|
||||
EXPECT_EQ(m.get_allocator(), n.get_allocator());
|
||||
EXPECT_EQ(m, n);
|
||||
}
|
||||
|
||||
template <typename TypeParam>
|
||||
void MoveConstructorAllocTest(std::false_type) {}
|
||||
|
||||
template <typename TypeParam>
|
||||
void MoveConstructorAllocTest(std::true_type) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using H = typename TypeParam::hasher;
|
||||
using E = typename TypeParam::key_equal;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
E equal;
|
||||
A alloc(0);
|
||||
TypeParam m(123, hasher, equal, alloc);
|
||||
for (size_t i = 0; i != 10; ++i) m.insert(hash_internal::Generator<T>()());
|
||||
TypeParam t(m);
|
||||
TypeParam n(std::move(t), A(1));
|
||||
EXPECT_EQ(m.hash_function(), n.hash_function());
|
||||
EXPECT_EQ(m.key_eq(), n.key_eq());
|
||||
EXPECT_NE(m.get_allocator(), n.get_allocator());
|
||||
EXPECT_EQ(m, n);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, MoveConstructorAlloc) {
|
||||
MoveConstructorAllocTest<TypeParam>(expect_alloc_constructors<TypeParam>());
|
||||
}
|
||||
|
||||
// TODO(alkis): Test non-propagating allocators on move constructors.
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, InitializerListBucketHashEqualAlloc) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
hash_internal::Generator<T> gen;
|
||||
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
|
||||
using H = typename TypeParam::hasher;
|
||||
using E = typename TypeParam::key_equal;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
E equal;
|
||||
A alloc(0);
|
||||
TypeParam m(values, 123, hasher, equal, alloc);
|
||||
EXPECT_EQ(m.hash_function(), hasher);
|
||||
EXPECT_EQ(m.key_eq(), equal);
|
||||
EXPECT_EQ(m.get_allocator(), alloc);
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAreArray(values));
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
template <typename TypeParam>
|
||||
void InitializerListBucketAllocTest(std::false_type) {}
|
||||
|
||||
template <typename TypeParam>
|
||||
void InitializerListBucketAllocTest(std::true_type) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
hash_internal::Generator<T> gen;
|
||||
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
|
||||
A alloc(0);
|
||||
TypeParam m(values, 123, alloc);
|
||||
EXPECT_EQ(m.get_allocator(), alloc);
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAreArray(values));
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, InitializerListBucketAlloc) {
|
||||
InitializerListBucketAllocTest<TypeParam>(expect_cxx14_apis<TypeParam>());
|
||||
}
|
||||
|
||||
template <typename TypeParam>
|
||||
void InitializerListBucketHashAllocTest(std::false_type) {}
|
||||
|
||||
template <typename TypeParam>
|
||||
void InitializerListBucketHashAllocTest(std::true_type) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using H = typename TypeParam::hasher;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
A alloc(0);
|
||||
hash_internal::Generator<T> gen;
|
||||
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
|
||||
TypeParam m(values, 123, hasher, alloc);
|
||||
EXPECT_EQ(m.hash_function(), hasher);
|
||||
EXPECT_EQ(m.get_allocator(), alloc);
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAreArray(values));
|
||||
EXPECT_GE(m.bucket_count(), 123);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, InitializerListBucketHashAlloc) {
|
||||
InitializerListBucketHashAllocTest<TypeParam>(expect_cxx14_apis<TypeParam>());
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, CopyAssignment) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using H = typename TypeParam::hasher;
|
||||
using E = typename TypeParam::key_equal;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
E equal;
|
||||
A alloc(0);
|
||||
hash_internal::Generator<T> gen;
|
||||
TypeParam m({gen(), gen(), gen()}, 123, hasher, equal, alloc);
|
||||
TypeParam n;
|
||||
n = m;
|
||||
EXPECT_EQ(m.hash_function(), n.hash_function());
|
||||
EXPECT_EQ(m.key_eq(), n.key_eq());
|
||||
EXPECT_EQ(m, n);
|
||||
}
|
||||
|
||||
// TODO(alkis): Test [non-]propagating allocators on move/copy assignments
|
||||
// (it depends on traits).
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, MoveAssignment) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
using H = typename TypeParam::hasher;
|
||||
using E = typename TypeParam::key_equal;
|
||||
using A = typename TypeParam::allocator_type;
|
||||
H hasher;
|
||||
E equal;
|
||||
A alloc(0);
|
||||
hash_internal::Generator<T> gen;
|
||||
TypeParam m({gen(), gen(), gen()}, 123, hasher, equal, alloc);
|
||||
TypeParam t(m);
|
||||
TypeParam n;
|
||||
n = std::move(t);
|
||||
EXPECT_EQ(m.hash_function(), n.hash_function());
|
||||
EXPECT_EQ(m.key_eq(), n.key_eq());
|
||||
EXPECT_EQ(m, n);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, AssignmentFromInitializerList) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
hash_internal::Generator<T> gen;
|
||||
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
|
||||
TypeParam m;
|
||||
m = values;
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAreArray(values));
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, AssignmentOverwritesExisting) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
hash_internal::Generator<T> gen;
|
||||
TypeParam m({gen(), gen(), gen()});
|
||||
TypeParam n({gen()});
|
||||
n = m;
|
||||
EXPECT_EQ(m, n);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, MoveAssignmentOverwritesExisting) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
hash_internal::Generator<T> gen;
|
||||
TypeParam m({gen(), gen(), gen()});
|
||||
TypeParam t(m);
|
||||
TypeParam n({gen()});
|
||||
n = std::move(t);
|
||||
EXPECT_EQ(m, n);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, AssignmentFromInitializerListOverwritesExisting) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
hash_internal::Generator<T> gen;
|
||||
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
|
||||
TypeParam m;
|
||||
m = values;
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAreArray(values));
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ConstructorTest, AssignmentOnSelf) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
hash_internal::Generator<T> gen;
|
||||
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
|
||||
TypeParam m(values);
|
||||
m = *&m; // Avoid -Wself-assign.
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAreArray(values));
|
||||
}
|
||||
|
||||
REGISTER_TYPED_TEST_CASE_P(
|
||||
ConstructorTest, NoArgs, BucketCount, BucketCountHash, BucketCountHashEqual,
|
||||
BucketCountHashEqualAlloc, BucketCountAlloc, BucketCountHashAlloc, Alloc,
|
||||
InputIteratorBucketHashEqualAlloc, InputIteratorBucketAlloc,
|
||||
InputIteratorBucketHashAlloc, CopyConstructor, CopyConstructorAlloc,
|
||||
MoveConstructor, MoveConstructorAlloc, InitializerListBucketHashEqualAlloc,
|
||||
InitializerListBucketAlloc, InitializerListBucketHashAlloc, CopyAssignment,
|
||||
MoveAssignment, AssignmentFromInitializerList, AssignmentOverwritesExisting,
|
||||
MoveAssignmentOverwritesExisting,
|
||||
AssignmentFromInitializerListOverwritesExisting, AssignmentOnSelf);
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_UNORDERED_SET_CONSTRUCTOR_TEST_H_
|
||||
91
third_party/abseil_cpp/absl/container/internal/unordered_set_lookup_test.h
vendored
Normal file
91
third_party/abseil_cpp/absl/container/internal/unordered_set_lookup_test.h
vendored
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_UNORDERED_SET_LOOKUP_TEST_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_UNORDERED_SET_LOOKUP_TEST_H_
|
||||
|
||||
#include "gmock/gmock.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "absl/container/internal/hash_generator_testing.h"
|
||||
#include "absl/container/internal/hash_policy_testing.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
template <class UnordSet>
|
||||
class LookupTest : public ::testing::Test {};
|
||||
|
||||
TYPED_TEST_SUITE_P(LookupTest);
|
||||
|
||||
TYPED_TEST_P(LookupTest, Count) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m;
|
||||
for (const auto& v : values)
|
||||
EXPECT_EQ(0, m.count(v)) << ::testing::PrintToString(v);
|
||||
m.insert(values.begin(), values.end());
|
||||
for (const auto& v : values)
|
||||
EXPECT_EQ(1, m.count(v)) << ::testing::PrintToString(v);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(LookupTest, Find) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m;
|
||||
for (const auto& v : values)
|
||||
EXPECT_TRUE(m.end() == m.find(v)) << ::testing::PrintToString(v);
|
||||
m.insert(values.begin(), values.end());
|
||||
for (const auto& v : values) {
|
||||
typename TypeParam::iterator it = m.find(v);
|
||||
static_assert(std::is_same<const typename TypeParam::value_type&,
|
||||
decltype(*it)>::value,
|
||||
"");
|
||||
static_assert(std::is_same<const typename TypeParam::value_type*,
|
||||
decltype(it.operator->())>::value,
|
||||
"");
|
||||
EXPECT_TRUE(m.end() != it) << ::testing::PrintToString(v);
|
||||
EXPECT_EQ(v, *it) << ::testing::PrintToString(v);
|
||||
}
|
||||
}
|
||||
|
||||
TYPED_TEST_P(LookupTest, EqualRange) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m;
|
||||
for (const auto& v : values) {
|
||||
auto r = m.equal_range(v);
|
||||
ASSERT_EQ(0, std::distance(r.first, r.second));
|
||||
}
|
||||
m.insert(values.begin(), values.end());
|
||||
for (const auto& v : values) {
|
||||
auto r = m.equal_range(v);
|
||||
ASSERT_EQ(1, std::distance(r.first, r.second));
|
||||
EXPECT_EQ(v, *r.first);
|
||||
}
|
||||
}
|
||||
|
||||
REGISTER_TYPED_TEST_CASE_P(LookupTest, Count, Find, EqualRange);
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_UNORDERED_SET_LOOKUP_TEST_H_
|
||||
86
third_party/abseil_cpp/absl/container/internal/unordered_set_members_test.h
vendored
Normal file
86
third_party/abseil_cpp/absl/container/internal/unordered_set_members_test.h
vendored
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
// Copyright 2019 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_UNORDERED_SET_MEMBERS_TEST_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_UNORDERED_SET_MEMBERS_TEST_H_
|
||||
|
||||
#include <type_traits>
|
||||
#include "gmock/gmock.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "absl/meta/type_traits.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
template <class UnordSet>
|
||||
class MembersTest : public ::testing::Test {};
|
||||
|
||||
TYPED_TEST_SUITE_P(MembersTest);
|
||||
|
||||
template <typename T>
|
||||
void UseType() {}
|
||||
|
||||
TYPED_TEST_P(MembersTest, Typedefs) {
|
||||
EXPECT_TRUE((std::is_same<typename TypeParam::key_type,
|
||||
typename TypeParam::value_type>()));
|
||||
EXPECT_TRUE((absl::conjunction<
|
||||
absl::negation<std::is_signed<typename TypeParam::size_type>>,
|
||||
std::is_integral<typename TypeParam::size_type>>()));
|
||||
EXPECT_TRUE((absl::conjunction<
|
||||
std::is_signed<typename TypeParam::difference_type>,
|
||||
std::is_integral<typename TypeParam::difference_type>>()));
|
||||
EXPECT_TRUE((std::is_convertible<
|
||||
decltype(std::declval<const typename TypeParam::hasher&>()(
|
||||
std::declval<const typename TypeParam::key_type&>())),
|
||||
size_t>()));
|
||||
EXPECT_TRUE((std::is_convertible<
|
||||
decltype(std::declval<const typename TypeParam::key_equal&>()(
|
||||
std::declval<const typename TypeParam::key_type&>(),
|
||||
std::declval<const typename TypeParam::key_type&>())),
|
||||
bool>()));
|
||||
EXPECT_TRUE((std::is_same<typename TypeParam::allocator_type::value_type,
|
||||
typename TypeParam::value_type>()));
|
||||
EXPECT_TRUE((std::is_same<typename TypeParam::value_type&,
|
||||
typename TypeParam::reference>()));
|
||||
EXPECT_TRUE((std::is_same<const typename TypeParam::value_type&,
|
||||
typename TypeParam::const_reference>()));
|
||||
EXPECT_TRUE((std::is_same<typename std::allocator_traits<
|
||||
typename TypeParam::allocator_type>::pointer,
|
||||
typename TypeParam::pointer>()));
|
||||
EXPECT_TRUE(
|
||||
(std::is_same<typename std::allocator_traits<
|
||||
typename TypeParam::allocator_type>::const_pointer,
|
||||
typename TypeParam::const_pointer>()));
|
||||
}
|
||||
|
||||
TYPED_TEST_P(MembersTest, SimpleFunctions) {
|
||||
EXPECT_GT(TypeParam().max_size(), 0);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(MembersTest, BeginEnd) {
|
||||
TypeParam t = {typename TypeParam::value_type{}};
|
||||
EXPECT_EQ(t.begin(), t.cbegin());
|
||||
EXPECT_EQ(t.end(), t.cend());
|
||||
EXPECT_NE(t.begin(), t.end());
|
||||
EXPECT_NE(t.cbegin(), t.cend());
|
||||
}
|
||||
|
||||
REGISTER_TYPED_TEST_SUITE_P(MembersTest, Typedefs, SimpleFunctions, BeginEnd);
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_UNORDERED_SET_MEMBERS_TEST_H_
|
||||
190
third_party/abseil_cpp/absl/container/internal/unordered_set_modifiers_test.h
vendored
Normal file
190
third_party/abseil_cpp/absl/container/internal/unordered_set_modifiers_test.h
vendored
Normal file
|
|
@ -0,0 +1,190 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_UNORDERED_SET_MODIFIERS_TEST_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_UNORDERED_SET_MODIFIERS_TEST_H_
|
||||
|
||||
#include "gmock/gmock.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "absl/container/internal/hash_generator_testing.h"
|
||||
#include "absl/container/internal/hash_policy_testing.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
template <class UnordSet>
|
||||
class ModifiersTest : public ::testing::Test {};
|
||||
|
||||
TYPED_TEST_SUITE_P(ModifiersTest);
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, Clear) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m(values.begin(), values.end());
|
||||
ASSERT_THAT(keys(m), ::testing::UnorderedElementsAreArray(values));
|
||||
m.clear();
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAre());
|
||||
EXPECT_TRUE(m.empty());
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, Insert) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
T val = hash_internal::Generator<T>()();
|
||||
TypeParam m;
|
||||
auto p = m.insert(val);
|
||||
EXPECT_TRUE(p.second);
|
||||
EXPECT_EQ(val, *p.first);
|
||||
p = m.insert(val);
|
||||
EXPECT_FALSE(p.second);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, InsertHint) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
T val = hash_internal::Generator<T>()();
|
||||
TypeParam m;
|
||||
auto it = m.insert(m.end(), val);
|
||||
EXPECT_TRUE(it != m.end());
|
||||
EXPECT_EQ(val, *it);
|
||||
it = m.insert(it, val);
|
||||
EXPECT_TRUE(it != m.end());
|
||||
EXPECT_EQ(val, *it);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, InsertRange) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m;
|
||||
m.insert(values.begin(), values.end());
|
||||
ASSERT_THAT(keys(m), ::testing::UnorderedElementsAreArray(values));
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, Emplace) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
T val = hash_internal::Generator<T>()();
|
||||
TypeParam m;
|
||||
// TODO(alkis): We need a way to run emplace in a more meaningful way. Perhaps
|
||||
// with test traits/policy.
|
||||
auto p = m.emplace(val);
|
||||
EXPECT_TRUE(p.second);
|
||||
EXPECT_EQ(val, *p.first);
|
||||
p = m.emplace(val);
|
||||
EXPECT_FALSE(p.second);
|
||||
EXPECT_EQ(val, *p.first);
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, EmplaceHint) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
T val = hash_internal::Generator<T>()();
|
||||
TypeParam m;
|
||||
// TODO(alkis): We need a way to run emplace in a more meaningful way. Perhaps
|
||||
// with test traits/policy.
|
||||
auto it = m.emplace_hint(m.end(), val);
|
||||
EXPECT_EQ(val, *it);
|
||||
it = m.emplace_hint(it, val);
|
||||
EXPECT_EQ(val, *it);
|
||||
}
|
||||
|
||||
template <class V>
|
||||
using IfNotVoid = typename std::enable_if<!std::is_void<V>::value, V>::type;
|
||||
|
||||
// In openmap we chose not to return the iterator from erase because that's
|
||||
// more expensive. As such we adapt erase to return an iterator here.
|
||||
struct EraseFirst {
|
||||
template <class Map>
|
||||
auto operator()(Map* m, int) const
|
||||
-> IfNotVoid<decltype(m->erase(m->begin()))> {
|
||||
return m->erase(m->begin());
|
||||
}
|
||||
template <class Map>
|
||||
typename Map::iterator operator()(Map* m, ...) const {
|
||||
auto it = m->begin();
|
||||
m->erase(it++);
|
||||
return it;
|
||||
}
|
||||
};
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, Erase) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m(values.begin(), values.end());
|
||||
ASSERT_THAT(keys(m), ::testing::UnorderedElementsAreArray(values));
|
||||
std::vector<T> values2;
|
||||
for (const auto& val : values)
|
||||
if (val != *m.begin()) values2.push_back(val);
|
||||
auto it = EraseFirst()(&m, 0);
|
||||
ASSERT_TRUE(it != m.end());
|
||||
EXPECT_EQ(1, std::count(values2.begin(), values2.end(), *it));
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAreArray(values2.begin(),
|
||||
values2.end()));
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, EraseRange) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m(values.begin(), values.end());
|
||||
ASSERT_THAT(keys(m), ::testing::UnorderedElementsAreArray(values));
|
||||
auto it = m.erase(m.begin(), m.end());
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAre());
|
||||
EXPECT_TRUE(it == m.end());
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, EraseKey) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
std::vector<T> values;
|
||||
std::generate_n(std::back_inserter(values), 10,
|
||||
hash_internal::Generator<T>());
|
||||
TypeParam m(values.begin(), values.end());
|
||||
ASSERT_THAT(keys(m), ::testing::UnorderedElementsAreArray(values));
|
||||
EXPECT_EQ(1, m.erase(values[0]));
|
||||
EXPECT_EQ(0, std::count(m.begin(), m.end(), values[0]));
|
||||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAreArray(values.begin() + 1,
|
||||
values.end()));
|
||||
}
|
||||
|
||||
TYPED_TEST_P(ModifiersTest, Swap) {
|
||||
using T = hash_internal::GeneratedType<TypeParam>;
|
||||
std::vector<T> v1;
|
||||
std::vector<T> v2;
|
||||
std::generate_n(std::back_inserter(v1), 5, hash_internal::Generator<T>());
|
||||
std::generate_n(std::back_inserter(v2), 5, hash_internal::Generator<T>());
|
||||
TypeParam m1(v1.begin(), v1.end());
|
||||
TypeParam m2(v2.begin(), v2.end());
|
||||
EXPECT_THAT(keys(m1), ::testing::UnorderedElementsAreArray(v1));
|
||||
EXPECT_THAT(keys(m2), ::testing::UnorderedElementsAreArray(v2));
|
||||
m1.swap(m2);
|
||||
EXPECT_THAT(keys(m1), ::testing::UnorderedElementsAreArray(v2));
|
||||
EXPECT_THAT(keys(m2), ::testing::UnorderedElementsAreArray(v1));
|
||||
}
|
||||
|
||||
// TODO(alkis): Write tests for extract.
|
||||
// TODO(alkis): Write tests for merge.
|
||||
|
||||
REGISTER_TYPED_TEST_CASE_P(ModifiersTest, Clear, Insert, InsertHint,
|
||||
InsertRange, Emplace, EmplaceHint, Erase, EraseRange,
|
||||
EraseKey, Swap);
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_UNORDERED_SET_MODIFIERS_TEST_H_
|
||||
41
third_party/abseil_cpp/absl/container/internal/unordered_set_test.cc
vendored
Normal file
41
third_party/abseil_cpp/absl/container/internal/unordered_set_test.cc
vendored
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include <unordered_set>
|
||||
|
||||
#include "absl/container/internal/unordered_set_constructor_test.h"
|
||||
#include "absl/container/internal/unordered_set_lookup_test.h"
|
||||
#include "absl/container/internal/unordered_set_members_test.h"
|
||||
#include "absl/container/internal/unordered_set_modifiers_test.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
namespace {
|
||||
|
||||
using SetTypes = ::testing::Types<
|
||||
std::unordered_set<int, StatefulTestingHash, StatefulTestingEqual,
|
||||
Alloc<int>>,
|
||||
std::unordered_set<std::string, StatefulTestingHash, StatefulTestingEqual,
|
||||
Alloc<std::string>>>;
|
||||
|
||||
INSTANTIATE_TYPED_TEST_SUITE_P(UnorderedSet, ConstructorTest, SetTypes);
|
||||
INSTANTIATE_TYPED_TEST_SUITE_P(UnorderedSet, LookupTest, SetTypes);
|
||||
INSTANTIATE_TYPED_TEST_SUITE_P(UnorderedSet, MembersTest, SetTypes);
|
||||
INSTANTIATE_TYPED_TEST_SUITE_P(UnorderedSet, ModifiersTest, SetTypes);
|
||||
|
||||
} // namespace
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
Loading…
Add table
Add a link
Reference in a new issue