Export of internal Abseil changes
-- 074a799119ac881b8b8ce59ef7a3166d1aa025ac by Tom Manshreck <shreck@google.com>: nit: Add return info for StrCat PiperOrigin-RevId: 278647298 -- d58a2a39ab6f50266cc695506ba2e86bdb45d795 by Mark Barolak <mbar@google.com>: Stop suppressing no-nested-anon-types warnings because there aren't actually any warnings to suppress. PiperOrigin-RevId: 278440548 -- 445051bd280b9a6f608a8c80b3d7cafcc1377a03 by Abseil Team <absl-team@google.com>: ResetThreadIdentity does not need to clear identity->waiter_state. ResetThreadIdentity is only called by NewThreadIdentity. NewThreadIdentity is only called by CreateThreadIdentity. CreateThreadIdentity calls PerThreadSem::Init, which initializes identity->waiter_state, immediately after calling NewThreadIdentity. Therefore ResetThreadIdentity does not need to clear identity->waiter_state. PiperOrigin-RevId: 278429844 -- c2079b664d92be40d5e365abcca4e9b3505a75a6 by Abseil Team <absl-team@google.com>: Delete the f->header.magic check in LowLevelAlloc::Free(). The f->header.magic check in LowLevelAlloc::Free() is redundant, because AddToFreeList() will immediately perform the same check. Also fix a typo in the comment that documents the lock requirements for Next(). The comment should say "L >= arena->mu", which is equivalent to EXCLUSIVE_LOCKS_REQUIRED(arena->mu). NOTE: LowLevelAlloc::Free() performs the f->header.magic check without holding the arena lock. This may have caused the TSAN data race warning reported in bug 143697235. PiperOrigin-RevId: 278414140 -- 5534f35ce677165700117d868f51607ed1f0d73b by Greg Falcon <gfalcon@google.com>: Add an internal (unsupported) PiecewiseCombiner class to allow hashing buffers piecewise. PiperOrigin-RevId: 278388902 GitOrigin-RevId: 074a799119ac881b8b8ce59ef7a3166d1aa025ac Change-Id: I61734850cbbb01c7585e8c736a5bb56e416512a8
This commit is contained in:
parent
20de2db748
commit
e96ae2203b
11 changed files with 278 additions and 15 deletions
|
|
@ -274,8 +274,8 @@ TEST(HashValueTest, Strings) {
|
|||
|
||||
const std::string small = "foo";
|
||||
const std::string dup = "foofoo";
|
||||
const std::string large = "large";
|
||||
const std::string huge = std::string(5000, 'a');
|
||||
const std::string large = std::string(2048, 'x'); // multiple of chunk size
|
||||
const std::string huge = std::string(5000, 'a'); // not a multiple
|
||||
|
||||
EXPECT_TRUE(absl::VerifyTypeImplementsAbslHashCorrectly(std::make_tuple(
|
||||
std::string(), absl::string_view(),
|
||||
|
|
@ -378,6 +378,116 @@ struct Private {
|
|||
}
|
||||
};
|
||||
|
||||
// Test helper for combine_piecewise_buffer. It holds a string_view to the
|
||||
// buffer-to-be-hashed. Its AbslHashValue specialization will split up its
|
||||
// contents at the character offsets requested.
|
||||
class PiecewiseHashTester {
|
||||
public:
|
||||
// Create a hash view of a buffer to be hashed contiguously.
|
||||
explicit PiecewiseHashTester(absl::string_view buf)
|
||||
: buf_(buf), piecewise_(false), split_locations_() {}
|
||||
|
||||
// Create a hash view of a buffer to be hashed piecewise, with breaks at the
|
||||
// given locations.
|
||||
PiecewiseHashTester(absl::string_view buf, std::set<size_t> split_locations)
|
||||
: buf_(buf),
|
||||
piecewise_(true),
|
||||
split_locations_(std::move(split_locations)) {}
|
||||
|
||||
template <typename H>
|
||||
friend H AbslHashValue(H h, const PiecewiseHashTester& p) {
|
||||
if (!p.piecewise_) {
|
||||
return H::combine_contiguous(std::move(h), p.buf_.data(), p.buf_.size());
|
||||
}
|
||||
absl::hash_internal::PiecewiseCombiner combiner;
|
||||
if (p.split_locations_.empty()) {
|
||||
h = combiner.add_buffer(std::move(h), p.buf_.data(), p.buf_.size());
|
||||
return combiner.finalize(std::move(h));
|
||||
}
|
||||
size_t begin = 0;
|
||||
for (size_t next : p.split_locations_) {
|
||||
absl::string_view chunk = p.buf_.substr(begin, next - begin);
|
||||
h = combiner.add_buffer(std::move(h), chunk.data(), chunk.size());
|
||||
begin = next;
|
||||
}
|
||||
absl::string_view last_chunk = p.buf_.substr(begin);
|
||||
if (!last_chunk.empty()) {
|
||||
h = combiner.add_buffer(std::move(h), last_chunk.data(),
|
||||
last_chunk.size());
|
||||
}
|
||||
return combiner.finalize(std::move(h));
|
||||
}
|
||||
|
||||
private:
|
||||
absl::string_view buf_;
|
||||
bool piecewise_;
|
||||
std::set<size_t> split_locations_;
|
||||
};
|
||||
|
||||
// Dummy object that hashes as two distinct contiguous buffers, "foo" followed
|
||||
// by "bar"
|
||||
struct DummyFooBar {
|
||||
template <typename H>
|
||||
friend H AbslHashValue(H h, const DummyFooBar&) {
|
||||
const char* foo = "foo";
|
||||
const char* bar = "bar";
|
||||
h = H::combine_contiguous(std::move(h), foo, 3);
|
||||
h = H::combine_contiguous(std::move(h), bar, 3);
|
||||
return std::move(h);
|
||||
}
|
||||
};
|
||||
|
||||
TEST(HashValueTest, CombinePiecewiseBuffer) {
|
||||
absl::Hash<PiecewiseHashTester> hash;
|
||||
|
||||
// Check that hashing an empty buffer through the piecewise API works.
|
||||
EXPECT_EQ(hash(PiecewiseHashTester("")), hash(PiecewiseHashTester("", {})));
|
||||
|
||||
// Similarly, small buffers should give consistent results
|
||||
EXPECT_EQ(hash(PiecewiseHashTester("foobar")),
|
||||
hash(PiecewiseHashTester("foobar", {})));
|
||||
EXPECT_EQ(hash(PiecewiseHashTester("foobar")),
|
||||
hash(PiecewiseHashTester("foobar", {3})));
|
||||
|
||||
// But hashing "foobar" in pieces gives a different answer than hashing "foo"
|
||||
// contiguously, then "bar" contiguously.
|
||||
EXPECT_NE(hash(PiecewiseHashTester("foobar", {3})),
|
||||
absl::Hash<DummyFooBar>()(DummyFooBar{}));
|
||||
|
||||
// Test hashing a large buffer incrementally, broken up in several different
|
||||
// ways. Arrange for breaks on and near the stride boundaries to look for
|
||||
// off-by-one errors in the implementation.
|
||||
//
|
||||
// This test is run on a buffer that is a multiple of the stride size, and one
|
||||
// that isn't.
|
||||
for (size_t big_buffer_size : {1024 * 2 + 512, 1024 * 3}) {
|
||||
SCOPED_TRACE(big_buffer_size);
|
||||
std::string big_buffer;
|
||||
for (int i = 0; i < big_buffer_size; ++i) {
|
||||
// Arbitrary std::string
|
||||
big_buffer.push_back(32 + (i * (i / 3)) % 64);
|
||||
}
|
||||
auto big_buffer_hash = hash(PiecewiseHashTester(big_buffer));
|
||||
|
||||
const int possible_breaks = 9;
|
||||
size_t breaks[possible_breaks] = {1, 512, 1023, 1024, 1025,
|
||||
1536, 2047, 2048, 2049};
|
||||
for (unsigned test_mask = 0; test_mask < (1u << possible_breaks);
|
||||
++test_mask) {
|
||||
SCOPED_TRACE(test_mask);
|
||||
std::set<size_t> break_locations;
|
||||
for (int j = 0; j < possible_breaks; ++j) {
|
||||
if (test_mask & (1u << j)) {
|
||||
break_locations.insert(breaks[j]);
|
||||
}
|
||||
}
|
||||
EXPECT_EQ(
|
||||
hash(PiecewiseHashTester(big_buffer, std::move(break_locations))),
|
||||
big_buffer_hash);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TEST(HashValueTest, PrivateSanity) {
|
||||
// Sanity check that Private is working as the tests below expect it to work.
|
||||
EXPECT_TRUE(is_hashable<Private>::value);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue