Squashed 'third_party/abseil/' changes from ddf8e52a2..384af0e91
384af0e91 Export of internal Abseil changes
e7ca23aca Export of internal Abseil changes
4611a601a Export of internal Abseil changes
8a9ef3c5d Export of internal Abseil changes
9f8b87b71 Add missing word 'library' in the 'status' description (#868)
e2b1bab19 Export of internal Abseil changes
1bae23e32 Export of internal Abseil changes
6df644c56 Include the status library into the main README. (#863)
68f1ad932 Export of internal Abseil changes
52acfe6fc Export of internal Abseil changes
1918ad2ae Export of internal Abseil changes
938fd0f4e Export of internal Abseil changes
fbdff6f3a Export of internal Abseil changes
acf3390ca Export of internal Abseil changes
592924480 Export of internal Abseil changes
e80c0b353 Export of internal Abseil changes
5d8fc9192 Export of internal Abseil changes
e19260fd7 Export of internal Abseil changes
4fd9a1ec5 Export of internal Abseil changes
4ae673067 Export of internal Abseil changes
1b465af3b Export of internal Abseil changes
6b03bf543 fix build dll (#797)
0bbebc85c Export of internal Abseil changes
0453e1653 Export of internal Abseil changes
a4798817e Export of internal Abseil changes
e96d49687 Export of internal Abseil changes
731852f10 Fix stacktrace on aarch64 architecture. Fixes #805 (#827)
2e5f2bcfd moved deleted functions to public for better compiler errors. (#828)
e9e9b9fc7 Export of internal Abseil changes
b8e890f95 Export of internal Abseil changes
c9894d1dc Export of internal Abseil changes
e9b9e38f6 Export of internal Abseil changes
962b06754 Export of internal Abseil changes
5bf048b84 Export of internal Abseil changes
1e3d25b26 Export of internal Abseil changes
eb317a701 Export of internal Abseil changes
4b915e709 Export of internal Abseil changes
8f1c34a77 Export of internal Abseil changes
60d00a582 Export of internal Abseil changes
f3f785ab5 Export of internal Abseil changes
4b2fbb4ad Export of internal Abseil changes
e493d6acb fix compile fails with asan and -Wredundant-decls (#801)
e63a5a610 Export of internal Abseil changes
c678d6c6b Export of internal Abseil changes
4b4f9aae7 Export of internal Abseil changes
887d0eee6 Export of internal Abseil changes
b978fc02f Export of internal Abseil changes
093cc2760 Export of internal Abseil changes
40fdb59d3 btree: fix sign-compare warnings (#800)
1fd58b69c Export of internal Abseil changes
d1de75bf5 Export of internal Abseil changes
cad3f30b4 Export of internal Abseil changes
9927a0989 Export of internal Abseil changes
7680a5f8e Added missing asserts for seq.index() < capacity_ and unified their usage based on has_element(). (#781)
d3614de61 Export of internal Abseil changes
20feb1cdb Export of internal Abseil changes
c1ae0a497 Export of internal Abseil changes
6af91b351 Export of internal Abseil changes
f2c9c663d Export of internal Abseil changes
3c8b5d758 Export of internal Abseil changes
7ba8cdb56 Export of internal Abseil changes
930fbec75 Export of internal Abseil changes
0e9921b75 Export of internal Abseil changes
a4cbb5f69 Export of internal Abseil changes
4d2ff381a Export of internal Abseil changes
c03c18e7f Export of internal Abseil changes
b321ad86c Export of internal Abseil changes
fbf0fdab6 Export of internal Abseil changes
dc969f34a Export of internal Abseil changes
d0c433455 Export of internal Abseil changes
c6b3f2cf5 Export of internal Abseil changes
1beb3191c Export of internal Abseil changes
1b7e751e5 Export of internal Abseil changes
ce4bc9277 Export of internal Abseil changes
f72cc3516 Export of internal Abseil changes
f66bc7492 Export of internal Abseil changes
1995c6a3c Export of internal Abseil changes
184cf2524 Export of internal Abseil changes
82302f1e0 Export of internal Abseil changes
dea76486c Export of internal Abseil changes
d39fe6cd6 Export of internal Abseil changes
2c8a5b0d8 Export of internal Abseil changes
41a6263fd Export of internal Abseil changes
3c2bed2e7 Export of internal Abseil changes
ea8a689cf fix build on P9 (#739)
672d9e0ae Export of internal Abseil changes
f624790b7 Export of internal Abseil changes
55c04eb92 Export of internal Abseil changes
302b250e1 Disable pthread for standalone wasm build support (#721)
61d8bc057 Merge branch 'master' of https://github.com/abseil/abseil-cpp into master
4b5b25a28 cmake: remove unneeded enable_testing() cmd (#736)
63f2c695e cmake: flag conformance_testing as TESTONLY (#737)
d5269a8b6 Export of internal Abseil changes
23f1f9cf6 Typo in comment (#733)
259301a52 Fix CMake path for INSTALL_INCLUDEDIR (#723)
dc464c1dc Allow overriding Abseil IDE folder (#724)
bf655de09 Export of internal Abseil changes
38db52adb Export of internal Abseil changes
81f34df83 Export of internal Abseil changes
b86fff162 Export of internal Abseil changes
10cb35e45 Export of internal Abseil changes
4ccc0fce0 Export of internal Abseil changes
4a851046a Export of internal Abseil changes
ccdbb5941 Export of internal Abseil changes
01f5f81f9 Export of internal Abseil changes
2c92bdc7c Export of internal Abseil changes
e7ebf9803 Export of internal Abseil changes
2eba343b5 Export of internal Abseil changes
a8b03d90e Export of internal Abseil changes
1d31b5c36 Export of internal Abseil changes
da3a87690 Export of internal Abseil changes
8faf20461 Exclude empty directories (#697)
2069dc796 Export of internal Abseil changes
4832bf6bf Added a BUILD file in root to expose license. (#695)
af8f994af Export of internal Abseil changes
33caf1097 Export of internal Abseil changes
cf1a02e2d Export of internal Abseil changes
768eb2ca2 Export of internal Abseil changes
3f347c462 Fix build on riscv32 (#675)
62cf6a704 Export of internal Abseil changes
d118d4bb1 Export of internal Abseil changes
f2bc9d11e Fix public target name of the random library (#684)
0fecf0e63 Export of internal Abseil changes
cbfd0f0fe Export of internal Abseil changes
c45d1c09d Export of internal Abseil changes
a35ef8a62 Export of internal Abseil changes
bd317cae3 Export of internal Abseil changes
b11574465 fix MSVC warning 4245: conversion signed => unsigned during initialization (#678)
d85783fd0 Export of internal Abseil changes
a1d668990 Export of internal Abseil changes
ca9856cab Export of internal Abseil changes
6e18c7115 Export of internal Abseil changes
3f48ce1c4 init (#673)
cde2e2410 Export of internal Abseil changes
68494aae9 Fix CMake Threads dependency issue
902909a43 Export of internal Abseil changes
cb52b05ea Export of internal Abseil changes
1a02b7a20 Use "-lrt" instead of the resolved find_library result when linking librt (#665)
df60c82df Export of internal Abseil changes
b35973e3e Export of internal Abseil changes
db5773a72 Export of internal Abseil changes
71079e42c Export of internal Abseil changes
2946ac0de Use base_internal::AtomicHook instead of std::atomic (#661)
567bee2f7 Fix ABSL_RANDOM_RANDEN_COPTS setting on FreeBSD (#664)
bf6166a63 Export of internal Abseil changes
111260963 Export of internal Abseil changes
73ea9a957 Export of internal Abseil changes
c01b9916e Add option to use an externally provided GoogleTest target (for usage of abseil as add_subdirectory target) (#647)
d43b7997c Export of internal Abseil changes
62f05b1f5 Export of internal Abseil changes
fba8a316c Export of internal Abseil changes
79e0dc115 Export of internal Abseil changes
132d791b4 bazel: Add missing load statements for cc_binary (#645)
518f17501 Export of internal Abseil changes
092ed9793 Export of internal Abseil changes
2d2a8aea2 Export of internal Abseil changes
7853a7586 Export of internal Abseil changes
c6954897f Export of internal Abseil changes
b92f35f65 Fix CompressedTuple move constructor on MSVC (#637)
a877af1f2 Export of internal Abseil changes
d936052d3 Export of internal Abseil changes
238b9a59c Skip the .exe suffix in the helpshort filter on Windows (#629)
417ea99cb UWP doesn't allow reading regkeys (#594)
40a0e58eb Export of internal Abseil changes
cf3a1998e Export of internal Abseil changes
b19ba9676 Export of internal Abseil changes
06f0e767d BuildBreak: UWP apps can't call GetModuleHandle (#596)
bcefbdcdf Export of internal Abseil changes
0033c9ea9 Fix build on FreeBSD/powerpc (#616)
0d5ce2797 Export of internal Abseil changes
b69c7d880 Export of internal Abseil changes
2a5633fc0 Merge "Export of internal Abseil changes"
f9b3d6e49 Add RISCV support to GetProgramCounter() (#621)
914ff4451 Export of internal Abseil changes
0232c87f2 Add missing ABSL_HAVE_VDSO_SUPPORT conditional (#622)
3c8141051 Export of internal Abseil changes
c44657f55 Export of internal Abseil changes
98eb410c9 Export of internal Abseil changes
bf78e9773 Export of internal Abseil changes
d95d15671 Export of internal Abseil changes
24713a703 Export of internal Abseil changes
72382c21f Export of internal Abseil changes
08a7e7bf9 Export of internal Abseil changes
36bcd9599 Fix pointer format specifier in documentation (#614)
0f86336b6 Export of internal Abseil changes
c512f118d Export of internal Abseil changes
37dd2562e Export of internal Abseil changes
444277026 fix: Add support for more ARM processors detection (#608)
159bf2bf6 Export of internal Abseil changes
a2e6adecc Use https links. (#586)
564001ae5 Export of internal Abseil changes
b3aaac8a3 Export of internal Abseil changes
63ee2f887 Export of internal Abseil changes
a048203a8 Export of internal Abseil changes
1de016636 Export of internal Abseil changes
ad904b6cd Export of internal Abseil changes
7bd1935dc cmake: Fix x86_64 check on Windows for random copts (#518)
292351391 Export of internal Abseil changes
bf86cfe16 Export of internal Abseil changes
12bc53e03 Export of internal Abseil changes
1e39f8626 Export of internal Abseil changes
77f87009a Export of internal Abseil changes
d659fe54b Export of internal Abseil changes
a4b757b5d Export of internal Abseil changes
0514227d2 Export of internal Abseil changes
7f4fe64af Export of internal Abseil changes
16d9fd58a Export of internal Abseil changes
bcaae6009 Export of internal Abseil changes
8ba96a824 Export of internal Abseil changes
2103fd9ac Export of internal Abseil changes
3df7b52a6 Export of internal Abseil changes
fa8c75182 Export of internal Abseil changes
85092b4b6 Fix Conan builds (#400)
e96ae2203 Export of internal Abseil changes
20de2db74 Export of internal Abseil changes
846e5dbed Export of internal Abseil changes
83880e3d8 Merge branch 'master' of https://github.com/abseil/abseil-cpp
8207907f4 Export of internal Abseil changes
39d68a422 docs: fix typo (#397)
078b89b3c Export of internal Abseil changes
19b021cb3 Export of internal Abseil changes
ecc0033b5 Always enable proper symbolize implementation on Windows (#257)
2796d500a Export of internal Abseil changes
e4c8d0eb8 Export of internal Abseil changes
a15364ce4 Export of internal Abseil changes
ab3552a18 Export of internal Abseil changes
e9f9000c7 Fix ABSL_WAITER_MODE detection for mingw (#342)
abea769b5 Fix ABSL_HAVE_ALARM check on mingw (#341)
25597bdfc Export of internal Abseil changes
aad33fefa Export of internal Abseil changes
8fe7214fe Export of internal Abseil changes
debac94cf Export of internal Abseil changes
882b3501a Fix spelling errors (#384)
502efe6d7 Export of internal Abseil changes
ccdd1d57b Export of internal Abseil changes
Change-Id: I59864a0053f6e03d88cc9d5e2a92757039a05484
git-subtree-dir: third_party/abseil
git-subtree-split: 384af0e9141283172e2bff3210dae79fb7130d9c
diff --git a/absl/container/internal/raw_hash_set.h b/absl/container/internal/raw_hash_set.h
index 2e6f4dd..74b2ef4 100644
--- a/absl/container/internal/raw_hash_set.h
+++ b/absl/container/internal/raw_hash_set.h
@@ -102,8 +102,8 @@
#include <type_traits>
#include <utility>
-#include "absl/base/internal/bits.h"
#include "absl/base/internal/endian.h"
+#include "absl/base/optimization.h"
#include "absl/base/port.h"
#include "absl/container/internal/common.h"
#include "absl/container/internal/compressed_tuple.h"
@@ -115,11 +115,23 @@
#include "absl/container/internal/layout.h"
#include "absl/memory/memory.h"
#include "absl/meta/type_traits.h"
+#include "absl/numeric/bits.h"
#include "absl/utility/utility.h"
namespace absl {
+ABSL_NAMESPACE_BEGIN
namespace container_internal {
+template <typename AllocType>
+void SwapAlloc(AllocType& lhs, AllocType& rhs,
+ std::true_type /* propagate_on_container_swap */) {
+ using std::swap;
+ swap(lhs, rhs);
+}
+template <typename AllocType>
+void SwapAlloc(AllocType& /*lhs*/, AllocType& /*rhs*/,
+ std::false_type /* propagate_on_container_swap */) {}
+
template <size_t Width>
class probe_seq {
public:
@@ -167,24 +179,19 @@
// TODO(alkis): Switch to std::is_nothrow_swappable when gcc/clang supports it.
template <class T>
-constexpr bool IsNoThrowSwappable() {
+constexpr bool IsNoThrowSwappable(std::true_type = {} /* is_swappable */) {
using std::swap;
return noexcept(swap(std::declval<T&>(), std::declval<T&>()));
}
-
-template <typename T>
-int TrailingZeros(T x) {
- return sizeof(T) == 8 ? base_internal::CountTrailingZerosNonZero64(
- static_cast<uint64_t>(x))
- : base_internal::CountTrailingZerosNonZero32(
- static_cast<uint32_t>(x));
+template <class T>
+constexpr bool IsNoThrowSwappable(std::false_type /* is_swappable */) {
+ return false;
}
template <typename T>
-int LeadingZeros(T x) {
- return sizeof(T) == 8
- ? base_internal::CountLeadingZeros64(static_cast<uint64_t>(x))
- : base_internal::CountLeadingZeros32(static_cast<uint32_t>(x));
+uint32_t TrailingZeros(T x) {
+ ABSL_INTERNAL_ASSUME(x != 0);
+ return countr_zero(x);
}
// An abstraction over a bitmask. It provides an easy way to iterate through the
@@ -214,26 +221,24 @@
}
explicit operator bool() const { return mask_ != 0; }
int operator*() const { return LowestBitSet(); }
- int LowestBitSet() const {
+ uint32_t LowestBitSet() const {
return container_internal::TrailingZeros(mask_) >> Shift;
}
- int HighestBitSet() const {
- return (sizeof(T) * CHAR_BIT - container_internal::LeadingZeros(mask_) -
- 1) >>
- Shift;
+ uint32_t HighestBitSet() const {
+ return static_cast<uint32_t>((bit_width(mask_) - 1) >> Shift);
}
BitMask begin() const { return *this; }
BitMask end() const { return BitMask(0); }
- int TrailingZeros() const {
+ uint32_t TrailingZeros() const {
return container_internal::TrailingZeros(mask_) >> Shift;
}
- int LeadingZeros() const {
+ uint32_t LeadingZeros() const {
constexpr int total_significant_bits = SignificantBits << Shift;
constexpr int extra_bits = sizeof(T) * 8 - total_significant_bits;
- return container_internal::LeadingZeros(mask_ << extra_bits) >> Shift;
+ return countl_zero(mask_ << extra_bits) >> Shift;
}
private:
@@ -311,7 +316,7 @@
inline bool IsDeleted(ctrl_t c) { return c == kDeleted; }
inline bool IsEmptyOrDeleted(ctrl_t c) { return c < kSentinel; }
-#if SWISSTABLE_HAVE_SSE2
+#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2
// https://github.com/abseil/abseil-cpp/issues/209
// https://gcc.gnu.org/bugzilla/show_bug.cgi?id=87853
@@ -345,7 +350,7 @@
// Returns a bitmask representing the positions of empty slots.
BitMask<uint32_t, kWidth> MatchEmpty() const {
-#if SWISSTABLE_HAVE_SSSE3
+#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSSE3
// This only works because kEmpty is -128.
return BitMask<uint32_t, kWidth>(
_mm_movemask_epi8(_mm_sign_epi8(ctrl, ctrl)));
@@ -364,14 +369,14 @@
// Returns the number of trailing empty or deleted elements in the group.
uint32_t CountLeadingEmptyOrDeleted() const {
auto special = _mm_set1_epi8(kSentinel);
- return TrailingZeros(
- _mm_movemask_epi8(_mm_cmpgt_epi8_fixed(special, ctrl)) + 1);
+ return TrailingZeros(static_cast<uint32_t>(
+ _mm_movemask_epi8(_mm_cmpgt_epi8_fixed(special, ctrl)) + 1));
}
void ConvertSpecialToEmptyAndFullToDeleted(ctrl_t* dst) const {
auto msbs = _mm_set1_epi8(static_cast<char>(-128));
auto x126 = _mm_set1_epi8(126);
-#if SWISSTABLE_HAVE_SSSE3
+#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSSE3
auto res = _mm_or_si128(_mm_shuffle_epi8(x126, ctrl), msbs);
#else
auto zero = _mm_setzero_si128();
@@ -383,7 +388,7 @@
__m128i ctrl;
};
-#endif // SWISSTABLE_HAVE_SSE2
+#endif // ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2
struct GroupPortableImpl {
static constexpr size_t kWidth = 8;
@@ -437,7 +442,7 @@
uint64_t ctrl;
};
-#if SWISSTABLE_HAVE_SSE2
+#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2
using Group = GroupSse2Impl;
#else
using Group = GroupPortableImpl;
@@ -456,21 +461,11 @@
// DELETED -> EMPTY
// EMPTY -> EMPTY
// FULL -> DELETED
-inline void ConvertDeletedToEmptyAndFullToDeleted(
- ctrl_t* ctrl, size_t capacity) {
- assert(ctrl[capacity] == kSentinel);
- assert(IsValidCapacity(capacity));
- for (ctrl_t* pos = ctrl; pos != ctrl + capacity + 1; pos += Group::kWidth) {
- Group{pos}.ConvertSpecialToEmptyAndFullToDeleted(pos);
- }
- // Copy the cloned ctrl bytes.
- std::memcpy(ctrl + capacity + 1, ctrl, Group::kWidth);
- ctrl[capacity] = kSentinel;
-}
+void ConvertDeletedToEmptyAndFullToDeleted(ctrl_t* ctrl, size_t capacity);
// Rounds up the capacity to the next power of 2 minus 1, with a minimum of 1.
inline size_t NormalizeCapacity(size_t n) {
- return n ? ~size_t{} >> LeadingZeros(n) : 1;
+ return n ? ~size_t{} >> countl_zero(n) : 1;
}
// We use 7/8th as maximum load factor.
@@ -495,6 +490,76 @@
return growth + static_cast<size_t>((static_cast<int64_t>(growth) - 1) / 7);
}
+inline void AssertIsFull(ctrl_t* ctrl) {
+ ABSL_HARDENING_ASSERT((ctrl != nullptr && IsFull(*ctrl)) &&
+ "Invalid operation on iterator. The element might have "
+ "been erased, or the table might have rehashed.");
+}
+
+inline void AssertIsValid(ctrl_t* ctrl) {
+ ABSL_HARDENING_ASSERT((ctrl == nullptr || IsFull(*ctrl)) &&
+ "Invalid operation on iterator. The element might have "
+ "been erased, or the table might have rehashed.");
+}
+
+struct FindInfo {
+ size_t offset;
+ size_t probe_length;
+};
+
+// The representation of the object has two modes:
+// - small: For capacities < kWidth-1
+// - large: For the rest.
+//
+// Differences:
+// - In small mode we are able to use the whole capacity. The extra control
+// bytes give us at least one "empty" control byte to stop the iteration.
+// This is important to make 1 a valid capacity.
+//
+// - In small mode only the first `capacity()` control bytes after the
+// sentinel are valid. The rest contain dummy kEmpty values that do not
+// represent a real slot. This is important to take into account on
+// find_first_non_full(), where we never try ShouldInsertBackwards() for
+// small tables.
+inline bool is_small(size_t capacity) { return capacity < Group::kWidth - 1; }
+
+inline probe_seq<Group::kWidth> probe(ctrl_t* ctrl, size_t hash,
+ size_t capacity) {
+ return probe_seq<Group::kWidth>(H1(hash, ctrl), capacity);
+}
+
+// Probes the raw_hash_set with the probe sequence for hash and returns the
+// pointer to the first empty or deleted slot.
+// NOTE: this function must work with tables having both kEmpty and kDelete
+// in one group. Such tables appears during drop_deletes_without_resize.
+//
+// This function is very useful when insertions happen and:
+// - the input is already a set
+// - there are enough slots
+// - the element with the hash is not in the table
+inline FindInfo find_first_non_full(ctrl_t* ctrl, size_t hash,
+ size_t capacity) {
+ auto seq = probe(ctrl, hash, capacity);
+ while (true) {
+ Group g{ctrl + seq.offset()};
+ auto mask = g.MatchEmptyOrDeleted();
+ if (mask) {
+#if !defined(NDEBUG)
+ // We want to add entropy even when ASLR is not enabled.
+ // In debug build we will randomly insert in either the front or back of
+ // the group.
+ // TODO(kfm,sbenza): revisit after we do unconditional mixing
+ if (!is_small(capacity) && ShouldInsertBackwards(hash, ctrl)) {
+ return {seq.offset(mask.HighestBitSet()), seq.index()};
+ }
+#endif
+ return {seq.offset(mask.LowestBitSet()), seq.index()};
+ }
+ seq.next();
+ assert(seq.index() < capacity && "full table!");
+ }
+}
+
// Policy: a policy defines how to perform different operations on
// the slots of the hashtable (see hash_policy_traits.h for the full interface
// of policy).
@@ -509,7 +574,8 @@
// if they are equal, false if they are not. If two keys compare equal, then
// their hash values as defined by Hash MUST be equal.
//
-// Allocator: an Allocator [https://devdocs.io/cpp/concept/allocator] with which
+// Allocator: an Allocator
+// [https://en.cppreference.com/w/cpp/named_req/Allocator] with which
// the storage of the hashtable will be allocated and the elements will be
// constructed and destroyed.
template <class Policy, class Hash, class Eq, class Alloc>
@@ -614,13 +680,17 @@
iterator() {}
// PRECONDITION: not an end() iterator.
- reference operator*() const { return PolicyTraits::element(slot_); }
+ reference operator*() const {
+ AssertIsFull(ctrl_);
+ return PolicyTraits::element(slot_);
+ }
// PRECONDITION: not an end() iterator.
pointer operator->() const { return &operator*(); }
// PRECONDITION: not an end() iterator.
iterator& operator++() {
+ AssertIsFull(ctrl_);
++ctrl_;
++slot_;
skip_empty_or_deleted();
@@ -634,6 +704,8 @@
}
friend bool operator==(const iterator& a, const iterator& b) {
+ AssertIsValid(a.ctrl_);
+ AssertIsValid(b.ctrl_);
return a.ctrl_ == b.ctrl_;
}
friend bool operator!=(const iterator& a, const iterator& b) {
@@ -641,23 +713,23 @@
}
private:
- iterator(ctrl_t* ctrl) : ctrl_(ctrl) {} // for end()
- iterator(ctrl_t* ctrl, slot_type* slot) : ctrl_(ctrl), slot_(slot) {}
+ iterator(ctrl_t* ctrl, slot_type* slot) : ctrl_(ctrl), slot_(slot) {
+ // This assumption helps the compiler know that any non-end iterator is
+ // not equal to any end iterator.
+ ABSL_INTERNAL_ASSUME(ctrl != nullptr);
+ }
void skip_empty_or_deleted() {
while (IsEmptyOrDeleted(*ctrl_)) {
- // ctrl is not necessarily aligned to Group::kWidth. It is also likely
- // to read past the space for ctrl bytes and into slots. This is ok
- // because ctrl has sizeof() == 1 and slot has sizeof() >= 1 so there
- // is no way to read outside the combined slot array.
uint32_t shift = Group{ctrl_}.CountLeadingEmptyOrDeleted();
ctrl_ += shift;
slot_ += shift;
}
+ if (ABSL_PREDICT_FALSE(*ctrl_ == kSentinel)) ctrl_ = nullptr;
}
ctrl_t* ctrl_ = nullptr;
- // To avoid uninitialized member warnigs, put slot_ in an anonymous union.
+ // To avoid uninitialized member warnings, put slot_ in an anonymous union.
// The member is not initialized on singleton and end iterators.
union {
slot_type* slot_;
@@ -715,7 +787,6 @@
: ctrl_(EmptyGroup()), settings_(0, hash, eq, alloc) {
if (bucket_count) {
capacity_ = NormalizeCapacity(bucket_count);
- reset_growth_left();
initialize_slots();
}
}
@@ -821,7 +892,7 @@
// than a full `insert`.
for (const auto& v : that) {
const size_t hash = PolicyTraits::apply(HashElement{hash_ref()}, v);
- auto target = find_first_non_full(hash);
+ auto target = find_first_non_full(ctrl_, hash, capacity_);
set_ctrl(target.offset, H2(hash));
emplace_at(target.offset, v);
infoz_.RecordInsert(hash, target.probe_length);
@@ -895,12 +966,12 @@
it.skip_empty_or_deleted();
return it;
}
- iterator end() { return {ctrl_ + capacity_}; }
+ iterator end() { return {}; }
const_iterator begin() const {
return const_cast<raw_hash_set*>(this)->begin();
}
- const_iterator end() const { return const_cast<raw_hash_set*>(this)->end(); }
+ const_iterator end() const { return {}; }
const_iterator cbegin() const { return begin(); }
const_iterator cend() const { return end(); }
@@ -938,8 +1009,11 @@
//
// flat_hash_map<std::string, int> m;
// m.insert(std::make_pair("abc", 42));
+ // TODO(cheshire): A type alias T2 is introduced as a workaround for the nvcc
+ // bug.
template <class T, RequiresInsertable<T> = 0,
- typename std::enable_if<IsDecomposable<T>::value, int>::type = 0,
+ class T2 = T,
+ typename std::enable_if<IsDecomposable<T2>::value, int>::type = 0,
T* = nullptr>
std::pair<iterator, bool> insert(T&& value) {
return emplace(std::forward<T>(value));
@@ -975,8 +1049,10 @@
return emplace(std::move(value));
}
- template <class T, RequiresInsertable<T> = 0,
- typename std::enable_if<IsDecomposable<T>::value, int>::type = 0,
+ // TODO(cheshire): A type alias T2 is introduced as a workaround for the nvcc
+ // bug.
+ template <class T, RequiresInsertable<T> = 0, class T2 = T,
+ typename std::enable_if<IsDecomposable<T2>::value, int>::type = 0,
T* = nullptr>
iterator insert(const_iterator, T&& value) {
return insert(std::forward<T>(value)).first;
@@ -998,7 +1074,7 @@
template <class InputIt>
void insert(InputIt first, InputIt last) {
- for (; first != last; ++first) insert(*first);
+ for (; first != last; ++first) emplace(*first);
}
template <class T, RequiresNotInit<T> = 0, RequiresInsertable<const T&> = 0>
@@ -1025,7 +1101,9 @@
}
iterator insert(const_iterator, node_type&& node) {
- return insert(std::move(node)).first;
+ auto res = insert(std::move(node));
+ node = std::move(res.node);
+ return res.position;
}
// This overload kicks in if we can deduce the key from args. This enables us
@@ -1050,8 +1128,7 @@
template <class... Args, typename std::enable_if<
!IsDecomposable<Args...>::value, int>::type = 0>
std::pair<iterator, bool> emplace(Args&&... args) {
- typename std::aligned_storage<sizeof(slot_type), alignof(slot_type)>::type
- raw;
+ alignas(slot_type) unsigned char raw[sizeof(slot_type)];
slot_type* slot = reinterpret_cast<slot_type*>(&raw);
PolicyTraits::construct(&alloc_ref(), slot, std::forward<Args>(args)...);
@@ -1067,10 +1144,15 @@
// Extension API: support for lazy emplace.
//
// Looks up key in the table. If found, returns the iterator to the element.
- // Otherwise calls f with one argument of type raw_hash_set::constructor. f
- // MUST call raw_hash_set::constructor with arguments as if a
- // raw_hash_set::value_type is constructed, otherwise the behavior is
- // undefined.
+ // Otherwise calls `f` with one argument of type `raw_hash_set::constructor`.
+ //
+ // `f` must abide by several restrictions:
+ // - it MUST call `raw_hash_set::constructor` with arguments as if a
+ // `raw_hash_set::value_type` is constructed,
+ // - it MUST NOT access the container before the call to
+ // `raw_hash_set::constructor`, and
+ // - it MUST NOT erase the lazily emplaced element.
+ // Doing any of these is undefined behavior.
//
// For example:
//
@@ -1150,7 +1232,7 @@
// This overload is necessary because otherwise erase<K>(const K&) would be
// a better match if non-const iterator is passed as an argument.
void erase(iterator it) {
- assert(it != end());
+ AssertIsFull(it.ctrl_);
PolicyTraits::destroy(&alloc_ref(), it.slot_);
erase_meta_only(it);
}
@@ -1167,12 +1249,14 @@
template <typename H, typename E>
void merge(raw_hash_set<Policy, H, E, Alloc>& src) { // NOLINT
assert(this != &src);
- for (auto it = src.begin(), e = src.end(); it != e; ++it) {
+ for (auto it = src.begin(), e = src.end(); it != e;) {
+ auto next = std::next(it);
if (PolicyTraits::apply(InsertSlot<false>{*this, std::move(*it.slot_)},
PolicyTraits::element(it.slot_))
.second) {
src.erase_meta_only(it);
}
+ it = next;
}
}
@@ -1182,6 +1266,7 @@
}
node_type extract(const_iterator position) {
+ AssertIsFull(position.inner_.ctrl_);
auto node =
CommonAccess::Transfer<node_type>(alloc_ref(), position.inner_.slot_);
erase_meta_only(position);
@@ -1198,8 +1283,8 @@
void swap(raw_hash_set& that) noexcept(
IsNoThrowSwappable<hasher>() && IsNoThrowSwappable<key_equal>() &&
- (!AllocTraits::propagate_on_container_swap::value ||
- IsNoThrowSwappable<allocator_type>())) {
+ IsNoThrowSwappable<allocator_type>(
+ typename AllocTraits::propagate_on_container_swap{})) {
using std::swap;
swap(ctrl_, that.ctrl_);
swap(slots_, that.slots_);
@@ -1209,12 +1294,8 @@
swap(hash_ref(), that.hash_ref());
swap(eq_ref(), that.eq_ref());
swap(infoz_, that.infoz_);
- if (AllocTraits::propagate_on_container_swap::value) {
- swap(alloc_ref(), that.alloc_ref());
- } else {
- // If the allocators do not compare equal it is officially undefined
- // behavior. We choose to do nothing.
- }
+ SwapAlloc(alloc_ref(), that.alloc_ref(),
+ typename AllocTraits::propagate_on_container_swap{});
}
void rehash(size_t n) {
@@ -1233,7 +1314,12 @@
}
}
- void reserve(size_t n) { rehash(GrowthToLowerboundCapacity(n)); }
+ void reserve(size_t n) {
+ size_t m = GrowthToLowerboundCapacity(n);
+ if (m > capacity_) {
+ resize(NormalizeCapacity(m));
+ }
+ }
// Extension API: support for heterogeneous keys.
//
@@ -1258,7 +1344,7 @@
void prefetch(const key_arg<K>& key) const {
(void)key;
#if defined(__GNUC__)
- auto seq = probe(hash_ref()(key));
+ auto seq = probe(ctrl_, hash_ref()(key), capacity_);
__builtin_prefetch(static_cast<const void*>(ctrl_ + seq.offset()));
__builtin_prefetch(static_cast<const void*>(slots_ + seq.offset()));
#endif // __GNUC__
@@ -1273,7 +1359,7 @@
// called heterogeneous key support.
template <class K = key_type>
iterator find(const key_arg<K>& key, size_t hash) {
- auto seq = probe(hash);
+ auto seq = probe(ctrl_, hash, capacity_);
while (true) {
Group g{ctrl_ + seq.offset()};
for (int i : g.Match(H2(hash))) {
@@ -1284,6 +1370,7 @@
}
if (ABSL_PREDICT_TRUE(g.MatchEmpty())) return end();
seq.next();
+ assert(seq.index() < capacity_ && "full table!");
}
}
template <class K = key_type>
@@ -1494,7 +1581,7 @@
if (IsFull(old_ctrl[i])) {
size_t hash = PolicyTraits::apply(HashElement{hash_ref()},
PolicyTraits::element(old_slots + i));
- auto target = find_first_non_full(hash);
+ auto target = find_first_non_full(ctrl_, hash, capacity_);
size_t new_i = target.offset;
total_probe_length += target.probe_length;
set_ctrl(new_i, H2(hash));
@@ -1513,7 +1600,7 @@
void drop_deletes_without_resize() ABSL_ATTRIBUTE_NOINLINE {
assert(IsValidCapacity(capacity_));
- assert(!is_small());
+ assert(!is_small(capacity_));
// Algorithm:
// - mark all DELETED slots as EMPTY
// - mark all FULL slots as DELETED
@@ -1531,15 +1618,14 @@
// mark target as FULL
// repeat procedure for current slot with moved from element (target)
ConvertDeletedToEmptyAndFullToDeleted(ctrl_, capacity_);
- typename std::aligned_storage<sizeof(slot_type), alignof(slot_type)>::type
- raw;
+ alignas(slot_type) unsigned char raw[sizeof(slot_type)];
size_t total_probe_length = 0;
slot_type* slot = reinterpret_cast<slot_type*>(&raw);
for (size_t i = 0; i != capacity_; ++i) {
if (!IsDeleted(ctrl_[i])) continue;
size_t hash = PolicyTraits::apply(HashElement{hash_ref()},
PolicyTraits::element(slots_ + i));
- auto target = find_first_non_full(hash);
+ auto target = find_first_non_full(ctrl_, hash, capacity_);
size_t new_i = target.offset;
total_probe_length += target.probe_length;
@@ -1547,7 +1633,8 @@
// If they do, we don't need to move the object as it falls already in the
// best probe we can.
const auto probe_index = [&](size_t pos) {
- return ((pos - probe(hash).offset()) & capacity_) / Group::kWidth;
+ return ((pos - probe(ctrl_, hash, capacity_).offset()) & capacity_) /
+ Group::kWidth;
};
// Element doesn't move.
@@ -1591,7 +1678,7 @@
bool has_element(const value_type& elem) const {
size_t hash = PolicyTraits::apply(HashElement{hash_ref()}, elem);
- auto seq = probe(hash);
+ auto seq = probe(ctrl_, hash, capacity_);
while (true) {
Group g{ctrl_ + seq.offset()};
for (int i : g.Match(H2(hash))) {
@@ -1606,41 +1693,6 @@
return false;
}
- // Probes the raw_hash_set with the probe sequence for hash and returns the
- // pointer to the first empty or deleted slot.
- // NOTE: this function must work with tables having both kEmpty and kDelete
- // in one group. Such tables appears during drop_deletes_without_resize.
- //
- // This function is very useful when insertions happen and:
- // - the input is already a set
- // - there are enough slots
- // - the element with the hash is not in the table
- struct FindInfo {
- size_t offset;
- size_t probe_length;
- };
- FindInfo find_first_non_full(size_t hash) {
- auto seq = probe(hash);
- while (true) {
- Group g{ctrl_ + seq.offset()};
- auto mask = g.MatchEmptyOrDeleted();
- if (mask) {
-#if !defined(NDEBUG)
- // We want to add entropy even when ASLR is not enabled.
- // In debug build we will randomly insert in either the front or back of
- // the group.
- // TODO(kfm,sbenza): revisit after we do unconditional mixing
- if (!is_small() && ShouldInsertBackwards(hash, ctrl_)) {
- return {seq.offset(mask.HighestBitSet()), seq.index()};
- }
-#endif
- return {seq.offset(mask.LowestBitSet()), seq.index()};
- }
- assert(seq.index() < capacity_ && "full table!");
- seq.next();
- }
- }
-
// TODO(alkis): Optimize this assuming *this and that don't overlap.
raw_hash_set& move_assign(raw_hash_set&& that, std::true_type) {
raw_hash_set tmp(std::move(that));
@@ -1657,7 +1709,7 @@
template <class K>
std::pair<size_t, bool> find_or_prepare_insert(const K& key) {
auto hash = hash_ref()(key);
- auto seq = probe(hash);
+ auto seq = probe(ctrl_, hash, capacity_);
while (true) {
Group g{ctrl_ + seq.offset()};
for (int i : g.Match(H2(hash))) {
@@ -1668,16 +1720,17 @@
}
if (ABSL_PREDICT_TRUE(g.MatchEmpty())) break;
seq.next();
+ assert(seq.index() < capacity_ && "full table!");
}
return {prepare_insert(hash), true};
}
size_t prepare_insert(size_t hash) ABSL_ATTRIBUTE_NOINLINE {
- auto target = find_first_non_full(hash);
+ auto target = find_first_non_full(ctrl_, hash, capacity_);
if (ABSL_PREDICT_FALSE(growth_left() == 0 &&
!IsDeleted(ctrl_[target.offset]))) {
rehash_and_grow_if_necessary();
- target = find_first_non_full(hash);
+ target = find_first_non_full(ctrl_, hash, capacity_);
}
++size_;
growth_left() -= IsEmpty(ctrl_[target.offset]);
@@ -1710,10 +1763,6 @@
private:
friend struct RawHashSetTestOnlyAccess;
- probe_seq<Group::kWidth> probe(size_t hash) const {
- return probe_seq<Group::kWidth>(H1(hash, ctrl_), capacity_);
- }
-
// Reset all ctrl bytes back to kEmpty, except the sentinel.
void reset_ctrl() {
std::memset(ctrl_, kEmpty, capacity_ + Group::kWidth);
@@ -1743,22 +1792,6 @@
size_t& growth_left() { return settings_.template get<0>(); }
- // The representation of the object has two modes:
- // - small: For capacities < kWidth-1
- // - large: For the rest.
- //
- // Differences:
- // - In small mode we are able to use the whole capacity. The extra control
- // bytes give us at least one "empty" control byte to stop the iteration.
- // This is important to make 1 a valid capacity.
- //
- // - In small mode only the first `capacity()` control bytes after the
- // sentinel are valid. The rest contain dummy kEmpty values that do not
- // represent a real slot. This is important to take into account on
- // find_first_non_full(), where we never try ShouldInsertBackwards() for
- // small tables.
- bool is_small() const { return capacity_ < Group::kWidth - 1; }
-
hasher& hash_ref() { return settings_.template get<1>(); }
const hasher& hash_ref() const { return settings_.template get<1>(); }
key_equal& eq_ref() { return settings_.template get<2>(); }
@@ -1781,6 +1814,17 @@
settings_{0, hasher{}, key_equal{}, allocator_type{}};
};
+// Erases all elements that satisfy the predicate `pred` from the container `c`.
+template <typename P, typename H, typename E, typename A, typename Predicate>
+void EraseIf(Predicate pred, raw_hash_set<P, H, E, A>* c) {
+ for (auto it = c->begin(), last = c->end(); it != last;) {
+ auto copy_it = it++;
+ if (pred(*copy_it)) {
+ c->erase(copy_it);
+ }
+ }
+}
+
namespace hashtable_debug_internal {
template <typename Set>
struct HashtableDebugAccess<Set, absl::void_t<typename Set::raw_hash_set>> {
@@ -1791,7 +1835,7 @@
const typename Set::key_type& key) {
size_t num_probes = 0;
size_t hash = set.hash_ref()(key);
- auto seq = set.probe(hash);
+ auto seq = probe(set.ctrl_, hash, set.capacity_);
while (true) {
container_internal::Group g{set.ctrl_ + seq.offset()};
for (int i : g.Match(container_internal::H2(hash))) {
@@ -1842,6 +1886,7 @@
} // namespace hashtable_debug_internal
} // namespace container_internal
+ABSL_NAMESPACE_END
} // namespace absl
#endif // ABSL_CONTAINER_INTERNAL_RAW_HASH_SET_H_