Austin Schuh | 36244a1 | 2019-09-21 17:52:38 -0700 | [diff] [blame] | 1 | // Copyright 2018 The Abseil Authors. |
| 2 | // |
| 3 | // Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | // you may not use this file except in compliance with the License. |
| 5 | // You may obtain a copy of the License at |
| 6 | // |
| 7 | // https://www.apache.org/licenses/LICENSE-2.0 |
| 8 | // |
| 9 | // Unless required by applicable law or agreed to in writing, software |
| 10 | // distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | // See the License for the specific language governing permissions and |
| 13 | // limitations under the License. |
| 14 | |
| 15 | #include "absl/container/flat_hash_map.h" |
| 16 | |
| 17 | #include <memory> |
| 18 | |
Austin Schuh | b4691e9 | 2020-12-31 12:37:18 -0800 | [diff] [blame^] | 19 | #include "absl/base/internal/raw_logging.h" |
Austin Schuh | 36244a1 | 2019-09-21 17:52:38 -0700 | [diff] [blame] | 20 | #include "absl/container/internal/hash_generator_testing.h" |
| 21 | #include "absl/container/internal/unordered_map_constructor_test.h" |
| 22 | #include "absl/container/internal/unordered_map_lookup_test.h" |
| 23 | #include "absl/container/internal/unordered_map_members_test.h" |
| 24 | #include "absl/container/internal/unordered_map_modifiers_test.h" |
| 25 | #include "absl/types/any.h" |
| 26 | |
| 27 | namespace absl { |
Austin Schuh | b4691e9 | 2020-12-31 12:37:18 -0800 | [diff] [blame^] | 28 | ABSL_NAMESPACE_BEGIN |
Austin Schuh | 36244a1 | 2019-09-21 17:52:38 -0700 | [diff] [blame] | 29 | namespace container_internal { |
| 30 | namespace { |
| 31 | using ::absl::container_internal::hash_internal::Enum; |
| 32 | using ::absl::container_internal::hash_internal::EnumClass; |
| 33 | using ::testing::_; |
Austin Schuh | b4691e9 | 2020-12-31 12:37:18 -0800 | [diff] [blame^] | 34 | using ::testing::IsEmpty; |
Austin Schuh | 36244a1 | 2019-09-21 17:52:38 -0700 | [diff] [blame] | 35 | using ::testing::Pair; |
| 36 | using ::testing::UnorderedElementsAre; |
| 37 | |
Austin Schuh | b4691e9 | 2020-12-31 12:37:18 -0800 | [diff] [blame^] | 38 | // Check that absl::flat_hash_map works in a global constructor. |
| 39 | struct BeforeMain { |
| 40 | BeforeMain() { |
| 41 | absl::flat_hash_map<int, int> x; |
| 42 | x.insert({1, 1}); |
| 43 | ABSL_RAW_CHECK(x.find(0) == x.end(), "x should not contain 0"); |
| 44 | auto it = x.find(1); |
| 45 | ABSL_RAW_CHECK(it != x.end(), "x should contain 1"); |
| 46 | ABSL_RAW_CHECK(it->second, "1 should map to 1"); |
| 47 | } |
| 48 | }; |
| 49 | const BeforeMain before_main; |
| 50 | |
Austin Schuh | 36244a1 | 2019-09-21 17:52:38 -0700 | [diff] [blame] | 51 | template <class K, class V> |
| 52 | using Map = flat_hash_map<K, V, StatefulTestingHash, StatefulTestingEqual, |
| 53 | Alloc<std::pair<const K, V>>>; |
| 54 | |
| 55 | static_assert(!std::is_standard_layout<NonStandardLayout>(), ""); |
| 56 | |
| 57 | using MapTypes = |
| 58 | ::testing::Types<Map<int, int>, Map<std::string, int>, |
| 59 | Map<Enum, std::string>, Map<EnumClass, int>, |
| 60 | Map<int, NonStandardLayout>, Map<NonStandardLayout, int>>; |
| 61 | |
| 62 | INSTANTIATE_TYPED_TEST_SUITE_P(FlatHashMap, ConstructorTest, MapTypes); |
| 63 | INSTANTIATE_TYPED_TEST_SUITE_P(FlatHashMap, LookupTest, MapTypes); |
| 64 | INSTANTIATE_TYPED_TEST_SUITE_P(FlatHashMap, MembersTest, MapTypes); |
| 65 | INSTANTIATE_TYPED_TEST_SUITE_P(FlatHashMap, ModifiersTest, MapTypes); |
| 66 | |
| 67 | using UniquePtrMapTypes = ::testing::Types<Map<int, std::unique_ptr<int>>>; |
| 68 | |
| 69 | INSTANTIATE_TYPED_TEST_SUITE_P(FlatHashMap, UniquePtrModifiersTest, |
| 70 | UniquePtrMapTypes); |
| 71 | |
| 72 | TEST(FlatHashMap, StandardLayout) { |
| 73 | struct Int { |
| 74 | explicit Int(size_t value) : value(value) {} |
| 75 | Int() : value(0) { ADD_FAILURE(); } |
| 76 | Int(const Int& other) : value(other.value) { ADD_FAILURE(); } |
| 77 | Int(Int&&) = default; |
| 78 | bool operator==(const Int& other) const { return value == other.value; } |
| 79 | size_t value; |
| 80 | }; |
| 81 | static_assert(std::is_standard_layout<Int>(), ""); |
| 82 | |
| 83 | struct Hash { |
| 84 | size_t operator()(const Int& obj) const { return obj.value; } |
| 85 | }; |
| 86 | |
| 87 | // Verify that neither the key nor the value get default-constructed or |
| 88 | // copy-constructed. |
| 89 | { |
| 90 | flat_hash_map<Int, Int, Hash> m; |
| 91 | m.try_emplace(Int(1), Int(2)); |
| 92 | m.try_emplace(Int(3), Int(4)); |
| 93 | m.erase(Int(1)); |
| 94 | m.rehash(2 * m.bucket_count()); |
| 95 | } |
| 96 | { |
| 97 | flat_hash_map<Int, Int, Hash> m; |
| 98 | m.try_emplace(Int(1), Int(2)); |
| 99 | m.try_emplace(Int(3), Int(4)); |
| 100 | m.erase(Int(1)); |
| 101 | m.clear(); |
| 102 | } |
| 103 | } |
| 104 | |
| 105 | // gcc becomes unhappy if this is inside the method, so pull it out here. |
| 106 | struct balast {}; |
| 107 | |
| 108 | TEST(FlatHashMap, IteratesMsan) { |
| 109 | // Because SwissTable randomizes on pointer addresses, we keep old tables |
| 110 | // around to ensure we don't reuse old memory. |
| 111 | std::vector<absl::flat_hash_map<int, balast>> garbage; |
| 112 | for (int i = 0; i < 100; ++i) { |
| 113 | absl::flat_hash_map<int, balast> t; |
| 114 | for (int j = 0; j < 100; ++j) { |
| 115 | t[j]; |
| 116 | for (const auto& p : t) EXPECT_THAT(p, Pair(_, _)); |
| 117 | } |
| 118 | garbage.push_back(std::move(t)); |
| 119 | } |
| 120 | } |
| 121 | |
| 122 | // Demonstration of the "Lazy Key" pattern. This uses heterogeneous insert to |
| 123 | // avoid creating expensive key elements when the item is already present in the |
| 124 | // map. |
| 125 | struct LazyInt { |
| 126 | explicit LazyInt(size_t value, int* tracker) |
| 127 | : value(value), tracker(tracker) {} |
| 128 | |
| 129 | explicit operator size_t() const { |
| 130 | ++*tracker; |
| 131 | return value; |
| 132 | } |
| 133 | |
| 134 | size_t value; |
| 135 | int* tracker; |
| 136 | }; |
| 137 | |
| 138 | struct Hash { |
| 139 | using is_transparent = void; |
| 140 | int* tracker; |
| 141 | size_t operator()(size_t obj) const { |
| 142 | ++*tracker; |
| 143 | return obj; |
| 144 | } |
| 145 | size_t operator()(const LazyInt& obj) const { |
| 146 | ++*tracker; |
| 147 | return obj.value; |
| 148 | } |
| 149 | }; |
| 150 | |
| 151 | struct Eq { |
| 152 | using is_transparent = void; |
| 153 | bool operator()(size_t lhs, size_t rhs) const { |
| 154 | return lhs == rhs; |
| 155 | } |
| 156 | bool operator()(size_t lhs, const LazyInt& rhs) const { |
| 157 | return lhs == rhs.value; |
| 158 | } |
| 159 | }; |
| 160 | |
| 161 | TEST(FlatHashMap, LazyKeyPattern) { |
| 162 | // hashes are only guaranteed in opt mode, we use assertions to track internal |
| 163 | // state that can cause extra calls to hash. |
| 164 | int conversions = 0; |
| 165 | int hashes = 0; |
| 166 | flat_hash_map<size_t, size_t, Hash, Eq> m(0, Hash{&hashes}); |
| 167 | m.reserve(3); |
| 168 | |
| 169 | m[LazyInt(1, &conversions)] = 1; |
| 170 | EXPECT_THAT(m, UnorderedElementsAre(Pair(1, 1))); |
| 171 | EXPECT_EQ(conversions, 1); |
| 172 | #ifdef NDEBUG |
| 173 | EXPECT_EQ(hashes, 1); |
| 174 | #endif |
| 175 | |
| 176 | m[LazyInt(1, &conversions)] = 2; |
| 177 | EXPECT_THAT(m, UnorderedElementsAre(Pair(1, 2))); |
| 178 | EXPECT_EQ(conversions, 1); |
| 179 | #ifdef NDEBUG |
| 180 | EXPECT_EQ(hashes, 2); |
| 181 | #endif |
| 182 | |
| 183 | m.try_emplace(LazyInt(2, &conversions), 3); |
| 184 | EXPECT_THAT(m, UnorderedElementsAre(Pair(1, 2), Pair(2, 3))); |
| 185 | EXPECT_EQ(conversions, 2); |
| 186 | #ifdef NDEBUG |
| 187 | EXPECT_EQ(hashes, 3); |
| 188 | #endif |
| 189 | |
| 190 | m.try_emplace(LazyInt(2, &conversions), 4); |
| 191 | EXPECT_THAT(m, UnorderedElementsAre(Pair(1, 2), Pair(2, 3))); |
| 192 | EXPECT_EQ(conversions, 2); |
| 193 | #ifdef NDEBUG |
| 194 | EXPECT_EQ(hashes, 4); |
| 195 | #endif |
| 196 | } |
| 197 | |
| 198 | TEST(FlatHashMap, BitfieldArgument) { |
| 199 | union { |
| 200 | int n : 1; |
| 201 | }; |
| 202 | n = 0; |
| 203 | flat_hash_map<int, int> m; |
| 204 | m.erase(n); |
| 205 | m.count(n); |
| 206 | m.prefetch(n); |
| 207 | m.find(n); |
| 208 | m.contains(n); |
| 209 | m.equal_range(n); |
| 210 | m.insert_or_assign(n, n); |
| 211 | m.insert_or_assign(m.end(), n, n); |
| 212 | m.try_emplace(n); |
| 213 | m.try_emplace(m.end(), n); |
| 214 | m.at(n); |
| 215 | m[n]; |
| 216 | } |
| 217 | |
| 218 | TEST(FlatHashMap, MergeExtractInsert) { |
| 219 | // We can't test mutable keys, or non-copyable keys with flat_hash_map. |
| 220 | // Test that the nodes have the proper API. |
| 221 | absl::flat_hash_map<int, int> m = {{1, 7}, {2, 9}}; |
| 222 | auto node = m.extract(1); |
| 223 | EXPECT_TRUE(node); |
| 224 | EXPECT_EQ(node.key(), 1); |
| 225 | EXPECT_EQ(node.mapped(), 7); |
| 226 | EXPECT_THAT(m, UnorderedElementsAre(Pair(2, 9))); |
| 227 | |
| 228 | node.mapped() = 17; |
| 229 | m.insert(std::move(node)); |
| 230 | EXPECT_THAT(m, UnorderedElementsAre(Pair(1, 17), Pair(2, 9))); |
| 231 | } |
| 232 | |
Austin Schuh | b4691e9 | 2020-12-31 12:37:18 -0800 | [diff] [blame^] | 233 | bool FirstIsEven(std::pair<const int, int> p) { return p.first % 2 == 0; } |
Austin Schuh | 36244a1 | 2019-09-21 17:52:38 -0700 | [diff] [blame] | 234 | |
Austin Schuh | b4691e9 | 2020-12-31 12:37:18 -0800 | [diff] [blame^] | 235 | TEST(FlatHashMap, EraseIf) { |
| 236 | // Erase all elements. |
| 237 | { |
| 238 | flat_hash_map<int, int> s = {{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}}; |
| 239 | erase_if(s, [](std::pair<const int, int>) { return true; }); |
| 240 | EXPECT_THAT(s, IsEmpty()); |
| 241 | } |
| 242 | // Erase no elements. |
| 243 | { |
| 244 | flat_hash_map<int, int> s = {{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}}; |
| 245 | erase_if(s, [](std::pair<const int, int>) { return false; }); |
| 246 | EXPECT_THAT(s, UnorderedElementsAre(Pair(1, 1), Pair(2, 2), Pair(3, 3), |
| 247 | Pair(4, 4), Pair(5, 5))); |
| 248 | } |
| 249 | // Erase specific elements. |
| 250 | { |
| 251 | flat_hash_map<int, int> s = {{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}}; |
| 252 | erase_if(s, |
| 253 | [](std::pair<const int, int> kvp) { return kvp.first % 2 == 1; }); |
| 254 | EXPECT_THAT(s, UnorderedElementsAre(Pair(2, 2), Pair(4, 4))); |
| 255 | } |
| 256 | // Predicate is function reference. |
| 257 | { |
| 258 | flat_hash_map<int, int> s = {{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}}; |
| 259 | erase_if(s, FirstIsEven); |
| 260 | EXPECT_THAT(s, UnorderedElementsAre(Pair(1, 1), Pair(3, 3), Pair(5, 5))); |
| 261 | } |
| 262 | // Predicate is function pointer. |
| 263 | { |
| 264 | flat_hash_map<int, int> s = {{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}}; |
| 265 | erase_if(s, &FirstIsEven); |
| 266 | EXPECT_THAT(s, UnorderedElementsAre(Pair(1, 1), Pair(3, 3), Pair(5, 5))); |
| 267 | } |
Austin Schuh | 36244a1 | 2019-09-21 17:52:38 -0700 | [diff] [blame] | 268 | } |
Austin Schuh | b4691e9 | 2020-12-31 12:37:18 -0800 | [diff] [blame^] | 269 | |
| 270 | // This test requires std::launder for mutable key access in node handles. |
| 271 | #if defined(__cpp_lib_launder) && __cpp_lib_launder >= 201606 |
| 272 | TEST(FlatHashMap, NodeHandleMutableKeyAccess) { |
| 273 | flat_hash_map<std::string, std::string> map; |
| 274 | |
| 275 | map["key1"] = "mapped"; |
| 276 | |
| 277 | auto nh = map.extract(map.begin()); |
| 278 | nh.key().resize(3); |
| 279 | map.insert(std::move(nh)); |
| 280 | |
| 281 | EXPECT_THAT(map, testing::ElementsAre(Pair("key", "mapped"))); |
| 282 | } |
| 283 | #endif |
Austin Schuh | 36244a1 | 2019-09-21 17:52:38 -0700 | [diff] [blame] | 284 | |
| 285 | } // namespace |
| 286 | } // namespace container_internal |
Austin Schuh | b4691e9 | 2020-12-31 12:37:18 -0800 | [diff] [blame^] | 287 | ABSL_NAMESPACE_END |
Austin Schuh | 36244a1 | 2019-09-21 17:52:38 -0700 | [diff] [blame] | 288 | } // namespace absl |