Import hashbrown-0.9.1 am: cdc803b2d8

Original change: https://android-review.googlesource.com/c/platform/external/rust/crates/hashbrown/+/1493419

Change-Id: Iab874ec814955d6ddca8572af67579bbeb3c14b7
diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json
new file mode 100644
index 0000000..845b5e7
--- /dev/null
+++ b/.cargo_vcs_info.json
@@ -0,0 +1,5 @@
+{
+  "git": {
+    "sha1": "34c11891e13fa3c0d08b0540e869aace9d347c26"
+  }
+}
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..6936990
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,3 @@
+/target
+**/*.rs.bk
+Cargo.lock
diff --git a/Android.bp b/Android.bp
new file mode 100644
index 0000000..8f2981a
--- /dev/null
+++ b/Android.bp
@@ -0,0 +1,20 @@
+// This file is generated by cargo2android.py --device --run --dependencies.
+
+rust_library {
+    name: "libhashbrown",
+    host_supported: true,
+    crate_name: "hashbrown",
+    srcs: ["src/lib.rs"],
+    edition: "2018",
+    features: [
+        "ahash",
+        "default",
+        "inline-more",
+    ],
+    rustlibs: [
+        "libahash",
+    ],
+}
+
+// dependent_library ["feature_list"]
+//   ahash-0.4.6
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000..b6eb671
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,294 @@
+# Change Log
+
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](http://keepachangelog.com/)
+and this project adheres to [Semantic Versioning](http://semver.org/).
+
+## [Unreleased]
+
+## [v0.9.1] - 2020-09-28
+
+## Added
+- Added safe methods to `RawTable` (#202):
+  - `get`: `find` and `as_ref`
+  - `get_mut`: `find` and `as_mut`
+  - `insert_entry`: `insert` and `as_mut`
+  - `remove_entry`: `find` and `remove`
+  - `erase_entry`: `find` and `erase`
+
+## Changed
+- Removed `from_key_hashed_nocheck`'s `Q: Hash`. (#200)
+- Made `RawTable::drain` safe. (#201)
+
+## [v0.9.0] - 2020-09-03
+
+### Fixed
+- `drain_filter` now removes and yields items that do match the predicate,
+  rather than items that don't.  This is a **breaking change** to match the
+  behavior of the `drain_filter` methods in `std`. (#187)
+
+### Added
+- Added `replace_entry_with` to `OccupiedEntry`, and `and_replace_entry_with` to `Entry`. (#190)
+- Implemented `FusedIterator` and `size_hint` for `DrainFilter`. (#188)
+
+### Changed
+- The minimum Rust version has been bumped to 1.36 (due to `crossbeam` dependency). (#193)
+- Updated `ahash` dependency to 0.4. (#198)
+- `HashMap::with_hasher` and `HashSet::with_hasher` are now `const fn`. (#195)
+- Removed `T: Hash + Eq` and `S: BuildHasher` bounds on `HashSet::new`,
+  `with_capacity`, `with_hasher`, and `with_capacity_and_hasher`.  (#185)
+
+## [v0.8.2] - 2020-08-08
+
+### Changed
+- Avoid closures to improve compile times. (#183)
+- Do not iterate to drop if empty. (#182)
+
+## [v0.8.1] - 2020-07-16
+
+### Added
+- Added `erase` and `remove` to `RawTable`. (#171)
+- Added `try_with_capacity` to `RawTable`. (#174)
+- Added methods that allow re-using a `RawIter` for `RawDrain`,
+  `RawIntoIter`, and `RawParIter`. (#175)
+- Added `reflect_remove` and `reflect_insert` to `RawIter`. (#175)
+- Added a `drain_filter` function to `HashSet`. (#179)
+
+### Changed
+- Deprecated `RawTable::erase_no_drop` in favor of `erase` and `remove`. (#176)
+- `insert_no_grow` is now exposed under the `"raw"` feature. (#180)
+
+## [v0.8.0] - 2020-06-18
+
+### Fixed
+- Marked `RawTable::par_iter` as `unsafe`. (#157)
+
+### Changed
+- Reduced the size of `HashMap`. (#159)
+- No longer create tables with a capacity of 1 element. (#162)
+- Removed `K: Eq + Hash` bounds on `retain`. (#163)
+- Pulled in `HashMap` changes from rust-lang/rust (#164):
+  - `extend_one` support on nightly.
+  - `CollectionAllocErr` renamed to `TryReserveError`.
+  - Added `HashSet::get_or_insert_owned`.
+  - `Default` for `HashSet` no longer requires `T: Eq + Hash` and `S: BuildHasher`.
+
+## [v0.7.2] - 2020-04-27
+
+### Added
+- Added `or_insert_with_key` to `Entry`. (#152)
+
+### Fixed
+- Partially reverted `Clone` optimization which was unsound. (#154)
+
+### Changed
+- Disabled use of `const-random` by default, which prevented reproducible builds. (#155)
+- Optimized `repeat` function. (#150)
+- Use `NonNull` for buckets, which improves codegen for iterators. (#148)
+
+## [v0.7.1] - 2020-03-16
+
+### Added
+- Added `HashMap::get_key_value_mut`. (#145)
+
+### Changed
+- Optimized `Clone` implementation. (#146)
+
+## [v0.7.0] - 2020-01-31
+
+### Added
+- Added a `drain_filter` function to `HashMap`. (#135)
+
+### Changed
+- Updated `ahash` dependency to 0.3. (#141)
+- Optimized set union and intersection. (#130)
+- `raw_entry` can now be used without requiring `S: BuildHasher`. (#123)
+- `RawTable::bucket_index` can now be used under the `raw` feature. (#128)
+
+## [v0.6.3] - 2019-10-31
+
+### Added
+- Added an `ahash-compile-time-rng` feature (enabled by default) which allows disabling the
+  `compile-time-rng` feature in `ahash` to work around a Cargo bug. (#125)
+
+## [v0.6.2] - 2019-10-23
+
+### Added
+- Added an `inline-more` feature (enabled by default) which allows choosing a tradeoff between 
+  runtime performance and compilation time. (#119)
+
+## [v0.6.1] - 2019-10-04
+
+### Added
+- Added `Entry::insert` and `RawEntryMut::insert`. (#118)
+
+### Changed
+- `Group::static_empty` was changed from a `const` to a `static` (#116).
+
+## [v0.6.0] - 2019-08-13
+
+### Fixed
+- Fixed AHash accidentally depending on `std`. (#110)
+
+### Changed
+- The minimum Rust version has been bumped to 1.32 (due to `rand` dependency).
+
+## ~~[v0.5.1] - 2019-08-04~~
+
+This release was _yanked_ due to a breaking change for users of `no-default-features`.
+
+### Added
+- The experimental and unsafe `RawTable` API is available under the "raw" feature. (#108)
+- Added entry-like methods for `HashSet`. (#98)
+
+### Changed
+- Changed the default hasher from FxHash to AHash. (#97)
+- `hashbrown` is now fully `no_std` on recent Rust versions (1.36+). (#96)
+
+### Fixed
+- We now avoid growing the table during insertions when it wasn't necessary. (#106)
+- `RawOccupiedEntryMut` now properly implements `Send` and `Sync`. (#100)
+- Relaxed `lazy_static` version. (#92)
+
+## [v0.5.0] - 2019-06-12
+
+### Fixed
+- Resize with a more conservative amount of space after deletions. (#86)
+
+### Changed
+- Exposed the Layout of the failed allocation in CollectionAllocErr::AllocErr. (#89)
+
+## [v0.4.0] - 2019-05-30
+
+### Fixed
+- Fixed `Send` trait bounds on `IterMut` not matching the libstd one. (#82)
+
+## [v0.3.1] - 2019-05-30
+
+### Fixed
+- Fixed incorrect use of slice in unsafe code. (#80)
+
+## [v0.3.0] - 2019-04-23
+
+### Changed
+- Changed shrink_to to not panic if min_capacity < capacity. (#67)
+
+### Fixed
+- Worked around emscripten bug emscripten-core/emscripten-fastcomp#258. (#66)
+
+## [v0.2.2] - 2019-04-16
+
+### Fixed
+- Inlined non-nightly lowest_set_bit_nonzero. (#64)
+- Fixed build on latest nightly. (#65)
+
+## [v0.2.1] - 2019-04-14
+
+### Changed
+- Use for_each in map Extend and FromIterator. (#58)
+- Improved worst-case performance of HashSet.is_subset. (#61)
+
+### Fixed
+- Removed incorrect debug_assert. (#60)
+
+## [v0.2.0] - 2019-03-31
+
+### Changed
+- The code has been updated to Rust 2018 edition. This means that the minimum
+  Rust version has been bumped to 1.31 (2018 edition).
+
+### Added
+- Added `insert_with_hasher` to the raw_entry API to allow `K: !(Hash + Eq)`. (#54)
+- Added support for using hashbrown as the hash table implementation in libstd. (#46)
+
+### Fixed
+- Fixed cargo build with minimal-versions. (#45)
+- Fixed `#[may_dangle]` attributes to match the libstd `HashMap`. (#46)
+- ZST keys and values are now handled properly. (#46)
+
+## [v0.1.8] - 2019-01-14
+
+### Added
+- Rayon parallel iterator support (#37)
+- `raw_entry` support (#31)
+- `#[may_dangle]` on nightly (#31)
+- `try_reserve` support (#31)
+
+### Fixed
+- Fixed variance on `IterMut`. (#31)
+
+## [v0.1.7] - 2018-12-05
+
+### Fixed
+- Fixed non-SSE version of convert_special_to_empty_and_full_to_deleted. (#32)
+- Fixed overflow in rehash_in_place. (#33)
+
+## [v0.1.6] - 2018-11-17
+
+### Fixed
+- Fixed compile error on nightly. (#29)
+
+## [v0.1.5] - 2018-11-08
+
+### Fixed
+- Fixed subtraction overflow in generic::Group::match_byte. (#28)
+
+## [v0.1.4] - 2018-11-04
+
+### Fixed
+- Fixed a bug in the `erase_no_drop` implementation. (#26)
+
+## [v0.1.3] - 2018-11-01
+
+### Added
+- Serde support. (#14)
+
+### Fixed
+- Make the compiler inline functions more aggressively. (#20)
+
+## [v0.1.2] - 2018-10-31
+
+### Fixed
+- `clear` segfaults when called on an empty table. (#13)
+
+## [v0.1.1] - 2018-10-30
+
+### Fixed
+- `erase_no_drop` optimization not triggering in the SSE2 implementation. (#3)
+- Missing `Send` and `Sync` for hash map and iterator types. (#7)
+- Bug when inserting into a table smaller than the group width. (#5)
+
+## v0.1.0 - 2018-10-29
+
+- Initial release
+
+[Unreleased]: https://github.com/rust-lang/hashbrown/compare/v0.9.1...HEAD
+[v0.9.1]: https://github.com/rust-lang/hashbrown/compare/v0.9.0...v0.9.1
+[v0.9.0]: https://github.com/rust-lang/hashbrown/compare/v0.8.2...v0.9.0
+[v0.8.2]: https://github.com/rust-lang/hashbrown/compare/v0.8.1...v0.8.2
+[v0.8.1]: https://github.com/rust-lang/hashbrown/compare/v0.8.0...v0.8.1
+[v0.8.0]: https://github.com/rust-lang/hashbrown/compare/v0.7.2...v0.8.0
+[v0.7.2]: https://github.com/rust-lang/hashbrown/compare/v0.7.1...v0.7.2
+[v0.7.1]: https://github.com/rust-lang/hashbrown/compare/v0.7.0...v0.7.1
+[v0.7.0]: https://github.com/rust-lang/hashbrown/compare/v0.6.3...v0.7.0
+[v0.6.3]: https://github.com/rust-lang/hashbrown/compare/v0.6.2...v0.6.3
+[v0.6.2]: https://github.com/rust-lang/hashbrown/compare/v0.6.1...v0.6.2
+[v0.6.1]: https://github.com/rust-lang/hashbrown/compare/v0.6.0...v0.6.1
+[v0.6.0]: https://github.com/rust-lang/hashbrown/compare/v0.5.1...v0.6.0
+[v0.5.1]: https://github.com/rust-lang/hashbrown/compare/v0.5.0...v0.5.1
+[v0.5.0]: https://github.com/rust-lang/hashbrown/compare/v0.4.0...v0.5.0
+[v0.4.0]: https://github.com/rust-lang/hashbrown/compare/v0.3.1...v0.4.0
+[v0.3.1]: https://github.com/rust-lang/hashbrown/compare/v0.3.0...v0.3.1
+[v0.3.0]: https://github.com/rust-lang/hashbrown/compare/v0.2.2...v0.3.0
+[v0.2.2]: https://github.com/rust-lang/hashbrown/compare/v0.2.1...v0.2.2
+[v0.2.1]: https://github.com/rust-lang/hashbrown/compare/v0.2.0...v0.2.1
+[v0.2.0]: https://github.com/rust-lang/hashbrown/compare/v0.1.8...v0.2.0
+[v0.1.8]: https://github.com/rust-lang/hashbrown/compare/v0.1.7...v0.1.8
+[v0.1.7]: https://github.com/rust-lang/hashbrown/compare/v0.1.6...v0.1.7
+[v0.1.6]: https://github.com/rust-lang/hashbrown/compare/v0.1.5...v0.1.6
+[v0.1.5]: https://github.com/rust-lang/hashbrown/compare/v0.1.4...v0.1.5
+[v0.1.4]: https://github.com/rust-lang/hashbrown/compare/v0.1.3...v0.1.4
+[v0.1.3]: https://github.com/rust-lang/hashbrown/compare/v0.1.2...v0.1.3
+[v0.1.2]: https://github.com/rust-lang/hashbrown/compare/v0.1.1...v0.1.2
+[v0.1.1]: https://github.com/rust-lang/hashbrown/compare/v0.1.0...v0.1.1
diff --git a/Cargo.toml b/Cargo.toml
new file mode 100644
index 0000000..7be0341
--- /dev/null
+++ b/Cargo.toml
@@ -0,0 +1,80 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+edition = "2018"
+name = "hashbrown"
+version = "0.9.1"
+authors = ["Amanieu d'Antras <amanieu@gmail.com>"]
+exclude = [".travis.yml", "bors.toml", "/ci/*"]
+description = "A Rust port of Google's SwissTable hash map"
+readme = "README.md"
+keywords = ["hash", "no_std", "hashmap", "swisstable"]
+categories = ["data-structures", "no-std"]
+license = "Apache-2.0/MIT"
+repository = "https://github.com/rust-lang/hashbrown"
+[package.metadata.docs.rs]
+features = ["nightly", "rayon", "serde", "raw"]
+[dependencies.ahash]
+version = "0.4.4"
+optional = true
+default-features = false
+
+[dependencies.alloc]
+version = "1.0.0"
+optional = true
+package = "rustc-std-workspace-alloc"
+
+[dependencies.compiler_builtins]
+version = "0.1.2"
+optional = true
+
+[dependencies.core]
+version = "1.0.0"
+optional = true
+package = "rustc-std-workspace-core"
+
+[dependencies.rayon]
+version = "1.0"
+optional = true
+
+[dependencies.serde]
+version = "1.0.25"
+optional = true
+default-features = false
+[dev-dependencies.doc-comment]
+version = "0.3.1"
+
+[dev-dependencies.lazy_static]
+version = "1.2"
+
+[dev-dependencies.rand]
+version = "0.7.3"
+features = ["small_rng"]
+
+[dev-dependencies.rayon]
+version = "1.0"
+
+[dev-dependencies.rustc-hash]
+version = "=1.0"
+
+[dev-dependencies.serde_test]
+version = "1.0"
+
+[features]
+ahash-compile-time-rng = ["ahash/compile-time-rng"]
+default = ["ahash", "inline-more"]
+inline-more = []
+nightly = []
+raw = []
+rustc-dep-of-std = ["nightly", "core", "compiler_builtins", "alloc", "rustc-internal-api"]
+rustc-internal-api = []
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
new file mode 100644
index 0000000..21bd5c2
--- /dev/null
+++ b/Cargo.toml.orig
@@ -0,0 +1,56 @@
+[package]
+name = "hashbrown"
+version = "0.9.1"
+authors = ["Amanieu d'Antras <amanieu@gmail.com>"]
+description = "A Rust port of Google's SwissTable hash map"
+license = "Apache-2.0/MIT"
+repository = "https://github.com/rust-lang/hashbrown"
+readme = "README.md"
+keywords = ["hash", "no_std", "hashmap", "swisstable"]
+categories = ["data-structures", "no-std"]
+exclude = [".travis.yml", "bors.toml", "/ci/*"]
+edition = "2018"
+
+[dependencies]
+# For the default hasher
+ahash = { version = "0.4.4", optional = true, default-features = false }
+
+# For external trait impls
+rayon = { version = "1.0", optional = true }
+serde = { version = "1.0.25", default-features = false, optional = true }
+
+# When built as part of libstd
+core = { version = "1.0.0", optional = true, package = "rustc-std-workspace-core" }
+compiler_builtins = { version = "0.1.2", optional = true }
+alloc = { version = "1.0.0", optional = true, package = "rustc-std-workspace-alloc" }
+
+[dev-dependencies]
+lazy_static = "1.2"
+rand = { version = "0.7.3", features = ["small_rng"] }
+rayon = "1.0"
+rustc-hash = "=1.0"
+serde_test = "1.0"
+doc-comment = "0.3.1"
+
+[features]
+default = ["ahash", "inline-more"]
+
+ahash-compile-time-rng = ["ahash/compile-time-rng"]
+nightly = []
+rustc-internal-api = []
+rustc-dep-of-std = [
+    "nightly",
+    "core",
+    "compiler_builtins",
+    "alloc",
+    "rustc-internal-api",
+]
+raw = []
+
+# Enables usage of `#[inline]` on far more functions than by default in this
+# crate. This may lead to a performance increase but often comes at a compile
+# time cost.
+inline-more = []
+
+[package.metadata.docs.rs]
+features = ["nightly", "rayon", "serde", "raw"]
diff --git a/LICENSE b/LICENSE
new file mode 120000
index 0000000..6b579aa
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1 @@
+LICENSE-APACHE
\ No newline at end of file
diff --git a/LICENSE-APACHE b/LICENSE-APACHE
new file mode 100644
index 0000000..16fe87b
--- /dev/null
+++ b/LICENSE-APACHE
@@ -0,0 +1,201 @@
+                              Apache License
+                        Version 2.0, January 2004
+                     http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+   "License" shall mean the terms and conditions for use, reproduction,
+   and distribution as defined by Sections 1 through 9 of this document.
+
+   "Licensor" shall mean the copyright owner or entity authorized by
+   the copyright owner that is granting the License.
+
+   "Legal Entity" shall mean the union of the acting entity and all
+   other entities that control, are controlled by, or are under common
+   control with that entity. For the purposes of this definition,
+   "control" means (i) the power, direct or indirect, to cause the
+   direction or management of such entity, whether by contract or
+   otherwise, or (ii) ownership of fifty percent (50%) or more of the
+   outstanding shares, or (iii) beneficial ownership of such entity.
+
+   "You" (or "Your") shall mean an individual or Legal Entity
+   exercising permissions granted by this License.
+
+   "Source" form shall mean the preferred form for making modifications,
+   including but not limited to software source code, documentation
+   source, and configuration files.
+
+   "Object" form shall mean any form resulting from mechanical
+   transformation or translation of a Source form, including but
+   not limited to compiled object code, generated documentation,
+   and conversions to other media types.
+
+   "Work" shall mean the work of authorship, whether in Source or
+   Object form, made available under the License, as indicated by a
+   copyright notice that is included in or attached to the work
+   (an example is provided in the Appendix below).
+
+   "Derivative Works" shall mean any work, whether in Source or Object
+   form, that is based on (or derived from) the Work and for which the
+   editorial revisions, annotations, elaborations, or other modifications
+   represent, as a whole, an original work of authorship. For the purposes
+   of this License, Derivative Works shall not include works that remain
+   separable from, or merely link (or bind by name) to the interfaces of,
+   the Work and Derivative Works thereof.
+
+   "Contribution" shall mean any work of authorship, including
+   the original version of the Work and any modifications or additions
+   to that Work or Derivative Works thereof, that is intentionally
+   submitted to Licensor for inclusion in the Work by the copyright owner
+   or by an individual or Legal Entity authorized to submit on behalf of
+   the copyright owner. For the purposes of this definition, "submitted"
+   means any form of electronic, verbal, or written communication sent
+   to the Licensor or its representatives, including but not limited to
+   communication on electronic mailing lists, source code control systems,
+   and issue tracking systems that are managed by, or on behalf of, the
+   Licensor for the purpose of discussing and improving the Work, but
+   excluding communication that is conspicuously marked or otherwise
+   designated in writing by the copyright owner as "Not a Contribution."
+
+   "Contributor" shall mean Licensor and any individual or Legal Entity
+   on behalf of whom a Contribution has been received by Licensor and
+   subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   copyright license to reproduce, prepare Derivative Works of,
+   publicly display, publicly perform, sublicense, and distribute the
+   Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   (except as stated in this section) patent license to make, have made,
+   use, offer to sell, sell, import, and otherwise transfer the Work,
+   where such license applies only to those patent claims licensable
+   by such Contributor that are necessarily infringed by their
+   Contribution(s) alone or by combination of their Contribution(s)
+   with the Work to which such Contribution(s) was submitted. If You
+   institute patent litigation against any entity (including a
+   cross-claim or counterclaim in a lawsuit) alleging that the Work
+   or a Contribution incorporated within the Work constitutes direct
+   or contributory patent infringement, then any patent licenses
+   granted to You under this License for that Work shall terminate
+   as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+   Work or Derivative Works thereof in any medium, with or without
+   modifications, and in Source or Object form, provided that You
+   meet the following conditions:
+
+   (a) You must give any other recipients of the Work or
+       Derivative Works a copy of this License; and
+
+   (b) You must cause any modified files to carry prominent notices
+       stating that You changed the files; and
+
+   (c) You must retain, in the Source form of any Derivative Works
+       that You distribute, all copyright, patent, trademark, and
+       attribution notices from the Source form of the Work,
+       excluding those notices that do not pertain to any part of
+       the Derivative Works; and
+
+   (d) If the Work includes a "NOTICE" text file as part of its
+       distribution, then any Derivative Works that You distribute must
+       include a readable copy of the attribution notices contained
+       within such NOTICE file, excluding those notices that do not
+       pertain to any part of the Derivative Works, in at least one
+       of the following places: within a NOTICE text file distributed
+       as part of the Derivative Works; within the Source form or
+       documentation, if provided along with the Derivative Works; or,
+       within a display generated by the Derivative Works, if and
+       wherever such third-party notices normally appear. The contents
+       of the NOTICE file are for informational purposes only and
+       do not modify the License. You may add Your own attribution
+       notices within Derivative Works that You distribute, alongside
+       or as an addendum to the NOTICE text from the Work, provided
+       that such additional attribution notices cannot be construed
+       as modifying the License.
+
+   You may add Your own copyright statement to Your modifications and
+   may provide additional or different license terms and conditions
+   for use, reproduction, or distribution of Your modifications, or
+   for any such Derivative Works as a whole, provided Your use,
+   reproduction, and distribution of the Work otherwise complies with
+   the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+   any Contribution intentionally submitted for inclusion in the Work
+   by You to the Licensor shall be under the terms and conditions of
+   this License, without any additional terms or conditions.
+   Notwithstanding the above, nothing herein shall supersede or modify
+   the terms of any separate license agreement you may have executed
+   with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+   names, trademarks, service marks, or product names of the Licensor,
+   except as required for reasonable and customary use in describing the
+   origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+   agreed to in writing, Licensor provides the Work (and each
+   Contributor provides its Contributions) on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+   implied, including, without limitation, any warranties or conditions
+   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+   PARTICULAR PURPOSE. You are solely responsible for determining the
+   appropriateness of using or redistributing the Work and assume any
+   risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+   whether in tort (including negligence), contract, or otherwise,
+   unless required by applicable law (such as deliberate and grossly
+   negligent acts) or agreed to in writing, shall any Contributor be
+   liable to You for damages, including any direct, indirect, special,
+   incidental, or consequential damages of any character arising as a
+   result of this License or out of the use or inability to use the
+   Work (including but not limited to damages for loss of goodwill,
+   work stoppage, computer failure or malfunction, or any and all
+   other commercial damages or losses), even if such Contributor
+   has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+   the Work or Derivative Works thereof, You may choose to offer,
+   and charge a fee for, acceptance of support, warranty, indemnity,
+   or other liability obligations and/or rights consistent with this
+   License. However, in accepting such obligations, You may act only
+   on Your own behalf and on Your sole responsibility, not on behalf
+   of any other Contributor, and only if You agree to indemnify,
+   defend, and hold each Contributor harmless for any liability
+   incurred by, or claims asserted against, such Contributor by reason
+   of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+   To apply the Apache License to your work, attach the following
+   boilerplate notice, with the fields enclosed by brackets "[]"
+   replaced with your own identifying information. (Don't include
+   the brackets!)  The text should be enclosed in the appropriate
+   comment syntax for the file format. We also recommend that a
+   file or class name and description of purpose be included on the
+   same "printed page" as the copyright notice for easier
+   identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+	http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/LICENSE-MIT b/LICENSE-MIT
new file mode 100644
index 0000000..5afc2a7
--- /dev/null
+++ b/LICENSE-MIT
@@ -0,0 +1,25 @@
+Copyright (c) 2016 Amanieu d'Antras
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/METADATA b/METADATA
new file mode 100644
index 0000000..b3d1a7c
--- /dev/null
+++ b/METADATA
@@ -0,0 +1,19 @@
+name: "hashbrown"
+description: "A Rust port of Google\'s SwissTable hash map"
+third_party {
+  url {
+    type: HOMEPAGE
+    value: "https://crates.io/crates/hashbrown"
+  }
+  url {
+    type: ARCHIVE
+    value: "https://static.crates.io/crates/hashbrown/hashbrown-0.9.1.crate"
+  }
+  version: "0.9.1"
+  license_type: NOTICE
+  last_upgrade_date {
+    year: 2020
+    month: 11
+    day: 9
+  }
+}
diff --git a/MODULE_LICENSE_APACHE2 b/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/MODULE_LICENSE_APACHE2
diff --git a/OWNERS b/OWNERS
new file mode 100644
index 0000000..46fc303
--- /dev/null
+++ b/OWNERS
@@ -0,0 +1 @@
+include platform/prebuilts/rust:/OWNERS
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..2e43171
--- /dev/null
+++ b/README.md
@@ -0,0 +1,126 @@
+hashbrown
+=========
+
+[![Build Status](https://travis-ci.com/rust-lang/hashbrown.svg?branch=master)](https://travis-ci.com/rust-lang/hashbrown)
+[![Crates.io](https://img.shields.io/crates/v/hashbrown.svg)](https://crates.io/crates/hashbrown)
+[![Documentation](https://docs.rs/hashbrown/badge.svg)](https://docs.rs/hashbrown)
+[![Rust](https://img.shields.io/badge/rust-1.36.0%2B-blue.svg?maxAge=3600)](https://github.com/rust-lang/hashbrown)
+
+This crate is a Rust port of Google's high-performance [SwissTable] hash
+map, adapted to make it a drop-in replacement for Rust's standard `HashMap`
+and `HashSet` types.
+
+The original C++ version of SwissTable can be found [here], and this
+[CppCon talk] gives an overview of how the algorithm works.
+
+Since Rust 1.36, this is now the `HashMap` implementation for the Rust standard
+library. However you may still want to use this crate instead since it works
+in environments without `std`, such as embedded systems and kernels.
+
+[SwissTable]: https://abseil.io/blog/20180927-swisstables
+[here]: https://github.com/abseil/abseil-cpp/blob/master/absl/container/internal/raw_hash_set.h
+[CppCon talk]: https://www.youtube.com/watch?v=ncHmEUmJZf4
+
+## [Change log](CHANGELOG.md)
+
+## Features
+
+- Drop-in replacement for the standard library `HashMap` and `HashSet` types.
+- Uses `AHash` as the default hasher, which is much faster than SipHash.
+- Around 2x faster than the previous standard library `HashMap`.
+- Lower memory usage: only 1 byte of overhead per entry instead of 8.
+- Compatible with `#[no_std]` (but requires a global allocator with the `alloc` crate).
+- Empty hash maps do not allocate any memory.
+- SIMD lookups to scan multiple hash entries in parallel.
+
+## Performance
+
+Compared to the previous implementation of `std::collections::HashMap` (Rust 1.35).
+
+With the hashbrown default AHash hasher (not HashDoS-resistant):
+
+```text
+ name                       oldstdhash ns/iter  hashbrown ns/iter  diff ns/iter   diff %  speedup 
+ insert_ahash_highbits        20,846              7,397                   -13,449  -64.52%   x 2.82 
+ insert_ahash_random          20,515              7,796                   -12,719  -62.00%   x 2.63 
+ insert_ahash_serial          21,668              7,264                   -14,404  -66.48%   x 2.98 
+ insert_erase_ahash_highbits  29,570              17,498                  -12,072  -40.83%   x 1.69 
+ insert_erase_ahash_random    39,569              17,474                  -22,095  -55.84%   x 2.26 
+ insert_erase_ahash_serial    32,073              17,332                  -14,741  -45.96%   x 1.85 
+ iter_ahash_highbits          1,572               2,087                       515   32.76%   x 0.75 
+ iter_ahash_random            1,609               2,074                       465   28.90%   x 0.78 
+ iter_ahash_serial            2,293               2,120                      -173   -7.54%   x 1.08 
+ lookup_ahash_highbits        3,460               4,403                       943   27.25%   x 0.79 
+ lookup_ahash_random          6,377               3,911                    -2,466  -38.67%   x 1.63 
+ lookup_ahash_serial          3,629               3,586                       -43   -1.18%   x 1.01 
+ lookup_fail_ahash_highbits   5,286               3,411                    -1,875  -35.47%   x 1.55 
+ lookup_fail_ahash_random     12,365              4,171                    -8,194  -66.27%   x 2.96 
+ lookup_fail_ahash_serial     4,902               3,240                    -1,662  -33.90%   x 1.51 
+```
+
+With the libstd default SipHash hasher (HashDoS-resistant):
+
+```text
+ name                       oldstdhash ns/iter  hashbrown ns/iter  diff ns/iter   diff %  speedup 
+ insert_std_highbits        32,598              20,199                  -12,399  -38.04%   x 1.61 
+ insert_std_random          29,824              20,760                   -9,064  -30.39%   x 1.44 
+ insert_std_serial          33,151              17,256                  -15,895  -47.95%   x 1.92 
+ insert_erase_std_highbits  74,731              48,735                  -25,996  -34.79%   x 1.53 
+ insert_erase_std_random    73,828              47,649                  -26,179  -35.46%   x 1.55 
+ insert_erase_std_serial    73,864              40,147                  -33,717  -45.65%   x 1.84 
+ iter_std_highbits          1,518               2,264                       746   49.14%   x 0.67 
+ iter_std_random            1,502               2,414                       912   60.72%   x 0.62 
+ iter_std_serial            6,361               2,118                    -4,243  -66.70%   x 3.00 
+ lookup_std_highbits        21,705              16,962                   -4,743  -21.85%   x 1.28 
+ lookup_std_random          21,654              17,158                   -4,496  -20.76%   x 1.26 
+ lookup_std_serial          18,726              14,509                   -4,217  -22.52%   x 1.29 
+ lookup_fail_std_highbits   25,852              17,323                   -8,529  -32.99%   x 1.49 
+ lookup_fail_std_random     25,913              17,760                   -8,153  -31.46%   x 1.46 
+ lookup_fail_std_serial     22,648              14,839                   -7,809  -34.48%   x 1.53 
+```
+
+## Usage
+
+Add this to your `Cargo.toml`:
+
+```toml
+[dependencies]
+hashbrown = "0.9"
+```
+
+Then:
+
+```rust
+use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert(1, "one");
+```
+
+This crate has the following Cargo features:
+
+- `nightly`: Enables nightly-only features: `#[may_dangle]`.
+- `serde`: Enables serde serialization support.
+- `rayon`: Enables rayon parallel iterator support.
+- `raw`: Enables access to the experimental and unsafe `RawTable` API.
+- `inline-more`: Adds inline hints to most functions, improving run-time performance at the cost
+  of compilation time. (enabled by default)
+- `ahash`: Compiles with ahash as default hasher. (enabled by default)
+- `ahash-compile-time-rng`: Activates the `compile-time-rng` feature of ahash, to increase the
+   DOS-resistance, but can result in issues for `no_std` builds. More details in
+   [issue#124](https://github.com/rust-lang/hashbrown/issues/124). (enabled by default)
+
+## License
+
+Licensed under either of:
+
+ * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
+ * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
+
+at your option.
+
+### Contribution
+
+Unless you explicitly state otherwise, any contribution intentionally submitted
+for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any
+additional terms or conditions.
diff --git a/benches/bench.rs b/benches/bench.rs
new file mode 100644
index 0000000..771e716
--- /dev/null
+++ b/benches/bench.rs
@@ -0,0 +1,260 @@
+// This benchmark suite contains some benchmarks along a set of dimensions:
+//   Hasher: std default (SipHash) and crate default (AHash).
+//   Int key distribution: low bit heavy, top bit heavy, and random.
+//   Task: basic functionality: insert, insert_erase, lookup, lookup_fail, iter
+#![feature(test)]
+
+extern crate test;
+
+use test::{black_box, Bencher};
+
+use hashbrown::hash_map::DefaultHashBuilder;
+use hashbrown::HashMap;
+use std::collections::hash_map::RandomState;
+
+const SIZE: usize = 1000;
+
+// The default hashmap when using this crate directly.
+type AHashMap<K, V> = HashMap<K, V, DefaultHashBuilder>;
+// This uses the hashmap from this crate with the default hasher of the stdlib.
+type StdHashMap<K, V> = HashMap<K, V, RandomState>;
+
+// A random key iterator.
+#[derive(Clone, Copy)]
+struct RandomKeys {
+    state: usize,
+}
+
+impl RandomKeys {
+    fn new() -> Self {
+        RandomKeys { state: 0 }
+    }
+}
+
+impl Iterator for RandomKeys {
+    type Item = usize;
+    fn next(&mut self) -> Option<usize> {
+        // Add 1 then multiply by some 32 bit prime.
+        self.state = self.state.wrapping_add(1).wrapping_mul(3787392781);
+        Some(self.state)
+    }
+}
+
+macro_rules! bench_suite {
+    ($bench_macro:ident, $bench_ahash_serial:ident, $bench_std_serial:ident,
+     $bench_ahash_highbits:ident, $bench_std_highbits:ident,
+     $bench_ahash_random:ident, $bench_std_random:ident) => {
+        $bench_macro!($bench_ahash_serial, AHashMap, 0..);
+        $bench_macro!($bench_std_serial, StdHashMap, 0..);
+        $bench_macro!(
+            $bench_ahash_highbits,
+            AHashMap,
+            (0..).map(usize::swap_bytes)
+        );
+        $bench_macro!(
+            $bench_std_highbits,
+            StdHashMap,
+            (0..).map(usize::swap_bytes)
+        );
+        $bench_macro!($bench_ahash_random, AHashMap, RandomKeys::new());
+        $bench_macro!($bench_std_random, StdHashMap, RandomKeys::new());
+    };
+}
+
+macro_rules! bench_insert {
+    ($name:ident, $maptype:ident, $keydist:expr) => {
+        #[bench]
+        fn $name(b: &mut Bencher) {
+            let mut m = $maptype::with_capacity_and_hasher(SIZE, Default::default());
+            b.iter(|| {
+                m.clear();
+                for i in ($keydist).take(SIZE) {
+                    m.insert(i, i);
+                }
+                black_box(&mut m);
+            })
+        }
+    };
+}
+
+bench_suite!(
+    bench_insert,
+    insert_ahash_serial,
+    insert_std_serial,
+    insert_ahash_highbits,
+    insert_std_highbits,
+    insert_ahash_random,
+    insert_std_random
+);
+
+macro_rules! bench_insert_erase {
+    ($name:ident, $maptype:ident, $keydist:expr) => {
+        #[bench]
+        fn $name(b: &mut Bencher) {
+            let mut base = $maptype::default();
+            for i in ($keydist).take(SIZE) {
+                base.insert(i, i);
+            }
+            let skip = $keydist.skip(SIZE);
+            b.iter(|| {
+                let mut m = base.clone();
+                let mut add_iter = skip.clone();
+                let mut remove_iter = $keydist;
+                // While keeping the size constant,
+                // replace the first keydist with the second.
+                for (add, remove) in (&mut add_iter).zip(&mut remove_iter).take(SIZE) {
+                    m.insert(add, add);
+                    black_box(m.remove(&remove));
+                }
+                black_box(m);
+            })
+        }
+    };
+}
+
+bench_suite!(
+    bench_insert_erase,
+    insert_erase_ahash_serial,
+    insert_erase_std_serial,
+    insert_erase_ahash_highbits,
+    insert_erase_std_highbits,
+    insert_erase_ahash_random,
+    insert_erase_std_random
+);
+
+macro_rules! bench_lookup {
+    ($name:ident, $maptype:ident, $keydist:expr) => {
+        #[bench]
+        fn $name(b: &mut Bencher) {
+            let mut m = $maptype::default();
+            for i in $keydist.take(SIZE) {
+                m.insert(i, i);
+            }
+
+            b.iter(|| {
+                for i in $keydist.take(SIZE) {
+                    black_box(m.get(&i));
+                }
+            })
+        }
+    };
+}
+
+bench_suite!(
+    bench_lookup,
+    lookup_ahash_serial,
+    lookup_std_serial,
+    lookup_ahash_highbits,
+    lookup_std_highbits,
+    lookup_ahash_random,
+    lookup_std_random
+);
+
+macro_rules! bench_lookup_fail {
+    ($name:ident, $maptype:ident, $keydist:expr) => {
+        #[bench]
+        fn $name(b: &mut Bencher) {
+            let mut m = $maptype::default();
+            let mut iter = $keydist;
+            for i in (&mut iter).take(SIZE) {
+                m.insert(i, i);
+            }
+
+            b.iter(|| {
+                for i in (&mut iter).take(SIZE) {
+                    black_box(m.get(&i));
+                }
+            })
+        }
+    };
+}
+
+bench_suite!(
+    bench_lookup_fail,
+    lookup_fail_ahash_serial,
+    lookup_fail_std_serial,
+    lookup_fail_ahash_highbits,
+    lookup_fail_std_highbits,
+    lookup_fail_ahash_random,
+    lookup_fail_std_random
+);
+
+macro_rules! bench_iter {
+    ($name:ident, $maptype:ident, $keydist:expr) => {
+        #[bench]
+        fn $name(b: &mut Bencher) {
+            let mut m = $maptype::default();
+            for i in ($keydist).take(SIZE) {
+                m.insert(i, i);
+            }
+
+            b.iter(|| {
+                for i in &m {
+                    black_box(i);
+                }
+            })
+        }
+    };
+}
+
+bench_suite!(
+    bench_iter,
+    iter_ahash_serial,
+    iter_std_serial,
+    iter_ahash_highbits,
+    iter_std_highbits,
+    iter_ahash_random,
+    iter_std_random
+);
+
+#[bench]
+fn clone_small(b: &mut Bencher) {
+    let mut m = HashMap::new();
+    for i in 0..10 {
+        m.insert(i, i);
+    }
+
+    b.iter(|| {
+        black_box(m.clone());
+    })
+}
+
+#[bench]
+fn clone_from_small(b: &mut Bencher) {
+    let mut m = HashMap::new();
+    let mut m2 = HashMap::new();
+    for i in 0..10 {
+        m.insert(i, i);
+    }
+
+    b.iter(|| {
+        m2.clone_from(&m);
+        black_box(&mut m2);
+    })
+}
+
+#[bench]
+fn clone_large(b: &mut Bencher) {
+    let mut m = HashMap::new();
+    for i in 0..1000 {
+        m.insert(i, i);
+    }
+
+    b.iter(|| {
+        black_box(m.clone());
+    })
+}
+
+#[bench]
+fn clone_from_large(b: &mut Bencher) {
+    let mut m = HashMap::new();
+    let mut m2 = HashMap::new();
+    for i in 0..1000 {
+        m.insert(i, i);
+    }
+
+    b.iter(|| {
+        m2.clone_from(&m);
+        black_box(&mut m2);
+    })
+}
diff --git a/clippy.toml b/clippy.toml
new file mode 100644
index 0000000..d98bf2c
--- /dev/null
+++ b/clippy.toml
@@ -0,0 +1 @@
+doc-valid-idents = [ "CppCon", "SwissTable", "SipHash", "HashDoS" ]
diff --git a/src/external_trait_impls/mod.rs b/src/external_trait_impls/mod.rs
new file mode 100644
index 0000000..ef49783
--- /dev/null
+++ b/src/external_trait_impls/mod.rs
@@ -0,0 +1,4 @@
+#[cfg(feature = "rayon")]
+pub(crate) mod rayon;
+#[cfg(feature = "serde")]
+mod serde;
diff --git a/src/external_trait_impls/rayon/helpers.rs b/src/external_trait_impls/rayon/helpers.rs
new file mode 100644
index 0000000..9382007
--- /dev/null
+++ b/src/external_trait_impls/rayon/helpers.rs
@@ -0,0 +1,26 @@
+use alloc::collections::LinkedList;
+use alloc::vec::Vec;
+
+use rayon::iter::{IntoParallelIterator, ParallelIterator};
+
+/// Helper for collecting parallel iterators to an intermediary
+pub(super) fn collect<I: IntoParallelIterator>(iter: I) -> (LinkedList<Vec<I::Item>>, usize) {
+    let list = iter
+        .into_par_iter()
+        .fold(Vec::new, |mut vec, elem| {
+            vec.push(elem);
+            vec
+        })
+        .map(|vec| {
+            let mut list = LinkedList::new();
+            list.push_back(vec);
+            list
+        })
+        .reduce(LinkedList::new, |mut list1, mut list2| {
+            list1.append(&mut list2);
+            list1
+        });
+
+    let len = list.iter().map(Vec::len).sum();
+    (list, len)
+}
diff --git a/src/external_trait_impls/rayon/map.rs b/src/external_trait_impls/rayon/map.rs
new file mode 100644
index 0000000..334f8bb
--- /dev/null
+++ b/src/external_trait_impls/rayon/map.rs
@@ -0,0 +1,666 @@
+//! Rayon extensions for `HashMap`.
+
+use crate::hash_map::HashMap;
+use core::fmt;
+use core::hash::{BuildHasher, Hash};
+use rayon::iter::plumbing::UnindexedConsumer;
+use rayon::iter::{FromParallelIterator, IntoParallelIterator, ParallelExtend, ParallelIterator};
+
+/// Parallel iterator over shared references to entries in a map.
+///
+/// This iterator is created by the [`par_iter`] method on [`HashMap`]
+/// (provided by the [`IntoParallelRefIterator`] trait).
+/// See its documentation for more.
+///
+/// [`par_iter`]: /hashbrown/struct.HashMap.html#method.par_iter
+/// [`HashMap`]: /hashbrown/struct.HashMap.html
+/// [`IntoParallelRefIterator`]: https://docs.rs/rayon/1.0/rayon/iter/trait.IntoParallelRefIterator.html
+pub struct ParIter<'a, K, V, S> {
+    map: &'a HashMap<K, V, S>,
+}
+
+impl<'a, K: Sync, V: Sync, S: Sync> ParallelIterator for ParIter<'a, K, V, S> {
+    type Item = (&'a K, &'a V);
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drive_unindexed<C>(self, consumer: C) -> C::Result
+    where
+        C: UnindexedConsumer<Self::Item>,
+    {
+        unsafe { self.map.table.par_iter() }
+            .map(|x| unsafe {
+                let r = x.as_ref();
+                (&r.0, &r.1)
+            })
+            .drive_unindexed(consumer)
+    }
+}
+
+impl<K, V, S> Clone for ParIter<'_, K, V, S> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        ParIter { map: self.map }
+    }
+}
+
+impl<K: fmt::Debug + Eq + Hash, V: fmt::Debug, S: BuildHasher> fmt::Debug for ParIter<'_, K, V, S> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        self.map.iter().fmt(f)
+    }
+}
+
+/// Parallel iterator over shared references to keys in a map.
+///
+/// This iterator is created by the [`par_keys`] method on [`HashMap`].
+/// See its documentation for more.
+///
+/// [`par_keys`]: /hashbrown/struct.HashMap.html#method.par_keys
+/// [`HashMap`]: /hashbrown/struct.HashMap.html
+pub struct ParKeys<'a, K, V, S> {
+    map: &'a HashMap<K, V, S>,
+}
+
+impl<'a, K: Sync, V: Sync, S: Sync> ParallelIterator for ParKeys<'a, K, V, S> {
+    type Item = &'a K;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drive_unindexed<C>(self, consumer: C) -> C::Result
+    where
+        C: UnindexedConsumer<Self::Item>,
+    {
+        unsafe { self.map.table.par_iter() }
+            .map(|x| unsafe { &x.as_ref().0 })
+            .drive_unindexed(consumer)
+    }
+}
+
+impl<K, V, S> Clone for ParKeys<'_, K, V, S> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        ParKeys { map: self.map }
+    }
+}
+
+impl<K: fmt::Debug + Eq + Hash, V, S: BuildHasher> fmt::Debug for ParKeys<'_, K, V, S> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        self.map.keys().fmt(f)
+    }
+}
+
+/// Parallel iterator over shared references to values in a map.
+///
+/// This iterator is created by the [`par_values`] method on [`HashMap`].
+/// See its documentation for more.
+///
+/// [`par_values`]: /hashbrown/struct.HashMap.html#method.par_values
+/// [`HashMap`]: /hashbrown/struct.HashMap.html
+pub struct ParValues<'a, K, V, S> {
+    map: &'a HashMap<K, V, S>,
+}
+
+impl<'a, K: Sync, V: Sync, S: Sync> ParallelIterator for ParValues<'a, K, V, S> {
+    type Item = &'a V;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drive_unindexed<C>(self, consumer: C) -> C::Result
+    where
+        C: UnindexedConsumer<Self::Item>,
+    {
+        unsafe { self.map.table.par_iter() }
+            .map(|x| unsafe { &x.as_ref().1 })
+            .drive_unindexed(consumer)
+    }
+}
+
+impl<K, V, S> Clone for ParValues<'_, K, V, S> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        ParValues { map: self.map }
+    }
+}
+
+impl<K: Eq + Hash, V: fmt::Debug, S: BuildHasher> fmt::Debug for ParValues<'_, K, V, S> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        self.map.values().fmt(f)
+    }
+}
+
+/// Parallel iterator over mutable references to entries in a map.
+///
+/// This iterator is created by the [`par_iter_mut`] method on [`HashMap`]
+/// (provided by the [`IntoParallelRefMutIterator`] trait).
+/// See its documentation for more.
+///
+/// [`par_iter_mut`]: /hashbrown/struct.HashMap.html#method.par_iter_mut
+/// [`HashMap`]: /hashbrown/struct.HashMap.html
+/// [`IntoParallelRefMutIterator`]: https://docs.rs/rayon/1.0/rayon/iter/trait.IntoParallelRefMutIterator.html
+pub struct ParIterMut<'a, K, V, S> {
+    map: &'a mut HashMap<K, V, S>,
+}
+
+impl<'a, K: Send + Sync, V: Send, S: Send> ParallelIterator for ParIterMut<'a, K, V, S> {
+    type Item = (&'a K, &'a mut V);
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drive_unindexed<C>(self, consumer: C) -> C::Result
+    where
+        C: UnindexedConsumer<Self::Item>,
+    {
+        unsafe { self.map.table.par_iter() }
+            .map(|x| unsafe {
+                let r = x.as_mut();
+                (&r.0, &mut r.1)
+            })
+            .drive_unindexed(consumer)
+    }
+}
+
+impl<K: fmt::Debug + Eq + Hash, V: fmt::Debug, S: BuildHasher> fmt::Debug
+    for ParIterMut<'_, K, V, S>
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        self.map.iter().fmt(f)
+    }
+}
+
+/// Parallel iterator over mutable references to values in a map.
+///
+/// This iterator is created by the [`par_values_mut`] method on [`HashMap`].
+/// See its documentation for more.
+///
+/// [`par_values_mut`]: /hashbrown/struct.HashMap.html#method.par_values_mut
+/// [`HashMap`]: /hashbrown/struct.HashMap.html
+pub struct ParValuesMut<'a, K, V, S> {
+    map: &'a mut HashMap<K, V, S>,
+}
+
+impl<'a, K: Send, V: Send, S: Send> ParallelIterator for ParValuesMut<'a, K, V, S> {
+    type Item = &'a mut V;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drive_unindexed<C>(self, consumer: C) -> C::Result
+    where
+        C: UnindexedConsumer<Self::Item>,
+    {
+        unsafe { self.map.table.par_iter() }
+            .map(|x| unsafe { &mut x.as_mut().1 })
+            .drive_unindexed(consumer)
+    }
+}
+
+impl<K: Eq + Hash, V: fmt::Debug, S: BuildHasher> fmt::Debug for ParValuesMut<'_, K, V, S> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        self.map.values().fmt(f)
+    }
+}
+
+/// Parallel iterator over entries of a consumed map.
+///
+/// This iterator is created by the [`into_par_iter`] method on [`HashMap`]
+/// (provided by the [`IntoParallelIterator`] trait).
+/// See its documentation for more.
+///
+/// [`into_par_iter`]: /hashbrown/struct.HashMap.html#method.into_par_iter
+/// [`HashMap`]: /hashbrown/struct.HashMap.html
+/// [`IntoParallelIterator`]: https://docs.rs/rayon/1.0/rayon/iter/trait.IntoParallelIterator.html
+pub struct IntoParIter<K, V, S> {
+    map: HashMap<K, V, S>,
+}
+
+impl<K: Send, V: Send, S: Send> ParallelIterator for IntoParIter<K, V, S> {
+    type Item = (K, V);
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drive_unindexed<C>(self, consumer: C) -> C::Result
+    where
+        C: UnindexedConsumer<Self::Item>,
+    {
+        self.map.table.into_par_iter().drive_unindexed(consumer)
+    }
+}
+
+impl<K: fmt::Debug + Eq + Hash, V: fmt::Debug, S: BuildHasher> fmt::Debug for IntoParIter<K, V, S> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        self.map.iter().fmt(f)
+    }
+}
+
+/// Parallel draining iterator over entries of a map.
+///
+/// This iterator is created by the [`par_drain`] method on [`HashMap`].
+/// See its documentation for more.
+///
+/// [`par_drain`]: /hashbrown/struct.HashMap.html#method.par_drain
+/// [`HashMap`]: /hashbrown/struct.HashMap.html
+pub struct ParDrain<'a, K, V, S> {
+    map: &'a mut HashMap<K, V, S>,
+}
+
+impl<K: Send, V: Send, S: Send> ParallelIterator for ParDrain<'_, K, V, S> {
+    type Item = (K, V);
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drive_unindexed<C>(self, consumer: C) -> C::Result
+    where
+        C: UnindexedConsumer<Self::Item>,
+    {
+        self.map.table.par_drain().drive_unindexed(consumer)
+    }
+}
+
+impl<K: fmt::Debug + Eq + Hash, V: fmt::Debug, S: BuildHasher> fmt::Debug
+    for ParDrain<'_, K, V, S>
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        self.map.iter().fmt(f)
+    }
+}
+
+impl<K: Sync, V: Sync, S: Sync> HashMap<K, V, S> {
+    /// Visits (potentially in parallel) immutably borrowed keys in an arbitrary order.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn par_keys(&self) -> ParKeys<'_, K, V, S> {
+        ParKeys { map: self }
+    }
+
+    /// Visits (potentially in parallel) immutably borrowed values in an arbitrary order.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn par_values(&self) -> ParValues<'_, K, V, S> {
+        ParValues { map: self }
+    }
+}
+
+impl<K: Send, V: Send, S: Send> HashMap<K, V, S> {
+    /// Visits (potentially in parallel) mutably borrowed values in an arbitrary order.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn par_values_mut(&mut self) -> ParValuesMut<'_, K, V, S> {
+        ParValuesMut { map: self }
+    }
+
+    /// Consumes (potentially in parallel) all values in an arbitrary order,
+    /// while preserving the map's allocated memory for reuse.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn par_drain(&mut self) -> ParDrain<'_, K, V, S> {
+        ParDrain { map: self }
+    }
+}
+
+impl<K, V, S> HashMap<K, V, S>
+where
+    K: Eq + Hash + Sync,
+    V: PartialEq + Sync,
+    S: BuildHasher + Sync,
+{
+    /// Returns `true` if the map is equal to another,
+    /// i.e. both maps contain the same keys mapped to the same values.
+    ///
+    /// This method runs in a potentially parallel fashion.
+    pub fn par_eq(&self, other: &Self) -> bool {
+        self.len() == other.len()
+            && self
+                .into_par_iter()
+                .all(|(key, value)| other.get(key).map_or(false, |v| *value == *v))
+    }
+}
+
+impl<K: Send, V: Send, S: Send> IntoParallelIterator for HashMap<K, V, S> {
+    type Item = (K, V);
+    type Iter = IntoParIter<K, V, S>;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn into_par_iter(self) -> Self::Iter {
+        IntoParIter { map: self }
+    }
+}
+
+impl<'a, K: Sync, V: Sync, S: Sync> IntoParallelIterator for &'a HashMap<K, V, S> {
+    type Item = (&'a K, &'a V);
+    type Iter = ParIter<'a, K, V, S>;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn into_par_iter(self) -> Self::Iter {
+        ParIter { map: self }
+    }
+}
+
+impl<'a, K: Send + Sync, V: Send, S: Send> IntoParallelIterator for &'a mut HashMap<K, V, S> {
+    type Item = (&'a K, &'a mut V);
+    type Iter = ParIterMut<'a, K, V, S>;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn into_par_iter(self) -> Self::Iter {
+        ParIterMut { map: self }
+    }
+}
+
+/// Collect (key, value) pairs from a parallel iterator into a
+/// hashmap. If multiple pairs correspond to the same key, then the
+/// ones produced earlier in the parallel iterator will be
+/// overwritten, just as with a sequential iterator.
+impl<K, V, S> FromParallelIterator<(K, V)> for HashMap<K, V, S>
+where
+    K: Eq + Hash + Send,
+    V: Send,
+    S: BuildHasher + Default,
+{
+    fn from_par_iter<P>(par_iter: P) -> Self
+    where
+        P: IntoParallelIterator<Item = (K, V)>,
+    {
+        let mut map = HashMap::default();
+        map.par_extend(par_iter);
+        map
+    }
+}
+
+/// Extend a hash map with items from a parallel iterator.
+impl<K, V, S> ParallelExtend<(K, V)> for HashMap<K, V, S>
+where
+    K: Eq + Hash + Send,
+    V: Send,
+    S: BuildHasher,
+{
+    fn par_extend<I>(&mut self, par_iter: I)
+    where
+        I: IntoParallelIterator<Item = (K, V)>,
+    {
+        extend(self, par_iter);
+    }
+}
+
+/// Extend a hash map with copied items from a parallel iterator.
+impl<'a, K, V, S> ParallelExtend<(&'a K, &'a V)> for HashMap<K, V, S>
+where
+    K: Copy + Eq + Hash + Sync,
+    V: Copy + Sync,
+    S: BuildHasher,
+{
+    fn par_extend<I>(&mut self, par_iter: I)
+    where
+        I: IntoParallelIterator<Item = (&'a K, &'a V)>,
+    {
+        extend(self, par_iter);
+    }
+}
+
+// This is equal to the normal `HashMap` -- no custom advantage.
+fn extend<K, V, S, I>(map: &mut HashMap<K, V, S>, par_iter: I)
+where
+    K: Eq + Hash,
+    S: BuildHasher,
+    I: IntoParallelIterator,
+    HashMap<K, V, S>: Extend<I::Item>,
+{
+    let (list, len) = super::helpers::collect(par_iter);
+
+    // Keys may be already present or show multiple times in the iterator.
+    // Reserve the entire length if the map is empty.
+    // Otherwise reserve half the length (rounded up), so the map
+    // will only resize twice in the worst case.
+    let reserve = if map.is_empty() { len } else { (len + 1) / 2 };
+    map.reserve(reserve);
+    for vec in list {
+        map.extend(vec);
+    }
+}
+
+#[cfg(test)]
+mod test_par_map {
+    use alloc::vec::Vec;
+    use core::hash::{Hash, Hasher};
+    use core::sync::atomic::{AtomicUsize, Ordering};
+
+    use rayon::prelude::*;
+
+    use crate::hash_map::HashMap;
+
+    struct Dropable<'a> {
+        k: usize,
+        counter: &'a AtomicUsize,
+    }
+
+    impl Dropable<'_> {
+        fn new(k: usize, counter: &AtomicUsize) -> Dropable<'_> {
+            counter.fetch_add(1, Ordering::Relaxed);
+
+            Dropable { k, counter }
+        }
+    }
+
+    impl Drop for Dropable<'_> {
+        fn drop(&mut self) {
+            self.counter.fetch_sub(1, Ordering::Relaxed);
+        }
+    }
+
+    impl Clone for Dropable<'_> {
+        fn clone(&self) -> Self {
+            Dropable::new(self.k, self.counter)
+        }
+    }
+
+    impl Hash for Dropable<'_> {
+        fn hash<H>(&self, state: &mut H)
+        where
+            H: Hasher,
+        {
+            self.k.hash(state)
+        }
+    }
+
+    impl PartialEq for Dropable<'_> {
+        fn eq(&self, other: &Self) -> bool {
+            self.k == other.k
+        }
+    }
+
+    impl Eq for Dropable<'_> {}
+
+    #[test]
+    fn test_into_iter_drops() {
+        let key = AtomicUsize::new(0);
+        let value = AtomicUsize::new(0);
+
+        let hm = {
+            let mut hm = HashMap::new();
+
+            assert_eq!(key.load(Ordering::Relaxed), 0);
+            assert_eq!(value.load(Ordering::Relaxed), 0);
+
+            for i in 0..100 {
+                let d1 = Dropable::new(i, &key);
+                let d2 = Dropable::new(i + 100, &value);
+                hm.insert(d1, d2);
+            }
+
+            assert_eq!(key.load(Ordering::Relaxed), 100);
+            assert_eq!(value.load(Ordering::Relaxed), 100);
+
+            hm
+        };
+
+        // By the way, ensure that cloning doesn't screw up the dropping.
+        drop(hm.clone());
+
+        assert_eq!(key.load(Ordering::Relaxed), 100);
+        assert_eq!(value.load(Ordering::Relaxed), 100);
+
+        // Ensure that dropping the iterator does not leak anything.
+        drop(hm.clone().into_par_iter());
+
+        {
+            assert_eq!(key.load(Ordering::Relaxed), 100);
+            assert_eq!(value.load(Ordering::Relaxed), 100);
+
+            // retain only half
+            let _v: Vec<_> = hm
+                .into_par_iter()
+                .filter(|&(ref key, _)| key.k < 50)
+                .collect();
+
+            assert_eq!(key.load(Ordering::Relaxed), 50);
+            assert_eq!(value.load(Ordering::Relaxed), 50);
+        };
+
+        assert_eq!(key.load(Ordering::Relaxed), 0);
+        assert_eq!(value.load(Ordering::Relaxed), 0);
+    }
+
+    #[test]
+    fn test_drain_drops() {
+        let key = AtomicUsize::new(0);
+        let value = AtomicUsize::new(0);
+
+        let mut hm = {
+            let mut hm = HashMap::new();
+
+            assert_eq!(key.load(Ordering::Relaxed), 0);
+            assert_eq!(value.load(Ordering::Relaxed), 0);
+
+            for i in 0..100 {
+                let d1 = Dropable::new(i, &key);
+                let d2 = Dropable::new(i + 100, &value);
+                hm.insert(d1, d2);
+            }
+
+            assert_eq!(key.load(Ordering::Relaxed), 100);
+            assert_eq!(value.load(Ordering::Relaxed), 100);
+
+            hm
+        };
+
+        // By the way, ensure that cloning doesn't screw up the dropping.
+        drop(hm.clone());
+
+        assert_eq!(key.load(Ordering::Relaxed), 100);
+        assert_eq!(value.load(Ordering::Relaxed), 100);
+
+        // Ensure that dropping the drain iterator does not leak anything.
+        drop(hm.clone().par_drain());
+
+        {
+            assert_eq!(key.load(Ordering::Relaxed), 100);
+            assert_eq!(value.load(Ordering::Relaxed), 100);
+
+            // retain only half
+            let _v: Vec<_> = hm.drain().filter(|&(ref key, _)| key.k < 50).collect();
+            assert!(hm.is_empty());
+
+            assert_eq!(key.load(Ordering::Relaxed), 50);
+            assert_eq!(value.load(Ordering::Relaxed), 50);
+        };
+
+        assert_eq!(key.load(Ordering::Relaxed), 0);
+        assert_eq!(value.load(Ordering::Relaxed), 0);
+    }
+
+    #[test]
+    fn test_empty_iter() {
+        let mut m: HashMap<isize, bool> = HashMap::new();
+        assert_eq!(m.par_drain().count(), 0);
+        assert_eq!(m.par_keys().count(), 0);
+        assert_eq!(m.par_values().count(), 0);
+        assert_eq!(m.par_values_mut().count(), 0);
+        assert_eq!(m.par_iter().count(), 0);
+        assert_eq!(m.par_iter_mut().count(), 0);
+        assert_eq!(m.len(), 0);
+        assert!(m.is_empty());
+        assert_eq!(m.into_par_iter().count(), 0);
+    }
+
+    #[test]
+    fn test_iterate() {
+        let mut m = HashMap::with_capacity(4);
+        for i in 0..32 {
+            assert!(m.insert(i, i * 2).is_none());
+        }
+        assert_eq!(m.len(), 32);
+
+        let observed = AtomicUsize::new(0);
+
+        m.par_iter().for_each(|(k, v)| {
+            assert_eq!(*v, *k * 2);
+            observed.fetch_or(1 << *k, Ordering::Relaxed);
+        });
+        assert_eq!(observed.into_inner(), 0xFFFF_FFFF);
+    }
+
+    #[test]
+    fn test_keys() {
+        let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
+        let map: HashMap<_, _> = vec.into_par_iter().collect();
+        let keys: Vec<_> = map.par_keys().cloned().collect();
+        assert_eq!(keys.len(), 3);
+        assert!(keys.contains(&1));
+        assert!(keys.contains(&2));
+        assert!(keys.contains(&3));
+    }
+
+    #[test]
+    fn test_values() {
+        let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
+        let map: HashMap<_, _> = vec.into_par_iter().collect();
+        let values: Vec<_> = map.par_values().cloned().collect();
+        assert_eq!(values.len(), 3);
+        assert!(values.contains(&'a'));
+        assert!(values.contains(&'b'));
+        assert!(values.contains(&'c'));
+    }
+
+    #[test]
+    fn test_values_mut() {
+        let vec = vec![(1, 1), (2, 2), (3, 3)];
+        let mut map: HashMap<_, _> = vec.into_par_iter().collect();
+        map.par_values_mut().for_each(|value| *value = (*value) * 2);
+        let values: Vec<_> = map.par_values().cloned().collect();
+        assert_eq!(values.len(), 3);
+        assert!(values.contains(&2));
+        assert!(values.contains(&4));
+        assert!(values.contains(&6));
+    }
+
+    #[test]
+    fn test_eq() {
+        let mut m1 = HashMap::new();
+        m1.insert(1, 2);
+        m1.insert(2, 3);
+        m1.insert(3, 4);
+
+        let mut m2 = HashMap::new();
+        m2.insert(1, 2);
+        m2.insert(2, 3);
+
+        assert!(!m1.par_eq(&m2));
+
+        m2.insert(3, 4);
+
+        assert!(m1.par_eq(&m2));
+    }
+
+    #[test]
+    fn test_from_iter() {
+        let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
+
+        let map: HashMap<_, _> = xs.par_iter().cloned().collect();
+
+        for &(k, v) in &xs {
+            assert_eq!(map.get(&k), Some(&v));
+        }
+    }
+
+    #[test]
+    fn test_extend_ref() {
+        let mut a = HashMap::new();
+        a.insert(1, "one");
+        let mut b = HashMap::new();
+        b.insert(2, "two");
+        b.insert(3, "three");
+
+        a.par_extend(&b);
+
+        assert_eq!(a.len(), 3);
+        assert_eq!(a[&1], "one");
+        assert_eq!(a[&2], "two");
+        assert_eq!(a[&3], "three");
+    }
+}
diff --git a/src/external_trait_impls/rayon/mod.rs b/src/external_trait_impls/rayon/mod.rs
new file mode 100644
index 0000000..99337a1
--- /dev/null
+++ b/src/external_trait_impls/rayon/mod.rs
@@ -0,0 +1,4 @@
+mod helpers;
+pub(crate) mod map;
+pub(crate) mod raw;
+pub(crate) mod set;
diff --git a/src/external_trait_impls/rayon/raw.rs b/src/external_trait_impls/rayon/raw.rs
new file mode 100644
index 0000000..1bd2c17
--- /dev/null
+++ b/src/external_trait_impls/rayon/raw.rs
@@ -0,0 +1,199 @@
+use crate::raw::Bucket;
+use crate::raw::{RawIter, RawIterRange, RawTable};
+use crate::scopeguard::guard;
+use alloc::alloc::dealloc;
+use core::marker::PhantomData;
+use core::mem;
+use core::ptr::NonNull;
+use rayon::iter::{
+    plumbing::{self, Folder, UnindexedConsumer, UnindexedProducer},
+    ParallelIterator,
+};
+
+/// Parallel iterator which returns a raw pointer to every full bucket in the table.
+pub struct RawParIter<T> {
+    iter: RawIterRange<T>,
+}
+
+impl<T> From<RawIter<T>> for RawParIter<T> {
+    fn from(it: RawIter<T>) -> Self {
+        RawParIter { iter: it.iter }
+    }
+}
+
+impl<T> ParallelIterator for RawParIter<T> {
+    type Item = Bucket<T>;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drive_unindexed<C>(self, consumer: C) -> C::Result
+    where
+        C: UnindexedConsumer<Self::Item>,
+    {
+        let producer = ParIterProducer { iter: self.iter };
+        plumbing::bridge_unindexed(producer, consumer)
+    }
+}
+
+/// Producer which returns a `Bucket<T>` for every element.
+struct ParIterProducer<T> {
+    iter: RawIterRange<T>,
+}
+
+impl<T> UnindexedProducer for ParIterProducer<T> {
+    type Item = Bucket<T>;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn split(self) -> (Self, Option<Self>) {
+        let (left, right) = self.iter.split();
+        let left = ParIterProducer { iter: left };
+        let right = right.map(|right| ParIterProducer { iter: right });
+        (left, right)
+    }
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn fold_with<F>(self, folder: F) -> F
+    where
+        F: Folder<Self::Item>,
+    {
+        folder.consume_iter(self.iter)
+    }
+}
+
+/// Parallel iterator which consumes a table and returns elements.
+pub struct RawIntoParIter<T> {
+    table: RawTable<T>,
+}
+
+impl<T: Send> ParallelIterator for RawIntoParIter<T> {
+    type Item = T;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drive_unindexed<C>(self, consumer: C) -> C::Result
+    where
+        C: UnindexedConsumer<Self::Item>,
+    {
+        let iter = unsafe { self.table.iter().iter };
+        let _guard = guard(self.table.into_alloc(), |alloc| {
+            if let Some((ptr, layout)) = *alloc {
+                unsafe {
+                    dealloc(ptr.as_ptr(), layout);
+                }
+            }
+        });
+        let producer = ParDrainProducer { iter };
+        plumbing::bridge_unindexed(producer, consumer)
+    }
+}
+
+/// Parallel iterator which consumes elements without freeing the table storage.
+pub struct RawParDrain<'a, T> {
+    // We don't use a &'a mut RawTable<T> because we want RawParDrain to be
+    // covariant over T.
+    table: NonNull<RawTable<T>>,
+    marker: PhantomData<&'a RawTable<T>>,
+}
+
+unsafe impl<T> Send for RawParDrain<'_, T> {}
+
+impl<T: Send> ParallelIterator for RawParDrain<'_, T> {
+    type Item = T;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drive_unindexed<C>(self, consumer: C) -> C::Result
+    where
+        C: UnindexedConsumer<Self::Item>,
+    {
+        let _guard = guard(self.table, |table| unsafe {
+            table.as_mut().clear_no_drop()
+        });
+        let iter = unsafe { self.table.as_ref().iter().iter };
+        mem::forget(self);
+        let producer = ParDrainProducer { iter };
+        plumbing::bridge_unindexed(producer, consumer)
+    }
+}
+
+impl<T> Drop for RawParDrain<'_, T> {
+    fn drop(&mut self) {
+        // If drive_unindexed is not called then simply clear the table.
+        unsafe { self.table.as_mut().clear() }
+    }
+}
+
+/// Producer which will consume all elements in the range, even if it is dropped
+/// halfway through.
+struct ParDrainProducer<T> {
+    iter: RawIterRange<T>,
+}
+
+impl<T: Send> UnindexedProducer for ParDrainProducer<T> {
+    type Item = T;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn split(self) -> (Self, Option<Self>) {
+        let (left, right) = self.iter.clone().split();
+        mem::forget(self);
+        let left = ParDrainProducer { iter: left };
+        let right = right.map(|right| ParDrainProducer { iter: right });
+        (left, right)
+    }
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn fold_with<F>(mut self, mut folder: F) -> F
+    where
+        F: Folder<Self::Item>,
+    {
+        // Make sure to modify the iterator in-place so that any remaining
+        // elements are processed in our Drop impl.
+        while let Some(item) = self.iter.next() {
+            folder = folder.consume(unsafe { item.read() });
+            if folder.full() {
+                return folder;
+            }
+        }
+
+        // If we processed all elements then we don't need to run the drop.
+        mem::forget(self);
+        folder
+    }
+}
+
+impl<T> Drop for ParDrainProducer<T> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drop(&mut self) {
+        // Drop all remaining elements
+        if mem::needs_drop::<T>() {
+            while let Some(item) = self.iter.next() {
+                unsafe {
+                    item.drop();
+                }
+            }
+        }
+    }
+}
+
+impl<T> RawTable<T> {
+    /// Returns a parallel iterator over the elements in a `RawTable`.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub unsafe fn par_iter(&self) -> RawParIter<T> {
+        RawParIter {
+            iter: self.iter().iter,
+        }
+    }
+
+    /// Returns a parallel iterator over the elements in a `RawTable`.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn into_par_iter(self) -> RawIntoParIter<T> {
+        RawIntoParIter { table: self }
+    }
+
+    /// Returns a parallel iterator which consumes all elements of a `RawTable`
+    /// without freeing its memory allocation.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn par_drain(&mut self) -> RawParDrain<'_, T> {
+        RawParDrain {
+            table: NonNull::from(self),
+            marker: PhantomData,
+        }
+    }
+}
diff --git a/src/external_trait_impls/rayon/set.rs b/src/external_trait_impls/rayon/set.rs
new file mode 100644
index 0000000..53d2660
--- /dev/null
+++ b/src/external_trait_impls/rayon/set.rs
@@ -0,0 +1,646 @@
+//! Rayon extensions for `HashSet`.
+
+use crate::hash_set::HashSet;
+use core::hash::{BuildHasher, Hash};
+use rayon::iter::plumbing::UnindexedConsumer;
+use rayon::iter::{FromParallelIterator, IntoParallelIterator, ParallelExtend, ParallelIterator};
+
+/// Parallel iterator over elements of a consumed set.
+///
+/// This iterator is created by the [`into_par_iter`] method on [`HashSet`]
+/// (provided by the [`IntoParallelIterator`] trait).
+/// See its documentation for more.
+///
+/// [`into_par_iter`]: /hashbrown/struct.HashSet.html#method.into_par_iter
+/// [`HashSet`]: /hashbrown/struct.HashSet.html
+/// [`IntoParallelIterator`]: https://docs.rs/rayon/1.0/rayon/iter/trait.IntoParallelIterator.html
+pub struct IntoParIter<T, S> {
+    set: HashSet<T, S>,
+}
+
+impl<T: Send, S: Send> ParallelIterator for IntoParIter<T, S> {
+    type Item = T;
+
+    fn drive_unindexed<C>(self, consumer: C) -> C::Result
+    where
+        C: UnindexedConsumer<Self::Item>,
+    {
+        self.set
+            .map
+            .into_par_iter()
+            .map(|(k, _)| k)
+            .drive_unindexed(consumer)
+    }
+}
+
+/// Parallel draining iterator over entries of a set.
+///
+/// This iterator is created by the [`par_drain`] method on [`HashSet`].
+/// See its documentation for more.
+///
+/// [`par_drain`]: /hashbrown/struct.HashSet.html#method.par_drain
+/// [`HashSet`]: /hashbrown/struct.HashSet.html
+pub struct ParDrain<'a, T, S> {
+    set: &'a mut HashSet<T, S>,
+}
+
+impl<T: Send, S: Send> ParallelIterator for ParDrain<'_, T, S> {
+    type Item = T;
+
+    fn drive_unindexed<C>(self, consumer: C) -> C::Result
+    where
+        C: UnindexedConsumer<Self::Item>,
+    {
+        self.set
+            .map
+            .par_drain()
+            .map(|(k, _)| k)
+            .drive_unindexed(consumer)
+    }
+}
+
+/// Parallel iterator over shared references to elements in a set.
+///
+/// This iterator is created by the [`par_iter`] method on [`HashSet`]
+/// (provided by the [`IntoParallelRefIterator`] trait).
+/// See its documentation for more.
+///
+/// [`par_iter`]: /hashbrown/struct.HashSet.html#method.par_iter
+/// [`HashSet`]: /hashbrown/struct.HashSet.html
+/// [`IntoParallelRefIterator`]: https://docs.rs/rayon/1.0/rayon/iter/trait.IntoParallelRefIterator.html
+pub struct ParIter<'a, T, S> {
+    set: &'a HashSet<T, S>,
+}
+
+impl<'a, T: Sync, S: Sync> ParallelIterator for ParIter<'a, T, S> {
+    type Item = &'a T;
+
+    fn drive_unindexed<C>(self, consumer: C) -> C::Result
+    where
+        C: UnindexedConsumer<Self::Item>,
+    {
+        self.set.map.par_keys().drive_unindexed(consumer)
+    }
+}
+
+/// Parallel iterator over shared references to elements in the difference of
+/// sets.
+///
+/// This iterator is created by the [`par_difference`] method on [`HashSet`].
+/// See its documentation for more.
+///
+/// [`par_difference`]: /hashbrown/struct.HashSet.html#method.par_difference
+/// [`HashSet`]: /hashbrown/struct.HashSet.html
+pub struct ParDifference<'a, T, S> {
+    a: &'a HashSet<T, S>,
+    b: &'a HashSet<T, S>,
+}
+
+impl<'a, T, S> ParallelIterator for ParDifference<'a, T, S>
+where
+    T: Eq + Hash + Sync,
+    S: BuildHasher + Sync,
+{
+    type Item = &'a T;
+
+    fn drive_unindexed<C>(self, consumer: C) -> C::Result
+    where
+        C: UnindexedConsumer<Self::Item>,
+    {
+        self.a
+            .into_par_iter()
+            .filter(|&x| !self.b.contains(x))
+            .drive_unindexed(consumer)
+    }
+}
+
+/// Parallel iterator over shared references to elements in the symmetric
+/// difference of sets.
+///
+/// This iterator is created by the [`par_symmetric_difference`] method on
+/// [`HashSet`].
+/// See its documentation for more.
+///
+/// [`par_symmetric_difference`]: /hashbrown/struct.HashSet.html#method.par_symmetric_difference
+/// [`HashSet`]: /hashbrown/struct.HashSet.html
+pub struct ParSymmetricDifference<'a, T, S> {
+    a: &'a HashSet<T, S>,
+    b: &'a HashSet<T, S>,
+}
+
+impl<'a, T, S> ParallelIterator for ParSymmetricDifference<'a, T, S>
+where
+    T: Eq + Hash + Sync,
+    S: BuildHasher + Sync,
+{
+    type Item = &'a T;
+
+    fn drive_unindexed<C>(self, consumer: C) -> C::Result
+    where
+        C: UnindexedConsumer<Self::Item>,
+    {
+        self.a
+            .par_difference(self.b)
+            .chain(self.b.par_difference(self.a))
+            .drive_unindexed(consumer)
+    }
+}
+
+/// Parallel iterator over shared references to elements in the intersection of
+/// sets.
+///
+/// This iterator is created by the [`par_intersection`] method on [`HashSet`].
+/// See its documentation for more.
+///
+/// [`par_intersection`]: /hashbrown/struct.HashSet.html#method.par_intersection
+/// [`HashSet`]: /hashbrown/struct.HashSet.html
+pub struct ParIntersection<'a, T, S> {
+    a: &'a HashSet<T, S>,
+    b: &'a HashSet<T, S>,
+}
+
+impl<'a, T, S> ParallelIterator for ParIntersection<'a, T, S>
+where
+    T: Eq + Hash + Sync,
+    S: BuildHasher + Sync,
+{
+    type Item = &'a T;
+
+    fn drive_unindexed<C>(self, consumer: C) -> C::Result
+    where
+        C: UnindexedConsumer<Self::Item>,
+    {
+        self.a
+            .into_par_iter()
+            .filter(|&x| self.b.contains(x))
+            .drive_unindexed(consumer)
+    }
+}
+
+/// Parallel iterator over shared references to elements in the union of sets.
+///
+/// This iterator is created by the [`par_union`] method on [`HashSet`].
+/// See its documentation for more.
+///
+/// [`par_union`]: /hashbrown/struct.HashSet.html#method.par_union
+/// [`HashSet`]: /hashbrown/struct.HashSet.html
+pub struct ParUnion<'a, T, S> {
+    a: &'a HashSet<T, S>,
+    b: &'a HashSet<T, S>,
+}
+
+impl<'a, T, S> ParallelIterator for ParUnion<'a, T, S>
+where
+    T: Eq + Hash + Sync,
+    S: BuildHasher + Sync,
+{
+    type Item = &'a T;
+
+    fn drive_unindexed<C>(self, consumer: C) -> C::Result
+    where
+        C: UnindexedConsumer<Self::Item>,
+    {
+        self.a
+            .into_par_iter()
+            .chain(self.b.par_difference(self.a))
+            .drive_unindexed(consumer)
+    }
+}
+
+impl<T, S> HashSet<T, S>
+where
+    T: Eq + Hash + Sync,
+    S: BuildHasher + Sync,
+{
+    /// Visits (potentially in parallel) the values representing the difference,
+    /// i.e. the values that are in `self` but not in `other`.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn par_difference<'a>(&'a self, other: &'a Self) -> ParDifference<'a, T, S> {
+        ParDifference { a: self, b: other }
+    }
+
+    /// Visits (potentially in parallel) the values representing the symmetric
+    /// difference, i.e. the values that are in `self` or in `other` but not in both.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn par_symmetric_difference<'a>(
+        &'a self,
+        other: &'a Self,
+    ) -> ParSymmetricDifference<'a, T, S> {
+        ParSymmetricDifference { a: self, b: other }
+    }
+
+    /// Visits (potentially in parallel) the values representing the
+    /// intersection, i.e. the values that are both in `self` and `other`.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn par_intersection<'a>(&'a self, other: &'a Self) -> ParIntersection<'a, T, S> {
+        ParIntersection { a: self, b: other }
+    }
+
+    /// Visits (potentially in parallel) the values representing the union,
+    /// i.e. all the values in `self` or `other`, without duplicates.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn par_union<'a>(&'a self, other: &'a Self) -> ParUnion<'a, T, S> {
+        ParUnion { a: self, b: other }
+    }
+
+    /// Returns `true` if `self` has no elements in common with `other`.
+    /// This is equivalent to checking for an empty intersection.
+    ///
+    /// This method runs in a potentially parallel fashion.
+    pub fn par_is_disjoint(&self, other: &Self) -> bool {
+        self.into_par_iter().all(|x| !other.contains(x))
+    }
+
+    /// Returns `true` if the set is a subset of another,
+    /// i.e. `other` contains at least all the values in `self`.
+    ///
+    /// This method runs in a potentially parallel fashion.
+    pub fn par_is_subset(&self, other: &Self) -> bool {
+        if self.len() <= other.len() {
+            self.into_par_iter().all(|x| other.contains(x))
+        } else {
+            false
+        }
+    }
+
+    /// Returns `true` if the set is a superset of another,
+    /// i.e. `self` contains at least all the values in `other`.
+    ///
+    /// This method runs in a potentially parallel fashion.
+    pub fn par_is_superset(&self, other: &Self) -> bool {
+        other.par_is_subset(self)
+    }
+
+    /// Returns `true` if the set is equal to another,
+    /// i.e. both sets contain the same values.
+    ///
+    /// This method runs in a potentially parallel fashion.
+    pub fn par_eq(&self, other: &Self) -> bool {
+        self.len() == other.len() && self.par_is_subset(other)
+    }
+}
+
+impl<T, S> HashSet<T, S>
+where
+    T: Eq + Hash + Send,
+    S: BuildHasher + Send,
+{
+    /// Consumes (potentially in parallel) all values in an arbitrary order,
+    /// while preserving the set's allocated memory for reuse.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn par_drain(&mut self) -> ParDrain<'_, T, S> {
+        ParDrain { set: self }
+    }
+}
+
+impl<T: Send, S: Send> IntoParallelIterator for HashSet<T, S> {
+    type Item = T;
+    type Iter = IntoParIter<T, S>;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn into_par_iter(self) -> Self::Iter {
+        IntoParIter { set: self }
+    }
+}
+
+impl<'a, T: Sync, S: Sync> IntoParallelIterator for &'a HashSet<T, S> {
+    type Item = &'a T;
+    type Iter = ParIter<'a, T, S>;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn into_par_iter(self) -> Self::Iter {
+        ParIter { set: self }
+    }
+}
+
+/// Collect values from a parallel iterator into a hashset.
+impl<T, S> FromParallelIterator<T> for HashSet<T, S>
+where
+    T: Eq + Hash + Send,
+    S: BuildHasher + Default,
+{
+    fn from_par_iter<P>(par_iter: P) -> Self
+    where
+        P: IntoParallelIterator<Item = T>,
+    {
+        let mut set = HashSet::default();
+        set.par_extend(par_iter);
+        set
+    }
+}
+
+/// Extend a hash set with items from a parallel iterator.
+impl<T, S> ParallelExtend<T> for HashSet<T, S>
+where
+    T: Eq + Hash + Send,
+    S: BuildHasher,
+{
+    fn par_extend<I>(&mut self, par_iter: I)
+    where
+        I: IntoParallelIterator<Item = T>,
+    {
+        extend(self, par_iter);
+    }
+}
+
+/// Extend a hash set with copied items from a parallel iterator.
+impl<'a, T, S> ParallelExtend<&'a T> for HashSet<T, S>
+where
+    T: 'a + Copy + Eq + Hash + Sync,
+    S: BuildHasher,
+{
+    fn par_extend<I>(&mut self, par_iter: I)
+    where
+        I: IntoParallelIterator<Item = &'a T>,
+    {
+        extend(self, par_iter);
+    }
+}
+
+// This is equal to the normal `HashSet` -- no custom advantage.
+fn extend<T, S, I>(set: &mut HashSet<T, S>, par_iter: I)
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+    I: IntoParallelIterator,
+    HashSet<T, S>: Extend<I::Item>,
+{
+    let (list, len) = super::helpers::collect(par_iter);
+
+    // Values may be already present or show multiple times in the iterator.
+    // Reserve the entire length if the set is empty.
+    // Otherwise reserve half the length (rounded up), so the set
+    // will only resize twice in the worst case.
+    let reserve = if set.is_empty() { len } else { (len + 1) / 2 };
+    set.reserve(reserve);
+    for vec in list {
+        set.extend(vec);
+    }
+}
+
+#[cfg(test)]
+mod test_par_set {
+    use alloc::vec::Vec;
+    use core::sync::atomic::{AtomicUsize, Ordering};
+
+    use rayon::prelude::*;
+
+    use crate::hash_set::HashSet;
+
+    #[test]
+    fn test_disjoint() {
+        let mut xs = HashSet::new();
+        let mut ys = HashSet::new();
+        assert!(xs.par_is_disjoint(&ys));
+        assert!(ys.par_is_disjoint(&xs));
+        assert!(xs.insert(5));
+        assert!(ys.insert(11));
+        assert!(xs.par_is_disjoint(&ys));
+        assert!(ys.par_is_disjoint(&xs));
+        assert!(xs.insert(7));
+        assert!(xs.insert(19));
+        assert!(xs.insert(4));
+        assert!(ys.insert(2));
+        assert!(ys.insert(-11));
+        assert!(xs.par_is_disjoint(&ys));
+        assert!(ys.par_is_disjoint(&xs));
+        assert!(ys.insert(7));
+        assert!(!xs.par_is_disjoint(&ys));
+        assert!(!ys.par_is_disjoint(&xs));
+    }
+
+    #[test]
+    fn test_subset_and_superset() {
+        let mut a = HashSet::new();
+        assert!(a.insert(0));
+        assert!(a.insert(5));
+        assert!(a.insert(11));
+        assert!(a.insert(7));
+
+        let mut b = HashSet::new();
+        assert!(b.insert(0));
+        assert!(b.insert(7));
+        assert!(b.insert(19));
+        assert!(b.insert(250));
+        assert!(b.insert(11));
+        assert!(b.insert(200));
+
+        assert!(!a.par_is_subset(&b));
+        assert!(!a.par_is_superset(&b));
+        assert!(!b.par_is_subset(&a));
+        assert!(!b.par_is_superset(&a));
+
+        assert!(b.insert(5));
+
+        assert!(a.par_is_subset(&b));
+        assert!(!a.par_is_superset(&b));
+        assert!(!b.par_is_subset(&a));
+        assert!(b.par_is_superset(&a));
+    }
+
+    #[test]
+    fn test_iterate() {
+        let mut a = HashSet::new();
+        for i in 0..32 {
+            assert!(a.insert(i));
+        }
+        let observed = AtomicUsize::new(0);
+        a.par_iter().for_each(|k| {
+            observed.fetch_or(1 << *k, Ordering::Relaxed);
+        });
+        assert_eq!(observed.into_inner(), 0xFFFF_FFFF);
+    }
+
+    #[test]
+    fn test_intersection() {
+        let mut a = HashSet::new();
+        let mut b = HashSet::new();
+
+        assert!(a.insert(11));
+        assert!(a.insert(1));
+        assert!(a.insert(3));
+        assert!(a.insert(77));
+        assert!(a.insert(103));
+        assert!(a.insert(5));
+        assert!(a.insert(-5));
+
+        assert!(b.insert(2));
+        assert!(b.insert(11));
+        assert!(b.insert(77));
+        assert!(b.insert(-9));
+        assert!(b.insert(-42));
+        assert!(b.insert(5));
+        assert!(b.insert(3));
+
+        let expected = [3, 5, 11, 77];
+        let i = a
+            .par_intersection(&b)
+            .map(|x| {
+                assert!(expected.contains(x));
+                1
+            })
+            .sum::<usize>();
+        assert_eq!(i, expected.len());
+    }
+
+    #[test]
+    fn test_difference() {
+        let mut a = HashSet::new();
+        let mut b = HashSet::new();
+
+        assert!(a.insert(1));
+        assert!(a.insert(3));
+        assert!(a.insert(5));
+        assert!(a.insert(9));
+        assert!(a.insert(11));
+
+        assert!(b.insert(3));
+        assert!(b.insert(9));
+
+        let expected = [1, 5, 11];
+        let i = a
+            .par_difference(&b)
+            .map(|x| {
+                assert!(expected.contains(x));
+                1
+            })
+            .sum::<usize>();
+        assert_eq!(i, expected.len());
+    }
+
+    #[test]
+    fn test_symmetric_difference() {
+        let mut a = HashSet::new();
+        let mut b = HashSet::new();
+
+        assert!(a.insert(1));
+        assert!(a.insert(3));
+        assert!(a.insert(5));
+        assert!(a.insert(9));
+        assert!(a.insert(11));
+
+        assert!(b.insert(-2));
+        assert!(b.insert(3));
+        assert!(b.insert(9));
+        assert!(b.insert(14));
+        assert!(b.insert(22));
+
+        let expected = [-2, 1, 5, 11, 14, 22];
+        let i = a
+            .par_symmetric_difference(&b)
+            .map(|x| {
+                assert!(expected.contains(x));
+                1
+            })
+            .sum::<usize>();
+        assert_eq!(i, expected.len());
+    }
+
+    #[test]
+    fn test_union() {
+        let mut a = HashSet::new();
+        let mut b = HashSet::new();
+
+        assert!(a.insert(1));
+        assert!(a.insert(3));
+        assert!(a.insert(5));
+        assert!(a.insert(9));
+        assert!(a.insert(11));
+        assert!(a.insert(16));
+        assert!(a.insert(19));
+        assert!(a.insert(24));
+
+        assert!(b.insert(-2));
+        assert!(b.insert(1));
+        assert!(b.insert(5));
+        assert!(b.insert(9));
+        assert!(b.insert(13));
+        assert!(b.insert(19));
+
+        let expected = [-2, 1, 3, 5, 9, 11, 13, 16, 19, 24];
+        let i = a
+            .par_union(&b)
+            .map(|x| {
+                assert!(expected.contains(x));
+                1
+            })
+            .sum::<usize>();
+        assert_eq!(i, expected.len());
+    }
+
+    #[test]
+    fn test_from_iter() {
+        let xs = [1, 2, 3, 4, 5, 6, 7, 8, 9];
+
+        let set: HashSet<_> = xs.par_iter().cloned().collect();
+
+        for x in &xs {
+            assert!(set.contains(x));
+        }
+    }
+
+    #[test]
+    fn test_move_iter() {
+        let hs = {
+            let mut hs = HashSet::new();
+
+            hs.insert('a');
+            hs.insert('b');
+
+            hs
+        };
+
+        let v = hs.into_par_iter().collect::<Vec<char>>();
+        assert!(v == ['a', 'b'] || v == ['b', 'a']);
+    }
+
+    #[test]
+    fn test_eq() {
+        // These constants once happened to expose a bug in insert().
+        // I'm keeping them around to prevent a regression.
+        let mut s1 = HashSet::new();
+
+        s1.insert(1);
+        s1.insert(2);
+        s1.insert(3);
+
+        let mut s2 = HashSet::new();
+
+        s2.insert(1);
+        s2.insert(2);
+
+        assert!(!s1.par_eq(&s2));
+
+        s2.insert(3);
+
+        assert!(s1.par_eq(&s2));
+    }
+
+    #[test]
+    fn test_extend_ref() {
+        let mut a = HashSet::new();
+        a.insert(1);
+
+        a.par_extend(&[2, 3, 4][..]);
+
+        assert_eq!(a.len(), 4);
+        assert!(a.contains(&1));
+        assert!(a.contains(&2));
+        assert!(a.contains(&3));
+        assert!(a.contains(&4));
+
+        let mut b = HashSet::new();
+        b.insert(5);
+        b.insert(6);
+
+        a.par_extend(&b);
+
+        assert_eq!(a.len(), 6);
+        assert!(a.contains(&1));
+        assert!(a.contains(&2));
+        assert!(a.contains(&3));
+        assert!(a.contains(&4));
+        assert!(a.contains(&5));
+        assert!(a.contains(&6));
+    }
+}
diff --git a/src/external_trait_impls/serde.rs b/src/external_trait_impls/serde.rs
new file mode 100644
index 0000000..7816e78
--- /dev/null
+++ b/src/external_trait_impls/serde.rs
@@ -0,0 +1,200 @@
+mod size_hint {
+    use core::cmp;
+
+    /// This presumably exists to prevent denial of service attacks.
+    ///
+    /// Original discussion: https://github.com/serde-rs/serde/issues/1114.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub(super) fn cautious(hint: Option<usize>) -> usize {
+        cmp::min(hint.unwrap_or(0), 4096)
+    }
+}
+
+mod map {
+    use core::fmt;
+    use core::hash::{BuildHasher, Hash};
+    use core::marker::PhantomData;
+    use serde::de::{Deserialize, Deserializer, MapAccess, Visitor};
+    use serde::ser::{Serialize, Serializer};
+
+    use crate::hash_map::HashMap;
+
+    use super::size_hint;
+
+    impl<K, V, H> Serialize for HashMap<K, V, H>
+    where
+        K: Serialize + Eq + Hash,
+        V: Serialize,
+        H: BuildHasher,
+    {
+        #[cfg_attr(feature = "inline-more", inline)]
+        fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+        where
+            S: Serializer,
+        {
+            serializer.collect_map(self)
+        }
+    }
+
+    impl<'de, K, V, S> Deserialize<'de> for HashMap<K, V, S>
+    where
+        K: Deserialize<'de> + Eq + Hash,
+        V: Deserialize<'de>,
+        S: BuildHasher + Default,
+    {
+        fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+        where
+            D: Deserializer<'de>,
+        {
+            struct MapVisitor<K, V, S> {
+                marker: PhantomData<HashMap<K, V, S>>,
+            }
+
+            impl<'de, K, V, S> Visitor<'de> for MapVisitor<K, V, S>
+            where
+                K: Deserialize<'de> + Eq + Hash,
+                V: Deserialize<'de>,
+                S: BuildHasher + Default,
+            {
+                type Value = HashMap<K, V, S>;
+
+                fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+                    formatter.write_str("a map")
+                }
+
+                #[cfg_attr(feature = "inline-more", inline)]
+                fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
+                where
+                    A: MapAccess<'de>,
+                {
+                    let mut values = HashMap::with_capacity_and_hasher(
+                        size_hint::cautious(map.size_hint()),
+                        S::default(),
+                    );
+
+                    while let Some((key, value)) = map.next_entry()? {
+                        values.insert(key, value);
+                    }
+
+                    Ok(values)
+                }
+            }
+
+            let visitor = MapVisitor {
+                marker: PhantomData,
+            };
+            deserializer.deserialize_map(visitor)
+        }
+    }
+}
+
+mod set {
+    use core::fmt;
+    use core::hash::{BuildHasher, Hash};
+    use core::marker::PhantomData;
+    use serde::de::{Deserialize, Deserializer, SeqAccess, Visitor};
+    use serde::ser::{Serialize, Serializer};
+
+    use crate::hash_set::HashSet;
+
+    use super::size_hint;
+
+    impl<T, H> Serialize for HashSet<T, H>
+    where
+        T: Serialize + Eq + Hash,
+        H: BuildHasher,
+    {
+        #[cfg_attr(feature = "inline-more", inline)]
+        fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+        where
+            S: Serializer,
+        {
+            serializer.collect_seq(self)
+        }
+    }
+
+    impl<'de, T, S> Deserialize<'de> for HashSet<T, S>
+    where
+        T: Deserialize<'de> + Eq + Hash,
+        S: BuildHasher + Default,
+    {
+        fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+        where
+            D: Deserializer<'de>,
+        {
+            struct SeqVisitor<T, S> {
+                marker: PhantomData<HashSet<T, S>>,
+            }
+
+            impl<'de, T, S> Visitor<'de> for SeqVisitor<T, S>
+            where
+                T: Deserialize<'de> + Eq + Hash,
+                S: BuildHasher + Default,
+            {
+                type Value = HashSet<T, S>;
+
+                fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+                    formatter.write_str("a sequence")
+                }
+
+                #[cfg_attr(feature = "inline-more", inline)]
+                fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
+                where
+                    A: SeqAccess<'de>,
+                {
+                    let mut values = HashSet::with_capacity_and_hasher(
+                        size_hint::cautious(seq.size_hint()),
+                        S::default(),
+                    );
+
+                    while let Some(value) = seq.next_element()? {
+                        values.insert(value);
+                    }
+
+                    Ok(values)
+                }
+            }
+
+            let visitor = SeqVisitor {
+                marker: PhantomData,
+            };
+            deserializer.deserialize_seq(visitor)
+        }
+
+        fn deserialize_in_place<D>(deserializer: D, place: &mut Self) -> Result<(), D::Error>
+        where
+            D: Deserializer<'de>,
+        {
+            struct SeqInPlaceVisitor<'a, T, S>(&'a mut HashSet<T, S>);
+
+            impl<'a, 'de, T, S> Visitor<'de> for SeqInPlaceVisitor<'a, T, S>
+            where
+                T: Deserialize<'de> + Eq + Hash,
+                S: BuildHasher + Default,
+            {
+                type Value = ();
+
+                fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+                    formatter.write_str("a sequence")
+                }
+
+                #[cfg_attr(feature = "inline-more", inline)]
+                fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
+                where
+                    A: SeqAccess<'de>,
+                {
+                    self.0.clear();
+                    self.0.reserve(size_hint::cautious(seq.size_hint()));
+
+                    while let Some(value) = seq.next_element()? {
+                        self.0.insert(value);
+                    }
+
+                    Ok(())
+                }
+            }
+
+            deserializer.deserialize_seq(SeqInPlaceVisitor(place))
+        }
+    }
+}
diff --git a/src/lib.rs b/src/lib.rs
new file mode 100644
index 0000000..3aff40a
--- /dev/null
+++ b/src/lib.rs
@@ -0,0 +1,112 @@
+//! This crate is a Rust port of Google's high-performance [SwissTable] hash
+//! map, adapted to make it a drop-in replacement for Rust's standard `HashMap`
+//! and `HashSet` types.
+//!
+//! The original C++ version of [SwissTable] can be found [here], and this
+//! [CppCon talk] gives an overview of how the algorithm works.
+//!
+//! [SwissTable]: https://abseil.io/blog/20180927-swisstables
+//! [here]: https://github.com/abseil/abseil-cpp/blob/master/absl/container/internal/raw_hash_set.h
+//! [CppCon talk]: https://www.youtube.com/watch?v=ncHmEUmJZf4
+
+#![no_std]
+#![cfg_attr(
+    feature = "nightly",
+    feature(test, core_intrinsics, dropck_eyepatch, min_specialization, extend_one)
+)]
+#![allow(
+    clippy::doc_markdown,
+    clippy::module_name_repetitions,
+    clippy::must_use_candidate,
+    clippy::option_if_let_else
+)]
+#![warn(missing_docs)]
+#![warn(rust_2018_idioms)]
+
+#[cfg(test)]
+#[macro_use]
+extern crate std;
+
+#[cfg_attr(test, macro_use)]
+extern crate alloc;
+
+#[cfg(feature = "nightly")]
+#[cfg(doctest)]
+doc_comment::doctest!("../README.md");
+
+#[macro_use]
+mod macros;
+
+#[cfg(feature = "raw")]
+/// Experimental and unsafe `RawTable` API. This module is only available if the
+/// `raw` feature is enabled.
+pub mod raw {
+    // The RawTable API is still experimental and is not properly documented yet.
+    #[allow(missing_docs)]
+    #[path = "mod.rs"]
+    mod inner;
+    pub use inner::*;
+
+    #[cfg(feature = "rayon")]
+    pub mod rayon {
+        pub use crate::external_trait_impls::rayon::raw::*;
+    }
+}
+#[cfg(not(feature = "raw"))]
+mod raw;
+
+mod external_trait_impls;
+mod map;
+#[cfg(feature = "rustc-internal-api")]
+mod rustc_entry;
+mod scopeguard;
+mod set;
+
+pub mod hash_map {
+    //! A hash map implemented with quadratic probing and SIMD lookup.
+    pub use crate::map::*;
+
+    #[cfg(feature = "rustc-internal-api")]
+    pub use crate::rustc_entry::*;
+
+    #[cfg(feature = "rayon")]
+    /// [rayon]-based parallel iterator types for hash maps.
+    /// You will rarely need to interact with it directly unless you have need
+    /// to name one of the iterator types.
+    ///
+    /// [rayon]: https://docs.rs/rayon/1.0/rayon
+    pub mod rayon {
+        pub use crate::external_trait_impls::rayon::map::*;
+    }
+}
+pub mod hash_set {
+    //! A hash set implemented as a `HashMap` where the value is `()`.
+    pub use crate::set::*;
+
+    #[cfg(feature = "rayon")]
+    /// [rayon]-based parallel iterator types for hash sets.
+    /// You will rarely need to interact with it directly unless you have need
+    /// to name one of the iterator types.
+    ///
+    /// [rayon]: https://docs.rs/rayon/1.0/rayon
+    pub mod rayon {
+        pub use crate::external_trait_impls::rayon::set::*;
+    }
+}
+
+pub use crate::map::HashMap;
+pub use crate::set::HashSet;
+
+/// The error type for `try_reserve` methods.
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub enum TryReserveError {
+    /// Error due to the computed capacity exceeding the collection's maximum
+    /// (usually `isize::MAX` bytes).
+    CapacityOverflow,
+
+    /// The memory allocator returned an error
+    AllocError {
+        /// The layout of the allocation request that failed.
+        layout: alloc::alloc::Layout,
+    },
+}
diff --git a/src/macros.rs b/src/macros.rs
new file mode 100644
index 0000000..0279597
--- /dev/null
+++ b/src/macros.rs
@@ -0,0 +1,69 @@
+// See the cfg-if crate.
+macro_rules! cfg_if {
+    // match if/else chains with a final `else`
+    ($(
+        if #[cfg($($meta:meta),*)] { $($it:item)* }
+    ) else * else {
+        $($it2:item)*
+    }) => {
+        cfg_if! {
+            @__items
+            () ;
+            $( ( ($($meta),*) ($($it)*) ), )*
+            ( () ($($it2)*) ),
+        }
+    };
+
+    // match if/else chains lacking a final `else`
+    (
+        if #[cfg($($i_met:meta),*)] { $($i_it:item)* }
+        $(
+            else if #[cfg($($e_met:meta),*)] { $($e_it:item)* }
+        )*
+    ) => {
+        cfg_if! {
+            @__items
+            () ;
+            ( ($($i_met),*) ($($i_it)*) ),
+            $( ( ($($e_met),*) ($($e_it)*) ), )*
+            ( () () ),
+        }
+    };
+
+    // Internal and recursive macro to emit all the items
+    //
+    // Collects all the negated cfgs in a list at the beginning and after the
+    // semicolon is all the remaining items
+    (@__items ($($not:meta,)*) ; ) => {};
+    (@__items ($($not:meta,)*) ; ( ($($m:meta),*) ($($it:item)*) ), $($rest:tt)*) => {
+        // Emit all items within one block, applying an approprate #[cfg]. The
+        // #[cfg] will require all `$m` matchers specified and must also negate
+        // all previous matchers.
+        cfg_if! { @__apply cfg(all($($m,)* not(any($($not),*)))), $($it)* }
+
+        // Recurse to emit all other items in `$rest`, and when we do so add all
+        // our `$m` matchers to the list of `$not` matchers as future emissions
+        // will have to negate everything we just matched as well.
+        cfg_if! { @__items ($($not,)* $($m,)*) ; $($rest)* }
+    };
+
+    // Internal macro to Apply a cfg attribute to a list of items
+    (@__apply $m:meta, $($it:item)*) => {
+        $(#[$m] $it)*
+    };
+}
+
+// Helper macro for specialization. This also helps avoid parse errors if the
+// default fn syntax for specialization changes in the future.
+#[cfg(feature = "nightly")]
+macro_rules! default_fn {
+	($($tt:tt)*) => {
+        default $($tt)*
+    }
+}
+#[cfg(not(feature = "nightly"))]
+macro_rules! default_fn {
+	($($tt:tt)*) => {
+        $($tt)*
+    }
+}
diff --git a/src/map.rs b/src/map.rs
new file mode 100644
index 0000000..1ccba31
--- /dev/null
+++ b/src/map.rs
@@ -0,0 +1,4524 @@
+use crate::raw::{Bucket, RawDrain, RawIntoIter, RawIter, RawTable};
+use crate::TryReserveError;
+use core::borrow::Borrow;
+use core::fmt::{self, Debug};
+use core::hash::{BuildHasher, Hash, Hasher};
+use core::iter::{FromIterator, FusedIterator};
+use core::marker::PhantomData;
+use core::mem;
+use core::ops::Index;
+
+/// Default hasher for `HashMap`.
+#[cfg(feature = "ahash")]
+pub type DefaultHashBuilder = ahash::RandomState;
+
+/// Dummy default hasher for `HashMap`.
+#[cfg(not(feature = "ahash"))]
+pub enum DefaultHashBuilder {}
+
+/// A hash map implemented with quadratic probing and SIMD lookup.
+///
+/// The default hashing algorithm is currently [`AHash`], though this is
+/// subject to change at any point in the future. This hash function is very
+/// fast for all types of keys, but this algorithm will typically *not* protect
+/// against attacks such as HashDoS.
+///
+/// The hashing algorithm can be replaced on a per-`HashMap` basis using the
+/// [`default`], [`with_hasher`], and [`with_capacity_and_hasher`] methods. Many
+/// alternative algorithms are available on crates.io, such as the [`fnv`] crate.
+///
+/// It is required that the keys implement the [`Eq`] and [`Hash`] traits, although
+/// this can frequently be achieved by using `#[derive(PartialEq, Eq, Hash)]`.
+/// If you implement these yourself, it is important that the following
+/// property holds:
+///
+/// ```text
+/// k1 == k2 -> hash(k1) == hash(k2)
+/// ```
+///
+/// In other words, if two keys are equal, their hashes must be equal.
+///
+/// It is a logic error for a key to be modified in such a way that the key's
+/// hash, as determined by the [`Hash`] trait, or its equality, as determined by
+/// the [`Eq`] trait, changes while it is in the map. This is normally only
+/// possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
+///
+/// It is also a logic error for the [`Hash`] implementation of a key to panic.
+/// This is generally only possible if the trait is implemented manually. If a
+/// panic does occur then the contents of the `HashMap` may become corrupted and
+/// some items may be dropped from the table.
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::HashMap;
+///
+/// // Type inference lets us omit an explicit type signature (which
+/// // would be `HashMap<String, String>` in this example).
+/// let mut book_reviews = HashMap::new();
+///
+/// // Review some books.
+/// book_reviews.insert(
+///     "Adventures of Huckleberry Finn".to_string(),
+///     "My favorite book.".to_string(),
+/// );
+/// book_reviews.insert(
+///     "Grimms' Fairy Tales".to_string(),
+///     "Masterpiece.".to_string(),
+/// );
+/// book_reviews.insert(
+///     "Pride and Prejudice".to_string(),
+///     "Very enjoyable.".to_string(),
+/// );
+/// book_reviews.insert(
+///     "The Adventures of Sherlock Holmes".to_string(),
+///     "Eye lyked it alot.".to_string(),
+/// );
+///
+/// // Check for a specific one.
+/// // When collections store owned values (String), they can still be
+/// // queried using references (&str).
+/// if !book_reviews.contains_key("Les Misérables") {
+///     println!("We've got {} reviews, but Les Misérables ain't one.",
+///              book_reviews.len());
+/// }
+///
+/// // oops, this review has a lot of spelling mistakes, let's delete it.
+/// book_reviews.remove("The Adventures of Sherlock Holmes");
+///
+/// // Look up the values associated with some keys.
+/// let to_find = ["Pride and Prejudice", "Alice's Adventure in Wonderland"];
+/// for &book in &to_find {
+///     match book_reviews.get(book) {
+///         Some(review) => println!("{}: {}", book, review),
+///         None => println!("{} is unreviewed.", book)
+///     }
+/// }
+///
+/// // Look up the value for a key (will panic if the key is not found).
+/// println!("Review for Jane: {}", book_reviews["Pride and Prejudice"]);
+///
+/// // Iterate over everything.
+/// for (book, review) in &book_reviews {
+///     println!("{}: \"{}\"", book, review);
+/// }
+/// ```
+///
+/// `HashMap` also implements an [`Entry API`](#method.entry), which allows
+/// for more complex methods of getting, setting, updating and removing keys and
+/// their values:
+///
+/// ```
+/// use hashbrown::HashMap;
+///
+/// // type inference lets us omit an explicit type signature (which
+/// // would be `HashMap<&str, u8>` in this example).
+/// let mut player_stats = HashMap::new();
+///
+/// fn random_stat_buff() -> u8 {
+///     // could actually return some random value here - let's just return
+///     // some fixed value for now
+///     42
+/// }
+///
+/// // insert a key only if it doesn't already exist
+/// player_stats.entry("health").or_insert(100);
+///
+/// // insert a key using a function that provides a new value only if it
+/// // doesn't already exist
+/// player_stats.entry("defence").or_insert_with(random_stat_buff);
+///
+/// // update a key, guarding against the key possibly not being set
+/// let stat = player_stats.entry("attack").or_insert(100);
+/// *stat += random_stat_buff();
+/// ```
+///
+/// The easiest way to use `HashMap` with a custom key type is to derive [`Eq`] and [`Hash`].
+/// We must also derive [`PartialEq`].
+///
+/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+/// [`PartialEq`]: https://doc.rust-lang.org/std/cmp/trait.PartialEq.html
+/// [`RefCell`]: https://doc.rust-lang.org/std/cell/struct.RefCell.html
+/// [`Cell`]: https://doc.rust-lang.org/std/cell/struct.Cell.html
+/// [`default`]: #method.default
+/// [`with_hasher`]: #method.with_hasher
+/// [`with_capacity_and_hasher`]: #method.with_capacity_and_hasher
+/// [`fnv`]: https://crates.io/crates/fnv
+/// [`AHash`]: https://crates.io/crates/ahash
+///
+/// ```
+/// use hashbrown::HashMap;
+///
+/// #[derive(Hash, Eq, PartialEq, Debug)]
+/// struct Viking {
+///     name: String,
+///     country: String,
+/// }
+///
+/// impl Viking {
+///     /// Creates a new Viking.
+///     fn new(name: &str, country: &str) -> Viking {
+///         Viking { name: name.to_string(), country: country.to_string() }
+///     }
+/// }
+///
+/// // Use a HashMap to store the vikings' health points.
+/// let mut vikings = HashMap::new();
+///
+/// vikings.insert(Viking::new("Einar", "Norway"), 25);
+/// vikings.insert(Viking::new("Olaf", "Denmark"), 24);
+/// vikings.insert(Viking::new("Harald", "Iceland"), 12);
+///
+/// // Use derived implementation to print the status of the vikings.
+/// for (viking, health) in &vikings {
+///     println!("{:?} has {} hp", viking, health);
+/// }
+/// ```
+///
+/// A `HashMap` with fixed list of elements can be initialized from an array:
+///
+/// ```
+/// use hashbrown::HashMap;
+///
+/// let timber_resources: HashMap<&str, i32> = [("Norway", 100), ("Denmark", 50), ("Iceland", 10)]
+///     .iter().cloned().collect();
+/// // use the values stored in map
+/// ```
+pub struct HashMap<K, V, S = DefaultHashBuilder> {
+    pub(crate) hash_builder: S,
+    pub(crate) table: RawTable<(K, V)>,
+}
+
+impl<K: Clone, V: Clone, S: Clone> Clone for HashMap<K, V, S> {
+    fn clone(&self) -> Self {
+        HashMap {
+            hash_builder: self.hash_builder.clone(),
+            table: self.table.clone(),
+        }
+    }
+
+    fn clone_from(&mut self, source: &Self) {
+        self.table.clone_from(&source.table);
+
+        // Update hash_builder only if we successfully cloned all elements.
+        self.hash_builder.clone_from(&source.hash_builder);
+    }
+}
+
+#[cfg_attr(feature = "inline-more", inline)]
+pub(crate) fn make_hash<K: Hash + ?Sized>(hash_builder: &impl BuildHasher, val: &K) -> u64 {
+    let mut state = hash_builder.build_hasher();
+    val.hash(&mut state);
+    state.finish()
+}
+
+#[cfg(feature = "ahash")]
+impl<K, V> HashMap<K, V, DefaultHashBuilder> {
+    /// Creates an empty `HashMap`.
+    ///
+    /// The hash map is initially created with a capacity of 0, so it will not allocate until it
+    /// is first inserted into.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// let mut map: HashMap<&str, i32> = HashMap::new();
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn new() -> Self {
+        Self::default()
+    }
+
+    /// Creates an empty `HashMap` with the specified capacity.
+    ///
+    /// The hash map will be able to hold at least `capacity` elements without
+    /// reallocating. If `capacity` is 0, the hash map will not allocate.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// let mut map: HashMap<&str, i32> = HashMap::with_capacity(10);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn with_capacity(capacity: usize) -> Self {
+        Self::with_capacity_and_hasher(capacity, DefaultHashBuilder::default())
+    }
+}
+
+impl<K, V, S> HashMap<K, V, S> {
+    /// Creates an empty `HashMap` which will use the given hash builder to hash
+    /// keys.
+    ///
+    /// The created map has the default initial capacity.
+    ///
+    /// Warning: `hash_builder` is normally randomly generated, and
+    /// is designed to allow HashMaps to be resistant to attacks that
+    /// cause many collisions and very poor performance. Setting it
+    /// manually using this function can expose a DoS attack vector.
+    ///
+    /// The `hash_builder` passed should implement the [`BuildHasher`] trait for
+    /// the HashMap to be useful, see its documentation for details.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::DefaultHashBuilder;
+    ///
+    /// let s = DefaultHashBuilder::default();
+    /// let mut map = HashMap::with_hasher(s);
+    /// map.insert(1, 2);
+    /// ```
+    ///
+    /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub const fn with_hasher(hash_builder: S) -> Self {
+        Self {
+            hash_builder,
+            table: RawTable::new(),
+        }
+    }
+
+    /// Creates an empty `HashMap` with the specified capacity, using `hash_builder`
+    /// to hash the keys.
+    ///
+    /// The hash map will be able to hold at least `capacity` elements without
+    /// reallocating. If `capacity` is 0, the hash map will not allocate.
+    ///
+    /// Warning: `hash_builder` is normally randomly generated, and
+    /// is designed to allow HashMaps to be resistant to attacks that
+    /// cause many collisions and very poor performance. Setting it
+    /// manually using this function can expose a DoS attack vector.
+    ///
+    /// The `hash_builder` passed should implement the [`BuildHasher`] trait for
+    /// the HashMap to be useful, see its documentation for details.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::DefaultHashBuilder;
+    ///
+    /// let s = DefaultHashBuilder::default();
+    /// let mut map = HashMap::with_capacity_and_hasher(10, s);
+    /// map.insert(1, 2);
+    /// ```
+    ///
+    /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Self {
+        Self {
+            hash_builder,
+            table: RawTable::with_capacity(capacity),
+        }
+    }
+
+    /// Returns a reference to the map's [`BuildHasher`].
+    ///
+    /// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::DefaultHashBuilder;
+    ///
+    /// let hasher = DefaultHashBuilder::default();
+    /// let map: HashMap<i32, i32> = HashMap::with_hasher(hasher);
+    /// let hasher: &DefaultHashBuilder = map.hasher();
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn hasher(&self) -> &S {
+        &self.hash_builder
+    }
+
+    /// Returns the number of elements the map can hold without reallocating.
+    ///
+    /// This number is a lower bound; the `HashMap<K, V>` might be able to hold
+    /// more, but is guaranteed to be able to hold at least this many.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// let map: HashMap<i32, i32> = HashMap::with_capacity(100);
+    /// assert!(map.capacity() >= 100);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn capacity(&self) -> usize {
+        self.table.capacity()
+    }
+
+    /// An iterator visiting all keys in arbitrary order.
+    /// The iterator element type is `&'a K`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert("a", 1);
+    /// map.insert("b", 2);
+    /// map.insert("c", 3);
+    ///
+    /// for key in map.keys() {
+    ///     println!("{}", key);
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn keys(&self) -> Keys<'_, K, V> {
+        Keys { inner: self.iter() }
+    }
+
+    /// An iterator visiting all values in arbitrary order.
+    /// The iterator element type is `&'a V`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert("a", 1);
+    /// map.insert("b", 2);
+    /// map.insert("c", 3);
+    ///
+    /// for val in map.values() {
+    ///     println!("{}", val);
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn values(&self) -> Values<'_, K, V> {
+        Values { inner: self.iter() }
+    }
+
+    /// An iterator visiting all values mutably in arbitrary order.
+    /// The iterator element type is `&'a mut V`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    ///
+    /// map.insert("a", 1);
+    /// map.insert("b", 2);
+    /// map.insert("c", 3);
+    ///
+    /// for val in map.values_mut() {
+    ///     *val = *val + 10;
+    /// }
+    ///
+    /// for val in map.values() {
+    ///     println!("{}", val);
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn values_mut(&mut self) -> ValuesMut<'_, K, V> {
+        ValuesMut {
+            inner: self.iter_mut(),
+        }
+    }
+
+    /// An iterator visiting all key-value pairs in arbitrary order.
+    /// The iterator element type is `(&'a K, &'a V)`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert("a", 1);
+    /// map.insert("b", 2);
+    /// map.insert("c", 3);
+    ///
+    /// for (key, val) in map.iter() {
+    ///     println!("key: {} val: {}", key, val);
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn iter(&self) -> Iter<'_, K, V> {
+        // Here we tie the lifetime of self to the iter.
+        unsafe {
+            Iter {
+                inner: self.table.iter(),
+                marker: PhantomData,
+            }
+        }
+    }
+
+    /// An iterator visiting all key-value pairs in arbitrary order,
+    /// with mutable references to the values.
+    /// The iterator element type is `(&'a K, &'a mut V)`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert("a", 1);
+    /// map.insert("b", 2);
+    /// map.insert("c", 3);
+    ///
+    /// // Update all values
+    /// for (_, val) in map.iter_mut() {
+    ///     *val *= 2;
+    /// }
+    ///
+    /// for (key, val) in &map {
+    ///     println!("key: {} val: {}", key, val);
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
+        // Here we tie the lifetime of self to the iter.
+        unsafe {
+            IterMut {
+                inner: self.table.iter(),
+                marker: PhantomData,
+            }
+        }
+    }
+
+    #[cfg(test)]
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn raw_capacity(&self) -> usize {
+        self.table.buckets()
+    }
+
+    /// Returns the number of elements in the map.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut a = HashMap::new();
+    /// assert_eq!(a.len(), 0);
+    /// a.insert(1, "a");
+    /// assert_eq!(a.len(), 1);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn len(&self) -> usize {
+        self.table.len()
+    }
+
+    /// Returns `true` if the map contains no elements.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut a = HashMap::new();
+    /// assert!(a.is_empty());
+    /// a.insert(1, "a");
+    /// assert!(!a.is_empty());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn is_empty(&self) -> bool {
+        self.len() == 0
+    }
+
+    /// Clears the map, returning all key-value pairs as an iterator. Keeps the
+    /// allocated memory for reuse.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut a = HashMap::new();
+    /// a.insert(1, "a");
+    /// a.insert(2, "b");
+    ///
+    /// for (k, v) in a.drain().take(1) {
+    ///     assert!(k == 1 || k == 2);
+    ///     assert!(v == "a" || v == "b");
+    /// }
+    ///
+    /// assert!(a.is_empty());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn drain(&mut self) -> Drain<'_, K, V> {
+        Drain {
+            inner: self.table.drain(),
+        }
+    }
+
+    /// Retains only the elements specified by the predicate.
+    ///
+    /// In other words, remove all pairs `(k, v)` such that `f(&k,&mut v)` returns `false`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<i32, i32> = (0..8).map(|x|(x, x*10)).collect();
+    /// map.retain(|&k, _| k % 2 == 0);
+    /// assert_eq!(map.len(), 4);
+    /// ```
+    pub fn retain<F>(&mut self, mut f: F)
+    where
+        F: FnMut(&K, &mut V) -> bool,
+    {
+        // Here we only use `iter` as a temporary, preventing use-after-free
+        unsafe {
+            for item in self.table.iter() {
+                let &mut (ref key, ref mut value) = item.as_mut();
+                if !f(key, value) {
+                    self.table.erase(item);
+                }
+            }
+        }
+    }
+
+    /// Drains elements which are true under the given predicate,
+    /// and returns an iterator over the removed items.
+    ///
+    /// In other words, move all pairs `(k, v)` such that `f(&k,&mut v)` returns `true` out
+    /// into another iterator.
+    ///
+    /// When the returned DrainedFilter is dropped, any remaining elements that satisfy
+    /// the predicate are dropped from the table.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<i32, i32> = (0..8).map(|x| (x, x)).collect();
+    /// let drained: HashMap<i32, i32> = map.drain_filter(|k, _v| k % 2 == 0).collect();
+    ///
+    /// let mut evens = drained.keys().cloned().collect::<Vec<_>>();
+    /// let mut odds = map.keys().cloned().collect::<Vec<_>>();
+    /// evens.sort();
+    /// odds.sort();
+    ///
+    /// assert_eq!(evens, vec![0, 2, 4, 6]);
+    /// assert_eq!(odds, vec![1, 3, 5, 7]);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn drain_filter<F>(&mut self, f: F) -> DrainFilter<'_, K, V, F>
+    where
+        F: FnMut(&K, &mut V) -> bool,
+    {
+        DrainFilter {
+            f,
+            inner: DrainFilterInner {
+                iter: unsafe { self.table.iter() },
+                table: &mut self.table,
+            },
+        }
+    }
+
+    /// Clears the map, removing all key-value pairs. Keeps the allocated memory
+    /// for reuse.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut a = HashMap::new();
+    /// a.insert(1, "a");
+    /// a.clear();
+    /// assert!(a.is_empty());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn clear(&mut self) {
+        self.table.clear();
+    }
+}
+
+impl<K, V, S> HashMap<K, V, S>
+where
+    K: Eq + Hash,
+    S: BuildHasher,
+{
+    /// Reserves capacity for at least `additional` more elements to be inserted
+    /// in the `HashMap`. The collection may reserve more space to avoid
+    /// frequent reallocations.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the new allocation size overflows [`usize`].
+    ///
+    /// [`usize`]: https://doc.rust-lang.org/std/primitive.usize.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// let mut map: HashMap<&str, i32> = HashMap::new();
+    /// map.reserve(10);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn reserve(&mut self, additional: usize) {
+        let hash_builder = &self.hash_builder;
+        self.table
+            .reserve(additional, |x| make_hash(hash_builder, &x.0));
+    }
+
+    /// Tries to reserve capacity for at least `additional` more elements to be inserted
+    /// in the given `HashMap<K,V>`. The collection may reserve more space to avoid
+    /// frequent reallocations.
+    ///
+    /// # Errors
+    ///
+    /// If the capacity overflows, or the allocator reports a failure, then an error
+    /// is returned.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// let mut map: HashMap<&str, isize> = HashMap::new();
+    /// map.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
+        let hash_builder = &self.hash_builder;
+        self.table
+            .try_reserve(additional, |x| make_hash(hash_builder, &x.0))
+    }
+
+    /// Shrinks the capacity of the map as much as possible. It will drop
+    /// down as much as possible while maintaining the internal rules
+    /// and possibly leaving some space in accordance with the resize policy.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
+    /// map.insert(1, 2);
+    /// map.insert(3, 4);
+    /// assert!(map.capacity() >= 100);
+    /// map.shrink_to_fit();
+    /// assert!(map.capacity() >= 2);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn shrink_to_fit(&mut self) {
+        let hash_builder = &self.hash_builder;
+        self.table.shrink_to(0, |x| make_hash(hash_builder, &x.0));
+    }
+
+    /// Shrinks the capacity of the map with a lower limit. It will drop
+    /// down no lower than the supplied limit while maintaining the internal rules
+    /// and possibly leaving some space in accordance with the resize policy.
+    ///
+    /// This function does nothing if the current capacity is smaller than the
+    /// supplied minimum capacity.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
+    /// map.insert(1, 2);
+    /// map.insert(3, 4);
+    /// assert!(map.capacity() >= 100);
+    /// map.shrink_to(10);
+    /// assert!(map.capacity() >= 10);
+    /// map.shrink_to(0);
+    /// assert!(map.capacity() >= 2);
+    /// map.shrink_to(10);
+    /// assert!(map.capacity() >= 2);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn shrink_to(&mut self, min_capacity: usize) {
+        let hash_builder = &self.hash_builder;
+        self.table
+            .shrink_to(min_capacity, |x| make_hash(hash_builder, &x.0));
+    }
+
+    /// Gets the given key's corresponding entry in the map for in-place manipulation.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut letters = HashMap::new();
+    ///
+    /// for ch in "a short treatise on fungi".chars() {
+    ///     let counter = letters.entry(ch).or_insert(0);
+    ///     *counter += 1;
+    /// }
+    ///
+    /// assert_eq!(letters[&'s'], 2);
+    /// assert_eq!(letters[&'t'], 3);
+    /// assert_eq!(letters[&'u'], 1);
+    /// assert_eq!(letters.get(&'y'), None);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S> {
+        let hash = make_hash(&self.hash_builder, &key);
+        if let Some(elem) = self.table.find(hash, |q| q.0.eq(&key)) {
+            Entry::Occupied(OccupiedEntry {
+                hash,
+                key: Some(key),
+                elem,
+                table: self,
+            })
+        } else {
+            Entry::Vacant(VacantEntry {
+                hash,
+                key,
+                table: self,
+            })
+        }
+    }
+
+    /// Returns a reference to the value corresponding to the key.
+    ///
+    /// The key may be any borrowed form of the map's key type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the key type.
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert(1, "a");
+    /// assert_eq!(map.get(&1), Some(&"a"));
+    /// assert_eq!(map.get(&2), None);
+    /// ```
+    #[inline]
+    pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V>
+    where
+        K: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.get_inner(k) {
+            Some(&(_, ref v)) => Some(v),
+            None => None,
+        }
+    }
+
+    /// Returns the key-value pair corresponding to the supplied key.
+    ///
+    /// The supplied key may be any borrowed form of the map's key type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the key type.
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert(1, "a");
+    /// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
+    /// assert_eq!(map.get_key_value(&2), None);
+    /// ```
+    #[inline]
+    pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
+    where
+        K: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.get_inner(k) {
+            Some(&(ref key, ref value)) => Some((key, value)),
+            None => None,
+        }
+    }
+
+    #[inline]
+    fn get_inner<Q: ?Sized>(&self, k: &Q) -> Option<&(K, V)>
+    where
+        K: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        let hash = make_hash(&self.hash_builder, k);
+        self.table.get(hash, |x| k.eq(x.0.borrow()))
+    }
+
+    /// Returns the key-value pair corresponding to the supplied key, with a mutable reference to value.
+    ///
+    /// The supplied key may be any borrowed form of the map's key type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the key type.
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert(1, "a");
+    /// let (k, v) = map.get_key_value_mut(&1).unwrap();
+    /// assert_eq!(k, &1);
+    /// assert_eq!(v, &mut "a");
+    /// *v = "b";
+    /// assert_eq!(map.get_key_value_mut(&1), Some((&1, &mut "b")));
+    /// assert_eq!(map.get_key_value_mut(&2), None);
+    /// ```
+    #[inline]
+    pub fn get_key_value_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<(&K, &mut V)>
+    where
+        K: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.get_inner_mut(k) {
+            Some(&mut (ref key, ref mut value)) => Some((key, value)),
+            None => None,
+        }
+    }
+
+    /// Returns `true` if the map contains a value for the specified key.
+    ///
+    /// The key may be any borrowed form of the map's key type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the key type.
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert(1, "a");
+    /// assert_eq!(map.contains_key(&1), true);
+    /// assert_eq!(map.contains_key(&2), false);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn contains_key<Q: ?Sized>(&self, k: &Q) -> bool
+    where
+        K: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        self.get_inner(k).is_some()
+    }
+
+    /// Returns a mutable reference to the value corresponding to the key.
+    ///
+    /// The key may be any borrowed form of the map's key type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the key type.
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert(1, "a");
+    /// if let Some(x) = map.get_mut(&1) {
+    ///     *x = "b";
+    /// }
+    /// assert_eq!(map[&1], "b");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut V>
+    where
+        K: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.get_inner_mut(k) {
+            Some(&mut (_, ref mut v)) => Some(v),
+            None => None,
+        }
+    }
+
+    #[inline]
+    fn get_inner_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut (K, V)>
+    where
+        K: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        let hash = make_hash(&self.hash_builder, k);
+        self.table.get_mut(hash, |x| k.eq(x.0.borrow()))
+    }
+
+    /// Inserts a key-value pair into the map.
+    ///
+    /// If the map did not have this key present, [`None`] is returned.
+    ///
+    /// If the map did have this key present, the value is updated, and the old
+    /// value is returned. The key is not updated, though; this matters for
+    /// types that can be `==` without being identical. See the [module-level
+    /// documentation] for more.
+    ///
+    /// [`None`]: https://doc.rust-lang.org/std/option/enum.Option.html#variant.None
+    /// [module-level documentation]: index.html#insert-and-complex-keys
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// assert_eq!(map.insert(37, "a"), None);
+    /// assert_eq!(map.is_empty(), false);
+    ///
+    /// map.insert(37, "b");
+    /// assert_eq!(map.insert(37, "c"), Some("b"));
+    /// assert_eq!(map[&37], "c");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(&mut self, k: K, v: V) -> Option<V> {
+        let hash = make_hash(&self.hash_builder, &k);
+        if let Some((_, item)) = self.table.get_mut(hash, |x| k.eq(&x.0)) {
+            Some(mem::replace(item, v))
+        } else {
+            let hash_builder = &self.hash_builder;
+            self.table
+                .insert(hash, (k, v), |x| make_hash(hash_builder, &x.0));
+            None
+        }
+    }
+
+    /// Removes a key from the map, returning the value at the key if the key
+    /// was previously in the map.
+    ///
+    /// The key may be any borrowed form of the map's key type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the key type.
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert(1, "a");
+    /// assert_eq!(map.remove(&1), Some("a"));
+    /// assert_eq!(map.remove(&1), None);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove<Q: ?Sized>(&mut self, k: &Q) -> Option<V>
+    where
+        K: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.remove_entry(k) {
+            Some((_, v)) => Some(v),
+            None => None,
+        }
+    }
+
+    /// Removes a key from the map, returning the stored key and value if the
+    /// key was previously in the map.
+    ///
+    /// The key may be any borrowed form of the map's key type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the key type.
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert(1, "a");
+    /// assert_eq!(map.remove_entry(&1), Some((1, "a")));
+    /// assert_eq!(map.remove(&1), None);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove_entry<Q: ?Sized>(&mut self, k: &Q) -> Option<(K, V)>
+    where
+        K: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        let hash = make_hash(&self.hash_builder, &k);
+        self.table.remove_entry(hash, |x| k.eq(x.0.borrow()))
+    }
+}
+
+impl<K, V, S> HashMap<K, V, S> {
+    /// Creates a raw entry builder for the HashMap.
+    ///
+    /// Raw entries provide the lowest level of control for searching and
+    /// manipulating a map. They must be manually initialized with a hash and
+    /// then manually searched. After this, insertions into a vacant entry
+    /// still require an owned key to be provided.
+    ///
+    /// Raw entries are useful for such exotic situations as:
+    ///
+    /// * Hash memoization
+    /// * Deferring the creation of an owned key until it is known to be required
+    /// * Using a search key that doesn't work with the Borrow trait
+    /// * Using custom comparison logic without newtype wrappers
+    ///
+    /// Because raw entries provide much more low-level control, it's much easier
+    /// to put the HashMap into an inconsistent state which, while memory-safe,
+    /// will cause the map to produce seemingly random results. Higher-level and
+    /// more foolproof APIs like `entry` should be preferred when possible.
+    ///
+    /// In particular, the hash used to initialized the raw entry must still be
+    /// consistent with the hash of the key that is ultimately stored in the entry.
+    /// This is because implementations of HashMap may need to recompute hashes
+    /// when resizing, at which point only the keys are available.
+    ///
+    /// Raw entries give mutable access to the keys. This must not be used
+    /// to modify how the key would compare or hash, as the map will not re-evaluate
+    /// where the key should go, meaning the keys may become "lost" if their
+    /// location does not reflect their state. For instance, if you change a key
+    /// so that the map now contains keys which compare equal, search may start
+    /// acting erratically, with two keys randomly masking each other. Implementations
+    /// are free to assume this doesn't happen (within the limits of memory-safety).
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<'_, K, V, S> {
+        RawEntryBuilderMut { map: self }
+    }
+
+    /// Creates a raw immutable entry builder for the HashMap.
+    ///
+    /// Raw entries provide the lowest level of control for searching and
+    /// manipulating a map. They must be manually initialized with a hash and
+    /// then manually searched.
+    ///
+    /// This is useful for
+    /// * Hash memoization
+    /// * Using a search key that doesn't work with the Borrow trait
+    /// * Using custom comparison logic without newtype wrappers
+    ///
+    /// Unless you are in such a situation, higher-level and more foolproof APIs like
+    /// `get` should be preferred.
+    ///
+    /// Immutable raw entries have very limited use; you might instead want `raw_entry_mut`.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn raw_entry(&self) -> RawEntryBuilder<'_, K, V, S> {
+        RawEntryBuilder { map: self }
+    }
+}
+
+impl<K, V, S> PartialEq for HashMap<K, V, S>
+where
+    K: Eq + Hash,
+    V: PartialEq,
+    S: BuildHasher,
+{
+    fn eq(&self, other: &Self) -> bool {
+        if self.len() != other.len() {
+            return false;
+        }
+
+        self.iter()
+            .all(|(key, value)| other.get(key).map_or(false, |v| *value == *v))
+    }
+}
+
+impl<K, V, S> Eq for HashMap<K, V, S>
+where
+    K: Eq + Hash,
+    V: Eq,
+    S: BuildHasher,
+{
+}
+
+impl<K, V, S> Debug for HashMap<K, V, S>
+where
+    K: Debug,
+    V: Debug,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_map().entries(self.iter()).finish()
+    }
+}
+
+impl<K, V, S> Default for HashMap<K, V, S>
+where
+    S: Default,
+{
+    /// Creates an empty `HashMap<K, V, S>`, with the `Default` value for the hasher.
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn default() -> Self {
+        Self::with_hasher(Default::default())
+    }
+}
+
+impl<K, Q: ?Sized, V, S> Index<&Q> for HashMap<K, V, S>
+where
+    K: Eq + Hash + Borrow<Q>,
+    Q: Eq + Hash,
+    S: BuildHasher,
+{
+    type Output = V;
+
+    /// Returns a reference to the value corresponding to the supplied key.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the key is not present in the `HashMap`.
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn index(&self, key: &Q) -> &V {
+        self.get(key).expect("no entry found for key")
+    }
+}
+
+/// An iterator over the entries of a `HashMap`.
+///
+/// This `struct` is created by the [`iter`] method on [`HashMap`]. See its
+/// documentation for more.
+///
+/// [`iter`]: struct.HashMap.html#method.iter
+/// [`HashMap`]: struct.HashMap.html
+pub struct Iter<'a, K, V> {
+    inner: RawIter<(K, V)>,
+    marker: PhantomData<(&'a K, &'a V)>,
+}
+
+// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
+impl<K, V> Clone for Iter<'_, K, V> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        Iter {
+            inner: self.inner.clone(),
+            marker: PhantomData,
+        }
+    }
+}
+
+impl<K: Debug, V: Debug> fmt::Debug for Iter<'_, K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
+/// A mutable iterator over the entries of a `HashMap`.
+///
+/// This `struct` is created by the [`iter_mut`] method on [`HashMap`]. See its
+/// documentation for more.
+///
+/// [`iter_mut`]: struct.HashMap.html#method.iter_mut
+/// [`HashMap`]: struct.HashMap.html
+pub struct IterMut<'a, K, V> {
+    inner: RawIter<(K, V)>,
+    // To ensure invariance with respect to V
+    marker: PhantomData<(&'a K, &'a mut V)>,
+}
+
+// We override the default Send impl which has K: Sync instead of K: Send. Both
+// are correct, but this one is more general since it allows keys which
+// implement Send but not Sync.
+unsafe impl<K: Send, V: Send> Send for IterMut<'_, K, V> {}
+
+impl<K, V> IterMut<'_, K, V> {
+    /// Returns a iterator of references over the remaining items.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub(super) fn iter(&self) -> Iter<'_, K, V> {
+        Iter {
+            inner: self.inner.clone(),
+            marker: PhantomData,
+        }
+    }
+}
+
+/// An owning iterator over the entries of a `HashMap`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`HashMap`]
+/// (provided by the `IntoIterator` trait). See its documentation for more.
+///
+/// [`into_iter`]: struct.HashMap.html#method.into_iter
+/// [`HashMap`]: struct.HashMap.html
+pub struct IntoIter<K, V> {
+    inner: RawIntoIter<(K, V)>,
+}
+
+impl<K, V> IntoIter<K, V> {
+    /// Returns a iterator of references over the remaining items.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub(super) fn iter(&self) -> Iter<'_, K, V> {
+        Iter {
+            inner: self.inner.iter(),
+            marker: PhantomData,
+        }
+    }
+}
+
+/// An iterator over the keys of a `HashMap`.
+///
+/// This `struct` is created by the [`keys`] method on [`HashMap`]. See its
+/// documentation for more.
+///
+/// [`keys`]: struct.HashMap.html#method.keys
+/// [`HashMap`]: struct.HashMap.html
+pub struct Keys<'a, K, V> {
+    inner: Iter<'a, K, V>,
+}
+
+// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
+impl<K, V> Clone for Keys<'_, K, V> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        Keys {
+            inner: self.inner.clone(),
+        }
+    }
+}
+
+impl<K: Debug, V> fmt::Debug for Keys<'_, K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
+/// An iterator over the values of a `HashMap`.
+///
+/// This `struct` is created by the [`values`] method on [`HashMap`]. See its
+/// documentation for more.
+///
+/// [`values`]: struct.HashMap.html#method.values
+/// [`HashMap`]: struct.HashMap.html
+pub struct Values<'a, K, V> {
+    inner: Iter<'a, K, V>,
+}
+
+// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
+impl<K, V> Clone for Values<'_, K, V> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        Values {
+            inner: self.inner.clone(),
+        }
+    }
+}
+
+impl<K, V: Debug> fmt::Debug for Values<'_, K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
+/// A draining iterator over the entries of a `HashMap`.
+///
+/// This `struct` is created by the [`drain`] method on [`HashMap`]. See its
+/// documentation for more.
+///
+/// [`drain`]: struct.HashMap.html#method.drain
+/// [`HashMap`]: struct.HashMap.html
+pub struct Drain<'a, K, V> {
+    inner: RawDrain<'a, (K, V)>,
+}
+
+impl<K, V> Drain<'_, K, V> {
+    /// Returns a iterator of references over the remaining items.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub(super) fn iter(&self) -> Iter<'_, K, V> {
+        Iter {
+            inner: self.inner.iter(),
+            marker: PhantomData,
+        }
+    }
+}
+
+/// A draining iterator over entries of a `HashMap` which don't satisfy the predicate `f`.
+///
+/// This `struct` is created by the [`drain_filter`] method on [`HashMap`]. See its
+/// documentation for more.
+///
+/// [`drain_filter`]: struct.HashMap.html#method.drain_filter
+/// [`HashMap`]: struct.HashMap.html
+pub struct DrainFilter<'a, K, V, F>
+where
+    F: FnMut(&K, &mut V) -> bool,
+{
+    f: F,
+    inner: DrainFilterInner<'a, K, V>,
+}
+
+impl<'a, K, V, F> Drop for DrainFilter<'a, K, V, F>
+where
+    F: FnMut(&K, &mut V) -> bool,
+{
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drop(&mut self) {
+        while let Some(item) = self.next() {
+            let guard = ConsumeAllOnDrop(self);
+            drop(item);
+            mem::forget(guard);
+        }
+    }
+}
+
+pub(super) struct ConsumeAllOnDrop<'a, T: Iterator>(pub &'a mut T);
+
+impl<T: Iterator> Drop for ConsumeAllOnDrop<'_, T> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drop(&mut self) {
+        self.0.for_each(drop)
+    }
+}
+
+impl<K, V, F> Iterator for DrainFilter<'_, K, V, F>
+where
+    F: FnMut(&K, &mut V) -> bool,
+{
+    type Item = (K, V);
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<Self::Item> {
+        self.inner.next(&mut self.f)
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        (0, self.inner.iter.size_hint().1)
+    }
+}
+
+impl<K, V, F> FusedIterator for DrainFilter<'_, K, V, F> where F: FnMut(&K, &mut V) -> bool {}
+
+/// Portions of `DrainFilter` shared with `set::DrainFilter`
+pub(super) struct DrainFilterInner<'a, K, V> {
+    pub iter: RawIter<(K, V)>,
+    pub table: &'a mut RawTable<(K, V)>,
+}
+
+impl<K, V> DrainFilterInner<'_, K, V> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub(super) fn next<F>(&mut self, f: &mut F) -> Option<(K, V)>
+    where
+        F: FnMut(&K, &mut V) -> bool,
+    {
+        unsafe {
+            while let Some(item) = self.iter.next() {
+                let &mut (ref key, ref mut value) = item.as_mut();
+                if f(key, value) {
+                    return Some(self.table.remove(item));
+                }
+            }
+        }
+        None
+    }
+}
+
+/// A mutable iterator over the values of a `HashMap`.
+///
+/// This `struct` is created by the [`values_mut`] method on [`HashMap`]. See its
+/// documentation for more.
+///
+/// [`values_mut`]: struct.HashMap.html#method.values_mut
+/// [`HashMap`]: struct.HashMap.html
+pub struct ValuesMut<'a, K, V> {
+    inner: IterMut<'a, K, V>,
+}
+
+/// A builder for computing where in a [`HashMap`] a key-value pair would be stored.
+///
+/// See the [`HashMap::raw_entry_mut`] docs for usage examples.
+///
+/// [`HashMap::raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
+pub struct RawEntryBuilderMut<'a, K, V, S> {
+    map: &'a mut HashMap<K, V, S>,
+}
+
+/// A view into a single entry in a map, which may either be vacant or occupied.
+///
+/// This is a lower-level version of [`Entry`].
+///
+/// This `enum` is constructed through the [`raw_entry_mut`] method on [`HashMap`],
+/// then calling one of the methods of that [`RawEntryBuilderMut`].
+///
+/// [`HashMap`]: struct.HashMap.html
+/// [`Entry`]: enum.Entry.html
+/// [`raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
+/// [`RawEntryBuilderMut`]: struct.RawEntryBuilderMut.html
+pub enum RawEntryMut<'a, K, V, S> {
+    /// An occupied entry.
+    Occupied(RawOccupiedEntryMut<'a, K, V, S>),
+    /// A vacant entry.
+    Vacant(RawVacantEntryMut<'a, K, V, S>),
+}
+
+/// A view into an occupied entry in a `HashMap`.
+/// It is part of the [`RawEntryMut`] enum.
+///
+/// [`RawEntryMut`]: enum.RawEntryMut.html
+pub struct RawOccupiedEntryMut<'a, K, V, S> {
+    elem: Bucket<(K, V)>,
+    table: &'a mut RawTable<(K, V)>,
+    hash_builder: &'a S,
+}
+
+unsafe impl<K, V, S> Send for RawOccupiedEntryMut<'_, K, V, S>
+where
+    K: Send,
+    V: Send,
+    S: Sync,
+{
+}
+unsafe impl<K, V, S> Sync for RawOccupiedEntryMut<'_, K, V, S>
+where
+    K: Sync,
+    V: Sync,
+    S: Sync,
+{
+}
+
+/// A view into a vacant entry in a `HashMap`.
+/// It is part of the [`RawEntryMut`] enum.
+///
+/// [`RawEntryMut`]: enum.RawEntryMut.html
+pub struct RawVacantEntryMut<'a, K, V, S> {
+    table: &'a mut RawTable<(K, V)>,
+    hash_builder: &'a S,
+}
+
+/// A builder for computing where in a [`HashMap`] a key-value pair would be stored.
+///
+/// See the [`HashMap::raw_entry`] docs for usage examples.
+///
+/// [`HashMap::raw_entry`]: struct.HashMap.html#method.raw_entry
+pub struct RawEntryBuilder<'a, K, V, S> {
+    map: &'a HashMap<K, V, S>,
+}
+
+impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S> {
+    /// Creates a `RawEntryMut` from the given key.
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[allow(clippy::wrong_self_convention)]
+    pub fn from_key<Q: ?Sized>(self, k: &Q) -> RawEntryMut<'a, K, V, S>
+    where
+        S: BuildHasher,
+        K: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        let mut hasher = self.map.hash_builder.build_hasher();
+        k.hash(&mut hasher);
+        self.from_key_hashed_nocheck(hasher.finish(), k)
+    }
+
+    /// Creates a `RawEntryMut` from the given key and its hash.
+    #[inline]
+    #[allow(clippy::wrong_self_convention)]
+    pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S>
+    where
+        K: Borrow<Q>,
+        Q: Eq,
+    {
+        self.from_hash(hash, |q| q.borrow().eq(k))
+    }
+}
+
+impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S> {
+    /// Creates a `RawEntryMut` from the given hash.
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[allow(clippy::wrong_self_convention)]
+    pub fn from_hash<F>(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S>
+    where
+        for<'b> F: FnMut(&'b K) -> bool,
+    {
+        self.search(hash, is_match)
+    }
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn search<F>(self, hash: u64, mut is_match: F) -> RawEntryMut<'a, K, V, S>
+    where
+        for<'b> F: FnMut(&'b K) -> bool,
+    {
+        match self.map.table.find(hash, |(k, _)| is_match(k)) {
+            Some(elem) => RawEntryMut::Occupied(RawOccupiedEntryMut {
+                elem,
+                table: &mut self.map.table,
+                hash_builder: &self.map.hash_builder,
+            }),
+            None => RawEntryMut::Vacant(RawVacantEntryMut {
+                table: &mut self.map.table,
+                hash_builder: &self.map.hash_builder,
+            }),
+        }
+    }
+}
+
+impl<'a, K, V, S> RawEntryBuilder<'a, K, V, S> {
+    /// Access an entry by key.
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[allow(clippy::wrong_self_convention)]
+    pub fn from_key<Q: ?Sized>(self, k: &Q) -> Option<(&'a K, &'a V)>
+    where
+        S: BuildHasher,
+        K: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        let mut hasher = self.map.hash_builder.build_hasher();
+        k.hash(&mut hasher);
+        self.from_key_hashed_nocheck(hasher.finish(), k)
+    }
+
+    /// Access an entry by a key and its hash.
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[allow(clippy::wrong_self_convention)]
+    pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> Option<(&'a K, &'a V)>
+    where
+        K: Borrow<Q>,
+        Q: Eq,
+    {
+        self.from_hash(hash, |q| q.borrow().eq(k))
+    }
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn search<F>(self, hash: u64, mut is_match: F) -> Option<(&'a K, &'a V)>
+    where
+        F: FnMut(&K) -> bool,
+    {
+        match self.map.table.get(hash, |(k, _)| is_match(k)) {
+            Some(&(ref key, ref value)) => Some((key, value)),
+            None => None,
+        }
+    }
+
+    /// Access an entry by hash.
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[allow(clippy::wrong_self_convention)]
+    pub fn from_hash<F>(self, hash: u64, is_match: F) -> Option<(&'a K, &'a V)>
+    where
+        F: FnMut(&K) -> bool,
+    {
+        self.search(hash, is_match)
+    }
+}
+
+impl<'a, K, V, S> RawEntryMut<'a, K, V, S> {
+    /// Sets the value of the entry, and returns a RawOccupiedEntryMut.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// let entry = map.raw_entry_mut().from_key("horseyland").insert("horseyland", 37);
+    ///
+    /// assert_eq!(entry.remove_entry(), ("horseyland", 37));
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V, S>
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        match self {
+            RawEntryMut::Occupied(mut entry) => {
+                entry.insert(value);
+                entry
+            }
+            RawEntryMut::Vacant(entry) => entry.insert_entry(key, value),
+        }
+    }
+
+    /// Ensures a value is in the entry by inserting the default if empty, and returns
+    /// mutable references to the key and value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 3);
+    /// assert_eq!(map["poneyland"], 3);
+    ///
+    /// *map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 10).1 *= 2;
+    /// assert_eq!(map["poneyland"], 6);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn or_insert(self, default_key: K, default_val: V) -> (&'a mut K, &'a mut V)
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        match self {
+            RawEntryMut::Occupied(entry) => entry.into_key_value(),
+            RawEntryMut::Vacant(entry) => entry.insert(default_key, default_val),
+        }
+    }
+
+    /// Ensures a value is in the entry by inserting the result of the default function if empty,
+    /// and returns mutable references to the key and value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, String> = HashMap::new();
+    ///
+    /// map.raw_entry_mut().from_key("poneyland").or_insert_with(|| {
+    ///     ("poneyland", "hoho".to_string())
+    /// });
+    ///
+    /// assert_eq!(map["poneyland"], "hoho".to_string());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn or_insert_with<F>(self, default: F) -> (&'a mut K, &'a mut V)
+    where
+        F: FnOnce() -> (K, V),
+        K: Hash,
+        S: BuildHasher,
+    {
+        match self {
+            RawEntryMut::Occupied(entry) => entry.into_key_value(),
+            RawEntryMut::Vacant(entry) => {
+                let (k, v) = default();
+                entry.insert(k, v)
+            }
+        }
+    }
+
+    /// Provides in-place mutable access to an occupied entry before any
+    /// potential inserts into the map.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// map.raw_entry_mut()
+    ///    .from_key("poneyland")
+    ///    .and_modify(|_k, v| { *v += 1 })
+    ///    .or_insert("poneyland", 42);
+    /// assert_eq!(map["poneyland"], 42);
+    ///
+    /// map.raw_entry_mut()
+    ///    .from_key("poneyland")
+    ///    .and_modify(|_k, v| { *v += 1 })
+    ///    .or_insert("poneyland", 0);
+    /// assert_eq!(map["poneyland"], 43);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn and_modify<F>(self, f: F) -> Self
+    where
+        F: FnOnce(&mut K, &mut V),
+    {
+        match self {
+            RawEntryMut::Occupied(mut entry) => {
+                {
+                    let (k, v) = entry.get_key_value_mut();
+                    f(k, v);
+                }
+                RawEntryMut::Occupied(entry)
+            }
+            RawEntryMut::Vacant(entry) => RawEntryMut::Vacant(entry),
+        }
+    }
+
+    /// Provides shared access to the key and owned access to the value of
+    /// an occupied entry and allows to replace or remove it based on the
+    /// value of the returned option.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::RawEntryMut;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// let entry = map
+    ///     .raw_entry_mut()
+    ///     .from_key("poneyland")
+    ///     .and_replace_entry_with(|_k, _v| panic!());
+    ///
+    /// match entry {
+    ///     RawEntryMut::Vacant(_) => {},
+    ///     RawEntryMut::Occupied(_) => panic!(),
+    /// }
+    ///
+    /// map.insert("poneyland", 42);
+    ///
+    /// let entry = map
+    ///     .raw_entry_mut()
+    ///     .from_key("poneyland")
+    ///     .and_replace_entry_with(|k, v| {
+    ///         assert_eq!(k, &"poneyland");
+    ///         assert_eq!(v, 42);
+    ///         Some(v + 1)
+    ///     });
+    ///
+    /// match entry {
+    ///     RawEntryMut::Occupied(e) => {
+    ///         assert_eq!(e.key(), &"poneyland");
+    ///         assert_eq!(e.get(), &43);
+    ///     },
+    ///     RawEntryMut::Vacant(_) => panic!(),
+    /// }
+    ///
+    /// assert_eq!(map["poneyland"], 43);
+    ///
+    /// let entry = map
+    ///     .raw_entry_mut()
+    ///     .from_key("poneyland")
+    ///     .and_replace_entry_with(|_k, _v| None);
+    ///
+    /// match entry {
+    ///     RawEntryMut::Vacant(_) => {},
+    ///     RawEntryMut::Occupied(_) => panic!(),
+    /// }
+    ///
+    /// assert!(!map.contains_key("poneyland"));
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn and_replace_entry_with<F>(self, f: F) -> Self
+    where
+        F: FnOnce(&K, V) -> Option<V>,
+    {
+        match self {
+            RawEntryMut::Occupied(entry) => entry.replace_entry_with(f),
+            RawEntryMut::Vacant(_) => self,
+        }
+    }
+}
+
+impl<'a, K, V, S> RawOccupiedEntryMut<'a, K, V, S> {
+    /// Gets a reference to the key in the entry.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn key(&self) -> &K {
+        unsafe { &self.elem.as_ref().0 }
+    }
+
+    /// Gets a mutable reference to the key in the entry.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn key_mut(&mut self) -> &mut K {
+        unsafe { &mut self.elem.as_mut().0 }
+    }
+
+    /// Converts the entry into a mutable reference to the key in the entry
+    /// with a lifetime bound to the map itself.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn into_key(self) -> &'a mut K {
+        unsafe { &mut self.elem.as_mut().0 }
+    }
+
+    /// Gets a reference to the value in the entry.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get(&self) -> &V {
+        unsafe { &self.elem.as_ref().1 }
+    }
+
+    /// Converts the OccupiedEntry into a mutable reference to the value in the entry
+    /// with a lifetime bound to the map itself.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn into_mut(self) -> &'a mut V {
+        unsafe { &mut self.elem.as_mut().1 }
+    }
+
+    /// Gets a mutable reference to the value in the entry.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get_mut(&mut self) -> &mut V {
+        unsafe { &mut self.elem.as_mut().1 }
+    }
+
+    /// Gets a reference to the key and value in the entry.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get_key_value(&mut self) -> (&K, &V) {
+        unsafe {
+            let &(ref key, ref value) = self.elem.as_ref();
+            (key, value)
+        }
+    }
+
+    /// Gets a mutable reference to the key and value in the entry.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get_key_value_mut(&mut self) -> (&mut K, &mut V) {
+        unsafe {
+            let &mut (ref mut key, ref mut value) = self.elem.as_mut();
+            (key, value)
+        }
+    }
+
+    /// Converts the OccupiedEntry into a mutable reference to the key and value in the entry
+    /// with a lifetime bound to the map itself.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn into_key_value(self) -> (&'a mut K, &'a mut V) {
+        unsafe {
+            let &mut (ref mut key, ref mut value) = self.elem.as_mut();
+            (key, value)
+        }
+    }
+
+    /// Sets the value of the entry, and returns the entry's old value.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(&mut self, value: V) -> V {
+        mem::replace(self.get_mut(), value)
+    }
+
+    /// Sets the value of the entry, and returns the entry's old value.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert_key(&mut self, key: K) -> K {
+        mem::replace(self.key_mut(), key)
+    }
+
+    /// Takes the value out of the entry, and returns it.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove(self) -> V {
+        self.remove_entry().1
+    }
+
+    /// Take the ownership of the key and value from the map.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove_entry(self) -> (K, V) {
+        unsafe { self.table.remove(self.elem) }
+    }
+
+    /// Provides shared access to the key and owned access to the value of
+    /// the entry and allows to replace or remove it based on the
+    /// value of the returned option.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn replace_entry_with<F>(self, f: F) -> RawEntryMut<'a, K, V, S>
+    where
+        F: FnOnce(&K, V) -> Option<V>,
+    {
+        unsafe {
+            let still_occupied = self
+                .table
+                .replace_bucket_with(self.elem.clone(), |(key, value)| {
+                    f(&key, value).map(|new_value| (key, new_value))
+                });
+
+            if still_occupied {
+                RawEntryMut::Occupied(self)
+            } else {
+                RawEntryMut::Vacant(RawVacantEntryMut {
+                    table: self.table,
+                    hash_builder: self.hash_builder,
+                })
+            }
+        }
+    }
+}
+
+impl<'a, K, V, S> RawVacantEntryMut<'a, K, V, S> {
+    /// Sets the value of the entry with the VacantEntry's key,
+    /// and returns a mutable reference to it.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(self, key: K, value: V) -> (&'a mut K, &'a mut V)
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        let mut hasher = self.hash_builder.build_hasher();
+        key.hash(&mut hasher);
+        self.insert_hashed_nocheck(hasher.finish(), key, value)
+    }
+
+    /// Sets the value of the entry with the VacantEntry's key,
+    /// and returns a mutable reference to it.
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[allow(clippy::shadow_unrelated)]
+    pub fn insert_hashed_nocheck(self, hash: u64, key: K, value: V) -> (&'a mut K, &'a mut V)
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        let hash_builder = self.hash_builder;
+        self.insert_with_hasher(hash, key, value, |k| make_hash(hash_builder, k))
+    }
+
+    /// Set the value of an entry with a custom hasher function.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert_with_hasher<H>(
+        self,
+        hash: u64,
+        key: K,
+        value: V,
+        hasher: H,
+    ) -> (&'a mut K, &'a mut V)
+    where
+        H: Fn(&K) -> u64,
+    {
+        let &mut (ref mut k, ref mut v) = self
+            .table
+            .insert_entry(hash, (key, value), |x| hasher(&x.0));
+        (k, v)
+    }
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn insert_entry(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V, S>
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        let hash_builder = self.hash_builder;
+        let mut hasher = self.hash_builder.build_hasher();
+        key.hash(&mut hasher);
+
+        let elem = self.table.insert(hasher.finish(), (key, value), |k| {
+            make_hash(hash_builder, &k.0)
+        });
+        RawOccupiedEntryMut {
+            elem,
+            table: self.table,
+            hash_builder: self.hash_builder,
+        }
+    }
+}
+
+impl<K, V, S> Debug for RawEntryBuilderMut<'_, K, V, S> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RawEntryBuilder").finish()
+    }
+}
+
+impl<K: Debug, V: Debug, S> Debug for RawEntryMut<'_, K, V, S> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match *self {
+            RawEntryMut::Vacant(ref v) => f.debug_tuple("RawEntry").field(v).finish(),
+            RawEntryMut::Occupied(ref o) => f.debug_tuple("RawEntry").field(o).finish(),
+        }
+    }
+}
+
+impl<K: Debug, V: Debug, S> Debug for RawOccupiedEntryMut<'_, K, V, S> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RawOccupiedEntryMut")
+            .field("key", self.key())
+            .field("value", self.get())
+            .finish()
+    }
+}
+
+impl<K, V, S> Debug for RawVacantEntryMut<'_, K, V, S> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RawVacantEntryMut").finish()
+    }
+}
+
+impl<K, V, S> Debug for RawEntryBuilder<'_, K, V, S> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RawEntryBuilder").finish()
+    }
+}
+
+/// A view into a single entry in a map, which may either be vacant or occupied.
+///
+/// This `enum` is constructed from the [`entry`] method on [`HashMap`].
+///
+/// [`HashMap`]: struct.HashMap.html
+/// [`entry`]: struct.HashMap.html#method.entry
+pub enum Entry<'a, K, V, S> {
+    /// An occupied entry.
+    Occupied(OccupiedEntry<'a, K, V, S>),
+
+    /// A vacant entry.
+    Vacant(VacantEntry<'a, K, V, S>),
+}
+
+impl<K: Debug, V: Debug, S> Debug for Entry<'_, K, V, S> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match *self {
+            Entry::Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
+            Entry::Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
+        }
+    }
+}
+
+/// A view into an occupied entry in a `HashMap`.
+/// It is part of the [`Entry`] enum.
+///
+/// [`Entry`]: enum.Entry.html
+pub struct OccupiedEntry<'a, K, V, S> {
+    hash: u64,
+    key: Option<K>,
+    elem: Bucket<(K, V)>,
+    table: &'a mut HashMap<K, V, S>,
+}
+
+unsafe impl<K, V, S> Send for OccupiedEntry<'_, K, V, S>
+where
+    K: Send,
+    V: Send,
+    S: Send,
+{
+}
+unsafe impl<K, V, S> Sync for OccupiedEntry<'_, K, V, S>
+where
+    K: Sync,
+    V: Sync,
+    S: Sync,
+{
+}
+
+impl<K: Debug, V: Debug, S> Debug for OccupiedEntry<'_, K, V, S> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("OccupiedEntry")
+            .field("key", self.key())
+            .field("value", self.get())
+            .finish()
+    }
+}
+
+/// A view into a vacant entry in a `HashMap`.
+/// It is part of the [`Entry`] enum.
+///
+/// [`Entry`]: enum.Entry.html
+pub struct VacantEntry<'a, K, V, S> {
+    hash: u64,
+    key: K,
+    table: &'a mut HashMap<K, V, S>,
+}
+
+impl<K: Debug, V, S> Debug for VacantEntry<'_, K, V, S> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("VacantEntry").field(self.key()).finish()
+    }
+}
+
+impl<'a, K, V, S> IntoIterator for &'a HashMap<K, V, S> {
+    type Item = (&'a K, &'a V);
+    type IntoIter = Iter<'a, K, V>;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn into_iter(self) -> Iter<'a, K, V> {
+        self.iter()
+    }
+}
+
+impl<'a, K, V, S> IntoIterator for &'a mut HashMap<K, V, S> {
+    type Item = (&'a K, &'a mut V);
+    type IntoIter = IterMut<'a, K, V>;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn into_iter(self) -> IterMut<'a, K, V> {
+        self.iter_mut()
+    }
+}
+
+impl<K, V, S> IntoIterator for HashMap<K, V, S> {
+    type Item = (K, V);
+    type IntoIter = IntoIter<K, V>;
+
+    /// Creates a consuming iterator, that is, one that moves each key-value
+    /// pair out of the map in arbitrary order. The map cannot be used after
+    /// calling this.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert("a", 1);
+    /// map.insert("b", 2);
+    /// map.insert("c", 3);
+    ///
+    /// // Not possible with .iter()
+    /// let vec: Vec<(&str, i32)> = map.into_iter().collect();
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn into_iter(self) -> IntoIter<K, V> {
+        IntoIter {
+            inner: self.table.into_iter(),
+        }
+    }
+}
+
+impl<'a, K, V> Iterator for Iter<'a, K, V> {
+    type Item = (&'a K, &'a V);
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<(&'a K, &'a V)> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.inner.next() {
+            Some(x) => unsafe {
+                let r = x.as_ref();
+                Some((&r.0, &r.1))
+            },
+            None => None,
+        }
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+}
+impl<K, V> ExactSizeIterator for Iter<'_, K, V> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+
+impl<K, V> FusedIterator for Iter<'_, K, V> {}
+
+impl<'a, K, V> Iterator for IterMut<'a, K, V> {
+    type Item = (&'a K, &'a mut V);
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.inner.next() {
+            Some(x) => unsafe {
+                let r = x.as_mut();
+                Some((&r.0, &mut r.1))
+            },
+            None => None,
+        }
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+}
+impl<K, V> ExactSizeIterator for IterMut<'_, K, V> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+impl<K, V> FusedIterator for IterMut<'_, K, V> {}
+
+impl<K, V> fmt::Debug for IterMut<'_, K, V>
+where
+    K: fmt::Debug,
+    V: fmt::Debug,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.iter()).finish()
+    }
+}
+
+impl<K, V> Iterator for IntoIter<K, V> {
+    type Item = (K, V);
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<(K, V)> {
+        self.inner.next()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+}
+impl<K, V> ExactSizeIterator for IntoIter<K, V> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+impl<K, V> FusedIterator for IntoIter<K, V> {}
+
+impl<K: Debug, V: Debug> fmt::Debug for IntoIter<K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.iter()).finish()
+    }
+}
+
+impl<'a, K, V> Iterator for Keys<'a, K, V> {
+    type Item = &'a K;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<&'a K> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.inner.next() {
+            Some((k, _)) => Some(k),
+            None => None,
+        }
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+}
+impl<K, V> ExactSizeIterator for Keys<'_, K, V> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+impl<K, V> FusedIterator for Keys<'_, K, V> {}
+
+impl<'a, K, V> Iterator for Values<'a, K, V> {
+    type Item = &'a V;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<&'a V> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.inner.next() {
+            Some((_, v)) => Some(v),
+            None => None,
+        }
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+}
+impl<K, V> ExactSizeIterator for Values<'_, K, V> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+impl<K, V> FusedIterator for Values<'_, K, V> {}
+
+impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
+    type Item = &'a mut V;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<&'a mut V> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.inner.next() {
+            Some((_, v)) => Some(v),
+            None => None,
+        }
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+}
+impl<K, V> ExactSizeIterator for ValuesMut<'_, K, V> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+impl<K, V> FusedIterator for ValuesMut<'_, K, V> {}
+
+impl<K, V> fmt::Debug for ValuesMut<'_, K, V>
+where
+    K: fmt::Debug,
+    V: fmt::Debug,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.inner.iter()).finish()
+    }
+}
+
+impl<'a, K, V> Iterator for Drain<'a, K, V> {
+    type Item = (K, V);
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<(K, V)> {
+        self.inner.next()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+}
+impl<K, V> ExactSizeIterator for Drain<'_, K, V> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+impl<K, V> FusedIterator for Drain<'_, K, V> {}
+
+impl<K, V> fmt::Debug for Drain<'_, K, V>
+where
+    K: fmt::Debug,
+    V: fmt::Debug,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.iter()).finish()
+    }
+}
+
+impl<'a, K, V, S> Entry<'a, K, V, S> {
+    /// Sets the value of the entry, and returns an OccupiedEntry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// let entry = map.entry("horseyland").insert(37);
+    ///
+    /// assert_eq!(entry.key(), &"horseyland");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(self, value: V) -> OccupiedEntry<'a, K, V, S>
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        match self {
+            Entry::Occupied(mut entry) => {
+                entry.insert(value);
+                entry
+            }
+            Entry::Vacant(entry) => entry.insert_entry(value),
+        }
+    }
+
+    /// Ensures a value is in the entry by inserting the default if empty, and returns
+    /// a mutable reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// map.entry("poneyland").or_insert(3);
+    /// assert_eq!(map["poneyland"], 3);
+    ///
+    /// *map.entry("poneyland").or_insert(10) *= 2;
+    /// assert_eq!(map["poneyland"], 6);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn or_insert(self, default: V) -> &'a mut V
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        match self {
+            Entry::Occupied(entry) => entry.into_mut(),
+            Entry::Vacant(entry) => entry.insert(default),
+        }
+    }
+
+    /// Ensures a value is in the entry by inserting the result of the default function if empty,
+    /// and returns a mutable reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, String> = HashMap::new();
+    /// let s = "hoho".to_string();
+    ///
+    /// map.entry("poneyland").or_insert_with(|| s);
+    ///
+    /// assert_eq!(map["poneyland"], "hoho".to_string());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        match self {
+            Entry::Occupied(entry) => entry.into_mut(),
+            Entry::Vacant(entry) => entry.insert(default()),
+        }
+    }
+
+    /// Ensures a value is in the entry by inserting, if empty, the result of the default function,
+    /// which takes the key as its argument, and returns a mutable reference to the value in the
+    /// entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, usize> = HashMap::new();
+    ///
+    /// map.entry("poneyland").or_insert_with_key(|key| key.chars().count());
+    ///
+    /// assert_eq!(map["poneyland"], 9);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn or_insert_with_key<F: FnOnce(&K) -> V>(self, default: F) -> &'a mut V
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        match self {
+            Entry::Occupied(entry) => entry.into_mut(),
+            Entry::Vacant(entry) => {
+                let value = default(entry.key());
+                entry.insert(value)
+            }
+        }
+    }
+
+    /// Returns a reference to this entry's key.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn key(&self) -> &K {
+        match *self {
+            Entry::Occupied(ref entry) => entry.key(),
+            Entry::Vacant(ref entry) => entry.key(),
+        }
+    }
+
+    /// Provides in-place mutable access to an occupied entry before any
+    /// potential inserts into the map.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// map.entry("poneyland")
+    ///    .and_modify(|e| { *e += 1 })
+    ///    .or_insert(42);
+    /// assert_eq!(map["poneyland"], 42);
+    ///
+    /// map.entry("poneyland")
+    ///    .and_modify(|e| { *e += 1 })
+    ///    .or_insert(42);
+    /// assert_eq!(map["poneyland"], 43);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn and_modify<F>(self, f: F) -> Self
+    where
+        F: FnOnce(&mut V),
+    {
+        match self {
+            Entry::Occupied(mut entry) => {
+                f(entry.get_mut());
+                Entry::Occupied(entry)
+            }
+            Entry::Vacant(entry) => Entry::Vacant(entry),
+        }
+    }
+
+    /// Provides shared access to the key and owned access to the value of
+    /// an occupied entry and allows to replace or remove it based on the
+    /// value of the returned option.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::Entry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// let entry = map
+    ///     .entry("poneyland")
+    ///     .and_replace_entry_with(|_k, _v| panic!());
+    ///
+    /// match entry {
+    ///     Entry::Vacant(e) => {
+    ///         assert_eq!(e.key(), &"poneyland");
+    ///     }
+    ///     Entry::Occupied(_) => panic!(),
+    /// }
+    ///
+    /// map.insert("poneyland", 42);
+    ///
+    /// let entry = map
+    ///     .entry("poneyland")
+    ///     .and_replace_entry_with(|k, v| {
+    ///         assert_eq!(k, &"poneyland");
+    ///         assert_eq!(v, 42);
+    ///         Some(v + 1)
+    ///     });
+    ///
+    /// match entry {
+    ///     Entry::Occupied(e) => {
+    ///         assert_eq!(e.key(), &"poneyland");
+    ///         assert_eq!(e.get(), &43);
+    ///     }
+    ///     Entry::Vacant(_) => panic!(),
+    /// }
+    ///
+    /// assert_eq!(map["poneyland"], 43);
+    ///
+    /// let entry = map
+    ///     .entry("poneyland")
+    ///     .and_replace_entry_with(|_k, _v| None);
+    ///
+    /// match entry {
+    ///     Entry::Vacant(e) => assert_eq!(e.key(), &"poneyland"),
+    ///     Entry::Occupied(_) => panic!(),
+    /// }
+    ///
+    /// assert!(!map.contains_key("poneyland"));
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn and_replace_entry_with<F>(self, f: F) -> Self
+    where
+        F: FnOnce(&K, V) -> Option<V>,
+    {
+        match self {
+            Entry::Occupied(entry) => entry.replace_entry_with(f),
+            Entry::Vacant(_) => self,
+        }
+    }
+}
+
+impl<'a, K, V: Default, S> Entry<'a, K, V, S> {
+    /// Ensures a value is in the entry by inserting the default value if empty,
+    /// and returns a mutable reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, Option<u32>> = HashMap::new();
+    /// map.entry("poneyland").or_default();
+    ///
+    /// assert_eq!(map["poneyland"], None);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn or_default(self) -> &'a mut V
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        match self {
+            Entry::Occupied(entry) => entry.into_mut(),
+            Entry::Vacant(entry) => entry.insert(Default::default()),
+        }
+    }
+}
+
+impl<'a, K, V, S> OccupiedEntry<'a, K, V, S> {
+    /// Gets a reference to the key in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.entry("poneyland").or_insert(12);
+    /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn key(&self) -> &K {
+        unsafe { &self.elem.as_ref().0 }
+    }
+
+    /// Take the ownership of the key and value from the map.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::Entry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.entry("poneyland").or_insert(12);
+    ///
+    /// if let Entry::Occupied(o) = map.entry("poneyland") {
+    ///     // We delete the entry from the map.
+    ///     o.remove_entry();
+    /// }
+    ///
+    /// assert_eq!(map.contains_key("poneyland"), false);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove_entry(self) -> (K, V) {
+        unsafe { self.table.table.remove(self.elem) }
+    }
+
+    /// Gets a reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::Entry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.entry("poneyland").or_insert(12);
+    ///
+    /// if let Entry::Occupied(o) = map.entry("poneyland") {
+    ///     assert_eq!(o.get(), &12);
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get(&self) -> &V {
+        unsafe { &self.elem.as_ref().1 }
+    }
+
+    /// Gets a mutable reference to the value in the entry.
+    ///
+    /// If you need a reference to the `OccupiedEntry` which may outlive the
+    /// destruction of the `Entry` value, see [`into_mut`].
+    ///
+    /// [`into_mut`]: #method.into_mut
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::Entry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.entry("poneyland").or_insert(12);
+    ///
+    /// assert_eq!(map["poneyland"], 12);
+    /// if let Entry::Occupied(mut o) = map.entry("poneyland") {
+    ///     *o.get_mut() += 10;
+    ///     assert_eq!(*o.get(), 22);
+    ///
+    ///     // We can use the same Entry multiple times.
+    ///     *o.get_mut() += 2;
+    /// }
+    ///
+    /// assert_eq!(map["poneyland"], 24);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get_mut(&mut self) -> &mut V {
+        unsafe { &mut self.elem.as_mut().1 }
+    }
+
+    /// Converts the OccupiedEntry into a mutable reference to the value in the entry
+    /// with a lifetime bound to the map itself.
+    ///
+    /// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
+    ///
+    /// [`get_mut`]: #method.get_mut
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::Entry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.entry("poneyland").or_insert(12);
+    ///
+    /// assert_eq!(map["poneyland"], 12);
+    /// if let Entry::Occupied(o) = map.entry("poneyland") {
+    ///     *o.into_mut() += 10;
+    /// }
+    ///
+    /// assert_eq!(map["poneyland"], 22);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn into_mut(self) -> &'a mut V {
+        unsafe { &mut self.elem.as_mut().1 }
+    }
+
+    /// Sets the value of the entry, and returns the entry's old value.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::Entry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.entry("poneyland").or_insert(12);
+    ///
+    /// if let Entry::Occupied(mut o) = map.entry("poneyland") {
+    ///     assert_eq!(o.insert(15), 12);
+    /// }
+    ///
+    /// assert_eq!(map["poneyland"], 15);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(&mut self, mut value: V) -> V {
+        let old_value = self.get_mut();
+        mem::swap(&mut value, old_value);
+        value
+    }
+
+    /// Takes the value out of the entry, and returns it.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::Entry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.entry("poneyland").or_insert(12);
+    ///
+    /// if let Entry::Occupied(o) = map.entry("poneyland") {
+    ///     assert_eq!(o.remove(), 12);
+    /// }
+    ///
+    /// assert_eq!(map.contains_key("poneyland"), false);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove(self) -> V {
+        self.remove_entry().1
+    }
+
+    /// Replaces the entry, returning the old key and value. The new key in the hash map will be
+    /// the key used to create this entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{Entry, HashMap};
+    /// use std::rc::Rc;
+    ///
+    /// let mut map: HashMap<Rc<String>, u32> = HashMap::new();
+    /// map.insert(Rc::new("Stringthing".to_string()), 15);
+    ///
+    /// let my_key = Rc::new("Stringthing".to_string());
+    ///
+    /// if let Entry::Occupied(entry) = map.entry(my_key) {
+    ///     // Also replace the key with a handle to our other key.
+    ///     let (old_key, old_value): (Rc<String>, u32) = entry.replace_entry(16);
+    /// }
+    ///
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn replace_entry(self, value: V) -> (K, V) {
+        let entry = unsafe { self.elem.as_mut() };
+
+        let old_key = mem::replace(&mut entry.0, self.key.unwrap());
+        let old_value = mem::replace(&mut entry.1, value);
+
+        (old_key, old_value)
+    }
+
+    /// Replaces the key in the hash map with the key used to create this entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{Entry, HashMap};
+    /// use std::rc::Rc;
+    ///
+    /// let mut map: HashMap<Rc<String>, u32> = HashMap::new();
+    /// let mut known_strings: Vec<Rc<String>> = Vec::new();
+    ///
+    /// // Initialise known strings, run program, etc.
+    ///
+    /// reclaim_memory(&mut map, &known_strings);
+    ///
+    /// fn reclaim_memory(map: &mut HashMap<Rc<String>, u32>, known_strings: &[Rc<String>] ) {
+    ///     for s in known_strings {
+    ///         if let Entry::Occupied(entry) = map.entry(s.clone()) {
+    ///             // Replaces the entry's key with our version of it in `known_strings`.
+    ///             entry.replace_key();
+    ///         }
+    ///     }
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn replace_key(self) -> K {
+        let entry = unsafe { self.elem.as_mut() };
+        mem::replace(&mut entry.0, self.key.unwrap())
+    }
+
+    /// Provides shared access to the key and owned access to the value of
+    /// the entry and allows to replace or remove it based on the
+    /// value of the returned option.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::Entry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.insert("poneyland", 42);
+    ///
+    /// let entry = match map.entry("poneyland") {
+    ///     Entry::Occupied(e) => {
+    ///         e.replace_entry_with(|k, v| {
+    ///             assert_eq!(k, &"poneyland");
+    ///             assert_eq!(v, 42);
+    ///             Some(v + 1)
+    ///         })
+    ///     }
+    ///     Entry::Vacant(_) => panic!(),
+    /// };
+    ///
+    /// match entry {
+    ///     Entry::Occupied(e) => {
+    ///         assert_eq!(e.key(), &"poneyland");
+    ///         assert_eq!(e.get(), &43);
+    ///     }
+    ///     Entry::Vacant(_) => panic!(),
+    /// }
+    ///
+    /// assert_eq!(map["poneyland"], 43);
+    ///
+    /// let entry = match map.entry("poneyland") {
+    ///     Entry::Occupied(e) => e.replace_entry_with(|_k, _v| None),
+    ///     Entry::Vacant(_) => panic!(),
+    /// };
+    ///
+    /// match entry {
+    ///     Entry::Vacant(e) => {
+    ///         assert_eq!(e.key(), &"poneyland");
+    ///     }
+    ///     Entry::Occupied(_) => panic!(),
+    /// }
+    ///
+    /// assert!(!map.contains_key("poneyland"));
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn replace_entry_with<F>(self, f: F) -> Entry<'a, K, V, S>
+    where
+        F: FnOnce(&K, V) -> Option<V>,
+    {
+        unsafe {
+            let mut spare_key = None;
+
+            self.table
+                .table
+                .replace_bucket_with(self.elem.clone(), |(key, value)| {
+                    if let Some(new_value) = f(&key, value) {
+                        Some((key, new_value))
+                    } else {
+                        spare_key = Some(key);
+                        None
+                    }
+                });
+
+            if let Some(key) = spare_key {
+                Entry::Vacant(VacantEntry {
+                    hash: self.hash,
+                    key,
+                    table: self.table,
+                })
+            } else {
+                Entry::Occupied(self)
+            }
+        }
+    }
+}
+
+impl<'a, K, V, S> VacantEntry<'a, K, V, S> {
+    /// Gets a reference to the key that would be used when inserting a value
+    /// through the `VacantEntry`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn key(&self) -> &K {
+        &self.key
+    }
+
+    /// Take ownership of the key.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::Entry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// if let Entry::Vacant(v) = map.entry("poneyland") {
+    ///     v.into_key();
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn into_key(self) -> K {
+        self.key
+    }
+
+    /// Sets the value of the entry with the VacantEntry's key,
+    /// and returns a mutable reference to it.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::Entry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// if let Entry::Vacant(o) = map.entry("poneyland") {
+    ///     o.insert(37);
+    /// }
+    /// assert_eq!(map["poneyland"], 37);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(self, value: V) -> &'a mut V
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        let hash_builder = &self.table.hash_builder;
+        let table = &mut self.table.table;
+        let entry = table.insert_entry(self.hash, (self.key, value), |x| {
+            make_hash(hash_builder, &x.0)
+        });
+        &mut entry.1
+    }
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn insert_entry(self, value: V) -> OccupiedEntry<'a, K, V, S>
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        let hash_builder = &self.table.hash_builder;
+        let elem = self.table.table.insert(self.hash, (self.key, value), |x| {
+            make_hash(hash_builder, &x.0)
+        });
+        OccupiedEntry {
+            hash: self.hash,
+            key: None,
+            elem,
+            table: self.table,
+        }
+    }
+}
+
+impl<K, V, S> FromIterator<(K, V)> for HashMap<K, V, S>
+where
+    K: Eq + Hash,
+    S: BuildHasher + Default,
+{
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
+        let iter = iter.into_iter();
+        let mut map = Self::with_capacity_and_hasher(iter.size_hint().0, S::default());
+        iter.for_each(|(k, v)| {
+            map.insert(k, v);
+        });
+        map
+    }
+}
+
+/// Inserts all new key-values from the iterator and replaces values with existing
+/// keys with new values returned from the iterator.
+impl<K, V, S> Extend<(K, V)> for HashMap<K, V, S>
+where
+    K: Eq + Hash,
+    S: BuildHasher,
+{
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
+        // Keys may be already present or show multiple times in the iterator.
+        // Reserve the entire hint lower bound if the map is empty.
+        // Otherwise reserve half the hint (rounded up), so the map
+        // will only resize twice in the worst case.
+        let iter = iter.into_iter();
+        let reserve = if self.is_empty() {
+            iter.size_hint().0
+        } else {
+            (iter.size_hint().0 + 1) / 2
+        };
+        self.reserve(reserve);
+        iter.for_each(move |(k, v)| {
+            self.insert(k, v);
+        });
+    }
+
+    #[inline]
+    #[cfg(feature = "nightly")]
+    fn extend_one(&mut self, (k, v): (K, V)) {
+        self.insert(k, v);
+    }
+
+    #[inline]
+    #[cfg(feature = "nightly")]
+    fn extend_reserve(&mut self, additional: usize) {
+        // Keys may be already present or show multiple times in the iterator.
+        // Reserve the entire hint lower bound if the map is empty.
+        // Otherwise reserve half the hint (rounded up), so the map
+        // will only resize twice in the worst case.
+        let reserve = if self.is_empty() {
+            additional
+        } else {
+            (additional + 1) / 2
+        };
+        self.reserve(reserve);
+    }
+}
+
+impl<'a, K, V, S> Extend<(&'a K, &'a V)> for HashMap<K, V, S>
+where
+    K: Eq + Hash + Copy,
+    V: Copy,
+    S: BuildHasher,
+{
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn extend<T: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: T) {
+        self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
+    }
+
+    #[inline]
+    #[cfg(feature = "nightly")]
+    fn extend_one(&mut self, (k, v): (&'a K, &'a V)) {
+        self.insert(*k, *v);
+    }
+
+    #[inline]
+    #[cfg(feature = "nightly")]
+    fn extend_reserve(&mut self, additional: usize) {
+        Extend::<(K, V)>::extend_reserve(self, additional);
+    }
+}
+
+#[allow(dead_code)]
+fn assert_covariance() {
+    fn map_key<'new>(v: HashMap<&'static str, u8>) -> HashMap<&'new str, u8> {
+        v
+    }
+    fn map_val<'new>(v: HashMap<u8, &'static str>) -> HashMap<u8, &'new str> {
+        v
+    }
+    fn iter_key<'a, 'new>(v: Iter<'a, &'static str, u8>) -> Iter<'a, &'new str, u8> {
+        v
+    }
+    fn iter_val<'a, 'new>(v: Iter<'a, u8, &'static str>) -> Iter<'a, u8, &'new str> {
+        v
+    }
+    fn into_iter_key<'new>(v: IntoIter<&'static str, u8>) -> IntoIter<&'new str, u8> {
+        v
+    }
+    fn into_iter_val<'new>(v: IntoIter<u8, &'static str>) -> IntoIter<u8, &'new str> {
+        v
+    }
+    fn keys_key<'a, 'new>(v: Keys<'a, &'static str, u8>) -> Keys<'a, &'new str, u8> {
+        v
+    }
+    fn keys_val<'a, 'new>(v: Keys<'a, u8, &'static str>) -> Keys<'a, u8, &'new str> {
+        v
+    }
+    fn values_key<'a, 'new>(v: Values<'a, &'static str, u8>) -> Values<'a, &'new str, u8> {
+        v
+    }
+    fn values_val<'a, 'new>(v: Values<'a, u8, &'static str>) -> Values<'a, u8, &'new str> {
+        v
+    }
+    fn drain<'new>(
+        d: Drain<'static, &'static str, &'static str>,
+    ) -> Drain<'new, &'new str, &'new str> {
+        d
+    }
+}
+
+#[cfg(test)]
+mod test_map {
+    use super::DefaultHashBuilder;
+    use super::Entry::{Occupied, Vacant};
+    use super::{HashMap, RawEntryMut};
+    use crate::TryReserveError::*;
+    use rand::{rngs::SmallRng, Rng, SeedableRng};
+    use std::cell::RefCell;
+    use std::usize;
+    use std::vec::Vec;
+
+    #[test]
+    fn test_zero_capacities() {
+        type HM = HashMap<i32, i32>;
+
+        let m = HM::new();
+        assert_eq!(m.capacity(), 0);
+
+        let m = HM::default();
+        assert_eq!(m.capacity(), 0);
+
+        let m = HM::with_hasher(DefaultHashBuilder::default());
+        assert_eq!(m.capacity(), 0);
+
+        let m = HM::with_capacity(0);
+        assert_eq!(m.capacity(), 0);
+
+        let m = HM::with_capacity_and_hasher(0, DefaultHashBuilder::default());
+        assert_eq!(m.capacity(), 0);
+
+        let mut m = HM::new();
+        m.insert(1, 1);
+        m.insert(2, 2);
+        m.remove(&1);
+        m.remove(&2);
+        m.shrink_to_fit();
+        assert_eq!(m.capacity(), 0);
+
+        let mut m = HM::new();
+        m.reserve(0);
+        assert_eq!(m.capacity(), 0);
+    }
+
+    #[test]
+    fn test_create_capacity_zero() {
+        let mut m = HashMap::with_capacity(0);
+
+        assert!(m.insert(1, 1).is_none());
+
+        assert!(m.contains_key(&1));
+        assert!(!m.contains_key(&0));
+    }
+
+    #[test]
+    fn test_insert() {
+        let mut m = HashMap::new();
+        assert_eq!(m.len(), 0);
+        assert!(m.insert(1, 2).is_none());
+        assert_eq!(m.len(), 1);
+        assert!(m.insert(2, 4).is_none());
+        assert_eq!(m.len(), 2);
+        assert_eq!(*m.get(&1).unwrap(), 2);
+        assert_eq!(*m.get(&2).unwrap(), 4);
+    }
+
+    #[test]
+    fn test_clone() {
+        let mut m = HashMap::new();
+        assert_eq!(m.len(), 0);
+        assert!(m.insert(1, 2).is_none());
+        assert_eq!(m.len(), 1);
+        assert!(m.insert(2, 4).is_none());
+        assert_eq!(m.len(), 2);
+        let m2 = m.clone();
+        assert_eq!(*m2.get(&1).unwrap(), 2);
+        assert_eq!(*m2.get(&2).unwrap(), 4);
+        assert_eq!(m2.len(), 2);
+    }
+
+    #[test]
+    fn test_clone_from() {
+        let mut m = HashMap::new();
+        let mut m2 = HashMap::new();
+        assert_eq!(m.len(), 0);
+        assert!(m.insert(1, 2).is_none());
+        assert_eq!(m.len(), 1);
+        assert!(m.insert(2, 4).is_none());
+        assert_eq!(m.len(), 2);
+        m2.clone_from(&m);
+        assert_eq!(*m2.get(&1).unwrap(), 2);
+        assert_eq!(*m2.get(&2).unwrap(), 4);
+        assert_eq!(m2.len(), 2);
+    }
+
+    thread_local! { static DROP_VECTOR: RefCell<Vec<i32>> = RefCell::new(Vec::new()) }
+
+    #[derive(Hash, PartialEq, Eq)]
+    struct Droppable {
+        k: usize,
+    }
+
+    impl Droppable {
+        fn new(k: usize) -> Droppable {
+            DROP_VECTOR.with(|slot| {
+                slot.borrow_mut()[k] += 1;
+            });
+
+            Droppable { k }
+        }
+    }
+
+    impl Drop for Droppable {
+        fn drop(&mut self) {
+            DROP_VECTOR.with(|slot| {
+                slot.borrow_mut()[self.k] -= 1;
+            });
+        }
+    }
+
+    impl Clone for Droppable {
+        fn clone(&self) -> Self {
+            Droppable::new(self.k)
+        }
+    }
+
+    #[test]
+    fn test_drops() {
+        DROP_VECTOR.with(|slot| {
+            *slot.borrow_mut() = vec![0; 200];
+        });
+
+        {
+            let mut m = HashMap::new();
+
+            DROP_VECTOR.with(|v| {
+                for i in 0..200 {
+                    assert_eq!(v.borrow()[i], 0);
+                }
+            });
+
+            for i in 0..100 {
+                let d1 = Droppable::new(i);
+                let d2 = Droppable::new(i + 100);
+                m.insert(d1, d2);
+            }
+
+            DROP_VECTOR.with(|v| {
+                for i in 0..200 {
+                    assert_eq!(v.borrow()[i], 1);
+                }
+            });
+
+            for i in 0..50 {
+                let k = Droppable::new(i);
+                let v = m.remove(&k);
+
+                assert!(v.is_some());
+
+                DROP_VECTOR.with(|v| {
+                    assert_eq!(v.borrow()[i], 1);
+                    assert_eq!(v.borrow()[i + 100], 1);
+                });
+            }
+
+            DROP_VECTOR.with(|v| {
+                for i in 0..50 {
+                    assert_eq!(v.borrow()[i], 0);
+                    assert_eq!(v.borrow()[i + 100], 0);
+                }
+
+                for i in 50..100 {
+                    assert_eq!(v.borrow()[i], 1);
+                    assert_eq!(v.borrow()[i + 100], 1);
+                }
+            });
+        }
+
+        DROP_VECTOR.with(|v| {
+            for i in 0..200 {
+                assert_eq!(v.borrow()[i], 0);
+            }
+        });
+    }
+
+    #[test]
+    fn test_into_iter_drops() {
+        DROP_VECTOR.with(|v| {
+            *v.borrow_mut() = vec![0; 200];
+        });
+
+        let hm = {
+            let mut hm = HashMap::new();
+
+            DROP_VECTOR.with(|v| {
+                for i in 0..200 {
+                    assert_eq!(v.borrow()[i], 0);
+                }
+            });
+
+            for i in 0..100 {
+                let d1 = Droppable::new(i);
+                let d2 = Droppable::new(i + 100);
+                hm.insert(d1, d2);
+            }
+
+            DROP_VECTOR.with(|v| {
+                for i in 0..200 {
+                    assert_eq!(v.borrow()[i], 1);
+                }
+            });
+
+            hm
+        };
+
+        // By the way, ensure that cloning doesn't screw up the dropping.
+        drop(hm.clone());
+
+        {
+            let mut half = hm.into_iter().take(50);
+
+            DROP_VECTOR.with(|v| {
+                for i in 0..200 {
+                    assert_eq!(v.borrow()[i], 1);
+                }
+            });
+
+            for _ in half.by_ref() {}
+
+            DROP_VECTOR.with(|v| {
+                let nk = (0..100).filter(|&i| v.borrow()[i] == 1).count();
+
+                let nv = (0..100).filter(|&i| v.borrow()[i + 100] == 1).count();
+
+                assert_eq!(nk, 50);
+                assert_eq!(nv, 50);
+            });
+        };
+
+        DROP_VECTOR.with(|v| {
+            for i in 0..200 {
+                assert_eq!(v.borrow()[i], 0);
+            }
+        });
+    }
+
+    #[test]
+    fn test_empty_remove() {
+        let mut m: HashMap<i32, bool> = HashMap::new();
+        assert_eq!(m.remove(&0), None);
+    }
+
+    #[test]
+    fn test_empty_entry() {
+        let mut m: HashMap<i32, bool> = HashMap::new();
+        match m.entry(0) {
+            Occupied(_) => panic!(),
+            Vacant(_) => {}
+        }
+        assert!(*m.entry(0).or_insert(true));
+        assert_eq!(m.len(), 1);
+    }
+
+    #[test]
+    fn test_empty_iter() {
+        let mut m: HashMap<i32, bool> = HashMap::new();
+        assert_eq!(m.drain().next(), None);
+        assert_eq!(m.keys().next(), None);
+        assert_eq!(m.values().next(), None);
+        assert_eq!(m.values_mut().next(), None);
+        assert_eq!(m.iter().next(), None);
+        assert_eq!(m.iter_mut().next(), None);
+        assert_eq!(m.len(), 0);
+        assert!(m.is_empty());
+        assert_eq!(m.into_iter().next(), None);
+    }
+
+    #[test]
+    #[cfg_attr(miri, ignore)] // FIXME: takes too long
+    fn test_lots_of_insertions() {
+        let mut m = HashMap::new();
+
+        // Try this a few times to make sure we never screw up the hashmap's
+        // internal state.
+        for _ in 0..10 {
+            assert!(m.is_empty());
+
+            for i in 1..1001 {
+                assert!(m.insert(i, i).is_none());
+
+                for j in 1..=i {
+                    let r = m.get(&j);
+                    assert_eq!(r, Some(&j));
+                }
+
+                for j in i + 1..1001 {
+                    let r = m.get(&j);
+                    assert_eq!(r, None);
+                }
+            }
+
+            for i in 1001..2001 {
+                assert!(!m.contains_key(&i));
+            }
+
+            // remove forwards
+            for i in 1..1001 {
+                assert!(m.remove(&i).is_some());
+
+                for j in 1..=i {
+                    assert!(!m.contains_key(&j));
+                }
+
+                for j in i + 1..1001 {
+                    assert!(m.contains_key(&j));
+                }
+            }
+
+            for i in 1..1001 {
+                assert!(!m.contains_key(&i));
+            }
+
+            for i in 1..1001 {
+                assert!(m.insert(i, i).is_none());
+            }
+
+            // remove backwards
+            for i in (1..1001).rev() {
+                assert!(m.remove(&i).is_some());
+
+                for j in i..1001 {
+                    assert!(!m.contains_key(&j));
+                }
+
+                for j in 1..i {
+                    assert!(m.contains_key(&j));
+                }
+            }
+        }
+    }
+
+    #[test]
+    fn test_find_mut() {
+        let mut m = HashMap::new();
+        assert!(m.insert(1, 12).is_none());
+        assert!(m.insert(2, 8).is_none());
+        assert!(m.insert(5, 14).is_none());
+        let new = 100;
+        match m.get_mut(&5) {
+            None => panic!(),
+            Some(x) => *x = new,
+        }
+        assert_eq!(m.get(&5), Some(&new));
+    }
+
+    #[test]
+    fn test_insert_overwrite() {
+        let mut m = HashMap::new();
+        assert!(m.insert(1, 2).is_none());
+        assert_eq!(*m.get(&1).unwrap(), 2);
+        assert!(!m.insert(1, 3).is_none());
+        assert_eq!(*m.get(&1).unwrap(), 3);
+    }
+
+    #[test]
+    fn test_insert_conflicts() {
+        let mut m = HashMap::with_capacity(4);
+        assert!(m.insert(1, 2).is_none());
+        assert!(m.insert(5, 3).is_none());
+        assert!(m.insert(9, 4).is_none());
+        assert_eq!(*m.get(&9).unwrap(), 4);
+        assert_eq!(*m.get(&5).unwrap(), 3);
+        assert_eq!(*m.get(&1).unwrap(), 2);
+    }
+
+    #[test]
+    fn test_conflict_remove() {
+        let mut m = HashMap::with_capacity(4);
+        assert!(m.insert(1, 2).is_none());
+        assert_eq!(*m.get(&1).unwrap(), 2);
+        assert!(m.insert(5, 3).is_none());
+        assert_eq!(*m.get(&1).unwrap(), 2);
+        assert_eq!(*m.get(&5).unwrap(), 3);
+        assert!(m.insert(9, 4).is_none());
+        assert_eq!(*m.get(&1).unwrap(), 2);
+        assert_eq!(*m.get(&5).unwrap(), 3);
+        assert_eq!(*m.get(&9).unwrap(), 4);
+        assert!(m.remove(&1).is_some());
+        assert_eq!(*m.get(&9).unwrap(), 4);
+        assert_eq!(*m.get(&5).unwrap(), 3);
+    }
+
+    #[test]
+    fn test_is_empty() {
+        let mut m = HashMap::with_capacity(4);
+        assert!(m.insert(1, 2).is_none());
+        assert!(!m.is_empty());
+        assert!(m.remove(&1).is_some());
+        assert!(m.is_empty());
+    }
+
+    #[test]
+    fn test_remove() {
+        let mut m = HashMap::new();
+        m.insert(1, 2);
+        assert_eq!(m.remove(&1), Some(2));
+        assert_eq!(m.remove(&1), None);
+    }
+
+    #[test]
+    fn test_remove_entry() {
+        let mut m = HashMap::new();
+        m.insert(1, 2);
+        assert_eq!(m.remove_entry(&1), Some((1, 2)));
+        assert_eq!(m.remove(&1), None);
+    }
+
+    #[test]
+    fn test_iterate() {
+        let mut m = HashMap::with_capacity(4);
+        for i in 0..32 {
+            assert!(m.insert(i, i * 2).is_none());
+        }
+        assert_eq!(m.len(), 32);
+
+        let mut observed: u32 = 0;
+
+        for (k, v) in &m {
+            assert_eq!(*v, *k * 2);
+            observed |= 1 << *k;
+        }
+        assert_eq!(observed, 0xFFFF_FFFF);
+    }
+
+    #[test]
+    fn test_keys() {
+        let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
+        let map: HashMap<_, _> = vec.into_iter().collect();
+        let keys: Vec<_> = map.keys().cloned().collect();
+        assert_eq!(keys.len(), 3);
+        assert!(keys.contains(&1));
+        assert!(keys.contains(&2));
+        assert!(keys.contains(&3));
+    }
+
+    #[test]
+    fn test_values() {
+        let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
+        let map: HashMap<_, _> = vec.into_iter().collect();
+        let values: Vec<_> = map.values().cloned().collect();
+        assert_eq!(values.len(), 3);
+        assert!(values.contains(&'a'));
+        assert!(values.contains(&'b'));
+        assert!(values.contains(&'c'));
+    }
+
+    #[test]
+    fn test_values_mut() {
+        let vec = vec![(1, 1), (2, 2), (3, 3)];
+        let mut map: HashMap<_, _> = vec.into_iter().collect();
+        for value in map.values_mut() {
+            *value = (*value) * 2
+        }
+        let values: Vec<_> = map.values().cloned().collect();
+        assert_eq!(values.len(), 3);
+        assert!(values.contains(&2));
+        assert!(values.contains(&4));
+        assert!(values.contains(&6));
+    }
+
+    #[test]
+    fn test_find() {
+        let mut m = HashMap::new();
+        assert!(m.get(&1).is_none());
+        m.insert(1, 2);
+        match m.get(&1) {
+            None => panic!(),
+            Some(v) => assert_eq!(*v, 2),
+        }
+    }
+
+    #[test]
+    fn test_eq() {
+        let mut m1 = HashMap::new();
+        m1.insert(1, 2);
+        m1.insert(2, 3);
+        m1.insert(3, 4);
+
+        let mut m2 = HashMap::new();
+        m2.insert(1, 2);
+        m2.insert(2, 3);
+
+        assert!(m1 != m2);
+
+        m2.insert(3, 4);
+
+        assert_eq!(m1, m2);
+    }
+
+    #[test]
+    fn test_show() {
+        let mut map = HashMap::new();
+        let empty: HashMap<i32, i32> = HashMap::new();
+
+        map.insert(1, 2);
+        map.insert(3, 4);
+
+        let map_str = format!("{:?}", map);
+
+        assert!(map_str == "{1: 2, 3: 4}" || map_str == "{3: 4, 1: 2}");
+        assert_eq!(format!("{:?}", empty), "{}");
+    }
+
+    #[test]
+    fn test_expand() {
+        let mut m = HashMap::new();
+
+        assert_eq!(m.len(), 0);
+        assert!(m.is_empty());
+
+        let mut i = 0;
+        let old_raw_cap = m.raw_capacity();
+        while old_raw_cap == m.raw_capacity() {
+            m.insert(i, i);
+            i += 1;
+        }
+
+        assert_eq!(m.len(), i);
+        assert!(!m.is_empty());
+    }
+
+    #[test]
+    fn test_behavior_resize_policy() {
+        let mut m = HashMap::new();
+
+        assert_eq!(m.len(), 0);
+        assert_eq!(m.raw_capacity(), 1);
+        assert!(m.is_empty());
+
+        m.insert(0, 0);
+        m.remove(&0);
+        assert!(m.is_empty());
+        let initial_raw_cap = m.raw_capacity();
+        m.reserve(initial_raw_cap);
+        let raw_cap = m.raw_capacity();
+
+        assert_eq!(raw_cap, initial_raw_cap * 2);
+
+        let mut i = 0;
+        for _ in 0..raw_cap * 3 / 4 {
+            m.insert(i, i);
+            i += 1;
+        }
+        // three quarters full
+
+        assert_eq!(m.len(), i);
+        assert_eq!(m.raw_capacity(), raw_cap);
+
+        for _ in 0..raw_cap / 4 {
+            m.insert(i, i);
+            i += 1;
+        }
+        // half full
+
+        let new_raw_cap = m.raw_capacity();
+        assert_eq!(new_raw_cap, raw_cap * 2);
+
+        for _ in 0..raw_cap / 2 - 1 {
+            i -= 1;
+            m.remove(&i);
+            assert_eq!(m.raw_capacity(), new_raw_cap);
+        }
+        // A little more than one quarter full.
+        m.shrink_to_fit();
+        assert_eq!(m.raw_capacity(), raw_cap);
+        // again, a little more than half full
+        for _ in 0..raw_cap / 2 {
+            i -= 1;
+            m.remove(&i);
+        }
+        m.shrink_to_fit();
+
+        assert_eq!(m.len(), i);
+        assert!(!m.is_empty());
+        assert_eq!(m.raw_capacity(), initial_raw_cap);
+    }
+
+    #[test]
+    fn test_reserve_shrink_to_fit() {
+        let mut m = HashMap::new();
+        m.insert(0, 0);
+        m.remove(&0);
+        assert!(m.capacity() >= m.len());
+        for i in 0..128 {
+            m.insert(i, i);
+        }
+        m.reserve(256);
+
+        let usable_cap = m.capacity();
+        for i in 128..(128 + 256) {
+            m.insert(i, i);
+            assert_eq!(m.capacity(), usable_cap);
+        }
+
+        for i in 100..(128 + 256) {
+            assert_eq!(m.remove(&i), Some(i));
+        }
+        m.shrink_to_fit();
+
+        assert_eq!(m.len(), 100);
+        assert!(!m.is_empty());
+        assert!(m.capacity() >= m.len());
+
+        for i in 0..100 {
+            assert_eq!(m.remove(&i), Some(i));
+        }
+        m.shrink_to_fit();
+        m.insert(0, 0);
+
+        assert_eq!(m.len(), 1);
+        assert!(m.capacity() >= m.len());
+        assert_eq!(m.remove(&0), Some(0));
+    }
+
+    #[test]
+    fn test_from_iter() {
+        let xs = [(1, 1), (2, 2), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
+
+        let map: HashMap<_, _> = xs.iter().cloned().collect();
+
+        for &(k, v) in &xs {
+            assert_eq!(map.get(&k), Some(&v));
+        }
+
+        assert_eq!(map.iter().len(), xs.len() - 1);
+    }
+
+    #[test]
+    fn test_size_hint() {
+        let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
+
+        let map: HashMap<_, _> = xs.iter().cloned().collect();
+
+        let mut iter = map.iter();
+
+        for _ in iter.by_ref().take(3) {}
+
+        assert_eq!(iter.size_hint(), (3, Some(3)));
+    }
+
+    #[test]
+    fn test_iter_len() {
+        let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
+
+        let map: HashMap<_, _> = xs.iter().cloned().collect();
+
+        let mut iter = map.iter();
+
+        for _ in iter.by_ref().take(3) {}
+
+        assert_eq!(iter.len(), 3);
+    }
+
+    #[test]
+    fn test_mut_size_hint() {
+        let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
+
+        let mut map: HashMap<_, _> = xs.iter().cloned().collect();
+
+        let mut iter = map.iter_mut();
+
+        for _ in iter.by_ref().take(3) {}
+
+        assert_eq!(iter.size_hint(), (3, Some(3)));
+    }
+
+    #[test]
+    fn test_iter_mut_len() {
+        let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
+
+        let mut map: HashMap<_, _> = xs.iter().cloned().collect();
+
+        let mut iter = map.iter_mut();
+
+        for _ in iter.by_ref().take(3) {}
+
+        assert_eq!(iter.len(), 3);
+    }
+
+    #[test]
+    fn test_index() {
+        let mut map = HashMap::new();
+
+        map.insert(1, 2);
+        map.insert(2, 1);
+        map.insert(3, 4);
+
+        assert_eq!(map[&2], 1);
+    }
+
+    #[test]
+    #[should_panic]
+    fn test_index_nonexistent() {
+        let mut map = HashMap::new();
+
+        map.insert(1, 2);
+        map.insert(2, 1);
+        map.insert(3, 4);
+
+        map[&4];
+    }
+
+    #[test]
+    fn test_entry() {
+        let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
+
+        let mut map: HashMap<_, _> = xs.iter().cloned().collect();
+
+        // Existing key (insert)
+        match map.entry(1) {
+            Vacant(_) => unreachable!(),
+            Occupied(mut view) => {
+                assert_eq!(view.get(), &10);
+                assert_eq!(view.insert(100), 10);
+            }
+        }
+        assert_eq!(map.get(&1).unwrap(), &100);
+        assert_eq!(map.len(), 6);
+
+        // Existing key (update)
+        match map.entry(2) {
+            Vacant(_) => unreachable!(),
+            Occupied(mut view) => {
+                let v = view.get_mut();
+                let new_v = (*v) * 10;
+                *v = new_v;
+            }
+        }
+        assert_eq!(map.get(&2).unwrap(), &200);
+        assert_eq!(map.len(), 6);
+
+        // Existing key (take)
+        match map.entry(3) {
+            Vacant(_) => unreachable!(),
+            Occupied(view) => {
+                assert_eq!(view.remove(), 30);
+            }
+        }
+        assert_eq!(map.get(&3), None);
+        assert_eq!(map.len(), 5);
+
+        // Inexistent key (insert)
+        match map.entry(10) {
+            Occupied(_) => unreachable!(),
+            Vacant(view) => {
+                assert_eq!(*view.insert(1000), 1000);
+            }
+        }
+        assert_eq!(map.get(&10).unwrap(), &1000);
+        assert_eq!(map.len(), 6);
+    }
+
+    #[test]
+    fn test_entry_take_doesnt_corrupt() {
+        #![allow(deprecated)] //rand
+                              // Test for #19292
+        fn check(m: &HashMap<i32, ()>) {
+            for k in m.keys() {
+                assert!(m.contains_key(k), "{} is in keys() but not in the map?", k);
+            }
+        }
+
+        let mut m = HashMap::new();
+
+        let mut rng = {
+            let seed = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];
+            SmallRng::from_seed(seed)
+        };
+
+        // Populate the map with some items.
+        for _ in 0..50 {
+            let x = rng.gen_range(-10, 10);
+            m.insert(x, ());
+        }
+
+        for _ in 0..1000 {
+            let x = rng.gen_range(-10, 10);
+            match m.entry(x) {
+                Vacant(_) => {}
+                Occupied(e) => {
+                    e.remove();
+                }
+            }
+
+            check(&m);
+        }
+    }
+
+    #[test]
+    fn test_extend_ref() {
+        let mut a = HashMap::new();
+        a.insert(1, "one");
+        let mut b = HashMap::new();
+        b.insert(2, "two");
+        b.insert(3, "three");
+
+        a.extend(&b);
+
+        assert_eq!(a.len(), 3);
+        assert_eq!(a[&1], "one");
+        assert_eq!(a[&2], "two");
+        assert_eq!(a[&3], "three");
+    }
+
+    #[test]
+    fn test_capacity_not_less_than_len() {
+        let mut a = HashMap::new();
+        let mut item = 0;
+
+        for _ in 0..116 {
+            a.insert(item, 0);
+            item += 1;
+        }
+
+        assert!(a.capacity() > a.len());
+
+        let free = a.capacity() - a.len();
+        for _ in 0..free {
+            a.insert(item, 0);
+            item += 1;
+        }
+
+        assert_eq!(a.len(), a.capacity());
+
+        // Insert at capacity should cause allocation.
+        a.insert(item, 0);
+        assert!(a.capacity() > a.len());
+    }
+
+    #[test]
+    fn test_occupied_entry_key() {
+        let mut a = HashMap::new();
+        let key = "hello there";
+        let value = "value goes here";
+        assert!(a.is_empty());
+        a.insert(key.clone(), value.clone());
+        assert_eq!(a.len(), 1);
+        assert_eq!(a[key], value);
+
+        match a.entry(key.clone()) {
+            Vacant(_) => panic!(),
+            Occupied(e) => assert_eq!(key, *e.key()),
+        }
+        assert_eq!(a.len(), 1);
+        assert_eq!(a[key], value);
+    }
+
+    #[test]
+    fn test_vacant_entry_key() {
+        let mut a = HashMap::new();
+        let key = "hello there";
+        let value = "value goes here";
+
+        assert!(a.is_empty());
+        match a.entry(key.clone()) {
+            Occupied(_) => panic!(),
+            Vacant(e) => {
+                assert_eq!(key, *e.key());
+                e.insert(value.clone());
+            }
+        }
+        assert_eq!(a.len(), 1);
+        assert_eq!(a[key], value);
+    }
+
+    #[test]
+    fn test_occupied_entry_replace_entry_with() {
+        let mut a = HashMap::new();
+
+        let key = "a key";
+        let value = "an initial value";
+        let new_value = "a new value";
+
+        let entry = a.entry(key).insert(value).replace_entry_with(|k, v| {
+            assert_eq!(k, &key);
+            assert_eq!(v, value);
+            Some(new_value)
+        });
+
+        match entry {
+            Occupied(e) => {
+                assert_eq!(e.key(), &key);
+                assert_eq!(e.get(), &new_value);
+            }
+            Vacant(_) => panic!(),
+        }
+
+        assert_eq!(a[key], new_value);
+        assert_eq!(a.len(), 1);
+
+        let entry = match a.entry(key) {
+            Occupied(e) => e.replace_entry_with(|k, v| {
+                assert_eq!(k, &key);
+                assert_eq!(v, new_value);
+                None
+            }),
+            Vacant(_) => panic!(),
+        };
+
+        match entry {
+            Vacant(e) => assert_eq!(e.key(), &key),
+            Occupied(_) => panic!(),
+        }
+
+        assert!(!a.contains_key(key));
+        assert_eq!(a.len(), 0);
+    }
+
+    #[test]
+    fn test_entry_and_replace_entry_with() {
+        let mut a = HashMap::new();
+
+        let key = "a key";
+        let value = "an initial value";
+        let new_value = "a new value";
+
+        let entry = a.entry(key).and_replace_entry_with(|_, _| panic!());
+
+        match entry {
+            Vacant(e) => assert_eq!(e.key(), &key),
+            Occupied(_) => panic!(),
+        }
+
+        a.insert(key, value);
+
+        let entry = a.entry(key).and_replace_entry_with(|k, v| {
+            assert_eq!(k, &key);
+            assert_eq!(v, value);
+            Some(new_value)
+        });
+
+        match entry {
+            Occupied(e) => {
+                assert_eq!(e.key(), &key);
+                assert_eq!(e.get(), &new_value);
+            }
+            Vacant(_) => panic!(),
+        }
+
+        assert_eq!(a[key], new_value);
+        assert_eq!(a.len(), 1);
+
+        let entry = a.entry(key).and_replace_entry_with(|k, v| {
+            assert_eq!(k, &key);
+            assert_eq!(v, new_value);
+            None
+        });
+
+        match entry {
+            Vacant(e) => assert_eq!(e.key(), &key),
+            Occupied(_) => panic!(),
+        }
+
+        assert!(!a.contains_key(key));
+        assert_eq!(a.len(), 0);
+    }
+
+    #[test]
+    fn test_raw_occupied_entry_replace_entry_with() {
+        let mut a = HashMap::new();
+
+        let key = "a key";
+        let value = "an initial value";
+        let new_value = "a new value";
+
+        let entry = a
+            .raw_entry_mut()
+            .from_key(&key)
+            .insert(key, value)
+            .replace_entry_with(|k, v| {
+                assert_eq!(k, &key);
+                assert_eq!(v, value);
+                Some(new_value)
+            });
+
+        match entry {
+            RawEntryMut::Occupied(e) => {
+                assert_eq!(e.key(), &key);
+                assert_eq!(e.get(), &new_value);
+            }
+            RawEntryMut::Vacant(_) => panic!(),
+        }
+
+        assert_eq!(a[key], new_value);
+        assert_eq!(a.len(), 1);
+
+        let entry = match a.raw_entry_mut().from_key(&key) {
+            RawEntryMut::Occupied(e) => e.replace_entry_with(|k, v| {
+                assert_eq!(k, &key);
+                assert_eq!(v, new_value);
+                None
+            }),
+            RawEntryMut::Vacant(_) => panic!(),
+        };
+
+        match entry {
+            RawEntryMut::Vacant(_) => {}
+            RawEntryMut::Occupied(_) => panic!(),
+        }
+
+        assert!(!a.contains_key(key));
+        assert_eq!(a.len(), 0);
+    }
+
+    #[test]
+    fn test_raw_entry_and_replace_entry_with() {
+        let mut a = HashMap::new();
+
+        let key = "a key";
+        let value = "an initial value";
+        let new_value = "a new value";
+
+        let entry = a
+            .raw_entry_mut()
+            .from_key(&key)
+            .and_replace_entry_with(|_, _| panic!());
+
+        match entry {
+            RawEntryMut::Vacant(_) => {}
+            RawEntryMut::Occupied(_) => panic!(),
+        }
+
+        a.insert(key, value);
+
+        let entry = a
+            .raw_entry_mut()
+            .from_key(&key)
+            .and_replace_entry_with(|k, v| {
+                assert_eq!(k, &key);
+                assert_eq!(v, value);
+                Some(new_value)
+            });
+
+        match entry {
+            RawEntryMut::Occupied(e) => {
+                assert_eq!(e.key(), &key);
+                assert_eq!(e.get(), &new_value);
+            }
+            RawEntryMut::Vacant(_) => panic!(),
+        }
+
+        assert_eq!(a[key], new_value);
+        assert_eq!(a.len(), 1);
+
+        let entry = a
+            .raw_entry_mut()
+            .from_key(&key)
+            .and_replace_entry_with(|k, v| {
+                assert_eq!(k, &key);
+                assert_eq!(v, new_value);
+                None
+            });
+
+        match entry {
+            RawEntryMut::Vacant(_) => {}
+            RawEntryMut::Occupied(_) => panic!(),
+        }
+
+        assert!(!a.contains_key(key));
+        assert_eq!(a.len(), 0);
+    }
+
+    #[test]
+    fn test_replace_entry_with_doesnt_corrupt() {
+        #![allow(deprecated)] //rand
+                              // Test for #19292
+        fn check(m: &HashMap<i32, ()>) {
+            for k in m.keys() {
+                assert!(m.contains_key(k), "{} is in keys() but not in the map?", k);
+            }
+        }
+
+        let mut m = HashMap::new();
+
+        let mut rng = {
+            let seed = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];
+            SmallRng::from_seed(seed)
+        };
+
+        // Populate the map with some items.
+        for _ in 0..50 {
+            let x = rng.gen_range(-10, 10);
+            m.insert(x, ());
+        }
+
+        for _ in 0..1000 {
+            let x = rng.gen_range(-10, 10);
+            m.entry(x).and_replace_entry_with(|_, _| None);
+            check(&m);
+        }
+    }
+
+    #[test]
+    fn test_retain() {
+        let mut map: HashMap<i32, i32> = (0..100).map(|x| (x, x * 10)).collect();
+
+        map.retain(|&k, _| k % 2 == 0);
+        assert_eq!(map.len(), 50);
+        assert_eq!(map[&2], 20);
+        assert_eq!(map[&4], 40);
+        assert_eq!(map[&6], 60);
+    }
+
+    #[test]
+    fn test_drain_filter() {
+        {
+            let mut map: HashMap<i32, i32> = (0..8).map(|x| (x, x * 10)).collect();
+            let drained = map.drain_filter(|&k, _| k % 2 == 0);
+            let mut out = drained.collect::<Vec<_>>();
+            out.sort_unstable();
+            assert_eq!(vec![(0, 0), (2, 20), (4, 40), (6, 60)], out);
+            assert_eq!(map.len(), 4);
+        }
+        {
+            let mut map: HashMap<i32, i32> = (0..8).map(|x| (x, x * 10)).collect();
+            drop(map.drain_filter(|&k, _| k % 2 == 0));
+            assert_eq!(map.len(), 4);
+        }
+    }
+
+    #[test]
+    #[cfg_attr(miri, ignore)] // FIXME: no OOM signalling (https://github.com/rust-lang/miri/issues/613)
+    fn test_try_reserve() {
+        let mut empty_bytes: HashMap<u8, u8> = HashMap::new();
+
+        const MAX_USIZE: usize = usize::MAX;
+
+        if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_USIZE) {
+        } else {
+            panic!("usize::MAX should trigger an overflow!");
+        }
+
+        if let Err(AllocError { .. }) = empty_bytes.try_reserve(MAX_USIZE / 8) {
+        } else {
+            // This may succeed if there is enough free memory. Attempt to
+            // allocate a second hashmap to ensure the allocation will fail.
+            let mut empty_bytes2: HashMap<u8, u8> = HashMap::new();
+            if let Err(AllocError { .. }) = empty_bytes2.try_reserve(MAX_USIZE / 8) {
+            } else {
+                panic!("usize::MAX / 8 should trigger an OOM!");
+            }
+        }
+    }
+
+    #[test]
+    fn test_raw_entry() {
+        use super::RawEntryMut::{Occupied, Vacant};
+
+        let xs = [(1i32, 10i32), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
+
+        let mut map: HashMap<_, _> = xs.iter().cloned().collect();
+
+        let compute_hash = |map: &HashMap<i32, i32>, k: i32| -> u64 {
+            use core::hash::{BuildHasher, Hash, Hasher};
+
+            let mut hasher = map.hasher().build_hasher();
+            k.hash(&mut hasher);
+            hasher.finish()
+        };
+
+        // Existing key (insert)
+        match map.raw_entry_mut().from_key(&1) {
+            Vacant(_) => unreachable!(),
+            Occupied(mut view) => {
+                assert_eq!(view.get(), &10);
+                assert_eq!(view.insert(100), 10);
+            }
+        }
+        let hash1 = compute_hash(&map, 1);
+        assert_eq!(map.raw_entry().from_key(&1).unwrap(), (&1, &100));
+        assert_eq!(
+            map.raw_entry().from_hash(hash1, |k| *k == 1).unwrap(),
+            (&1, &100)
+        );
+        assert_eq!(
+            map.raw_entry().from_key_hashed_nocheck(hash1, &1).unwrap(),
+            (&1, &100)
+        );
+        assert_eq!(map.len(), 6);
+
+        // Existing key (update)
+        match map.raw_entry_mut().from_key(&2) {
+            Vacant(_) => unreachable!(),
+            Occupied(mut view) => {
+                let v = view.get_mut();
+                let new_v = (*v) * 10;
+                *v = new_v;
+            }
+        }
+        let hash2 = compute_hash(&map, 2);
+        assert_eq!(map.raw_entry().from_key(&2).unwrap(), (&2, &200));
+        assert_eq!(
+            map.raw_entry().from_hash(hash2, |k| *k == 2).unwrap(),
+            (&2, &200)
+        );
+        assert_eq!(
+            map.raw_entry().from_key_hashed_nocheck(hash2, &2).unwrap(),
+            (&2, &200)
+        );
+        assert_eq!(map.len(), 6);
+
+        // Existing key (take)
+        let hash3 = compute_hash(&map, 3);
+        match map.raw_entry_mut().from_key_hashed_nocheck(hash3, &3) {
+            Vacant(_) => unreachable!(),
+            Occupied(view) => {
+                assert_eq!(view.remove_entry(), (3, 30));
+            }
+        }
+        assert_eq!(map.raw_entry().from_key(&3), None);
+        assert_eq!(map.raw_entry().from_hash(hash3, |k| *k == 3), None);
+        assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash3, &3), None);
+        assert_eq!(map.len(), 5);
+
+        // Nonexistent key (insert)
+        match map.raw_entry_mut().from_key(&10) {
+            Occupied(_) => unreachable!(),
+            Vacant(view) => {
+                assert_eq!(view.insert(10, 1000), (&mut 10, &mut 1000));
+            }
+        }
+        assert_eq!(map.raw_entry().from_key(&10).unwrap(), (&10, &1000));
+        assert_eq!(map.len(), 6);
+
+        // Ensure all lookup methods produce equivalent results.
+        for k in 0..12 {
+            let hash = compute_hash(&map, k);
+            let v = map.get(&k).cloned();
+            let kv = v.as_ref().map(|v| (&k, v));
+
+            assert_eq!(map.raw_entry().from_key(&k), kv);
+            assert_eq!(map.raw_entry().from_hash(hash, |q| *q == k), kv);
+            assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &k), kv);
+
+            match map.raw_entry_mut().from_key(&k) {
+                Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
+                Vacant(_) => assert_eq!(v, None),
+            }
+            match map.raw_entry_mut().from_key_hashed_nocheck(hash, &k) {
+                Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
+                Vacant(_) => assert_eq!(v, None),
+            }
+            match map.raw_entry_mut().from_hash(hash, |q| *q == k) {
+                Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
+                Vacant(_) => assert_eq!(v, None),
+            }
+        }
+    }
+
+    #[test]
+    fn test_key_without_hash_impl() {
+        #[derive(Debug)]
+        struct IntWrapper(u64);
+
+        let mut m: HashMap<IntWrapper, (), ()> = HashMap::default();
+        {
+            assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_none());
+        }
+        {
+            let vacant_entry = match m.raw_entry_mut().from_hash(0, |k| k.0 == 0) {
+                RawEntryMut::Occupied(..) => panic!("Found entry for key 0"),
+                RawEntryMut::Vacant(e) => e,
+            };
+            vacant_entry.insert_with_hasher(0, IntWrapper(0), (), |k| k.0);
+        }
+        {
+            assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_some());
+            assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_none());
+            assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
+        }
+        {
+            let vacant_entry = match m.raw_entry_mut().from_hash(1, |k| k.0 == 1) {
+                RawEntryMut::Occupied(..) => panic!("Found entry for key 1"),
+                RawEntryMut::Vacant(e) => e,
+            };
+            vacant_entry.insert_with_hasher(1, IntWrapper(1), (), |k| k.0);
+        }
+        {
+            assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_some());
+            assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_some());
+            assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
+        }
+        {
+            let occupied_entry = match m.raw_entry_mut().from_hash(0, |k| k.0 == 0) {
+                RawEntryMut::Occupied(e) => e,
+                RawEntryMut::Vacant(..) => panic!("Couldn't find entry for key 0"),
+            };
+            occupied_entry.remove();
+        }
+        assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_none());
+        assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_some());
+        assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
+    }
+
+    #[test]
+    #[cfg(feature = "raw")]
+    fn test_into_iter_refresh() {
+        use core::hash::{BuildHasher, Hash, Hasher};
+
+        #[cfg(miri)]
+        const N: usize = 32;
+        #[cfg(not(miri))]
+        const N: usize = 128;
+
+        let mut rng = rand::thread_rng();
+        for n in 0..N {
+            let mut m = HashMap::new();
+            for i in 0..n {
+                assert!(m.insert(i, 2 * i).is_none());
+            }
+            let hasher = m.hasher().clone();
+
+            let mut it = unsafe { m.table.iter() };
+            assert_eq!(it.len(), n);
+
+            let mut i = 0;
+            let mut left = n;
+            let mut removed = Vec::new();
+            loop {
+                // occasionally remove some elements
+                if i < n && rng.gen_bool(0.1) {
+                    let mut hsh = hasher.build_hasher();
+                    i.hash(&mut hsh);
+                    let hash = hsh.finish();
+
+                    unsafe {
+                        let e = m.table.find(hash, |q| q.0.eq(&i));
+                        if let Some(e) = e {
+                            it.reflect_remove(&e);
+                            let t = m.table.remove(e);
+                            removed.push(t);
+                            left -= 1;
+                        } else {
+                            assert!(removed.contains(&(i, 2 * i)), "{} not in {:?}", i, removed);
+                            let e = m
+                                .table
+                                .insert(hash, (i, 2 * i), |x| super::make_hash(&hasher, &x.0));
+                            it.reflect_insert(&e);
+                            if let Some(p) = removed.iter().position(|e| e == &(i, 2 * i)) {
+                                removed.swap_remove(p);
+                            }
+                            left += 1;
+                        }
+                    }
+                }
+
+                let e = it.next();
+                if e.is_none() {
+                    break;
+                }
+                assert!(i < n);
+                let t = unsafe { e.unwrap().as_ref() };
+                assert!(!removed.contains(t));
+                let (k, v) = t;
+                assert_eq!(*v, 2 * k);
+                i += 1;
+            }
+            assert!(i <= n);
+
+            // just for safety:
+            assert_eq!(m.table.len(), left);
+        }
+    }
+
+    #[test]
+    fn test_const_with_hasher() {
+        use core::hash::BuildHasher;
+        use std::borrow::ToOwned;
+        use std::collections::hash_map::DefaultHasher;
+
+        #[derive(Clone)]
+        struct MyHasher;
+        impl BuildHasher for MyHasher {
+            type Hasher = DefaultHasher;
+
+            fn build_hasher(&self) -> DefaultHasher {
+                DefaultHasher::new()
+            }
+        }
+
+        const EMPTY_MAP: HashMap<u32, std::string::String, MyHasher> =
+            HashMap::with_hasher(MyHasher);
+
+        let mut map = EMPTY_MAP.clone();
+        map.insert(17, "seventeen".to_owned());
+        assert_eq!("seventeen", map[&17]);
+    }
+}
diff --git a/src/raw/bitmask.rs b/src/raw/bitmask.rs
new file mode 100644
index 0000000..99b2d53
--- /dev/null
+++ b/src/raw/bitmask.rs
@@ -0,0 +1,122 @@
+use super::imp::{BitMaskWord, BITMASK_MASK, BITMASK_STRIDE};
+#[cfg(feature = "nightly")]
+use core::intrinsics;
+
+/// A bit mask which contains the result of a `Match` operation on a `Group` and
+/// allows iterating through them.
+///
+/// The bit mask is arranged so that low-order bits represent lower memory
+/// addresses for group match results.
+///
+/// For implementation reasons, the bits in the set may be sparsely packed, so
+/// that there is only one bit-per-byte used (the high bit, 7). If this is the
+/// case, `BITMASK_STRIDE` will be 8 to indicate a divide-by-8 should be
+/// performed on counts/indices to normalize this difference. `BITMASK_MASK` is
+/// similarly a mask of all the actually-used bits.
+#[derive(Copy, Clone)]
+pub struct BitMask(pub BitMaskWord);
+
+#[allow(clippy::use_self)]
+impl BitMask {
+    /// Returns a new `BitMask` with all bits inverted.
+    #[inline]
+    #[must_use]
+    pub fn invert(self) -> Self {
+        BitMask(self.0 ^ BITMASK_MASK)
+    }
+
+    /// Flip the bit in the mask for the entry at the given index.
+    ///
+    /// Returns the bit's previous state.
+    #[inline]
+    #[allow(clippy::cast_ptr_alignment)]
+    #[cfg(feature = "raw")]
+    pub unsafe fn flip(&mut self, index: usize) -> bool {
+        // NOTE: The + BITMASK_STRIDE - 1 is to set the high bit.
+        let mask = 1 << (index * BITMASK_STRIDE + BITMASK_STRIDE - 1);
+        self.0 ^= mask;
+        // The bit was set if the bit is now 0.
+        self.0 & mask == 0
+    }
+
+    /// Returns a new `BitMask` with the lowest bit removed.
+    #[inline]
+    #[must_use]
+    pub fn remove_lowest_bit(self) -> Self {
+        BitMask(self.0 & (self.0 - 1))
+    }
+    /// Returns whether the `BitMask` has at least one set bit.
+    #[inline]
+    pub fn any_bit_set(self) -> bool {
+        self.0 != 0
+    }
+
+    /// Returns the first set bit in the `BitMask`, if there is one.
+    #[inline]
+    pub fn lowest_set_bit(self) -> Option<usize> {
+        if self.0 == 0 {
+            None
+        } else {
+            Some(unsafe { self.lowest_set_bit_nonzero() })
+        }
+    }
+
+    /// Returns the first set bit in the `BitMask`, if there is one. The
+    /// bitmask must not be empty.
+    #[inline]
+    #[cfg(feature = "nightly")]
+    pub unsafe fn lowest_set_bit_nonzero(self) -> usize {
+        intrinsics::cttz_nonzero(self.0) as usize / BITMASK_STRIDE
+    }
+    #[inline]
+    #[cfg(not(feature = "nightly"))]
+    pub unsafe fn lowest_set_bit_nonzero(self) -> usize {
+        self.trailing_zeros()
+    }
+
+    /// Returns the number of trailing zeroes in the `BitMask`.
+    #[inline]
+    pub fn trailing_zeros(self) -> usize {
+        // ARM doesn't have a trailing_zeroes instruction, and instead uses
+        // reverse_bits (RBIT) + leading_zeroes (CLZ). However older ARM
+        // versions (pre-ARMv7) don't have RBIT and need to emulate it
+        // instead. Since we only have 1 bit set in each byte on ARM, we can
+        // use swap_bytes (REV) + leading_zeroes instead.
+        if cfg!(target_arch = "arm") && BITMASK_STRIDE % 8 == 0 {
+            self.0.swap_bytes().leading_zeros() as usize / BITMASK_STRIDE
+        } else {
+            self.0.trailing_zeros() as usize / BITMASK_STRIDE
+        }
+    }
+
+    /// Returns the number of leading zeroes in the `BitMask`.
+    #[inline]
+    pub fn leading_zeros(self) -> usize {
+        self.0.leading_zeros() as usize / BITMASK_STRIDE
+    }
+}
+
+impl IntoIterator for BitMask {
+    type Item = usize;
+    type IntoIter = BitMaskIter;
+
+    #[inline]
+    fn into_iter(self) -> BitMaskIter {
+        BitMaskIter(self)
+    }
+}
+
+/// Iterator over the contents of a `BitMask`, returning the indicies of set
+/// bits.
+pub struct BitMaskIter(BitMask);
+
+impl Iterator for BitMaskIter {
+    type Item = usize;
+
+    #[inline]
+    fn next(&mut self) -> Option<usize> {
+        let bit = self.0.lowest_set_bit()?;
+        self.0 = self.0.remove_lowest_bit();
+        Some(bit)
+    }
+}
diff --git a/src/raw/generic.rs b/src/raw/generic.rs
new file mode 100644
index 0000000..26f8c58
--- /dev/null
+++ b/src/raw/generic.rs
@@ -0,0 +1,151 @@
+use super::bitmask::BitMask;
+use super::EMPTY;
+use core::{mem, ptr};
+
+// Use the native word size as the group size. Using a 64-bit group size on
+// a 32-bit architecture will just end up being more expensive because
+// shifts and multiplies will need to be emulated.
+#[cfg(any(
+    target_pointer_width = "64",
+    target_arch = "aarch64",
+    target_arch = "x86_64",
+))]
+type GroupWord = u64;
+#[cfg(all(
+    target_pointer_width = "32",
+    not(target_arch = "aarch64"),
+    not(target_arch = "x86_64"),
+))]
+type GroupWord = u32;
+
+pub type BitMaskWord = GroupWord;
+pub const BITMASK_STRIDE: usize = 8;
+// We only care about the highest bit of each byte for the mask.
+#[allow(clippy::cast_possible_truncation, clippy::unnecessary_cast)]
+pub const BITMASK_MASK: BitMaskWord = 0x8080_8080_8080_8080_u64 as GroupWord;
+
+/// Helper function to replicate a byte across a `GroupWord`.
+#[inline]
+fn repeat(byte: u8) -> GroupWord {
+    GroupWord::from_ne_bytes([byte; Group::WIDTH])
+}
+
+/// Abstraction over a group of control bytes which can be scanned in
+/// parallel.
+///
+/// This implementation uses a word-sized integer.
+#[derive(Copy, Clone)]
+pub struct Group(GroupWord);
+
+// We perform all operations in the native endianess, and convert to
+// little-endian just before creating a BitMask. The can potentially
+// enable the compiler to eliminate unnecessary byte swaps if we are
+// only checking whether a BitMask is empty.
+#[allow(clippy::use_self)]
+impl Group {
+    /// Number of bytes in the group.
+    pub const WIDTH: usize = mem::size_of::<Self>();
+
+    /// Returns a full group of empty bytes, suitable for use as the initial
+    /// value for an empty hash table.
+    ///
+    /// This is guaranteed to be aligned to the group size.
+    pub const fn static_empty() -> &'static [u8; Group::WIDTH] {
+        #[repr(C)]
+        struct AlignedBytes {
+            _align: [Group; 0],
+            bytes: [u8; Group::WIDTH],
+        };
+        const ALIGNED_BYTES: AlignedBytes = AlignedBytes {
+            _align: [],
+            bytes: [EMPTY; Group::WIDTH],
+        };
+        &ALIGNED_BYTES.bytes
+    }
+
+    /// Loads a group of bytes starting at the given address.
+    #[inline]
+    #[allow(clippy::cast_ptr_alignment)] // unaligned load
+    pub unsafe fn load(ptr: *const u8) -> Self {
+        Group(ptr::read_unaligned(ptr as *const _))
+    }
+
+    /// Loads a group of bytes starting at the given address, which must be
+    /// aligned to `mem::align_of::<Group>()`.
+    #[inline]
+    #[allow(clippy::cast_ptr_alignment)]
+    pub unsafe fn load_aligned(ptr: *const u8) -> Self {
+        // FIXME: use align_offset once it stabilizes
+        debug_assert_eq!(ptr as usize & (mem::align_of::<Self>() - 1), 0);
+        Group(ptr::read(ptr as *const _))
+    }
+
+    /// Stores the group of bytes to the given address, which must be
+    /// aligned to `mem::align_of::<Group>()`.
+    #[inline]
+    #[allow(clippy::cast_ptr_alignment)]
+    pub unsafe fn store_aligned(self, ptr: *mut u8) {
+        // FIXME: use align_offset once it stabilizes
+        debug_assert_eq!(ptr as usize & (mem::align_of::<Self>() - 1), 0);
+        ptr::write(ptr as *mut _, self.0);
+    }
+
+    /// Returns a `BitMask` indicating all bytes in the group which *may*
+    /// have the given value.
+    ///
+    /// This function may return a false positive in certain cases where
+    /// the byte in the group differs from the searched value only in its
+    /// lowest bit. This is fine because:
+    /// - This never happens for `EMPTY` and `DELETED`, only full entries.
+    /// - The check for key equality will catch these.
+    /// - This only happens if there is at least 1 true match.
+    /// - The chance of this happening is very low (< 1% chance per byte).
+    #[inline]
+    pub fn match_byte(self, byte: u8) -> BitMask {
+        // This algorithm is derived from
+        // http://graphics.stanford.edu/~seander/bithacks.html##ValueInWord
+        let cmp = self.0 ^ repeat(byte);
+        BitMask((cmp.wrapping_sub(repeat(0x01)) & !cmp & repeat(0x80)).to_le())
+    }
+
+    /// Returns a `BitMask` indicating all bytes in the group which are
+    /// `EMPTY`.
+    #[inline]
+    pub fn match_empty(self) -> BitMask {
+        // If the high bit is set, then the byte must be either:
+        // 1111_1111 (EMPTY) or 1000_0000 (DELETED).
+        // So we can just check if the top two bits are 1 by ANDing them.
+        BitMask((self.0 & (self.0 << 1) & repeat(0x80)).to_le())
+    }
+
+    /// Returns a `BitMask` indicating all bytes in the group which are
+    /// `EMPTY` or `DELETED`.
+    #[inline]
+    pub fn match_empty_or_deleted(self) -> BitMask {
+        // A byte is EMPTY or DELETED iff the high bit is set
+        BitMask((self.0 & repeat(0x80)).to_le())
+    }
+
+    /// Returns a `BitMask` indicating all bytes in the group which are full.
+    #[inline]
+    pub fn match_full(self) -> BitMask {
+        self.match_empty_or_deleted().invert()
+    }
+
+    /// Performs the following transformation on all bytes in the group:
+    /// - `EMPTY => EMPTY`
+    /// - `DELETED => EMPTY`
+    /// - `FULL => DELETED`
+    #[inline]
+    pub fn convert_special_to_empty_and_full_to_deleted(self) -> Self {
+        // Map high_bit = 1 (EMPTY or DELETED) to 1111_1111
+        // and high_bit = 0 (FULL) to 1000_0000
+        //
+        // Here's this logic expanded to concrete values:
+        //   let full = 1000_0000 (true) or 0000_0000 (false)
+        //   !1000_0000 + 1 = 0111_1111 + 1 = 1000_0000 (no carry)
+        //   !0000_0000 + 0 = 1111_1111 + 0 = 1111_1111 (no carry)
+        let full = !self.0 & repeat(0x80);
+        Group(!full + (full >> 7))
+    }
+}
diff --git a/src/raw/mod.rs b/src/raw/mod.rs
new file mode 100644
index 0000000..32fec98
--- /dev/null
+++ b/src/raw/mod.rs
@@ -0,0 +1,1924 @@
+use crate::alloc::alloc::{alloc, dealloc, handle_alloc_error};
+use crate::scopeguard::guard;
+use crate::TryReserveError;
+use core::alloc::Layout;
+use core::hint;
+use core::iter::FusedIterator;
+use core::marker::PhantomData;
+use core::mem;
+use core::mem::ManuallyDrop;
+use core::ptr::NonNull;
+
+cfg_if! {
+    // Use the SSE2 implementation if possible: it allows us to scan 16 buckets
+    // at once instead of 8. We don't bother with AVX since it would require
+    // runtime dispatch and wouldn't gain us much anyways: the probability of
+    // finding a match drops off drastically after the first few buckets.
+    //
+    // I attempted an implementation on ARM using NEON instructions, but it
+    // turns out that most NEON instructions have multi-cycle latency, which in
+    // the end outweighs any gains over the generic implementation.
+    if #[cfg(all(
+        target_feature = "sse2",
+        any(target_arch = "x86", target_arch = "x86_64"),
+        not(miri)
+    ))] {
+        mod sse2;
+        use sse2 as imp;
+    } else {
+        #[path = "generic.rs"]
+        mod generic;
+        use generic as imp;
+    }
+}
+
+mod bitmask;
+
+use self::bitmask::{BitMask, BitMaskIter};
+use self::imp::Group;
+
+// Branch prediction hint. This is currently only available on nightly but it
+// consistently improves performance by 10-15%.
+#[cfg(feature = "nightly")]
+use core::intrinsics::{likely, unlikely};
+#[cfg(not(feature = "nightly"))]
+#[inline]
+fn likely(b: bool) -> bool {
+    b
+}
+#[cfg(not(feature = "nightly"))]
+#[inline]
+fn unlikely(b: bool) -> bool {
+    b
+}
+
+#[cfg(feature = "nightly")]
+#[cfg_attr(feature = "inline-more", inline)]
+unsafe fn offset_from<T>(to: *const T, from: *const T) -> usize {
+    to.offset_from(from) as usize
+}
+#[cfg(not(feature = "nightly"))]
+#[cfg_attr(feature = "inline-more", inline)]
+unsafe fn offset_from<T>(to: *const T, from: *const T) -> usize {
+    (to as usize - from as usize) / mem::size_of::<T>()
+}
+
+/// Whether memory allocation errors should return an error or abort.
+#[derive(Copy, Clone)]
+enum Fallibility {
+    Fallible,
+    Infallible,
+}
+
+impl Fallibility {
+    /// Error to return on capacity overflow.
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn capacity_overflow(self) -> TryReserveError {
+        match self {
+            Fallibility::Fallible => TryReserveError::CapacityOverflow,
+            Fallibility::Infallible => panic!("Hash table capacity overflow"),
+        }
+    }
+
+    /// Error to return on allocation error.
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn alloc_err(self, layout: Layout) -> TryReserveError {
+        match self {
+            Fallibility::Fallible => TryReserveError::AllocError { layout },
+            Fallibility::Infallible => handle_alloc_error(layout),
+        }
+    }
+}
+
+/// Control byte value for an empty bucket.
+const EMPTY: u8 = 0b1111_1111;
+
+/// Control byte value for a deleted bucket.
+const DELETED: u8 = 0b1000_0000;
+
+/// Checks whether a control byte represents a full bucket (top bit is clear).
+#[inline]
+fn is_full(ctrl: u8) -> bool {
+    ctrl & 0x80 == 0
+}
+
+/// Checks whether a control byte represents a special value (top bit is set).
+#[inline]
+fn is_special(ctrl: u8) -> bool {
+    ctrl & 0x80 != 0
+}
+
+/// Checks whether a special control value is EMPTY (just check 1 bit).
+#[inline]
+fn special_is_empty(ctrl: u8) -> bool {
+    debug_assert!(is_special(ctrl));
+    ctrl & 0x01 != 0
+}
+
+/// Primary hash function, used to select the initial bucket to probe from.
+#[inline]
+#[allow(clippy::cast_possible_truncation)]
+fn h1(hash: u64) -> usize {
+    // On 32-bit platforms we simply ignore the higher hash bits.
+    hash as usize
+}
+
+/// Secondary hash function, saved in the low 7 bits of the control byte.
+#[inline]
+#[allow(clippy::cast_possible_truncation)]
+fn h2(hash: u64) -> u8 {
+    // Grab the top 7 bits of the hash. While the hash is normally a full 64-bit
+    // value, some hash functions (such as FxHash) produce a usize result
+    // instead, which means that the top 32 bits are 0 on 32-bit platforms.
+    let hash_len = usize::min(mem::size_of::<usize>(), mem::size_of::<u64>());
+    let top7 = hash >> (hash_len * 8 - 7);
+    (top7 & 0x7f) as u8 // truncation
+}
+
+/// Probe sequence based on triangular numbers, which is guaranteed (since our
+/// table size is a power of two) to visit every group of elements exactly once.
+///
+/// A triangular probe has us jump by 1 more group every time. So first we
+/// jump by 1 group (meaning we just continue our linear scan), then 2 groups
+/// (skipping over 1 group), then 3 groups (skipping over 2 groups), and so on.
+///
+/// Proof that the probe will visit every group in the table:
+/// <https://fgiesen.wordpress.com/2015/02/22/triangular-numbers-mod-2n/>
+struct ProbeSeq {
+    bucket_mask: usize,
+    pos: usize,
+    stride: usize,
+}
+
+impl Iterator for ProbeSeq {
+    type Item = usize;
+
+    #[inline]
+    fn next(&mut self) -> Option<usize> {
+        // We should have found an empty bucket by now and ended the probe.
+        debug_assert!(
+            self.stride <= self.bucket_mask,
+            "Went past end of probe sequence"
+        );
+
+        let result = self.pos;
+        self.stride += Group::WIDTH;
+        self.pos += self.stride;
+        self.pos &= self.bucket_mask;
+        Some(result)
+    }
+}
+
+/// Returns the number of buckets needed to hold the given number of items,
+/// taking the maximum load factor into account.
+///
+/// Returns `None` if an overflow occurs.
+// Workaround for emscripten bug emscripten-core/emscripten-fastcomp#258
+#[cfg_attr(target_os = "emscripten", inline(never))]
+#[cfg_attr(not(target_os = "emscripten"), inline)]
+fn capacity_to_buckets(cap: usize) -> Option<usize> {
+    debug_assert_ne!(cap, 0);
+
+    // For small tables we require at least 1 empty bucket so that lookups are
+    // guaranteed to terminate if an element doesn't exist in the table.
+    if cap < 8 {
+        // We don't bother with a table size of 2 buckets since that can only
+        // hold a single element. Instead we skip directly to a 4 bucket table
+        // which can hold 3 elements.
+        return Some(if cap < 4 { 4 } else { 8 });
+    }
+
+    // Otherwise require 1/8 buckets to be empty (87.5% load)
+    //
+    // Be careful when modifying this, calculate_layout relies on the
+    // overflow check here.
+    let adjusted_cap = cap.checked_mul(8)? / 7;
+
+    // Any overflows will have been caught by the checked_mul. Also, any
+    // rounding errors from the division above will be cleaned up by
+    // next_power_of_two (which can't overflow because of the previous divison).
+    Some(adjusted_cap.next_power_of_two())
+}
+
+/// Returns the maximum effective capacity for the given bucket mask, taking
+/// the maximum load factor into account.
+#[inline]
+fn bucket_mask_to_capacity(bucket_mask: usize) -> usize {
+    if bucket_mask < 8 {
+        // For tables with 1/2/4/8 buckets, we always reserve one empty slot.
+        // Keep in mind that the bucket mask is one less than the bucket count.
+        bucket_mask
+    } else {
+        // For larger tables we reserve 12.5% of the slots as empty.
+        ((bucket_mask + 1) / 8) * 7
+    }
+}
+
+/// Returns a Layout which describes the allocation required for a hash table,
+/// and the offset of the control bytes in the allocation.
+/// (the offset is also one past last element of buckets)
+///
+/// Returns `None` if an overflow occurs.
+#[cfg_attr(feature = "inline-more", inline)]
+#[cfg(feature = "nightly")]
+fn calculate_layout<T>(buckets: usize) -> Option<(Layout, usize)> {
+    debug_assert!(buckets.is_power_of_two());
+
+    // Array of buckets
+    let data = Layout::array::<T>(buckets).ok()?;
+
+    // Array of control bytes. This must be aligned to the group size.
+    //
+    // We add `Group::WIDTH` control bytes at the end of the array which
+    // replicate the bytes at the start of the array and thus avoids the need to
+    // perform bounds-checking while probing.
+    //
+    // There is no possible overflow here since buckets is a power of two and
+    // Group::WIDTH is a small number.
+    let ctrl = unsafe { Layout::from_size_align_unchecked(buckets + Group::WIDTH, Group::WIDTH) };
+
+    data.extend(ctrl).ok()
+}
+
+/// Returns a Layout which describes the allocation required for a hash table,
+/// and the offset of the control bytes in the allocation.
+/// (the offset is also one past last element of buckets)
+///
+/// Returns `None` if an overflow occurs.
+#[cfg_attr(feature = "inline-more", inline)]
+#[cfg(not(feature = "nightly"))]
+fn calculate_layout<T>(buckets: usize) -> Option<(Layout, usize)> {
+    debug_assert!(buckets.is_power_of_two());
+
+    // Manual layout calculation since Layout methods are not yet stable.
+    let ctrl_align = usize::max(mem::align_of::<T>(), Group::WIDTH);
+    let ctrl_offset = mem::size_of::<T>()
+        .checked_mul(buckets)?
+        .checked_add(ctrl_align - 1)?
+        & !(ctrl_align - 1);
+    let len = ctrl_offset.checked_add(buckets + Group::WIDTH)?;
+
+    Some((
+        unsafe { Layout::from_size_align_unchecked(len, ctrl_align) },
+        ctrl_offset,
+    ))
+}
+
+/// A reference to a hash table bucket containing a `T`.
+///
+/// This is usually just a pointer to the element itself. However if the element
+/// is a ZST, then we instead track the index of the element in the table so
+/// that `erase` works properly.
+pub struct Bucket<T> {
+    // Actually it is pointer to next element than element itself
+    // this is needed to maintain pointer arithmetic invariants
+    // keeping direct pointer to element introduces difficulty.
+    // Using `NonNull` for variance and niche layout
+    ptr: NonNull<T>,
+}
+
+// This Send impl is needed for rayon support. This is safe since Bucket is
+// never exposed in a public API.
+unsafe impl<T> Send for Bucket<T> {}
+
+impl<T> Clone for Bucket<T> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        Self { ptr: self.ptr }
+    }
+}
+
+impl<T> Bucket<T> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    unsafe fn from_base_index(base: NonNull<T>, index: usize) -> Self {
+        let ptr = if mem::size_of::<T>() == 0 {
+            // won't overflow because index must be less than length
+            (index + 1) as *mut T
+        } else {
+            base.as_ptr().sub(index)
+        };
+        Self {
+            ptr: NonNull::new_unchecked(ptr),
+        }
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    unsafe fn to_base_index(&self, base: NonNull<T>) -> usize {
+        if mem::size_of::<T>() == 0 {
+            self.ptr.as_ptr() as usize - 1
+        } else {
+            offset_from(base.as_ptr(), self.ptr.as_ptr())
+        }
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub unsafe fn as_ptr(&self) -> *mut T {
+        if mem::size_of::<T>() == 0 {
+            // Just return an arbitrary ZST pointer which is properly aligned
+            mem::align_of::<T>() as *mut T
+        } else {
+            self.ptr.as_ptr().sub(1)
+        }
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    unsafe fn next_n(&self, offset: usize) -> Self {
+        let ptr = if mem::size_of::<T>() == 0 {
+            (self.ptr.as_ptr() as usize + offset) as *mut T
+        } else {
+            self.ptr.as_ptr().sub(offset)
+        };
+        Self {
+            ptr: NonNull::new_unchecked(ptr),
+        }
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub unsafe fn drop(&self) {
+        self.as_ptr().drop_in_place();
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub unsafe fn read(&self) -> T {
+        self.as_ptr().read()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub unsafe fn write(&self, val: T) {
+        self.as_ptr().write(val);
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub unsafe fn as_ref<'a>(&self) -> &'a T {
+        &*self.as_ptr()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub unsafe fn as_mut<'a>(&self) -> &'a mut T {
+        &mut *self.as_ptr()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub unsafe fn copy_from_nonoverlapping(&self, other: &Self) {
+        self.as_ptr().copy_from_nonoverlapping(other.as_ptr(), 1);
+    }
+}
+
+/// A raw hash table with an unsafe API.
+pub struct RawTable<T> {
+    // Mask to get an index from a hash value. The value is one less than the
+    // number of buckets in the table.
+    bucket_mask: usize,
+
+    // [Padding], T1, T2, ..., Tlast, C1, C2, ...
+    //                                ^ points here
+    ctrl: NonNull<u8>,
+
+    // Number of elements that can be inserted before we need to grow the table
+    growth_left: usize,
+
+    // Number of elements in the table, only really used by len()
+    items: usize,
+
+    // Tell dropck that we own instances of T.
+    marker: PhantomData<T>,
+}
+
+impl<T> RawTable<T> {
+    /// Creates a new empty hash table without allocating any memory.
+    ///
+    /// In effect this returns a table with exactly 1 bucket. However we can
+    /// leave the data pointer dangling since that bucket is never written to
+    /// due to our load factor forcing us to always have at least 1 free bucket.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub const fn new() -> Self {
+        Self {
+            // Be careful to cast the entire slice to a raw pointer.
+            ctrl: unsafe { NonNull::new_unchecked(Group::static_empty() as *const _ as *mut u8) },
+            bucket_mask: 0,
+            items: 0,
+            growth_left: 0,
+            marker: PhantomData,
+        }
+    }
+
+    /// Allocates a new hash table with the given number of buckets.
+    ///
+    /// The control bytes are left uninitialized.
+    #[cfg_attr(feature = "inline-more", inline)]
+    unsafe fn new_uninitialized(
+        buckets: usize,
+        fallability: Fallibility,
+    ) -> Result<Self, TryReserveError> {
+        debug_assert!(buckets.is_power_of_two());
+
+        // Avoid `Option::ok_or_else` because it bloats LLVM IR.
+        let (layout, ctrl_offset) = match calculate_layout::<T>(buckets) {
+            Some(lco) => lco,
+            None => return Err(fallability.capacity_overflow()),
+        };
+        let ptr = match NonNull::new(alloc(layout)) {
+            Some(ptr) => ptr,
+            None => return Err(fallability.alloc_err(layout)),
+        };
+        let ctrl = NonNull::new_unchecked(ptr.as_ptr().add(ctrl_offset));
+        Ok(Self {
+            ctrl,
+            bucket_mask: buckets - 1,
+            items: 0,
+            growth_left: bucket_mask_to_capacity(buckets - 1),
+            marker: PhantomData,
+        })
+    }
+
+    /// Attempts to allocate a new hash table with at least enough capacity
+    /// for inserting the given number of elements without reallocating.
+    fn fallible_with_capacity(
+        capacity: usize,
+        fallability: Fallibility,
+    ) -> Result<Self, TryReserveError> {
+        if capacity == 0 {
+            Ok(Self::new())
+        } else {
+            unsafe {
+                // Avoid `Option::ok_or_else` because it bloats LLVM IR.
+                let buckets = match capacity_to_buckets(capacity) {
+                    Some(buckets) => buckets,
+                    None => return Err(fallability.capacity_overflow()),
+                };
+                let result = Self::new_uninitialized(buckets, fallability)?;
+                result.ctrl(0).write_bytes(EMPTY, result.num_ctrl_bytes());
+
+                Ok(result)
+            }
+        }
+    }
+
+    /// Attempts to allocate a new hash table with at least enough capacity
+    /// for inserting the given number of elements without reallocating.
+    #[cfg(feature = "raw")]
+    pub fn try_with_capacity(capacity: usize) -> Result<Self, TryReserveError> {
+        Self::fallible_with_capacity(capacity, Fallibility::Fallible)
+    }
+
+    /// Allocates a new hash table with at least enough capacity for inserting
+    /// the given number of elements without reallocating.
+    pub fn with_capacity(capacity: usize) -> Self {
+        // Avoid `Result::unwrap_or_else` because it bloats LLVM IR.
+        match Self::fallible_with_capacity(capacity, Fallibility::Infallible) {
+            Ok(capacity) => capacity,
+            Err(_) => unsafe { hint::unreachable_unchecked() },
+        }
+    }
+
+    /// Deallocates the table without dropping any entries.
+    #[cfg_attr(feature = "inline-more", inline)]
+    unsafe fn free_buckets(&mut self) {
+        // Avoid `Option::unwrap_or_else` because it bloats LLVM IR.
+        let (layout, ctrl_offset) = match calculate_layout::<T>(self.buckets()) {
+            Some(lco) => lco,
+            None => hint::unreachable_unchecked(),
+        };
+        dealloc(self.ctrl.as_ptr().sub(ctrl_offset), layout);
+    }
+
+    /// Returns pointer to one past last element of data table.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub unsafe fn data_end(&self) -> NonNull<T> {
+        NonNull::new_unchecked(self.ctrl.as_ptr() as *mut T)
+    }
+
+    /// Returns pointer to start of data table.
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[cfg(feature = "nightly")]
+    pub unsafe fn data_start(&self) -> *mut T {
+        self.data_end().as_ptr().wrapping_sub(self.buckets())
+    }
+
+    /// Returns the index of a bucket from a `Bucket`.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub unsafe fn bucket_index(&self, bucket: &Bucket<T>) -> usize {
+        bucket.to_base_index(self.data_end())
+    }
+
+    /// Returns a pointer to a control byte.
+    #[cfg_attr(feature = "inline-more", inline)]
+    unsafe fn ctrl(&self, index: usize) -> *mut u8 {
+        debug_assert!(index < self.num_ctrl_bytes());
+        self.ctrl.as_ptr().add(index)
+    }
+
+    /// Returns a pointer to an element in the table.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub unsafe fn bucket(&self, index: usize) -> Bucket<T> {
+        debug_assert_ne!(self.bucket_mask, 0);
+        debug_assert!(index < self.buckets());
+        Bucket::from_base_index(self.data_end(), index)
+    }
+
+    /// Erases an element from the table without dropping it.
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[deprecated(since = "0.8.1", note = "use erase or remove instead")]
+    pub unsafe fn erase_no_drop(&mut self, item: &Bucket<T>) {
+        let index = self.bucket_index(item);
+        debug_assert!(is_full(*self.ctrl(index)));
+        let index_before = index.wrapping_sub(Group::WIDTH) & self.bucket_mask;
+        let empty_before = Group::load(self.ctrl(index_before)).match_empty();
+        let empty_after = Group::load(self.ctrl(index)).match_empty();
+
+        // If we are inside a continuous block of Group::WIDTH full or deleted
+        // cells then a probe window may have seen a full block when trying to
+        // insert. We therefore need to keep that block non-empty so that
+        // lookups will continue searching to the next probe window.
+        //
+        // Note that in this context `leading_zeros` refers to the bytes at the
+        // end of a group, while `trailing_zeros` refers to the bytes at the
+        // begining of a group.
+        let ctrl = if empty_before.leading_zeros() + empty_after.trailing_zeros() >= Group::WIDTH {
+            DELETED
+        } else {
+            self.growth_left += 1;
+            EMPTY
+        };
+        self.set_ctrl(index, ctrl);
+        self.items -= 1;
+    }
+
+    /// Erases an element from the table, dropping it in place.
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[allow(clippy::needless_pass_by_value)]
+    #[allow(deprecated)]
+    pub unsafe fn erase(&mut self, item: Bucket<T>) {
+        // Erase the element from the table first since drop might panic.
+        self.erase_no_drop(&item);
+        item.drop();
+    }
+
+    /// Finds and erases an element from the table, dropping it in place.
+    /// Returns true if an element was found.
+    #[cfg(feature = "raw")]
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn erase_entry(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> bool {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        if let Some(bucket) = self.find(hash, eq) {
+            unsafe { self.erase(bucket) };
+            true
+        } else {
+            false
+        }
+    }
+
+    /// Removes an element from the table, returning it.
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[allow(clippy::needless_pass_by_value)]
+    #[allow(deprecated)]
+    pub unsafe fn remove(&mut self, item: Bucket<T>) -> T {
+        self.erase_no_drop(&item);
+        item.read()
+    }
+
+    /// Finds and removes an element from the table, returning it.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove_entry(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<T> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.find(hash, eq) {
+            Some(bucket) => Some(unsafe { self.remove(bucket) }),
+            None => None,
+        }
+    }
+
+    /// Returns an iterator for a probe sequence on the table.
+    ///
+    /// This iterator never terminates, but is guaranteed to visit each bucket
+    /// group exactly once. The loop using `probe_seq` must terminate upon
+    /// reaching a group containing an empty bucket.
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn probe_seq(&self, hash: u64) -> ProbeSeq {
+        ProbeSeq {
+            bucket_mask: self.bucket_mask,
+            pos: h1(hash) & self.bucket_mask,
+            stride: 0,
+        }
+    }
+
+    /// Sets a control byte, and possibly also the replicated control byte at
+    /// the end of the array.
+    #[cfg_attr(feature = "inline-more", inline)]
+    unsafe fn set_ctrl(&self, index: usize, ctrl: u8) {
+        // Replicate the first Group::WIDTH control bytes at the end of
+        // the array without using a branch:
+        // - If index >= Group::WIDTH then index == index2.
+        // - Otherwise index2 == self.bucket_mask + 1 + index.
+        //
+        // The very last replicated control byte is never actually read because
+        // we mask the initial index for unaligned loads, but we write it
+        // anyways because it makes the set_ctrl implementation simpler.
+        //
+        // If there are fewer buckets than Group::WIDTH then this code will
+        // replicate the buckets at the end of the trailing group. For example
+        // with 2 buckets and a group size of 4, the control bytes will look
+        // like this:
+        //
+        //     Real    |             Replicated
+        // ---------------------------------------------
+        // | [A] | [B] | [EMPTY] | [EMPTY] | [A] | [B] |
+        // ---------------------------------------------
+        let index2 = ((index.wrapping_sub(Group::WIDTH)) & self.bucket_mask) + Group::WIDTH;
+
+        *self.ctrl(index) = ctrl;
+        *self.ctrl(index2) = ctrl;
+    }
+
+    /// Searches for an empty or deleted bucket which is suitable for inserting
+    /// a new element.
+    ///
+    /// There must be at least 1 empty bucket in the table.
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn find_insert_slot(&self, hash: u64) -> usize {
+        for pos in self.probe_seq(hash) {
+            unsafe {
+                let group = Group::load(self.ctrl(pos));
+                if let Some(bit) = group.match_empty_or_deleted().lowest_set_bit() {
+                    let result = (pos + bit) & self.bucket_mask;
+
+                    // In tables smaller than the group width, trailing control
+                    // bytes outside the range of the table are filled with
+                    // EMPTY entries. These will unfortunately trigger a
+                    // match, but once masked may point to a full bucket that
+                    // is already occupied. We detect this situation here and
+                    // perform a second scan starting at the begining of the
+                    // table. This second scan is guaranteed to find an empty
+                    // slot (due to the load factor) before hitting the trailing
+                    // control bytes (containing EMPTY).
+                    if unlikely(is_full(*self.ctrl(result))) {
+                        debug_assert!(self.bucket_mask < Group::WIDTH);
+                        debug_assert_ne!(pos, 0);
+                        return Group::load_aligned(self.ctrl(0))
+                            .match_empty_or_deleted()
+                            .lowest_set_bit_nonzero();
+                    } else {
+                        return result;
+                    }
+                }
+            }
+        }
+
+        // probe_seq never returns.
+        unreachable!();
+    }
+
+    /// Marks all table buckets as empty without dropping their contents.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn clear_no_drop(&mut self) {
+        if !self.is_empty_singleton() {
+            unsafe {
+                self.ctrl(0).write_bytes(EMPTY, self.num_ctrl_bytes());
+            }
+        }
+        self.items = 0;
+        self.growth_left = bucket_mask_to_capacity(self.bucket_mask);
+    }
+
+    /// Removes all elements from the table without freeing the backing memory.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn clear(&mut self) {
+        // Ensure that the table is reset even if one of the drops panic
+        let self_ = guard(self, |self_| self_.clear_no_drop());
+
+        if mem::needs_drop::<T>() && self_.len() != 0 {
+            unsafe {
+                for item in self_.iter() {
+                    item.drop();
+                }
+            }
+        }
+    }
+
+    /// Shrinks the table to fit `max(self.len(), min_size)` elements.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn shrink_to(&mut self, min_size: usize, hasher: impl Fn(&T) -> u64) {
+        // Calculate the minimal number of elements that we need to reserve
+        // space for.
+        let min_size = usize::max(self.items, min_size);
+        if min_size == 0 {
+            *self = Self::new();
+            return;
+        }
+
+        // Calculate the number of buckets that we need for this number of
+        // elements. If the calculation overflows then the requested bucket
+        // count must be larger than what we have right and nothing needs to be
+        // done.
+        let min_buckets = match capacity_to_buckets(min_size) {
+            Some(buckets) => buckets,
+            None => return,
+        };
+
+        // If we have more buckets than we need, shrink the table.
+        if min_buckets < self.buckets() {
+            // Fast path if the table is empty
+            if self.items == 0 {
+                *self = Self::with_capacity(min_size)
+            } else {
+                // Avoid `Result::unwrap_or_else` because it bloats LLVM IR.
+                if self
+                    .resize(min_size, hasher, Fallibility::Infallible)
+                    .is_err()
+                {
+                    unsafe { hint::unreachable_unchecked() }
+                }
+            }
+        }
+    }
+
+    /// Ensures that at least `additional` items can be inserted into the table
+    /// without reallocation.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn reserve(&mut self, additional: usize, hasher: impl Fn(&T) -> u64) {
+        if additional > self.growth_left {
+            // Avoid `Result::unwrap_or_else` because it bloats LLVM IR.
+            if self
+                .reserve_rehash(additional, hasher, Fallibility::Infallible)
+                .is_err()
+            {
+                unsafe { hint::unreachable_unchecked() }
+            }
+        }
+    }
+
+    /// Tries to ensure that at least `additional` items can be inserted into
+    /// the table without reallocation.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn try_reserve(
+        &mut self,
+        additional: usize,
+        hasher: impl Fn(&T) -> u64,
+    ) -> Result<(), TryReserveError> {
+        if additional > self.growth_left {
+            self.reserve_rehash(additional, hasher, Fallibility::Fallible)
+        } else {
+            Ok(())
+        }
+    }
+
+    /// Out-of-line slow path for `reserve` and `try_reserve`.
+    #[cold]
+    #[inline(never)]
+    fn reserve_rehash(
+        &mut self,
+        additional: usize,
+        hasher: impl Fn(&T) -> u64,
+        fallability: Fallibility,
+    ) -> Result<(), TryReserveError> {
+        // Avoid `Option::ok_or_else` because it bloats LLVM IR.
+        let new_items = match self.items.checked_add(additional) {
+            Some(new_items) => new_items,
+            None => return Err(fallability.capacity_overflow()),
+        };
+        let full_capacity = bucket_mask_to_capacity(self.bucket_mask);
+        if new_items <= full_capacity / 2 {
+            // Rehash in-place without re-allocating if we have plenty of spare
+            // capacity that is locked up due to DELETED entries.
+            self.rehash_in_place(hasher);
+            Ok(())
+        } else {
+            // Otherwise, conservatively resize to at least the next size up
+            // to avoid churning deletes into frequent rehashes.
+            self.resize(
+                usize::max(new_items, full_capacity + 1),
+                hasher,
+                fallability,
+            )
+        }
+    }
+
+    /// Rehashes the contents of the table in place (i.e. without changing the
+    /// allocation).
+    ///
+    /// If `hasher` panics then some the table's contents may be lost.
+    fn rehash_in_place(&mut self, hasher: impl Fn(&T) -> u64) {
+        unsafe {
+            // Bulk convert all full control bytes to DELETED, and all DELETED
+            // control bytes to EMPTY. This effectively frees up all buckets
+            // containing a DELETED entry.
+            for i in (0..self.buckets()).step_by(Group::WIDTH) {
+                let group = Group::load_aligned(self.ctrl(i));
+                let group = group.convert_special_to_empty_and_full_to_deleted();
+                group.store_aligned(self.ctrl(i));
+            }
+
+            // Fix up the trailing control bytes. See the comments in set_ctrl
+            // for the handling of tables smaller than the group width.
+            if self.buckets() < Group::WIDTH {
+                self.ctrl(0)
+                    .copy_to(self.ctrl(Group::WIDTH), self.buckets());
+            } else {
+                self.ctrl(0)
+                    .copy_to(self.ctrl(self.buckets()), Group::WIDTH);
+            }
+
+            // If the hash function panics then properly clean up any elements
+            // that we haven't rehashed yet. We unfortunately can't preserve the
+            // element since we lost their hash and have no way of recovering it
+            // without risking another panic.
+            let mut guard = guard(self, |self_| {
+                if mem::needs_drop::<T>() {
+                    for i in 0..self_.buckets() {
+                        if *self_.ctrl(i) == DELETED {
+                            self_.set_ctrl(i, EMPTY);
+                            self_.bucket(i).drop();
+                            self_.items -= 1;
+                        }
+                    }
+                }
+                self_.growth_left = bucket_mask_to_capacity(self_.bucket_mask) - self_.items;
+            });
+
+            // At this point, DELETED elements are elements that we haven't
+            // rehashed yet. Find them and re-insert them at their ideal
+            // position.
+            'outer: for i in 0..guard.buckets() {
+                if *guard.ctrl(i) != DELETED {
+                    continue;
+                }
+                'inner: loop {
+                    // Hash the current item
+                    let item = guard.bucket(i);
+                    let hash = hasher(item.as_ref());
+
+                    // Search for a suitable place to put it
+                    let new_i = guard.find_insert_slot(hash);
+
+                    // Probing works by scanning through all of the control
+                    // bytes in groups, which may not be aligned to the group
+                    // size. If both the new and old position fall within the
+                    // same unaligned group, then there is no benefit in moving
+                    // it and we can just continue to the next item.
+                    let probe_index = |pos: usize| {
+                        (pos.wrapping_sub(guard.probe_seq(hash).pos) & guard.bucket_mask)
+                            / Group::WIDTH
+                    };
+                    if likely(probe_index(i) == probe_index(new_i)) {
+                        guard.set_ctrl(i, h2(hash));
+                        continue 'outer;
+                    }
+
+                    // We are moving the current item to a new position. Write
+                    // our H2 to the control byte of the new position.
+                    let prev_ctrl = *guard.ctrl(new_i);
+                    guard.set_ctrl(new_i, h2(hash));
+
+                    if prev_ctrl == EMPTY {
+                        // If the target slot is empty, simply move the current
+                        // element into the new slot and clear the old control
+                        // byte.
+                        guard.set_ctrl(i, EMPTY);
+                        guard.bucket(new_i).copy_from_nonoverlapping(&item);
+                        continue 'outer;
+                    } else {
+                        // If the target slot is occupied, swap the two elements
+                        // and then continue processing the element that we just
+                        // swapped into the old slot.
+                        debug_assert_eq!(prev_ctrl, DELETED);
+                        mem::swap(guard.bucket(new_i).as_mut(), item.as_mut());
+                        continue 'inner;
+                    }
+                }
+            }
+
+            guard.growth_left = bucket_mask_to_capacity(guard.bucket_mask) - guard.items;
+            mem::forget(guard);
+        }
+    }
+
+    /// Allocates a new table of a different size and moves the contents of the
+    /// current table into it.
+    fn resize(
+        &mut self,
+        capacity: usize,
+        hasher: impl Fn(&T) -> u64,
+        fallability: Fallibility,
+    ) -> Result<(), TryReserveError> {
+        unsafe {
+            debug_assert!(self.items <= capacity);
+
+            // Allocate and initialize the new table.
+            let mut new_table = Self::fallible_with_capacity(capacity, fallability)?;
+            new_table.growth_left -= self.items;
+            new_table.items = self.items;
+
+            // The hash function may panic, in which case we simply free the new
+            // table without dropping any elements that may have been copied into
+            // it.
+            //
+            // This guard is also used to free the old table on success, see
+            // the comment at the bottom of this function.
+            let mut new_table = guard(ManuallyDrop::new(new_table), |new_table| {
+                if !new_table.is_empty_singleton() {
+                    new_table.free_buckets();
+                }
+            });
+
+            // Copy all elements to the new table.
+            for item in self.iter() {
+                // This may panic.
+                let hash = hasher(item.as_ref());
+
+                // We can use a simpler version of insert() here since:
+                // - there are no DELETED entries.
+                // - we know there is enough space in the table.
+                // - all elements are unique.
+                let index = new_table.find_insert_slot(hash);
+                new_table.set_ctrl(index, h2(hash));
+                new_table.bucket(index).copy_from_nonoverlapping(&item);
+            }
+
+            // We successfully copied all elements without panicking. Now replace
+            // self with the new table. The old table will have its memory freed but
+            // the items will not be dropped (since they have been moved into the
+            // new table).
+            mem::swap(self, &mut new_table);
+
+            Ok(())
+        }
+    }
+
+    /// Inserts a new element into the table, and returns its raw bucket.
+    ///
+    /// This does not check if the given element already exists in the table.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> Bucket<T> {
+        unsafe {
+            let mut index = self.find_insert_slot(hash);
+
+            // We can avoid growing the table once we have reached our load
+            // factor if we are replacing a tombstone. This works since the
+            // number of EMPTY slots does not change in this case.
+            let old_ctrl = *self.ctrl(index);
+            if unlikely(self.growth_left == 0 && special_is_empty(old_ctrl)) {
+                self.reserve(1, hasher);
+                index = self.find_insert_slot(hash);
+            }
+
+            let bucket = self.bucket(index);
+            self.growth_left -= special_is_empty(old_ctrl) as usize;
+            self.set_ctrl(index, h2(hash));
+            bucket.write(value);
+            self.items += 1;
+            bucket
+        }
+    }
+
+    /// Inserts a new element into the table, and returns a mutable reference to it.
+    ///
+    /// This does not check if the given element already exists in the table.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert_entry(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> &mut T {
+        unsafe { self.insert(hash, value, hasher).as_mut() }
+    }
+
+    /// Inserts a new element into the table, without growing the table.
+    ///
+    /// There must be enough space in the table to insert the new element.
+    ///
+    /// This does not check if the given element already exists in the table.
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[cfg(any(feature = "raw", feature = "rustc-internal-api"))]
+    pub fn insert_no_grow(&mut self, hash: u64, value: T) -> Bucket<T> {
+        unsafe {
+            let index = self.find_insert_slot(hash);
+            let bucket = self.bucket(index);
+
+            // If we are replacing a DELETED entry then we don't need to update
+            // the load counter.
+            let old_ctrl = *self.ctrl(index);
+            self.growth_left -= special_is_empty(old_ctrl) as usize;
+
+            self.set_ctrl(index, h2(hash));
+            bucket.write(value);
+            self.items += 1;
+            bucket
+        }
+    }
+
+    /// Temporary removes a bucket, applying the given function to the removed
+    /// element and optionally put back the returned value in the same bucket.
+    ///
+    /// Returns `true` if the bucket still contains an element
+    ///
+    /// This does not check if the given bucket is actually occupied.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub unsafe fn replace_bucket_with<F>(&mut self, bucket: Bucket<T>, f: F) -> bool
+    where
+        F: FnOnce(T) -> Option<T>,
+    {
+        let index = self.bucket_index(&bucket);
+        let old_ctrl = *self.ctrl(index);
+        debug_assert!(is_full(old_ctrl));
+        let old_growth_left = self.growth_left;
+        let item = self.remove(bucket);
+        if let Some(new_item) = f(item) {
+            self.growth_left = old_growth_left;
+            self.set_ctrl(index, old_ctrl);
+            self.items += 1;
+            self.bucket(index).write(new_item);
+            true
+        } else {
+            false
+        }
+    }
+
+    /// Searches for an element in the table.
+    #[inline]
+    pub fn find(&self, hash: u64, mut eq: impl FnMut(&T) -> bool) -> Option<Bucket<T>> {
+        unsafe {
+            for bucket in self.iter_hash(hash) {
+                let elm = bucket.as_ref();
+                if likely(eq(elm)) {
+                    return Some(bucket);
+                }
+            }
+            None
+        }
+    }
+
+    /// Gets a reference to an element in the table.
+    #[inline]
+    pub fn get(&self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&T> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.find(hash, eq) {
+            Some(bucket) => Some(unsafe { bucket.as_ref() }),
+            None => None,
+        }
+    }
+
+    /// Gets a mutable reference to an element in the table.
+    #[inline]
+    pub fn get_mut(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&mut T> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.find(hash, eq) {
+            Some(bucket) => Some(unsafe { bucket.as_mut() }),
+            None => None,
+        }
+    }
+
+    /// Returns the number of elements the map can hold without reallocating.
+    ///
+    /// This number is a lower bound; the table might be able to hold
+    /// more, but is guaranteed to be able to hold at least this many.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn capacity(&self) -> usize {
+        self.items + self.growth_left
+    }
+
+    /// Returns the number of elements in the table.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn len(&self) -> usize {
+        self.items
+    }
+
+    /// Returns the number of buckets in the table.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn buckets(&self) -> usize {
+        self.bucket_mask + 1
+    }
+
+    /// Returns the number of control bytes in the table.
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn num_ctrl_bytes(&self) -> usize {
+        self.bucket_mask + 1 + Group::WIDTH
+    }
+
+    /// Returns whether this table points to the empty singleton with a capacity
+    /// of 0.
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn is_empty_singleton(&self) -> bool {
+        self.bucket_mask == 0
+    }
+
+    /// Returns an iterator over every element in the table. It is up to
+    /// the caller to ensure that the `RawTable` outlives the `RawIter`.
+    /// Because we cannot make the `next` method unsafe on the `RawIter`
+    /// struct, we have to make the `iter` method unsafe.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub unsafe fn iter(&self) -> RawIter<T> {
+        let data = Bucket::from_base_index(self.data_end(), 0);
+        RawIter {
+            iter: RawIterRange::new(self.ctrl.as_ptr(), data, self.buckets()),
+            items: self.items,
+        }
+    }
+
+    /// Returns an iterator over occupied buckets that could match a given hash.
+    ///
+    /// In rare cases, the iterator may return a bucket with a different hash.
+    ///
+    /// It is up to the caller to ensure that the `RawTable` outlives the
+    /// `RawIterHash`. Because we cannot make the `next` method unsafe on the
+    /// `RawIterHash` struct, we have to make the `iter_hash` method unsafe.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub unsafe fn iter_hash(&self, hash: u64) -> RawIterHash<'_, T> {
+        RawIterHash::new(self, hash)
+    }
+
+    /// Returns an iterator which removes all elements from the table without
+    /// freeing the memory.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn drain(&mut self) -> RawDrain<'_, T> {
+        unsafe {
+            let iter = self.iter();
+            self.drain_iter_from(iter)
+        }
+    }
+
+    /// Returns an iterator which removes all elements from the table without
+    /// freeing the memory.
+    ///
+    /// Iteration starts at the provided iterator's current location.
+    ///
+    /// It is up to the caller to ensure that the iterator is valid for this
+    /// `RawTable` and covers all items that remain in the table.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub unsafe fn drain_iter_from(&mut self, iter: RawIter<T>) -> RawDrain<'_, T> {
+        debug_assert_eq!(iter.len(), self.len());
+        RawDrain {
+            iter,
+            table: ManuallyDrop::new(mem::replace(self, Self::new())),
+            orig_table: NonNull::from(self),
+            marker: PhantomData,
+        }
+    }
+
+    /// Returns an iterator which consumes all elements from the table.
+    ///
+    /// Iteration starts at the provided iterator's current location.
+    ///
+    /// It is up to the caller to ensure that the iterator is valid for this
+    /// `RawTable` and covers all items that remain in the table.
+    pub unsafe fn into_iter_from(self, iter: RawIter<T>) -> RawIntoIter<T> {
+        debug_assert_eq!(iter.len(), self.len());
+
+        let alloc = self.into_alloc();
+        RawIntoIter {
+            iter,
+            alloc,
+            marker: PhantomData,
+        }
+    }
+
+    /// Converts the table into a raw allocation. The contents of the table
+    /// should be dropped using a `RawIter` before freeing the allocation.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub(crate) fn into_alloc(self) -> Option<(NonNull<u8>, Layout)> {
+        let alloc = if self.is_empty_singleton() {
+            None
+        } else {
+            // Avoid `Option::unwrap_or_else` because it bloats LLVM IR.
+            let (layout, ctrl_offset) = match calculate_layout::<T>(self.buckets()) {
+                Some(lco) => lco,
+                None => unsafe { hint::unreachable_unchecked() },
+            };
+            Some((
+                unsafe { NonNull::new_unchecked(self.ctrl.as_ptr().sub(ctrl_offset)) },
+                layout,
+            ))
+        };
+        mem::forget(self);
+        alloc
+    }
+}
+
+unsafe impl<T> Send for RawTable<T> where T: Send {}
+unsafe impl<T> Sync for RawTable<T> where T: Sync {}
+
+impl<T: Clone> Clone for RawTable<T> {
+    fn clone(&self) -> Self {
+        if self.is_empty_singleton() {
+            Self::new()
+        } else {
+            unsafe {
+                let mut new_table = ManuallyDrop::new(
+                    // Avoid `Result::ok_or_else` because it bloats LLVM IR.
+                    match Self::new_uninitialized(self.buckets(), Fallibility::Infallible) {
+                        Ok(table) => table,
+                        Err(_) => hint::unreachable_unchecked(),
+                    },
+                );
+
+                new_table.clone_from_spec(self, |new_table| {
+                    // We need to free the memory allocated for the new table.
+                    new_table.free_buckets();
+                });
+
+                // Return the newly created table.
+                ManuallyDrop::into_inner(new_table)
+            }
+        }
+    }
+
+    fn clone_from(&mut self, source: &Self) {
+        if source.is_empty_singleton() {
+            *self = Self::new();
+        } else {
+            unsafe {
+                // First, drop all our elements without clearing the control bytes.
+                if mem::needs_drop::<T>() && self.len() != 0 {
+                    for item in self.iter() {
+                        item.drop();
+                    }
+                }
+
+                // If necessary, resize our table to match the source.
+                if self.buckets() != source.buckets() {
+                    // Skip our drop by using ptr::write.
+                    if !self.is_empty_singleton() {
+                        self.free_buckets();
+                    }
+                    (self as *mut Self).write(
+                        // Avoid `Result::unwrap_or_else` because it bloats LLVM IR.
+                        match Self::new_uninitialized(source.buckets(), Fallibility::Infallible) {
+                            Ok(table) => table,
+                            Err(_) => hint::unreachable_unchecked(),
+                        },
+                    );
+                }
+
+                self.clone_from_spec(source, |self_| {
+                    // We need to leave the table in an empty state.
+                    self_.clear_no_drop()
+                });
+            }
+        }
+    }
+}
+
+/// Specialization of `clone_from` for `Copy` types
+trait RawTableClone {
+    unsafe fn clone_from_spec(&mut self, source: &Self, on_panic: impl FnMut(&mut Self));
+}
+impl<T: Clone> RawTableClone for RawTable<T> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    default_fn! {
+        unsafe fn clone_from_spec(&mut self, source: &Self, on_panic: impl FnMut(&mut Self)) {
+            self.clone_from_impl(source, on_panic);
+        }
+    }
+}
+#[cfg(feature = "nightly")]
+impl<T: Copy> RawTableClone for RawTable<T> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    unsafe fn clone_from_spec(&mut self, source: &Self, _on_panic: impl FnMut(&mut Self)) {
+        source
+            .ctrl(0)
+            .copy_to_nonoverlapping(self.ctrl(0), self.num_ctrl_bytes());
+        source
+            .data_start()
+            .copy_to_nonoverlapping(self.data_start(), self.buckets());
+
+        self.items = source.items;
+        self.growth_left = source.growth_left;
+    }
+}
+
+impl<T: Clone> RawTable<T> {
+    /// Common code for clone and clone_from. Assumes `self.buckets() == source.buckets()`.
+    #[cfg_attr(feature = "inline-more", inline)]
+    unsafe fn clone_from_impl(&mut self, source: &Self, mut on_panic: impl FnMut(&mut Self)) {
+        // Copy the control bytes unchanged. We do this in a single pass
+        source
+            .ctrl(0)
+            .copy_to_nonoverlapping(self.ctrl(0), self.num_ctrl_bytes());
+
+        // The cloning of elements may panic, in which case we need
+        // to make sure we drop only the elements that have been
+        // cloned so far.
+        let mut guard = guard((0, &mut *self), |(index, self_)| {
+            if mem::needs_drop::<T>() && self_.len() != 0 {
+                for i in 0..=*index {
+                    if is_full(*self_.ctrl(i)) {
+                        self_.bucket(i).drop();
+                    }
+                }
+            }
+
+            // Depending on whether we were called from clone or clone_from, we
+            // either need to free the memory for the destination table or just
+            // clear the control bytes.
+            on_panic(self_);
+        });
+
+        for from in source.iter() {
+            let index = source.bucket_index(&from);
+            let to = guard.1.bucket(index);
+            to.write(from.as_ref().clone());
+
+            // Update the index in case we need to unwind.
+            guard.0 = index;
+        }
+
+        // Successfully cloned all items, no need to clean up.
+        mem::forget(guard);
+
+        self.items = source.items;
+        self.growth_left = source.growth_left;
+    }
+
+    /// Variant of `clone_from` to use when a hasher is available.
+    #[cfg(feature = "raw")]
+    pub fn clone_from_with_hasher(&mut self, source: &Self, hasher: impl Fn(&T) -> u64) {
+        // If we have enough capacity in the table, just clear it and insert
+        // elements one by one. We don't do this if we have the same number of
+        // buckets as the source since we can just copy the contents directly
+        // in that case.
+        if self.buckets() != source.buckets()
+            && bucket_mask_to_capacity(self.bucket_mask) >= source.len()
+        {
+            self.clear();
+
+            let guard_self = guard(&mut *self, |self_| {
+                // Clear the partially copied table if a panic occurs, otherwise
+                // items and growth_left will be out of sync with the contents
+                // of the table.
+                self_.clear();
+            });
+
+            unsafe {
+                for item in source.iter() {
+                    // This may panic.
+                    let item = item.as_ref().clone();
+                    let hash = hasher(&item);
+
+                    // We can use a simpler version of insert() here since:
+                    // - there are no DELETED entries.
+                    // - we know there is enough space in the table.
+                    // - all elements are unique.
+                    let index = guard_self.find_insert_slot(hash);
+                    guard_self.set_ctrl(index, h2(hash));
+                    guard_self.bucket(index).write(item);
+                }
+            }
+
+            // Successfully cloned all items, no need to clean up.
+            mem::forget(guard_self);
+
+            self.items = source.items;
+            self.growth_left -= source.items;
+        } else {
+            self.clone_from(source);
+        }
+    }
+}
+
+#[cfg(feature = "nightly")]
+unsafe impl<#[may_dangle] T> Drop for RawTable<T> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drop(&mut self) {
+        if !self.is_empty_singleton() {
+            unsafe {
+                if mem::needs_drop::<T>() && self.len() != 0 {
+                    for item in self.iter() {
+                        item.drop();
+                    }
+                }
+                self.free_buckets();
+            }
+        }
+    }
+}
+#[cfg(not(feature = "nightly"))]
+impl<T> Drop for RawTable<T> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drop(&mut self) {
+        if !self.is_empty_singleton() {
+            unsafe {
+                if mem::needs_drop::<T>() && self.len() != 0 {
+                    for item in self.iter() {
+                        item.drop();
+                    }
+                }
+                self.free_buckets();
+            }
+        }
+    }
+}
+
+impl<T> IntoIterator for RawTable<T> {
+    type Item = T;
+    type IntoIter = RawIntoIter<T>;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn into_iter(self) -> RawIntoIter<T> {
+        unsafe {
+            let iter = self.iter();
+            self.into_iter_from(iter)
+        }
+    }
+}
+
+/// Iterator over a sub-range of a table. Unlike `RawIter` this iterator does
+/// not track an item count.
+pub(crate) struct RawIterRange<T> {
+    // Mask of full buckets in the current group. Bits are cleared from this
+    // mask as each element is processed.
+    current_group: BitMask,
+
+    // Pointer to the buckets for the current group.
+    data: Bucket<T>,
+
+    // Pointer to the next group of control bytes,
+    // Must be aligned to the group size.
+    next_ctrl: *const u8,
+
+    // Pointer one past the last control byte of this range.
+    end: *const u8,
+}
+
+impl<T> RawIterRange<T> {
+    /// Returns a `RawIterRange` covering a subset of a table.
+    ///
+    /// The control byte address must be aligned to the group size.
+    #[cfg_attr(feature = "inline-more", inline)]
+    unsafe fn new(ctrl: *const u8, data: Bucket<T>, len: usize) -> Self {
+        debug_assert_ne!(len, 0);
+        debug_assert_eq!(ctrl as usize % Group::WIDTH, 0);
+        let end = ctrl.add(len);
+
+        // Load the first group and advance ctrl to point to the next group
+        let current_group = Group::load_aligned(ctrl).match_full();
+        let next_ctrl = ctrl.add(Group::WIDTH);
+
+        Self {
+            current_group,
+            data,
+            next_ctrl,
+            end,
+        }
+    }
+
+    /// Splits a `RawIterRange` into two halves.
+    ///
+    /// Returns `None` if the remaining range is smaller than or equal to the
+    /// group width.
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[cfg(feature = "rayon")]
+    pub(crate) fn split(mut self) -> (Self, Option<RawIterRange<T>>) {
+        unsafe {
+            if self.end <= self.next_ctrl {
+                // Nothing to split if the group that we are current processing
+                // is the last one.
+                (self, None)
+            } else {
+                // len is the remaining number of elements after the group that
+                // we are currently processing. It must be a multiple of the
+                // group size (small tables are caught by the check above).
+                let len = offset_from(self.end, self.next_ctrl);
+                debug_assert_eq!(len % Group::WIDTH, 0);
+
+                // Split the remaining elements into two halves, but round the
+                // midpoint down in case there is an odd number of groups
+                // remaining. This ensures that:
+                // - The tail is at least 1 group long.
+                // - The split is roughly even considering we still have the
+                //   current group to process.
+                let mid = (len / 2) & !(Group::WIDTH - 1);
+
+                let tail = Self::new(
+                    self.next_ctrl.add(mid),
+                    self.data.next_n(Group::WIDTH).next_n(mid),
+                    len - mid,
+                );
+                debug_assert_eq!(
+                    self.data.next_n(Group::WIDTH).next_n(mid).ptr,
+                    tail.data.ptr
+                );
+                debug_assert_eq!(self.end, tail.end);
+                self.end = self.next_ctrl.add(mid);
+                debug_assert_eq!(self.end.add(Group::WIDTH), tail.next_ctrl);
+                (self, Some(tail))
+            }
+        }
+    }
+}
+
+// We make raw iterators unconditionally Send and Sync, and let the PhantomData
+// in the actual iterator implementations determine the real Send/Sync bounds.
+unsafe impl<T> Send for RawIterRange<T> {}
+unsafe impl<T> Sync for RawIterRange<T> {}
+
+impl<T> Clone for RawIterRange<T> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        Self {
+            data: self.data.clone(),
+            next_ctrl: self.next_ctrl,
+            current_group: self.current_group,
+            end: self.end,
+        }
+    }
+}
+
+impl<T> Iterator for RawIterRange<T> {
+    type Item = Bucket<T>;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<Bucket<T>> {
+        unsafe {
+            loop {
+                if let Some(index) = self.current_group.lowest_set_bit() {
+                    self.current_group = self.current_group.remove_lowest_bit();
+                    return Some(self.data.next_n(index));
+                }
+
+                if self.next_ctrl >= self.end {
+                    return None;
+                }
+
+                // We might read past self.end up to the next group boundary,
+                // but this is fine because it only occurs on tables smaller
+                // than the group size where the trailing control bytes are all
+                // EMPTY. On larger tables self.end is guaranteed to be aligned
+                // to the group size (since tables are power-of-two sized).
+                self.current_group = Group::load_aligned(self.next_ctrl).match_full();
+                self.data = self.data.next_n(Group::WIDTH);
+                self.next_ctrl = self.next_ctrl.add(Group::WIDTH);
+            }
+        }
+    }
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        // We don't have an item count, so just guess based on the range size.
+        (
+            0,
+            Some(unsafe { offset_from(self.end, self.next_ctrl) + Group::WIDTH }),
+        )
+    }
+}
+
+impl<T> FusedIterator for RawIterRange<T> {}
+
+/// Iterator which returns a raw pointer to every full bucket in the table.
+///
+/// For maximum flexibility this iterator is not bound by a lifetime, but you
+/// must observe several rules when using it:
+/// - You must not free the hash table while iterating (including via growing/shrinking).
+/// - It is fine to erase a bucket that has been yielded by the iterator.
+/// - Erasing a bucket that has not yet been yielded by the iterator may still
+///   result in the iterator yielding that bucket (unless `reflect_remove` is called).
+/// - It is unspecified whether an element inserted after the iterator was
+///   created will be yielded by that iterator (unless `reflect_insert` is called).
+/// - The order in which the iterator yields bucket is unspecified and may
+///   change in the future.
+pub struct RawIter<T> {
+    pub(crate) iter: RawIterRange<T>,
+    items: usize,
+}
+
+impl<T> RawIter<T> {
+    /// Refresh the iterator so that it reflects a removal from the given bucket.
+    ///
+    /// For the iterator to remain valid, this method must be called once
+    /// for each removed bucket before `next` is called again.
+    ///
+    /// This method should be called _before_ the removal is made. It is not necessary to call this
+    /// method if you are removing an item that this iterator yielded in the past.
+    #[cfg(feature = "raw")]
+    pub fn reflect_remove(&mut self, b: &Bucket<T>) {
+        self.reflect_toggle_full(b, false);
+    }
+
+    /// Refresh the iterator so that it reflects an insertion into the given bucket.
+    ///
+    /// For the iterator to remain valid, this method must be called once
+    /// for each insert before `next` is called again.
+    ///
+    /// This method does not guarantee that an insertion of a bucket witha greater
+    /// index than the last one yielded will be reflected in the iterator.
+    ///
+    /// This method should be called _after_ the given insert is made.
+    #[cfg(feature = "raw")]
+    pub fn reflect_insert(&mut self, b: &Bucket<T>) {
+        self.reflect_toggle_full(b, true);
+    }
+
+    /// Refresh the iterator so that it reflects a change to the state of the given bucket.
+    #[cfg(feature = "raw")]
+    fn reflect_toggle_full(&mut self, b: &Bucket<T>, is_insert: bool) {
+        unsafe {
+            if b.as_ptr() > self.iter.data.as_ptr() {
+                // The iterator has already passed the bucket's group.
+                // So the toggle isn't relevant to this iterator.
+                return;
+            }
+
+            if self.iter.next_ctrl < self.iter.end
+                && b.as_ptr() <= self.iter.data.next_n(Group::WIDTH).as_ptr()
+            {
+                // The iterator has not yet reached the bucket's group.
+                // We don't need to reload anything, but we do need to adjust the item count.
+
+                if cfg!(debug_assertions) {
+                    // Double-check that the user isn't lying to us by checking the bucket state.
+                    // To do that, we need to find its control byte. We know that self.iter.data is
+                    // at self.iter.next_ctrl - Group::WIDTH, so we work from there:
+                    let offset = offset_from(self.iter.data.as_ptr(), b.as_ptr());
+                    let ctrl = self.iter.next_ctrl.sub(Group::WIDTH).add(offset);
+                    // This method should be called _before_ a removal, or _after_ an insert,
+                    // so in both cases the ctrl byte should indicate that the bucket is full.
+                    assert!(is_full(*ctrl));
+                }
+
+                if is_insert {
+                    self.items += 1;
+                } else {
+                    self.items -= 1;
+                }
+
+                return;
+            }
+
+            // The iterator is at the bucket group that the toggled bucket is in.
+            // We need to do two things:
+            //
+            //  - Determine if the iterator already yielded the toggled bucket.
+            //    If it did, we're done.
+            //  - Otherwise, update the iterator cached group so that it won't
+            //    yield a to-be-removed bucket, or _will_ yield a to-be-added bucket.
+            //    We'll also need ot update the item count accordingly.
+            if let Some(index) = self.iter.current_group.lowest_set_bit() {
+                let next_bucket = self.iter.data.next_n(index);
+                if b.as_ptr() > next_bucket.as_ptr() {
+                    // The toggled bucket is "before" the bucket the iterator would yield next. We
+                    // therefore don't need to do anything --- the iterator has already passed the
+                    // bucket in question.
+                    //
+                    // The item count must already be correct, since a removal or insert "prior" to
+                    // the iterator's position wouldn't affect the item count.
+                } else {
+                    // The removed bucket is an upcoming bucket. We need to make sure it does _not_
+                    // get yielded, and also that it's no longer included in the item count.
+                    //
+                    // NOTE: We can't just reload the group here, both since that might reflect
+                    // inserts we've already passed, and because that might inadvertently unset the
+                    // bits for _other_ removals. If we do that, we'd have to also decrement the
+                    // item count for those other bits that we unset. But the presumably subsequent
+                    // call to reflect for those buckets might _also_ decrement the item count.
+                    // Instead, we _just_ flip the bit for the particular bucket the caller asked
+                    // us to reflect.
+                    let our_bit = offset_from(self.iter.data.as_ptr(), b.as_ptr());
+                    let was_full = self.iter.current_group.flip(our_bit);
+                    debug_assert_ne!(was_full, is_insert);
+
+                    if is_insert {
+                        self.items += 1;
+                    } else {
+                        self.items -= 1;
+                    }
+
+                    if cfg!(debug_assertions) {
+                        if b.as_ptr() == next_bucket.as_ptr() {
+                            // The removed bucket should no longer be next
+                            debug_assert_ne!(self.iter.current_group.lowest_set_bit(), Some(index));
+                        } else {
+                            // We should not have changed what bucket comes next.
+                            debug_assert_eq!(self.iter.current_group.lowest_set_bit(), Some(index));
+                        }
+                    }
+                }
+            } else {
+                // We must have already iterated past the removed item.
+            }
+        }
+    }
+}
+
+impl<T> Clone for RawIter<T> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        Self {
+            iter: self.iter.clone(),
+            items: self.items,
+        }
+    }
+}
+
+impl<T> Iterator for RawIter<T> {
+    type Item = Bucket<T>;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<Bucket<T>> {
+        if let Some(b) = self.iter.next() {
+            self.items -= 1;
+            Some(b)
+        } else {
+            // We don't check against items == 0 here to allow the
+            // compiler to optimize away the item count entirely if the
+            // iterator length is never queried.
+            debug_assert_eq!(self.items, 0);
+            None
+        }
+    }
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        (self.items, Some(self.items))
+    }
+}
+
+impl<T> ExactSizeIterator for RawIter<T> {}
+impl<T> FusedIterator for RawIter<T> {}
+
+/// Iterator which consumes a table and returns elements.
+pub struct RawIntoIter<T> {
+    iter: RawIter<T>,
+    alloc: Option<(NonNull<u8>, Layout)>,
+    marker: PhantomData<T>,
+}
+
+impl<T> RawIntoIter<T> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn iter(&self) -> RawIter<T> {
+        self.iter.clone()
+    }
+}
+
+unsafe impl<T> Send for RawIntoIter<T> where T: Send {}
+unsafe impl<T> Sync for RawIntoIter<T> where T: Sync {}
+
+#[cfg(feature = "nightly")]
+unsafe impl<#[may_dangle] T> Drop for RawIntoIter<T> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drop(&mut self) {
+        unsafe {
+            // Drop all remaining elements
+            if mem::needs_drop::<T>() && self.iter.len() != 0 {
+                while let Some(item) = self.iter.next() {
+                    item.drop();
+                }
+            }
+
+            // Free the table
+            if let Some((ptr, layout)) = self.alloc {
+                dealloc(ptr.as_ptr(), layout);
+            }
+        }
+    }
+}
+#[cfg(not(feature = "nightly"))]
+impl<T> Drop for RawIntoIter<T> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drop(&mut self) {
+        unsafe {
+            // Drop all remaining elements
+            if mem::needs_drop::<T>() && self.iter.len() != 0 {
+                while let Some(item) = self.iter.next() {
+                    item.drop();
+                }
+            }
+
+            // Free the table
+            if let Some((ptr, layout)) = self.alloc {
+                dealloc(ptr.as_ptr(), layout);
+            }
+        }
+    }
+}
+
+impl<T> Iterator for RawIntoIter<T> {
+    type Item = T;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<T> {
+        unsafe { Some(self.iter.next()?.read()) }
+    }
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+}
+
+impl<T> ExactSizeIterator for RawIntoIter<T> {}
+impl<T> FusedIterator for RawIntoIter<T> {}
+
+/// Iterator which consumes elements without freeing the table storage.
+pub struct RawDrain<'a, T> {
+    iter: RawIter<T>,
+
+    // The table is moved into the iterator for the duration of the drain. This
+    // ensures that an empty table is left if the drain iterator is leaked
+    // without dropping.
+    table: ManuallyDrop<RawTable<T>>,
+    orig_table: NonNull<RawTable<T>>,
+
+    // We don't use a &'a mut RawTable<T> because we want RawDrain to be
+    // covariant over T.
+    marker: PhantomData<&'a RawTable<T>>,
+}
+
+impl<T> RawDrain<'_, T> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn iter(&self) -> RawIter<T> {
+        self.iter.clone()
+    }
+}
+
+unsafe impl<T> Send for RawDrain<'_, T> where T: Send {}
+unsafe impl<T> Sync for RawDrain<'_, T> where T: Sync {}
+
+impl<T> Drop for RawDrain<'_, T> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drop(&mut self) {
+        unsafe {
+            // Drop all remaining elements. Note that this may panic.
+            if mem::needs_drop::<T>() && self.iter.len() != 0 {
+                while let Some(item) = self.iter.next() {
+                    item.drop();
+                }
+            }
+
+            // Reset the contents of the table now that all elements have been
+            // dropped.
+            self.table.clear_no_drop();
+
+            // Move the now empty table back to its original location.
+            self.orig_table
+                .as_ptr()
+                .copy_from_nonoverlapping(&*self.table, 1);
+        }
+    }
+}
+
+impl<T> Iterator for RawDrain<'_, T> {
+    type Item = T;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<T> {
+        unsafe {
+            let item = self.iter.next()?;
+            Some(item.read())
+        }
+    }
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+}
+
+impl<T> ExactSizeIterator for RawDrain<'_, T> {}
+impl<T> FusedIterator for RawDrain<'_, T> {}
+
+/// Iterator over occupied buckets that could match a given hash.
+///
+/// In rare cases, the iterator may return a bucket with a different hash.
+pub struct RawIterHash<'a, T> {
+    table: &'a RawTable<T>,
+
+    // The top 7 bits of the hash.
+    h2_hash: u8,
+
+    // The sequence of groups to probe in the search.
+    probe_seq: ProbeSeq,
+
+    // The current group and its position.
+    pos: usize,
+    group: Group,
+
+    // The elements within the group with a matching h2-hash.
+    bitmask: BitMaskIter,
+}
+
+impl<'a, T> RawIterHash<'a, T> {
+    fn new(table: &'a RawTable<T>, hash: u64) -> Self {
+        unsafe {
+            let h2_hash = h2(hash);
+            let mut probe_seq = table.probe_seq(hash);
+            let pos = probe_seq.next().unwrap();
+            let group = Group::load(table.ctrl(pos));
+            let bitmask = group.match_byte(h2_hash).into_iter();
+
+            RawIterHash {
+                table,
+                h2_hash,
+                probe_seq,
+                pos,
+                group,
+                bitmask,
+            }
+        }
+    }
+}
+
+impl<'a, T> Iterator for RawIterHash<'a, T> {
+    type Item = Bucket<T>;
+
+    fn next(&mut self) -> Option<Bucket<T>> {
+        unsafe {
+            loop {
+                if let Some(bit) = self.bitmask.next() {
+                    let index = (self.pos + bit) & self.table.bucket_mask;
+                    let bucket = self.table.bucket(index);
+                    return Some(bucket);
+                }
+                if likely(self.group.match_empty().any_bit_set()) {
+                    return None;
+                }
+                self.pos = self.probe_seq.next().unwrap();
+                self.group = Group::load(self.table.ctrl(self.pos));
+                self.bitmask = self.group.match_byte(self.h2_hash).into_iter();
+            }
+        }
+    }
+}
diff --git a/src/raw/sse2.rs b/src/raw/sse2.rs
new file mode 100644
index 0000000..a27bc09
--- /dev/null
+++ b/src/raw/sse2.rs
@@ -0,0 +1,144 @@
+use super::bitmask::BitMask;
+use super::EMPTY;
+use core::mem;
+
+#[cfg(target_arch = "x86")]
+use core::arch::x86;
+#[cfg(target_arch = "x86_64")]
+use core::arch::x86_64 as x86;
+
+pub type BitMaskWord = u16;
+pub const BITMASK_STRIDE: usize = 1;
+pub const BITMASK_MASK: BitMaskWord = 0xffff;
+
+/// Abstraction over a group of control bytes which can be scanned in
+/// parallel.
+///
+/// This implementation uses a 128-bit SSE value.
+#[derive(Copy, Clone)]
+pub struct Group(x86::__m128i);
+
+// FIXME: https://github.com/rust-lang/rust-clippy/issues/3859
+#[allow(clippy::use_self)]
+impl Group {
+    /// Number of bytes in the group.
+    pub const WIDTH: usize = mem::size_of::<Self>();
+
+    /// Returns a full group of empty bytes, suitable for use as the initial
+    /// value for an empty hash table.
+    ///
+    /// This is guaranteed to be aligned to the group size.
+    pub const fn static_empty() -> &'static [u8; Group::WIDTH] {
+        #[repr(C)]
+        struct AlignedBytes {
+            _align: [Group; 0],
+            bytes: [u8; Group::WIDTH],
+        };
+        const ALIGNED_BYTES: AlignedBytes = AlignedBytes {
+            _align: [],
+            bytes: [EMPTY; Group::WIDTH],
+        };
+        &ALIGNED_BYTES.bytes
+    }
+
+    /// Loads a group of bytes starting at the given address.
+    #[inline]
+    #[allow(clippy::cast_ptr_alignment)] // unaligned load
+    pub unsafe fn load(ptr: *const u8) -> Self {
+        Group(x86::_mm_loadu_si128(ptr as *const _))
+    }
+
+    /// Loads a group of bytes starting at the given address, which must be
+    /// aligned to `mem::align_of::<Group>()`.
+    #[inline]
+    #[allow(clippy::cast_ptr_alignment)]
+    pub unsafe fn load_aligned(ptr: *const u8) -> Self {
+        // FIXME: use align_offset once it stabilizes
+        debug_assert_eq!(ptr as usize & (mem::align_of::<Self>() - 1), 0);
+        Group(x86::_mm_load_si128(ptr as *const _))
+    }
+
+    /// Stores the group of bytes to the given address, which must be
+    /// aligned to `mem::align_of::<Group>()`.
+    #[inline]
+    #[allow(clippy::cast_ptr_alignment)]
+    pub unsafe fn store_aligned(self, ptr: *mut u8) {
+        // FIXME: use align_offset once it stabilizes
+        debug_assert_eq!(ptr as usize & (mem::align_of::<Self>() - 1), 0);
+        x86::_mm_store_si128(ptr as *mut _, self.0);
+    }
+
+    /// Returns a `BitMask` indicating all bytes in the group which have
+    /// the given value.
+    #[inline]
+    pub fn match_byte(self, byte: u8) -> BitMask {
+        #[allow(
+            clippy::cast_possible_wrap, // byte: u8 as i8
+            // byte: i32 as u16
+            //   note: _mm_movemask_epi8 returns a 16-bit mask in a i32, the
+            //   upper 16-bits of the i32 are zeroed:
+            clippy::cast_sign_loss,
+            clippy::cast_possible_truncation
+        )]
+        unsafe {
+            let cmp = x86::_mm_cmpeq_epi8(self.0, x86::_mm_set1_epi8(byte as i8));
+            BitMask(x86::_mm_movemask_epi8(cmp) as u16)
+        }
+    }
+
+    /// Returns a `BitMask` indicating all bytes in the group which are
+    /// `EMPTY`.
+    #[inline]
+    pub fn match_empty(self) -> BitMask {
+        self.match_byte(EMPTY)
+    }
+
+    /// Returns a `BitMask` indicating all bytes in the group which are
+    /// `EMPTY` or `DELETED`.
+    #[inline]
+    pub fn match_empty_or_deleted(self) -> BitMask {
+        #[allow(
+            // byte: i32 as u16
+            //   note: _mm_movemask_epi8 returns a 16-bit mask in a i32, the
+            //   upper 16-bits of the i32 are zeroed:
+            clippy::cast_sign_loss,
+            clippy::cast_possible_truncation
+        )]
+        unsafe {
+            // A byte is EMPTY or DELETED iff the high bit is set
+            BitMask(x86::_mm_movemask_epi8(self.0) as u16)
+        }
+    }
+
+    /// Returns a `BitMask` indicating all bytes in the group which are full.
+    #[inline]
+    pub fn match_full(&self) -> BitMask {
+        self.match_empty_or_deleted().invert()
+    }
+
+    /// Performs the following transformation on all bytes in the group:
+    /// - `EMPTY => EMPTY`
+    /// - `DELETED => EMPTY`
+    /// - `FULL => DELETED`
+    #[inline]
+    pub fn convert_special_to_empty_and_full_to_deleted(self) -> Self {
+        // Map high_bit = 1 (EMPTY or DELETED) to 1111_1111
+        // and high_bit = 0 (FULL) to 1000_0000
+        //
+        // Here's this logic expanded to concrete values:
+        //   let special = 0 > byte = 1111_1111 (true) or 0000_0000 (false)
+        //   1111_1111 | 1000_0000 = 1111_1111
+        //   0000_0000 | 1000_0000 = 1000_0000
+        #[allow(
+            clippy::cast_possible_wrap, // byte: 0x80_u8 as i8
+        )]
+        unsafe {
+            let zero = x86::_mm_setzero_si128();
+            let special = x86::_mm_cmpgt_epi8(zero, self.0);
+            Group(x86::_mm_or_si128(
+                special,
+                x86::_mm_set1_epi8(0x80_u8 as i8),
+            ))
+        }
+    }
+}
diff --git a/src/rustc_entry.rs b/src/rustc_entry.rs
new file mode 100644
index 0000000..b6ea7bc
--- /dev/null
+++ b/src/rustc_entry.rs
@@ -0,0 +1,618 @@
+use self::RustcEntry::*;
+use crate::map::{make_hash, Drain, HashMap, IntoIter, Iter, IterMut};
+use crate::raw::{Bucket, RawTable};
+use core::fmt::{self, Debug};
+use core::hash::{BuildHasher, Hash};
+use core::mem;
+
+impl<K, V, S> HashMap<K, V, S>
+where
+    K: Eq + Hash,
+    S: BuildHasher,
+{
+    /// Gets the given key's corresponding entry in the map for in-place manipulation.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut letters = HashMap::new();
+    ///
+    /// for ch in "a short treatise on fungi".chars() {
+    ///     let counter = letters.rustc_entry(ch).or_insert(0);
+    ///     *counter += 1;
+    /// }
+    ///
+    /// assert_eq!(letters[&'s'], 2);
+    /// assert_eq!(letters[&'t'], 3);
+    /// assert_eq!(letters[&'u'], 1);
+    /// assert_eq!(letters.get(&'y'), None);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn rustc_entry(&mut self, key: K) -> RustcEntry<'_, K, V> {
+        let hash = make_hash(&self.hash_builder, &key);
+        if let Some(elem) = self.table.find(hash, |q| q.0.eq(&key)) {
+            RustcEntry::Occupied(RustcOccupiedEntry {
+                key: Some(key),
+                elem,
+                table: &mut self.table,
+            })
+        } else {
+            // Ideally we would put this in VacantEntry::insert, but Entry is not
+            // generic over the BuildHasher and adding a generic parameter would be
+            // a breaking change.
+            self.reserve(1);
+
+            RustcEntry::Vacant(RustcVacantEntry {
+                hash,
+                key,
+                table: &mut self.table,
+            })
+        }
+    }
+}
+
+/// A view into a single entry in a map, which may either be vacant or occupied.
+///
+/// This `enum` is constructed from the [`entry`] method on [`HashMap`].
+///
+/// [`HashMap`]: struct.HashMap.html
+/// [`entry`]: struct.HashMap.html#method.rustc_entry
+pub enum RustcEntry<'a, K, V> {
+    /// An occupied entry.
+    Occupied(RustcOccupiedEntry<'a, K, V>),
+
+    /// A vacant entry.
+    Vacant(RustcVacantEntry<'a, K, V>),
+}
+
+impl<K: Debug, V: Debug> Debug for RustcEntry<'_, K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match *self {
+            Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
+            Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
+        }
+    }
+}
+
+/// A view into an occupied entry in a `HashMap`.
+/// It is part of the [`RustcEntry`] enum.
+///
+/// [`RustcEntry`]: enum.RustcEntry.html
+pub struct RustcOccupiedEntry<'a, K, V> {
+    key: Option<K>,
+    elem: Bucket<(K, V)>,
+    table: &'a mut RawTable<(K, V)>,
+}
+
+unsafe impl<K, V> Send for RustcOccupiedEntry<'_, K, V>
+where
+    K: Send,
+    V: Send,
+{
+}
+unsafe impl<K, V> Sync for RustcOccupiedEntry<'_, K, V>
+where
+    K: Sync,
+    V: Sync,
+{
+}
+
+impl<K: Debug, V: Debug> Debug for RustcOccupiedEntry<'_, K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("OccupiedEntry")
+            .field("key", self.key())
+            .field("value", self.get())
+            .finish()
+    }
+}
+
+/// A view into a vacant entry in a `HashMap`.
+/// It is part of the [`RustcEntry`] enum.
+///
+/// [`RustcEntry`]: enum.RustcEntry.html
+pub struct RustcVacantEntry<'a, K, V> {
+    hash: u64,
+    key: K,
+    table: &'a mut RawTable<(K, V)>,
+}
+
+impl<K: Debug, V> Debug for RustcVacantEntry<'_, K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("VacantEntry").field(self.key()).finish()
+    }
+}
+
+impl<'a, K, V> RustcEntry<'a, K, V> {
+    /// Sets the value of the entry, and returns a RustcOccupiedEntry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// let entry = map.entry("horseyland").insert(37);
+    ///
+    /// assert_eq!(entry.key(), &"horseyland");
+    /// ```
+    pub fn insert(self, value: V) -> RustcOccupiedEntry<'a, K, V> {
+        match self {
+            Vacant(entry) => entry.insert_entry(value),
+            Occupied(mut entry) => {
+                entry.insert(value);
+                entry
+            }
+        }
+    }
+
+    /// Ensures a value is in the entry by inserting the default if empty, and returns
+    /// a mutable reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// map.rustc_entry("poneyland").or_insert(3);
+    /// assert_eq!(map["poneyland"], 3);
+    ///
+    /// *map.rustc_entry("poneyland").or_insert(10) *= 2;
+    /// assert_eq!(map["poneyland"], 6);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn or_insert(self, default: V) -> &'a mut V
+    where
+        K: Hash,
+    {
+        match self {
+            Occupied(entry) => entry.into_mut(),
+            Vacant(entry) => entry.insert(default),
+        }
+    }
+
+    /// Ensures a value is in the entry by inserting the result of the default function if empty,
+    /// and returns a mutable reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, String> = HashMap::new();
+    /// let s = "hoho".to_string();
+    ///
+    /// map.rustc_entry("poneyland").or_insert_with(|| s);
+    ///
+    /// assert_eq!(map["poneyland"], "hoho".to_string());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V
+    where
+        K: Hash,
+    {
+        match self {
+            Occupied(entry) => entry.into_mut(),
+            Vacant(entry) => entry.insert(default()),
+        }
+    }
+
+    /// Returns a reference to this entry's key.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// assert_eq!(map.rustc_entry("poneyland").key(), &"poneyland");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn key(&self) -> &K {
+        match *self {
+            Occupied(ref entry) => entry.key(),
+            Vacant(ref entry) => entry.key(),
+        }
+    }
+
+    /// Provides in-place mutable access to an occupied entry before any
+    /// potential inserts into the map.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// map.rustc_entry("poneyland")
+    ///    .and_modify(|e| { *e += 1 })
+    ///    .or_insert(42);
+    /// assert_eq!(map["poneyland"], 42);
+    ///
+    /// map.rustc_entry("poneyland")
+    ///    .and_modify(|e| { *e += 1 })
+    ///    .or_insert(42);
+    /// assert_eq!(map["poneyland"], 43);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn and_modify<F>(self, f: F) -> Self
+    where
+        F: FnOnce(&mut V),
+    {
+        match self {
+            Occupied(mut entry) => {
+                f(entry.get_mut());
+                Occupied(entry)
+            }
+            Vacant(entry) => Vacant(entry),
+        }
+    }
+}
+
+impl<'a, K, V: Default> RustcEntry<'a, K, V> {
+    /// Ensures a value is in the entry by inserting the default value if empty,
+    /// and returns a mutable reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # fn main() {
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, Option<u32>> = HashMap::new();
+    /// map.rustc_entry("poneyland").or_default();
+    ///
+    /// assert_eq!(map["poneyland"], None);
+    /// # }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn or_default(self) -> &'a mut V
+    where
+        K: Hash,
+    {
+        match self {
+            Occupied(entry) => entry.into_mut(),
+            Vacant(entry) => entry.insert(Default::default()),
+        }
+    }
+}
+
+impl<'a, K, V> RustcOccupiedEntry<'a, K, V> {
+    /// Gets a reference to the key in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.rustc_entry("poneyland").or_insert(12);
+    /// assert_eq!(map.rustc_entry("poneyland").key(), &"poneyland");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn key(&self) -> &K {
+        unsafe { &self.elem.as_ref().0 }
+    }
+
+    /// Take the ownership of the key and value from the map.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::RustcEntry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.rustc_entry("poneyland").or_insert(12);
+    ///
+    /// if let RustcEntry::Occupied(o) = map.rustc_entry("poneyland") {
+    ///     // We delete the entry from the map.
+    ///     o.remove_entry();
+    /// }
+    ///
+    /// assert_eq!(map.contains_key("poneyland"), false);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove_entry(self) -> (K, V) {
+        unsafe { self.table.remove(self.elem) }
+    }
+
+    /// Gets a reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::RustcEntry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.rustc_entry("poneyland").or_insert(12);
+    ///
+    /// if let RustcEntry::Occupied(o) = map.rustc_entry("poneyland") {
+    ///     assert_eq!(o.get(), &12);
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get(&self) -> &V {
+        unsafe { &self.elem.as_ref().1 }
+    }
+
+    /// Gets a mutable reference to the value in the entry.
+    ///
+    /// If you need a reference to the `RustcOccupiedEntry` which may outlive the
+    /// destruction of the `RustcEntry` value, see [`into_mut`].
+    ///
+    /// [`into_mut`]: #method.into_mut
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::RustcEntry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.rustc_entry("poneyland").or_insert(12);
+    ///
+    /// assert_eq!(map["poneyland"], 12);
+    /// if let RustcEntry::Occupied(mut o) = map.rustc_entry("poneyland") {
+    ///     *o.get_mut() += 10;
+    ///     assert_eq!(*o.get(), 22);
+    ///
+    ///     // We can use the same RustcEntry multiple times.
+    ///     *o.get_mut() += 2;
+    /// }
+    ///
+    /// assert_eq!(map["poneyland"], 24);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get_mut(&mut self) -> &mut V {
+        unsafe { &mut self.elem.as_mut().1 }
+    }
+
+    /// Converts the RustcOccupiedEntry into a mutable reference to the value in the entry
+    /// with a lifetime bound to the map itself.
+    ///
+    /// If you need multiple references to the `RustcOccupiedEntry`, see [`get_mut`].
+    ///
+    /// [`get_mut`]: #method.get_mut
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::RustcEntry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.rustc_entry("poneyland").or_insert(12);
+    ///
+    /// assert_eq!(map["poneyland"], 12);
+    /// if let RustcEntry::Occupied(o) = map.rustc_entry("poneyland") {
+    ///     *o.into_mut() += 10;
+    /// }
+    ///
+    /// assert_eq!(map["poneyland"], 22);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn into_mut(self) -> &'a mut V {
+        unsafe { &mut self.elem.as_mut().1 }
+    }
+
+    /// Sets the value of the entry, and returns the entry's old value.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::RustcEntry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.rustc_entry("poneyland").or_insert(12);
+    ///
+    /// if let RustcEntry::Occupied(mut o) = map.rustc_entry("poneyland") {
+    ///     assert_eq!(o.insert(15), 12);
+    /// }
+    ///
+    /// assert_eq!(map["poneyland"], 15);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(&mut self, mut value: V) -> V {
+        let old_value = self.get_mut();
+        mem::swap(&mut value, old_value);
+        value
+    }
+
+    /// Takes the value out of the entry, and returns it.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::RustcEntry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.rustc_entry("poneyland").or_insert(12);
+    ///
+    /// if let RustcEntry::Occupied(o) = map.rustc_entry("poneyland") {
+    ///     assert_eq!(o.remove(), 12);
+    /// }
+    ///
+    /// assert_eq!(map.contains_key("poneyland"), false);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove(self) -> V {
+        self.remove_entry().1
+    }
+
+    /// Replaces the entry, returning the old key and value. The new key in the hash map will be
+    /// the key used to create this entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{RustcEntry, HashMap};
+    /// use std::rc::Rc;
+    ///
+    /// let mut map: HashMap<Rc<String>, u32> = HashMap::new();
+    /// map.insert(Rc::new("Stringthing".to_string()), 15);
+    ///
+    /// let my_key = Rc::new("Stringthing".to_string());
+    ///
+    /// if let RustcEntry::Occupied(entry) = map.rustc_entry(my_key) {
+    ///     // Also replace the key with a handle to our other key.
+    ///     let (old_key, old_value): (Rc<String>, u32) = entry.replace_entry(16);
+    /// }
+    ///
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn replace_entry(self, value: V) -> (K, V) {
+        let entry = unsafe { self.elem.as_mut() };
+
+        let old_key = mem::replace(&mut entry.0, self.key.unwrap());
+        let old_value = mem::replace(&mut entry.1, value);
+
+        (old_key, old_value)
+    }
+
+    /// Replaces the key in the hash map with the key used to create this entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{RustcEntry, HashMap};
+    /// use std::rc::Rc;
+    ///
+    /// let mut map: HashMap<Rc<String>, u32> = HashMap::new();
+    /// let mut known_strings: Vec<Rc<String>> = Vec::new();
+    ///
+    /// // Initialise known strings, run program, etc.
+    ///
+    /// reclaim_memory(&mut map, &known_strings);
+    ///
+    /// fn reclaim_memory(map: &mut HashMap<Rc<String>, u32>, known_strings: &[Rc<String>] ) {
+    ///     for s in known_strings {
+    ///         if let RustcEntry::Occupied(entry) = map.rustc_entry(s.clone()) {
+    ///             // Replaces the entry's key with our version of it in `known_strings`.
+    ///             entry.replace_key();
+    ///         }
+    ///     }
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn replace_key(self) -> K {
+        let entry = unsafe { self.elem.as_mut() };
+        mem::replace(&mut entry.0, self.key.unwrap())
+    }
+}
+
+impl<'a, K, V> RustcVacantEntry<'a, K, V> {
+    /// Gets a reference to the key that would be used when inserting a value
+    /// through the `RustcVacantEntry`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// assert_eq!(map.rustc_entry("poneyland").key(), &"poneyland");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn key(&self) -> &K {
+        &self.key
+    }
+
+    /// Take ownership of the key.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::RustcEntry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// if let RustcEntry::Vacant(v) = map.rustc_entry("poneyland") {
+    ///     v.into_key();
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn into_key(self) -> K {
+        self.key
+    }
+
+    /// Sets the value of the entry with the RustcVacantEntry's key,
+    /// and returns a mutable reference to it.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::RustcEntry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// if let RustcEntry::Vacant(o) = map.rustc_entry("poneyland") {
+    ///     o.insert(37);
+    /// }
+    /// assert_eq!(map["poneyland"], 37);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(self, value: V) -> &'a mut V {
+        let bucket = self.table.insert_no_grow(self.hash, (self.key, value));
+        unsafe { &mut bucket.as_mut().1 }
+    }
+
+    /// Sets the value of the entry with the RustcVacantEntry's key,
+    /// and returns a RustcOccupiedEntry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::RustcEntry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// if let RustcEntry::Vacant(v) = map.rustc_entry("poneyland") {
+    ///     let o = v.insert_entry(37);
+    ///     assert_eq!(o.get(), &37);
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert_entry(self, value: V) -> RustcOccupiedEntry<'a, K, V> {
+        let bucket = self.table.insert_no_grow(self.hash, (self.key, value));
+        RustcOccupiedEntry {
+            key: None,
+            elem: bucket,
+            table: self.table,
+        }
+    }
+}
+
+impl<K, V> IterMut<'_, K, V> {
+    /// Returns a iterator of references over the remaining items.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn rustc_iter(&self) -> Iter<'_, K, V> {
+        self.iter()
+    }
+}
+
+impl<K, V> IntoIter<K, V> {
+    /// Returns a iterator of references over the remaining items.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn rustc_iter(&self) -> Iter<'_, K, V> {
+        self.iter()
+    }
+}
+
+impl<K, V> Drain<'_, K, V> {
+    /// Returns a iterator of references over the remaining items.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn rustc_iter(&self) -> Iter<'_, K, V> {
+        self.iter()
+    }
+}
diff --git a/src/scopeguard.rs b/src/scopeguard.rs
new file mode 100644
index 0000000..32c9694
--- /dev/null
+++ b/src/scopeguard.rs
@@ -0,0 +1,49 @@
+// Extracted from the scopeguard crate
+use core::ops::{Deref, DerefMut};
+
+pub struct ScopeGuard<T, F>
+where
+    F: FnMut(&mut T),
+{
+    dropfn: F,
+    value: T,
+}
+
+#[cfg_attr(feature = "inline-more", inline)]
+pub fn guard<T, F>(value: T, dropfn: F) -> ScopeGuard<T, F>
+where
+    F: FnMut(&mut T),
+{
+    ScopeGuard { dropfn, value }
+}
+
+impl<T, F> Deref for ScopeGuard<T, F>
+where
+    F: FnMut(&mut T),
+{
+    type Target = T;
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn deref(&self) -> &T {
+        &self.value
+    }
+}
+
+impl<T, F> DerefMut for ScopeGuard<T, F>
+where
+    F: FnMut(&mut T),
+{
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn deref_mut(&mut self) -> &mut T {
+        &mut self.value
+    }
+}
+
+impl<T, F> Drop for ScopeGuard<T, F>
+where
+    F: FnMut(&mut T),
+{
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drop(&mut self) {
+        (self.dropfn)(&mut self.value)
+    }
+}
diff --git a/src/set.rs b/src/set.rs
new file mode 100644
index 0000000..b8460fd
--- /dev/null
+++ b/src/set.rs
@@ -0,0 +1,2119 @@
+use crate::TryReserveError;
+use alloc::borrow::ToOwned;
+use core::borrow::Borrow;
+use core::fmt;
+use core::hash::{BuildHasher, Hash};
+use core::iter::{Chain, FromIterator, FusedIterator};
+use core::mem;
+use core::ops::{BitAnd, BitOr, BitXor, Sub};
+
+use super::map::{self, ConsumeAllOnDrop, DefaultHashBuilder, DrainFilterInner, HashMap, Keys};
+
+// Future Optimization (FIXME!)
+// =============================
+//
+// Iteration over zero sized values is a noop. There is no need
+// for `bucket.val` in the case of HashSet. I suppose we would need HKT
+// to get rid of it properly.
+
+/// A hash set implemented as a `HashMap` where the value is `()`.
+///
+/// As with the [`HashMap`] type, a `HashSet` requires that the elements
+/// implement the [`Eq`] and [`Hash`] traits. This can frequently be achieved by
+/// using `#[derive(PartialEq, Eq, Hash)]`. If you implement these yourself,
+/// it is important that the following property holds:
+///
+/// ```text
+/// k1 == k2 -> hash(k1) == hash(k2)
+/// ```
+///
+/// In other words, if two keys are equal, their hashes must be equal.
+///
+///
+/// It is a logic error for an item to be modified in such a way that the
+/// item's hash, as determined by the [`Hash`] trait, or its equality, as
+/// determined by the [`Eq`] trait, changes while it is in the set. This is
+/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or
+/// unsafe code.
+///
+/// It is also a logic error for the [`Hash`] implementation of a key to panic.
+/// This is generally only possible if the trait is implemented manually. If a
+/// panic does occur then the contents of the `HashSet` may become corrupted and
+/// some items may be dropped from the table.
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::HashSet;
+/// // Type inference lets us omit an explicit type signature (which
+/// // would be `HashSet<String>` in this example).
+/// let mut books = HashSet::new();
+///
+/// // Add some books.
+/// books.insert("A Dance With Dragons".to_string());
+/// books.insert("To Kill a Mockingbird".to_string());
+/// books.insert("The Odyssey".to_string());
+/// books.insert("The Great Gatsby".to_string());
+///
+/// // Check for a specific one.
+/// if !books.contains("The Winds of Winter") {
+///     println!("We have {} books, but The Winds of Winter ain't one.",
+///              books.len());
+/// }
+///
+/// // Remove a book.
+/// books.remove("The Odyssey");
+///
+/// // Iterate over everything.
+/// for book in &books {
+///     println!("{}", book);
+/// }
+/// ```
+///
+/// The easiest way to use `HashSet` with a custom type is to derive
+/// [`Eq`] and [`Hash`]. We must also derive [`PartialEq`], this will in the
+/// future be implied by [`Eq`].
+///
+/// ```
+/// use hashbrown::HashSet;
+/// #[derive(Hash, Eq, PartialEq, Debug)]
+/// struct Viking {
+///     name: String,
+///     power: usize,
+/// }
+///
+/// let mut vikings = HashSet::new();
+///
+/// vikings.insert(Viking { name: "Einar".to_string(), power: 9 });
+/// vikings.insert(Viking { name: "Einar".to_string(), power: 9 });
+/// vikings.insert(Viking { name: "Olaf".to_string(), power: 4 });
+/// vikings.insert(Viking { name: "Harald".to_string(), power: 8 });
+///
+/// // Use derived implementation to print the vikings.
+/// for x in &vikings {
+///     println!("{:?}", x);
+/// }
+/// ```
+///
+/// A `HashSet` with fixed list of elements can be initialized from an array:
+///
+/// ```
+/// use hashbrown::HashSet;
+///
+/// let viking_names: HashSet<&'static str> =
+///     [ "Einar", "Olaf", "Harald" ].iter().cloned().collect();
+/// // use the values stored in the set
+/// ```
+///
+/// [`Cell`]: https://doc.rust-lang.org/std/cell/struct.Cell.html
+/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+/// [`HashMap`]: struct.HashMap.html
+/// [`PartialEq`]: https://doc.rust-lang.org/std/cmp/trait.PartialEq.html
+/// [`RefCell`]: https://doc.rust-lang.org/std/cell/struct.RefCell.html
+pub struct HashSet<T, S = DefaultHashBuilder> {
+    pub(crate) map: HashMap<T, (), S>,
+}
+
+impl<T: Clone, S: Clone> Clone for HashSet<T, S> {
+    fn clone(&self) -> Self {
+        HashSet {
+            map: self.map.clone(),
+        }
+    }
+
+    fn clone_from(&mut self, source: &Self) {
+        self.map.clone_from(&source.map);
+    }
+}
+
+#[cfg(feature = "ahash")]
+impl<T> HashSet<T, DefaultHashBuilder> {
+    /// Creates an empty `HashSet`.
+    ///
+    /// The hash set is initially created with a capacity of 0, so it will not allocate until it
+    /// is first inserted into.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let set: HashSet<i32> = HashSet::new();
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn new() -> Self {
+        Self {
+            map: HashMap::new(),
+        }
+    }
+
+    /// Creates an empty `HashSet` with the specified capacity.
+    ///
+    /// The hash set will be able to hold at least `capacity` elements without
+    /// reallocating. If `capacity` is 0, the hash set will not allocate.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let set: HashSet<i32> = HashSet::with_capacity(10);
+    /// assert!(set.capacity() >= 10);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn with_capacity(capacity: usize) -> Self {
+        Self {
+            map: HashMap::with_capacity(capacity),
+        }
+    }
+}
+
+impl<T, S> HashSet<T, S> {
+    /// Creates a new empty hash set which will use the given hasher to hash
+    /// keys.
+    ///
+    /// The hash set is also created with the default initial capacity.
+    ///
+    /// Warning: `hasher` is normally randomly generated, and
+    /// is designed to allow `HashSet`s to be resistant to attacks that
+    /// cause many collisions and very poor performance. Setting it
+    /// manually using this function can expose a DoS attack vector.
+    ///
+    /// The `hash_builder` passed should implement the [`BuildHasher`] trait for
+    /// the HashMap to be useful, see its documentation for details.
+    ///
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// use hashbrown::hash_map::DefaultHashBuilder;
+    ///
+    /// let s = DefaultHashBuilder::default();
+    /// let mut set = HashSet::with_hasher(s);
+    /// set.insert(2);
+    /// ```
+    ///
+    /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub const fn with_hasher(hasher: S) -> Self {
+        Self {
+            map: HashMap::with_hasher(hasher),
+        }
+    }
+
+    /// Creates an empty `HashSet` with the specified capacity, using
+    /// `hasher` to hash the keys.
+    ///
+    /// The hash set will be able to hold at least `capacity` elements without
+    /// reallocating. If `capacity` is 0, the hash set will not allocate.
+    ///
+    /// Warning: `hasher` is normally randomly generated, and
+    /// is designed to allow `HashSet`s to be resistant to attacks that
+    /// cause many collisions and very poor performance. Setting it
+    /// manually using this function can expose a DoS attack vector.
+    ///
+    /// The `hash_builder` passed should implement the [`BuildHasher`] trait for
+    /// the HashMap to be useful, see its documentation for details.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// use hashbrown::hash_map::DefaultHashBuilder;
+    ///
+    /// let s = DefaultHashBuilder::default();
+    /// let mut set = HashSet::with_capacity_and_hasher(10, s);
+    /// set.insert(1);
+    /// ```
+    ///
+    /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> Self {
+        Self {
+            map: HashMap::with_capacity_and_hasher(capacity, hasher),
+        }
+    }
+
+    /// Returns the number of elements the set can hold without reallocating.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let set: HashSet<i32> = HashSet::with_capacity(100);
+    /// assert!(set.capacity() >= 100);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn capacity(&self) -> usize {
+        self.map.capacity()
+    }
+
+    /// An iterator visiting all elements in arbitrary order.
+    /// The iterator element type is `&'a T`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let mut set = HashSet::new();
+    /// set.insert("a");
+    /// set.insert("b");
+    ///
+    /// // Will print in an arbitrary order.
+    /// for x in set.iter() {
+    ///     println!("{}", x);
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn iter(&self) -> Iter<'_, T> {
+        Iter {
+            iter: self.map.keys(),
+        }
+    }
+
+    /// Returns the number of elements in the set.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut v = HashSet::new();
+    /// assert_eq!(v.len(), 0);
+    /// v.insert(1);
+    /// assert_eq!(v.len(), 1);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn len(&self) -> usize {
+        self.map.len()
+    }
+
+    /// Returns `true` if the set contains no elements.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut v = HashSet::new();
+    /// assert!(v.is_empty());
+    /// v.insert(1);
+    /// assert!(!v.is_empty());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn is_empty(&self) -> bool {
+        self.map.is_empty()
+    }
+
+    /// Clears the set, returning all elements in an iterator.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set: HashSet<_> = [1, 2, 3].iter().cloned().collect();
+    /// assert!(!set.is_empty());
+    ///
+    /// // print 1, 2, 3 in an arbitrary order
+    /// for i in set.drain() {
+    ///     println!("{}", i);
+    /// }
+    ///
+    /// assert!(set.is_empty());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn drain(&mut self) -> Drain<'_, T> {
+        Drain {
+            iter: self.map.drain(),
+        }
+    }
+
+    /// Retains only the elements specified by the predicate.
+    ///
+    /// In other words, remove all elements `e` such that `f(&e)` returns `false`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let xs = [1,2,3,4,5,6];
+    /// let mut set: HashSet<i32> = xs.iter().cloned().collect();
+    /// set.retain(|&k| k % 2 == 0);
+    /// assert_eq!(set.len(), 3);
+    /// ```
+    pub fn retain<F>(&mut self, mut f: F)
+    where
+        F: FnMut(&T) -> bool,
+    {
+        self.map.retain(|k, _| f(k));
+    }
+
+    /// Drains elements which are true under the given predicate,
+    /// and returns an iterator over the removed items.
+    ///
+    /// In other words, move all elements `e` such that `f(&e)` returns `true` out
+    /// into another iterator.
+    ///
+    /// When the returned DrainedFilter is dropped, any remaining elements that satisfy
+    /// the predicate are dropped from the set.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set: HashSet<i32> = (0..8).collect();
+    /// let drained: HashSet<i32> = set.drain_filter(|v| v % 2 == 0).collect();
+    ///
+    /// let mut evens = drained.into_iter().collect::<Vec<_>>();
+    /// let mut odds = set.into_iter().collect::<Vec<_>>();
+    /// evens.sort();
+    /// odds.sort();
+    ///
+    /// assert_eq!(evens, vec![0, 2, 4, 6]);
+    /// assert_eq!(odds, vec![1, 3, 5, 7]);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn drain_filter<F>(&mut self, f: F) -> DrainFilter<'_, T, F>
+    where
+        F: FnMut(&T) -> bool,
+    {
+        DrainFilter {
+            f,
+            inner: DrainFilterInner {
+                iter: unsafe { self.map.table.iter() },
+                table: &mut self.map.table,
+            },
+        }
+    }
+
+    /// Clears the set, removing all values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut v = HashSet::new();
+    /// v.insert(1);
+    /// v.clear();
+    /// assert!(v.is_empty());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn clear(&mut self) {
+        self.map.clear()
+    }
+
+    /// Returns a reference to the set's [`BuildHasher`].
+    ///
+    /// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// use hashbrown::hash_map::DefaultHashBuilder;
+    ///
+    /// let hasher = DefaultHashBuilder::default();
+    /// let set: HashSet<i32> = HashSet::with_hasher(hasher);
+    /// let hasher: &DefaultHashBuilder = set.hasher();
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn hasher(&self) -> &S {
+        self.map.hasher()
+    }
+}
+
+impl<T, S> HashSet<T, S>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+{
+    /// Reserves capacity for at least `additional` more elements to be inserted
+    /// in the `HashSet`. The collection may reserve more space to avoid
+    /// frequent reallocations.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the new allocation size overflows `usize`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let mut set: HashSet<i32> = HashSet::new();
+    /// set.reserve(10);
+    /// assert!(set.capacity() >= 10);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn reserve(&mut self, additional: usize) {
+        self.map.reserve(additional)
+    }
+
+    /// Tries to reserve capacity for at least `additional` more elements to be inserted
+    /// in the given `HashSet<K,V>`. The collection may reserve more space to avoid
+    /// frequent reallocations.
+    ///
+    /// # Errors
+    ///
+    /// If the capacity overflows, or the allocator reports a failure, then an error
+    /// is returned.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let mut set: HashSet<i32> = HashSet::new();
+    /// set.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
+        self.map.try_reserve(additional)
+    }
+
+    /// Shrinks the capacity of the set as much as possible. It will drop
+    /// down as much as possible while maintaining the internal rules
+    /// and possibly leaving some space in accordance with the resize policy.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set = HashSet::with_capacity(100);
+    /// set.insert(1);
+    /// set.insert(2);
+    /// assert!(set.capacity() >= 100);
+    /// set.shrink_to_fit();
+    /// assert!(set.capacity() >= 2);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn shrink_to_fit(&mut self) {
+        self.map.shrink_to_fit()
+    }
+
+    /// Shrinks the capacity of the set with a lower limit. It will drop
+    /// down no lower than the supplied limit while maintaining the internal rules
+    /// and possibly leaving some space in accordance with the resize policy.
+    ///
+    /// Panics if the current capacity is smaller than the supplied
+    /// minimum capacity.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set = HashSet::with_capacity(100);
+    /// set.insert(1);
+    /// set.insert(2);
+    /// assert!(set.capacity() >= 100);
+    /// set.shrink_to(10);
+    /// assert!(set.capacity() >= 10);
+    /// set.shrink_to(0);
+    /// assert!(set.capacity() >= 2);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn shrink_to(&mut self, min_capacity: usize) {
+        self.map.shrink_to(min_capacity)
+    }
+
+    /// Visits the values representing the difference,
+    /// i.e., the values that are in `self` but not in `other`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let a: HashSet<_> = [1, 2, 3].iter().cloned().collect();
+    /// let b: HashSet<_> = [4, 2, 3, 4].iter().cloned().collect();
+    ///
+    /// // Can be seen as `a - b`.
+    /// for x in a.difference(&b) {
+    ///     println!("{}", x); // Print 1
+    /// }
+    ///
+    /// let diff: HashSet<_> = a.difference(&b).collect();
+    /// assert_eq!(diff, [1].iter().collect());
+    ///
+    /// // Note that difference is not symmetric,
+    /// // and `b - a` means something else:
+    /// let diff: HashSet<_> = b.difference(&a).collect();
+    /// assert_eq!(diff, [4].iter().collect());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn difference<'a>(&'a self, other: &'a Self) -> Difference<'a, T, S> {
+        Difference {
+            iter: self.iter(),
+            other,
+        }
+    }
+
+    /// Visits the values representing the symmetric difference,
+    /// i.e., the values that are in `self` or in `other` but not in both.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let a: HashSet<_> = [1, 2, 3].iter().cloned().collect();
+    /// let b: HashSet<_> = [4, 2, 3, 4].iter().cloned().collect();
+    ///
+    /// // Print 1, 4 in arbitrary order.
+    /// for x in a.symmetric_difference(&b) {
+    ///     println!("{}", x);
+    /// }
+    ///
+    /// let diff1: HashSet<_> = a.symmetric_difference(&b).collect();
+    /// let diff2: HashSet<_> = b.symmetric_difference(&a).collect();
+    ///
+    /// assert_eq!(diff1, diff2);
+    /// assert_eq!(diff1, [1, 4].iter().collect());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn symmetric_difference<'a>(&'a self, other: &'a Self) -> SymmetricDifference<'a, T, S> {
+        SymmetricDifference {
+            iter: self.difference(other).chain(other.difference(self)),
+        }
+    }
+
+    /// Visits the values representing the intersection,
+    /// i.e., the values that are both in `self` and `other`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let a: HashSet<_> = [1, 2, 3].iter().cloned().collect();
+    /// let b: HashSet<_> = [4, 2, 3, 4].iter().cloned().collect();
+    ///
+    /// // Print 2, 3 in arbitrary order.
+    /// for x in a.intersection(&b) {
+    ///     println!("{}", x);
+    /// }
+    ///
+    /// let intersection: HashSet<_> = a.intersection(&b).collect();
+    /// assert_eq!(intersection, [2, 3].iter().collect());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn intersection<'a>(&'a self, other: &'a Self) -> Intersection<'a, T, S> {
+        let (smaller, larger) = if self.len() <= other.len() {
+            (self, other)
+        } else {
+            (other, self)
+        };
+        Intersection {
+            iter: smaller.iter(),
+            other: larger,
+        }
+    }
+
+    /// Visits the values representing the union,
+    /// i.e., all the values in `self` or `other`, without duplicates.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let a: HashSet<_> = [1, 2, 3].iter().cloned().collect();
+    /// let b: HashSet<_> = [4, 2, 3, 4].iter().cloned().collect();
+    ///
+    /// // Print 1, 2, 3, 4 in arbitrary order.
+    /// for x in a.union(&b) {
+    ///     println!("{}", x);
+    /// }
+    ///
+    /// let union: HashSet<_> = a.union(&b).collect();
+    /// assert_eq!(union, [1, 2, 3, 4].iter().collect());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn union<'a>(&'a self, other: &'a Self) -> Union<'a, T, S> {
+        let (smaller, larger) = if self.len() >= other.len() {
+            (self, other)
+        } else {
+            (other, self)
+        };
+        Union {
+            iter: larger.iter().chain(smaller.difference(larger)),
+        }
+    }
+
+    /// Returns `true` if the set contains a value.
+    ///
+    /// The value may be any borrowed form of the set's value type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the value type.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let set: HashSet<_> = [1, 2, 3].iter().cloned().collect();
+    /// assert_eq!(set.contains(&1), true);
+    /// assert_eq!(set.contains(&4), false);
+    /// ```
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn contains<Q: ?Sized>(&self, value: &Q) -> bool
+    where
+        T: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        self.map.contains_key(value)
+    }
+
+    /// Returns a reference to the value in the set, if any, that is equal to the given value.
+    ///
+    /// The value may be any borrowed form of the set's value type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the value type.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let set: HashSet<_> = [1, 2, 3].iter().cloned().collect();
+    /// assert_eq!(set.get(&2), Some(&2));
+    /// assert_eq!(set.get(&4), None);
+    /// ```
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get<Q: ?Sized>(&self, value: &Q) -> Option<&T>
+    where
+        T: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.map.get_key_value(value) {
+            Some((k, _)) => Some(k),
+            None => None,
+        }
+    }
+
+    /// Inserts the given `value` into the set if it is not present, then
+    /// returns a reference to the value in the set.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set: HashSet<_> = [1, 2, 3].iter().cloned().collect();
+    /// assert_eq!(set.len(), 3);
+    /// assert_eq!(set.get_or_insert(2), &2);
+    /// assert_eq!(set.get_or_insert(100), &100);
+    /// assert_eq!(set.len(), 4); // 100 was inserted
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get_or_insert(&mut self, value: T) -> &T {
+        // Although the raw entry gives us `&mut T`, we only return `&T` to be consistent with
+        // `get`. Key mutation is "raw" because you're not supposed to affect `Eq` or `Hash`.
+        self.map
+            .raw_entry_mut()
+            .from_key(&value)
+            .or_insert(value, ())
+            .0
+    }
+
+    /// Inserts an owned copy of the given `value` into the set if it is not
+    /// present, then returns a reference to the value in the set.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set: HashSet<String> = ["cat", "dog", "horse"]
+    ///     .iter().map(|&pet| pet.to_owned()).collect();
+    ///
+    /// assert_eq!(set.len(), 3);
+    /// for &pet in &["cat", "dog", "fish"] {
+    ///     let value = set.get_or_insert_owned(pet);
+    ///     assert_eq!(value, pet);
+    /// }
+    /// assert_eq!(set.len(), 4); // a new "fish" was inserted
+    /// ```
+    #[inline]
+    pub fn get_or_insert_owned<Q: ?Sized>(&mut self, value: &Q) -> &T
+    where
+        T: Borrow<Q>,
+        Q: Hash + Eq + ToOwned<Owned = T>,
+    {
+        // Although the raw entry gives us `&mut T`, we only return `&T` to be consistent with
+        // `get`. Key mutation is "raw" because you're not supposed to affect `Eq` or `Hash`.
+        self.map
+            .raw_entry_mut()
+            .from_key(value)
+            .or_insert_with(|| (value.to_owned(), ()))
+            .0
+    }
+
+    /// Inserts a value computed from `f` into the set if the given `value` is
+    /// not present, then returns a reference to the value in the set.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set: HashSet<String> = ["cat", "dog", "horse"]
+    ///     .iter().map(|&pet| pet.to_owned()).collect();
+    ///
+    /// assert_eq!(set.len(), 3);
+    /// for &pet in &["cat", "dog", "fish"] {
+    ///     let value = set.get_or_insert_with(pet, str::to_owned);
+    ///     assert_eq!(value, pet);
+    /// }
+    /// assert_eq!(set.len(), 4); // a new "fish" was inserted
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get_or_insert_with<Q: ?Sized, F>(&mut self, value: &Q, f: F) -> &T
+    where
+        T: Borrow<Q>,
+        Q: Hash + Eq,
+        F: FnOnce(&Q) -> T,
+    {
+        // Although the raw entry gives us `&mut T`, we only return `&T` to be consistent with
+        // `get`. Key mutation is "raw" because you're not supposed to affect `Eq` or `Hash`.
+        self.map
+            .raw_entry_mut()
+            .from_key(value)
+            .or_insert_with(|| (f(value), ()))
+            .0
+    }
+
+    /// Returns `true` if `self` has no elements in common with `other`.
+    /// This is equivalent to checking for an empty intersection.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let a: HashSet<_> = [1, 2, 3].iter().cloned().collect();
+    /// let mut b = HashSet::new();
+    ///
+    /// assert_eq!(a.is_disjoint(&b), true);
+    /// b.insert(4);
+    /// assert_eq!(a.is_disjoint(&b), true);
+    /// b.insert(1);
+    /// assert_eq!(a.is_disjoint(&b), false);
+    /// ```
+    pub fn is_disjoint(&self, other: &Self) -> bool {
+        self.iter().all(|v| !other.contains(v))
+    }
+
+    /// Returns `true` if the set is a subset of another,
+    /// i.e., `other` contains at least all the values in `self`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let sup: HashSet<_> = [1, 2, 3].iter().cloned().collect();
+    /// let mut set = HashSet::new();
+    ///
+    /// assert_eq!(set.is_subset(&sup), true);
+    /// set.insert(2);
+    /// assert_eq!(set.is_subset(&sup), true);
+    /// set.insert(4);
+    /// assert_eq!(set.is_subset(&sup), false);
+    /// ```
+    pub fn is_subset(&self, other: &Self) -> bool {
+        self.len() <= other.len() && self.iter().all(|v| other.contains(v))
+    }
+
+    /// Returns `true` if the set is a superset of another,
+    /// i.e., `self` contains at least all the values in `other`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let sub: HashSet<_> = [1, 2].iter().cloned().collect();
+    /// let mut set = HashSet::new();
+    ///
+    /// assert_eq!(set.is_superset(&sub), false);
+    ///
+    /// set.insert(0);
+    /// set.insert(1);
+    /// assert_eq!(set.is_superset(&sub), false);
+    ///
+    /// set.insert(2);
+    /// assert_eq!(set.is_superset(&sub), true);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn is_superset(&self, other: &Self) -> bool {
+        other.is_subset(self)
+    }
+
+    /// Adds a value to the set.
+    ///
+    /// If the set did not have this value present, `true` is returned.
+    ///
+    /// If the set did have this value present, `false` is returned.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set = HashSet::new();
+    ///
+    /// assert_eq!(set.insert(2), true);
+    /// assert_eq!(set.insert(2), false);
+    /// assert_eq!(set.len(), 1);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(&mut self, value: T) -> bool {
+        self.map.insert(value, ()).is_none()
+    }
+
+    /// Adds a value to the set, replacing the existing value, if any, that is equal to the given
+    /// one. Returns the replaced value.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set = HashSet::new();
+    /// set.insert(Vec::<i32>::new());
+    ///
+    /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 0);
+    /// set.replace(Vec::with_capacity(10));
+    /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 10);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn replace(&mut self, value: T) -> Option<T> {
+        match self.map.entry(value) {
+            map::Entry::Occupied(occupied) => Some(occupied.replace_key()),
+            map::Entry::Vacant(vacant) => {
+                vacant.insert(());
+                None
+            }
+        }
+    }
+
+    /// Removes a value from the set. Returns whether the value was
+    /// present in the set.
+    ///
+    /// The value may be any borrowed form of the set's value type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the value type.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set = HashSet::new();
+    ///
+    /// set.insert(2);
+    /// assert_eq!(set.remove(&2), true);
+    /// assert_eq!(set.remove(&2), false);
+    /// ```
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove<Q: ?Sized>(&mut self, value: &Q) -> bool
+    where
+        T: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        self.map.remove(value).is_some()
+    }
+
+    /// Removes and returns the value in the set, if any, that is equal to the given one.
+    ///
+    /// The value may be any borrowed form of the set's value type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the value type.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set: HashSet<_> = [1, 2, 3].iter().cloned().collect();
+    /// assert_eq!(set.take(&2), Some(2));
+    /// assert_eq!(set.take(&2), None);
+    /// ```
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn take<Q: ?Sized>(&mut self, value: &Q) -> Option<T>
+    where
+        T: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.map.remove_entry(value) {
+            Some((k, _)) => Some(k),
+            None => None,
+        }
+    }
+}
+
+impl<T, S> PartialEq for HashSet<T, S>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+{
+    fn eq(&self, other: &Self) -> bool {
+        if self.len() != other.len() {
+            return false;
+        }
+
+        self.iter().all(|key| other.contains(key))
+    }
+}
+
+impl<T, S> Eq for HashSet<T, S>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+{
+}
+
+impl<T, S> fmt::Debug for HashSet<T, S>
+where
+    T: Eq + Hash + fmt::Debug,
+    S: BuildHasher,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_set().entries(self.iter()).finish()
+    }
+}
+
+impl<T, S> FromIterator<T> for HashSet<T, S>
+where
+    T: Eq + Hash,
+    S: BuildHasher + Default,
+{
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
+        let mut set = Self::with_hasher(Default::default());
+        set.extend(iter);
+        set
+    }
+}
+
+impl<T, S> Extend<T> for HashSet<T, S>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+{
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
+        self.map.extend(iter.into_iter().map(|k| (k, ())));
+    }
+
+    #[inline]
+    #[cfg(feature = "nightly")]
+    fn extend_one(&mut self, k: T) {
+        self.map.insert(k, ());
+    }
+
+    #[inline]
+    #[cfg(feature = "nightly")]
+    fn extend_reserve(&mut self, additional: usize) {
+        Extend::<(T, ())>::extend_reserve(&mut self.map, additional);
+    }
+}
+
+impl<'a, T, S> Extend<&'a T> for HashSet<T, S>
+where
+    T: 'a + Eq + Hash + Copy,
+    S: BuildHasher,
+{
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
+        self.extend(iter.into_iter().cloned());
+    }
+
+    #[inline]
+    #[cfg(feature = "nightly")]
+    fn extend_one(&mut self, k: &'a T) {
+        self.map.insert(*k, ());
+    }
+
+    #[inline]
+    #[cfg(feature = "nightly")]
+    fn extend_reserve(&mut self, additional: usize) {
+        Extend::<(T, ())>::extend_reserve(&mut self.map, additional);
+    }
+}
+
+impl<T, S> Default for HashSet<T, S>
+where
+    S: Default,
+{
+    /// Creates an empty `HashSet<T, S>` with the `Default` value for the hasher.
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn default() -> Self {
+        Self {
+            map: HashMap::default(),
+        }
+    }
+}
+
+impl<T, S> BitOr<&HashSet<T, S>> for &HashSet<T, S>
+where
+    T: Eq + Hash + Clone,
+    S: BuildHasher + Default,
+{
+    type Output = HashSet<T, S>;
+
+    /// Returns the union of `self` and `rhs` as a new `HashSet<T, S>`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
+    /// let b: HashSet<_> = vec![3, 4, 5].into_iter().collect();
+    ///
+    /// let set = &a | &b;
+    ///
+    /// let mut i = 0;
+    /// let expected = [1, 2, 3, 4, 5];
+    /// for x in &set {
+    ///     assert!(expected.contains(x));
+    ///     i += 1;
+    /// }
+    /// assert_eq!(i, expected.len());
+    /// ```
+    fn bitor(self, rhs: &HashSet<T, S>) -> HashSet<T, S> {
+        self.union(rhs).cloned().collect()
+    }
+}
+
+impl<T, S> BitAnd<&HashSet<T, S>> for &HashSet<T, S>
+where
+    T: Eq + Hash + Clone,
+    S: BuildHasher + Default,
+{
+    type Output = HashSet<T, S>;
+
+    /// Returns the intersection of `self` and `rhs` as a new `HashSet<T, S>`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
+    /// let b: HashSet<_> = vec![2, 3, 4].into_iter().collect();
+    ///
+    /// let set = &a & &b;
+    ///
+    /// let mut i = 0;
+    /// let expected = [2, 3];
+    /// for x in &set {
+    ///     assert!(expected.contains(x));
+    ///     i += 1;
+    /// }
+    /// assert_eq!(i, expected.len());
+    /// ```
+    fn bitand(self, rhs: &HashSet<T, S>) -> HashSet<T, S> {
+        self.intersection(rhs).cloned().collect()
+    }
+}
+
+impl<T, S> BitXor<&HashSet<T, S>> for &HashSet<T, S>
+where
+    T: Eq + Hash + Clone,
+    S: BuildHasher + Default,
+{
+    type Output = HashSet<T, S>;
+
+    /// Returns the symmetric difference of `self` and `rhs` as a new `HashSet<T, S>`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
+    /// let b: HashSet<_> = vec![3, 4, 5].into_iter().collect();
+    ///
+    /// let set = &a ^ &b;
+    ///
+    /// let mut i = 0;
+    /// let expected = [1, 2, 4, 5];
+    /// for x in &set {
+    ///     assert!(expected.contains(x));
+    ///     i += 1;
+    /// }
+    /// assert_eq!(i, expected.len());
+    /// ```
+    fn bitxor(self, rhs: &HashSet<T, S>) -> HashSet<T, S> {
+        self.symmetric_difference(rhs).cloned().collect()
+    }
+}
+
+impl<T, S> Sub<&HashSet<T, S>> for &HashSet<T, S>
+where
+    T: Eq + Hash + Clone,
+    S: BuildHasher + Default,
+{
+    type Output = HashSet<T, S>;
+
+    /// Returns the difference of `self` and `rhs` as a new `HashSet<T, S>`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
+    /// let b: HashSet<_> = vec![3, 4, 5].into_iter().collect();
+    ///
+    /// let set = &a - &b;
+    ///
+    /// let mut i = 0;
+    /// let expected = [1, 2];
+    /// for x in &set {
+    ///     assert!(expected.contains(x));
+    ///     i += 1;
+    /// }
+    /// assert_eq!(i, expected.len());
+    /// ```
+    fn sub(self, rhs: &HashSet<T, S>) -> HashSet<T, S> {
+        self.difference(rhs).cloned().collect()
+    }
+}
+
+/// An iterator over the items of a `HashSet`.
+///
+/// This `struct` is created by the [`iter`] method on [`HashSet`].
+/// See its documentation for more.
+///
+/// [`HashSet`]: struct.HashSet.html
+/// [`iter`]: struct.HashSet.html#method.iter
+pub struct Iter<'a, K> {
+    iter: Keys<'a, K, ()>,
+}
+
+/// An owning iterator over the items of a `HashSet`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`HashSet`]
+/// (provided by the `IntoIterator` trait). See its documentation for more.
+///
+/// [`HashSet`]: struct.HashSet.html
+/// [`into_iter`]: struct.HashSet.html#method.into_iter
+pub struct IntoIter<K> {
+    iter: map::IntoIter<K, ()>,
+}
+
+/// A draining iterator over the items of a `HashSet`.
+///
+/// This `struct` is created by the [`drain`] method on [`HashSet`].
+/// See its documentation for more.
+///
+/// [`HashSet`]: struct.HashSet.html
+/// [`drain`]: struct.HashSet.html#method.drain
+pub struct Drain<'a, K> {
+    iter: map::Drain<'a, K, ()>,
+}
+
+/// A draining iterator over entries of a `HashSet` which don't satisfy the predicate `f`.
+///
+/// This `struct` is created by the [`drain_filter`] method on [`HashSet`]. See its
+/// documentation for more.
+///
+/// [`drain_filter`]: struct.HashSet.html#method.drain_filter
+/// [`HashSet`]: struct.HashSet.html
+pub struct DrainFilter<'a, K, F>
+where
+    F: FnMut(&K) -> bool,
+{
+    f: F,
+    inner: DrainFilterInner<'a, K, ()>,
+}
+
+/// A lazy iterator producing elements in the intersection of `HashSet`s.
+///
+/// This `struct` is created by the [`intersection`] method on [`HashSet`].
+/// See its documentation for more.
+///
+/// [`HashSet`]: struct.HashSet.html
+/// [`intersection`]: struct.HashSet.html#method.intersection
+pub struct Intersection<'a, T, S> {
+    // iterator of the first set
+    iter: Iter<'a, T>,
+    // the second set
+    other: &'a HashSet<T, S>,
+}
+
+/// A lazy iterator producing elements in the difference of `HashSet`s.
+///
+/// This `struct` is created by the [`difference`] method on [`HashSet`].
+/// See its documentation for more.
+///
+/// [`HashSet`]: struct.HashSet.html
+/// [`difference`]: struct.HashSet.html#method.difference
+pub struct Difference<'a, T, S> {
+    // iterator of the first set
+    iter: Iter<'a, T>,
+    // the second set
+    other: &'a HashSet<T, S>,
+}
+
+/// A lazy iterator producing elements in the symmetric difference of `HashSet`s.
+///
+/// This `struct` is created by the [`symmetric_difference`] method on
+/// [`HashSet`]. See its documentation for more.
+///
+/// [`HashSet`]: struct.HashSet.html
+/// [`symmetric_difference`]: struct.HashSet.html#method.symmetric_difference
+pub struct SymmetricDifference<'a, T, S> {
+    iter: Chain<Difference<'a, T, S>, Difference<'a, T, S>>,
+}
+
+/// A lazy iterator producing elements in the union of `HashSet`s.
+///
+/// This `struct` is created by the [`union`] method on [`HashSet`].
+/// See its documentation for more.
+///
+/// [`HashSet`]: struct.HashSet.html
+/// [`union`]: struct.HashSet.html#method.union
+pub struct Union<'a, T, S> {
+    iter: Chain<Iter<'a, T>, Difference<'a, T, S>>,
+}
+
+impl<'a, T, S> IntoIterator for &'a HashSet<T, S> {
+    type Item = &'a T;
+    type IntoIter = Iter<'a, T>;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn into_iter(self) -> Iter<'a, T> {
+        self.iter()
+    }
+}
+
+impl<T, S> IntoIterator for HashSet<T, S> {
+    type Item = T;
+    type IntoIter = IntoIter<T>;
+
+    /// Creates a consuming iterator, that is, one that moves each value out
+    /// of the set in arbitrary order. The set cannot be used after calling
+    /// this.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let mut set = HashSet::new();
+    /// set.insert("a".to_string());
+    /// set.insert("b".to_string());
+    ///
+    /// // Not possible to collect to a Vec<String> with a regular `.iter()`.
+    /// let v: Vec<String> = set.into_iter().collect();
+    ///
+    /// // Will print in an arbitrary order.
+    /// for x in &v {
+    ///     println!("{}", x);
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn into_iter(self) -> IntoIter<T> {
+        IntoIter {
+            iter: self.map.into_iter(),
+        }
+    }
+}
+
+impl<K> Clone for Iter<'_, K> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        Iter {
+            iter: self.iter.clone(),
+        }
+    }
+}
+impl<'a, K> Iterator for Iter<'a, K> {
+    type Item = &'a K;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<&'a K> {
+        self.iter.next()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+}
+impl<'a, K> ExactSizeIterator for Iter<'a, K> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn len(&self) -> usize {
+        self.iter.len()
+    }
+}
+impl<K> FusedIterator for Iter<'_, K> {}
+
+impl<K: fmt::Debug> fmt::Debug for Iter<'_, K> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
+impl<K> Iterator for IntoIter<K> {
+    type Item = K;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<K> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.iter.next() {
+            Some((k, _)) => Some(k),
+            None => None,
+        }
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+}
+impl<K> ExactSizeIterator for IntoIter<K> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn len(&self) -> usize {
+        self.iter.len()
+    }
+}
+impl<K> FusedIterator for IntoIter<K> {}
+
+impl<K: fmt::Debug> fmt::Debug for IntoIter<K> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        let entries_iter = self.iter.iter().map(|(k, _)| k);
+        f.debug_list().entries(entries_iter).finish()
+    }
+}
+
+impl<K> Iterator for Drain<'_, K> {
+    type Item = K;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<K> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.iter.next() {
+            Some((k, _)) => Some(k),
+            None => None,
+        }
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+}
+impl<K> ExactSizeIterator for Drain<'_, K> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn len(&self) -> usize {
+        self.iter.len()
+    }
+}
+impl<K> FusedIterator for Drain<'_, K> {}
+
+impl<K: fmt::Debug> fmt::Debug for Drain<'_, K> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        let entries_iter = self.iter.iter().map(|(k, _)| k);
+        f.debug_list().entries(entries_iter).finish()
+    }
+}
+
+impl<'a, K, F> Drop for DrainFilter<'a, K, F>
+where
+    F: FnMut(&K) -> bool,
+{
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drop(&mut self) {
+        while let Some(item) = self.next() {
+            let guard = ConsumeAllOnDrop(self);
+            drop(item);
+            mem::forget(guard);
+        }
+    }
+}
+
+impl<K, F> Iterator for DrainFilter<'_, K, F>
+where
+    F: FnMut(&K) -> bool,
+{
+    type Item = K;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<Self::Item> {
+        let f = &mut self.f;
+        let (k, _) = self.inner.next(&mut |k, _| f(k))?;
+        Some(k)
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        (0, self.inner.iter.size_hint().1)
+    }
+}
+
+impl<K, F> FusedIterator for DrainFilter<'_, K, F> where F: FnMut(&K) -> bool {}
+
+impl<T, S> Clone for Intersection<'_, T, S> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        Intersection {
+            iter: self.iter.clone(),
+            ..*self
+        }
+    }
+}
+
+impl<'a, T, S> Iterator for Intersection<'a, T, S>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+{
+    type Item = &'a T;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<&'a T> {
+        loop {
+            let elt = self.iter.next()?;
+            if self.other.contains(elt) {
+                return Some(elt);
+            }
+        }
+    }
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        let (_, upper) = self.iter.size_hint();
+        (0, upper)
+    }
+}
+
+impl<T, S> fmt::Debug for Intersection<'_, T, S>
+where
+    T: fmt::Debug + Eq + Hash,
+    S: BuildHasher,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
+impl<T, S> FusedIterator for Intersection<'_, T, S>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+{
+}
+
+impl<T, S> Clone for Difference<'_, T, S> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        Difference {
+            iter: self.iter.clone(),
+            ..*self
+        }
+    }
+}
+
+impl<'a, T, S> Iterator for Difference<'a, T, S>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+{
+    type Item = &'a T;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<&'a T> {
+        loop {
+            let elt = self.iter.next()?;
+            if !self.other.contains(elt) {
+                return Some(elt);
+            }
+        }
+    }
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        let (_, upper) = self.iter.size_hint();
+        (0, upper)
+    }
+}
+
+impl<T, S> FusedIterator for Difference<'_, T, S>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+{
+}
+
+impl<T, S> fmt::Debug for Difference<'_, T, S>
+where
+    T: fmt::Debug + Eq + Hash,
+    S: BuildHasher,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
+impl<T, S> Clone for SymmetricDifference<'_, T, S> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        SymmetricDifference {
+            iter: self.iter.clone(),
+        }
+    }
+}
+
+impl<'a, T, S> Iterator for SymmetricDifference<'a, T, S>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+{
+    type Item = &'a T;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<&'a T> {
+        self.iter.next()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+}
+
+impl<T, S> FusedIterator for SymmetricDifference<'_, T, S>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+{
+}
+
+impl<T, S> fmt::Debug for SymmetricDifference<'_, T, S>
+where
+    T: fmt::Debug + Eq + Hash,
+    S: BuildHasher,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
+impl<T, S> Clone for Union<'_, T, S> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        Union {
+            iter: self.iter.clone(),
+        }
+    }
+}
+
+impl<T, S> FusedIterator for Union<'_, T, S>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+{
+}
+
+impl<T, S> fmt::Debug for Union<'_, T, S>
+where
+    T: fmt::Debug + Eq + Hash,
+    S: BuildHasher,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
+impl<'a, T, S> Iterator for Union<'a, T, S>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+{
+    type Item = &'a T;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<&'a T> {
+        self.iter.next()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+}
+
+#[allow(dead_code)]
+fn assert_covariance() {
+    fn set<'new>(v: HashSet<&'static str>) -> HashSet<&'new str> {
+        v
+    }
+    fn iter<'a, 'new>(v: Iter<'a, &'static str>) -> Iter<'a, &'new str> {
+        v
+    }
+    fn into_iter<'new>(v: IntoIter<&'static str>) -> IntoIter<&'new str> {
+        v
+    }
+    fn difference<'a, 'new>(
+        v: Difference<'a, &'static str, DefaultHashBuilder>,
+    ) -> Difference<'a, &'new str, DefaultHashBuilder> {
+        v
+    }
+    fn symmetric_difference<'a, 'new>(
+        v: SymmetricDifference<'a, &'static str, DefaultHashBuilder>,
+    ) -> SymmetricDifference<'a, &'new str, DefaultHashBuilder> {
+        v
+    }
+    fn intersection<'a, 'new>(
+        v: Intersection<'a, &'static str, DefaultHashBuilder>,
+    ) -> Intersection<'a, &'new str, DefaultHashBuilder> {
+        v
+    }
+    fn union<'a, 'new>(
+        v: Union<'a, &'static str, DefaultHashBuilder>,
+    ) -> Union<'a, &'new str, DefaultHashBuilder> {
+        v
+    }
+    fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> {
+        d
+    }
+}
+
+#[cfg(test)]
+mod test_set {
+    use super::super::map::DefaultHashBuilder;
+    use super::HashSet;
+    use std::vec::Vec;
+
+    #[test]
+    fn test_zero_capacities() {
+        type HS = HashSet<i32>;
+
+        let s = HS::new();
+        assert_eq!(s.capacity(), 0);
+
+        let s = HS::default();
+        assert_eq!(s.capacity(), 0);
+
+        let s = HS::with_hasher(DefaultHashBuilder::default());
+        assert_eq!(s.capacity(), 0);
+
+        let s = HS::with_capacity(0);
+        assert_eq!(s.capacity(), 0);
+
+        let s = HS::with_capacity_and_hasher(0, DefaultHashBuilder::default());
+        assert_eq!(s.capacity(), 0);
+
+        let mut s = HS::new();
+        s.insert(1);
+        s.insert(2);
+        s.remove(&1);
+        s.remove(&2);
+        s.shrink_to_fit();
+        assert_eq!(s.capacity(), 0);
+
+        let mut s = HS::new();
+        s.reserve(0);
+        assert_eq!(s.capacity(), 0);
+    }
+
+    #[test]
+    fn test_disjoint() {
+        let mut xs = HashSet::new();
+        let mut ys = HashSet::new();
+        assert!(xs.is_disjoint(&ys));
+        assert!(ys.is_disjoint(&xs));
+        assert!(xs.insert(5));
+        assert!(ys.insert(11));
+        assert!(xs.is_disjoint(&ys));
+        assert!(ys.is_disjoint(&xs));
+        assert!(xs.insert(7));
+        assert!(xs.insert(19));
+        assert!(xs.insert(4));
+        assert!(ys.insert(2));
+        assert!(ys.insert(-11));
+        assert!(xs.is_disjoint(&ys));
+        assert!(ys.is_disjoint(&xs));
+        assert!(ys.insert(7));
+        assert!(!xs.is_disjoint(&ys));
+        assert!(!ys.is_disjoint(&xs));
+    }
+
+    #[test]
+    fn test_subset_and_superset() {
+        let mut a = HashSet::new();
+        assert!(a.insert(0));
+        assert!(a.insert(5));
+        assert!(a.insert(11));
+        assert!(a.insert(7));
+
+        let mut b = HashSet::new();
+        assert!(b.insert(0));
+        assert!(b.insert(7));
+        assert!(b.insert(19));
+        assert!(b.insert(250));
+        assert!(b.insert(11));
+        assert!(b.insert(200));
+
+        assert!(!a.is_subset(&b));
+        assert!(!a.is_superset(&b));
+        assert!(!b.is_subset(&a));
+        assert!(!b.is_superset(&a));
+
+        assert!(b.insert(5));
+
+        assert!(a.is_subset(&b));
+        assert!(!a.is_superset(&b));
+        assert!(!b.is_subset(&a));
+        assert!(b.is_superset(&a));
+    }
+
+    #[test]
+    fn test_iterate() {
+        let mut a = HashSet::new();
+        for i in 0..32 {
+            assert!(a.insert(i));
+        }
+        let mut observed: u32 = 0;
+        for k in &a {
+            observed |= 1 << *k;
+        }
+        assert_eq!(observed, 0xFFFF_FFFF);
+    }
+
+    #[test]
+    fn test_intersection() {
+        let mut a = HashSet::new();
+        let mut b = HashSet::new();
+
+        assert!(a.insert(11));
+        assert!(a.insert(1));
+        assert!(a.insert(3));
+        assert!(a.insert(77));
+        assert!(a.insert(103));
+        assert!(a.insert(5));
+        assert!(a.insert(-5));
+
+        assert!(b.insert(2));
+        assert!(b.insert(11));
+        assert!(b.insert(77));
+        assert!(b.insert(-9));
+        assert!(b.insert(-42));
+        assert!(b.insert(5));
+        assert!(b.insert(3));
+
+        let mut i = 0;
+        let expected = [3, 5, 11, 77];
+        for x in a.intersection(&b) {
+            assert!(expected.contains(x));
+            i += 1
+        }
+        assert_eq!(i, expected.len());
+    }
+
+    #[test]
+    fn test_difference() {
+        let mut a = HashSet::new();
+        let mut b = HashSet::new();
+
+        assert!(a.insert(1));
+        assert!(a.insert(3));
+        assert!(a.insert(5));
+        assert!(a.insert(9));
+        assert!(a.insert(11));
+
+        assert!(b.insert(3));
+        assert!(b.insert(9));
+
+        let mut i = 0;
+        let expected = [1, 5, 11];
+        for x in a.difference(&b) {
+            assert!(expected.contains(x));
+            i += 1
+        }
+        assert_eq!(i, expected.len());
+    }
+
+    #[test]
+    fn test_symmetric_difference() {
+        let mut a = HashSet::new();
+        let mut b = HashSet::new();
+
+        assert!(a.insert(1));
+        assert!(a.insert(3));
+        assert!(a.insert(5));
+        assert!(a.insert(9));
+        assert!(a.insert(11));
+
+        assert!(b.insert(-2));
+        assert!(b.insert(3));
+        assert!(b.insert(9));
+        assert!(b.insert(14));
+        assert!(b.insert(22));
+
+        let mut i = 0;
+        let expected = [-2, 1, 5, 11, 14, 22];
+        for x in a.symmetric_difference(&b) {
+            assert!(expected.contains(x));
+            i += 1
+        }
+        assert_eq!(i, expected.len());
+    }
+
+    #[test]
+    fn test_union() {
+        let mut a = HashSet::new();
+        let mut b = HashSet::new();
+
+        assert!(a.insert(1));
+        assert!(a.insert(3));
+        assert!(a.insert(5));
+        assert!(a.insert(9));
+        assert!(a.insert(11));
+        assert!(a.insert(16));
+        assert!(a.insert(19));
+        assert!(a.insert(24));
+
+        assert!(b.insert(-2));
+        assert!(b.insert(1));
+        assert!(b.insert(5));
+        assert!(b.insert(9));
+        assert!(b.insert(13));
+        assert!(b.insert(19));
+
+        let mut i = 0;
+        let expected = [-2, 1, 3, 5, 9, 11, 13, 16, 19, 24];
+        for x in a.union(&b) {
+            assert!(expected.contains(x));
+            i += 1
+        }
+        assert_eq!(i, expected.len());
+    }
+
+    #[test]
+    fn test_from_iter() {
+        let xs = [1, 2, 2, 3, 4, 5, 6, 7, 8, 9];
+
+        let set: HashSet<_> = xs.iter().cloned().collect();
+
+        for x in &xs {
+            assert!(set.contains(x));
+        }
+
+        assert_eq!(set.iter().len(), xs.len() - 1);
+    }
+
+    #[test]
+    fn test_move_iter() {
+        let hs = {
+            let mut hs = HashSet::new();
+
+            hs.insert('a');
+            hs.insert('b');
+
+            hs
+        };
+
+        let v = hs.into_iter().collect::<Vec<char>>();
+        assert!(v == ['a', 'b'] || v == ['b', 'a']);
+    }
+
+    #[test]
+    fn test_eq() {
+        // These constants once happened to expose a bug in insert().
+        // I'm keeping them around to prevent a regression.
+        let mut s1 = HashSet::new();
+
+        s1.insert(1);
+        s1.insert(2);
+        s1.insert(3);
+
+        let mut s2 = HashSet::new();
+
+        s2.insert(1);
+        s2.insert(2);
+
+        assert!(s1 != s2);
+
+        s2.insert(3);
+
+        assert_eq!(s1, s2);
+    }
+
+    #[test]
+    fn test_show() {
+        let mut set = HashSet::new();
+        let empty = HashSet::<i32>::new();
+
+        set.insert(1);
+        set.insert(2);
+
+        let set_str = format!("{:?}", set);
+
+        assert!(set_str == "{1, 2}" || set_str == "{2, 1}");
+        assert_eq!(format!("{:?}", empty), "{}");
+    }
+
+    #[test]
+    fn test_trivial_drain() {
+        let mut s = HashSet::<i32>::new();
+        for _ in s.drain() {}
+        assert!(s.is_empty());
+        drop(s);
+
+        let mut s = HashSet::<i32>::new();
+        drop(s.drain());
+        assert!(s.is_empty());
+    }
+
+    #[test]
+    fn test_drain() {
+        let mut s: HashSet<_> = (1..100).collect();
+
+        // try this a bunch of times to make sure we don't screw up internal state.
+        for _ in 0..20 {
+            assert_eq!(s.len(), 99);
+
+            {
+                let mut last_i = 0;
+                let mut d = s.drain();
+                for (i, x) in d.by_ref().take(50).enumerate() {
+                    last_i = i;
+                    assert!(x != 0);
+                }
+                assert_eq!(last_i, 49);
+            }
+
+            for _ in &s {
+                panic!("s should be empty!");
+            }
+
+            // reset to try again.
+            s.extend(1..100);
+        }
+    }
+
+    #[test]
+    fn test_replace() {
+        use core::hash;
+
+        #[derive(Debug)]
+        struct Foo(&'static str, i32);
+
+        impl PartialEq for Foo {
+            fn eq(&self, other: &Self) -> bool {
+                self.0 == other.0
+            }
+        }
+
+        impl Eq for Foo {}
+
+        impl hash::Hash for Foo {
+            fn hash<H: hash::Hasher>(&self, h: &mut H) {
+                self.0.hash(h);
+            }
+        }
+
+        let mut s = HashSet::new();
+        assert_eq!(s.replace(Foo("a", 1)), None);
+        assert_eq!(s.len(), 1);
+        assert_eq!(s.replace(Foo("a", 2)), Some(Foo("a", 1)));
+        assert_eq!(s.len(), 1);
+
+        let mut it = s.iter();
+        assert_eq!(it.next(), Some(&Foo("a", 2)));
+        assert_eq!(it.next(), None);
+    }
+
+    #[test]
+    fn test_extend_ref() {
+        let mut a = HashSet::new();
+        a.insert(1);
+
+        a.extend(&[2, 3, 4]);
+
+        assert_eq!(a.len(), 4);
+        assert!(a.contains(&1));
+        assert!(a.contains(&2));
+        assert!(a.contains(&3));
+        assert!(a.contains(&4));
+
+        let mut b = HashSet::new();
+        b.insert(5);
+        b.insert(6);
+
+        a.extend(&b);
+
+        assert_eq!(a.len(), 6);
+        assert!(a.contains(&1));
+        assert!(a.contains(&2));
+        assert!(a.contains(&3));
+        assert!(a.contains(&4));
+        assert!(a.contains(&5));
+        assert!(a.contains(&6));
+    }
+
+    #[test]
+    fn test_retain() {
+        let xs = [1, 2, 3, 4, 5, 6];
+        let mut set: HashSet<i32> = xs.iter().cloned().collect();
+        set.retain(|&k| k % 2 == 0);
+        assert_eq!(set.len(), 3);
+        assert!(set.contains(&2));
+        assert!(set.contains(&4));
+        assert!(set.contains(&6));
+    }
+
+    #[test]
+    fn test_drain_filter() {
+        {
+            let mut set: HashSet<i32> = (0..8).collect();
+            let drained = set.drain_filter(|&k| k % 2 == 0);
+            let mut out = drained.collect::<Vec<_>>();
+            out.sort_unstable();
+            assert_eq!(vec![0, 2, 4, 6], out);
+            assert_eq!(set.len(), 4);
+        }
+        {
+            let mut set: HashSet<i32> = (0..8).collect();
+            drop(set.drain_filter(|&k| k % 2 == 0));
+            assert_eq!(set.len(), 4, "Removes non-matching items on drop");
+        }
+    }
+
+    #[test]
+    fn test_const_with_hasher() {
+        use core::hash::BuildHasher;
+        use std::collections::hash_map::DefaultHasher;
+
+        #[derive(Clone)]
+        struct MyHasher;
+        impl BuildHasher for MyHasher {
+            type Hasher = DefaultHasher;
+
+            fn build_hasher(&self) -> DefaultHasher {
+                DefaultHasher::new()
+            }
+        }
+
+        const EMPTY_SET: HashSet<u32, MyHasher> = HashSet::with_hasher(MyHasher);
+
+        let mut set = EMPTY_SET.clone();
+        set.insert(19);
+        assert!(set.contains(&19));
+    }
+}
diff --git a/tests/hasher.rs b/tests/hasher.rs
new file mode 100644
index 0000000..e455e3d
--- /dev/null
+++ b/tests/hasher.rs
@@ -0,0 +1,65 @@
+//! Sanity check that alternate hashers work correctly.
+
+#![cfg(not(miri))] // FIXME: takes too long
+
+use hashbrown::HashSet;
+use std::hash::{BuildHasher, BuildHasherDefault, Hasher};
+
+fn check<S: BuildHasher + Default>() {
+    let range = 0..1_000;
+
+    let mut set = HashSet::<i32, S>::default();
+    set.extend(range.clone());
+
+    assert!(!set.contains(&i32::min_value()));
+    assert!(!set.contains(&(range.start - 1)));
+    for i in range.clone() {
+        assert!(set.contains(&i));
+    }
+    assert!(!set.contains(&range.end));
+    assert!(!set.contains(&i32::max_value()));
+}
+
+/// Use hashbrown's default hasher.
+#[test]
+fn default() {
+    check::<hashbrown::hash_map::DefaultHashBuilder>();
+}
+
+/// Use std's default hasher.
+#[test]
+fn random_state() {
+    check::<std::collections::hash_map::RandomState>();
+}
+
+/// Use a constant 0 hash.
+#[test]
+fn zero() {
+    #[derive(Default)]
+    struct ZeroHasher;
+
+    impl Hasher for ZeroHasher {
+        fn finish(&self) -> u64 {
+            0
+        }
+        fn write(&mut self, _: &[u8]) {}
+    }
+
+    check::<BuildHasherDefault<ZeroHasher>>();
+}
+
+/// Use a constant maximum hash.
+#[test]
+fn max() {
+    #[derive(Default)]
+    struct MaxHasher;
+
+    impl Hasher for MaxHasher {
+        fn finish(&self) -> u64 {
+            u64::max_value()
+        }
+        fn write(&mut self, _: &[u8]) {}
+    }
+
+    check::<BuildHasherDefault<MaxHasher>>();
+}
diff --git a/tests/rayon.rs b/tests/rayon.rs
new file mode 100644
index 0000000..39b4770
--- /dev/null
+++ b/tests/rayon.rs
@@ -0,0 +1,533 @@
+#![cfg(feature = "rayon")]
+
+#[macro_use]
+extern crate lazy_static;
+
+use hashbrown::{HashMap, HashSet};
+use rayon::iter::{
+    IntoParallelIterator, IntoParallelRefIterator, IntoParallelRefMutIterator, ParallelExtend,
+    ParallelIterator,
+};
+
+macro_rules! assert_eq3 {
+    ($e1:expr, $e2:expr, $e3:expr) => {{
+        assert_eq!($e1, $e2);
+        assert_eq!($e1, $e3);
+        assert_eq!($e2, $e3);
+    }};
+}
+
+lazy_static! {
+    static ref MAP_EMPTY: HashMap<char, u32> = HashMap::new();
+    static ref MAP: HashMap<char, u32> = {
+        let mut m = HashMap::new();
+        m.insert('b', 20);
+        m.insert('a', 10);
+        m.insert('c', 30);
+        m.insert('e', 50);
+        m.insert('f', 60);
+        m.insert('d', 40);
+        m
+    };
+}
+
+#[test]
+fn map_seq_par_equivalence_iter_empty() {
+    let vec_seq = MAP_EMPTY.iter().collect::<Vec<_>>();
+    let vec_par = MAP_EMPTY.par_iter().collect::<Vec<_>>();
+
+    assert_eq3!(vec_seq, vec_par, []);
+}
+
+#[test]
+fn map_seq_par_equivalence_iter() {
+    let mut vec_seq = MAP.iter().collect::<Vec<_>>();
+    let mut vec_par = MAP.par_iter().collect::<Vec<_>>();
+
+    assert_eq!(vec_seq, vec_par);
+
+    // Do not depend on the exact order of values
+    let expected_sorted = [
+        (&'a', &10),
+        (&'b', &20),
+        (&'c', &30),
+        (&'d', &40),
+        (&'e', &50),
+        (&'f', &60),
+    ];
+
+    vec_seq.sort_unstable();
+    vec_par.sort_unstable();
+
+    assert_eq3!(vec_seq, vec_par, expected_sorted);
+}
+
+#[test]
+fn map_seq_par_equivalence_keys_empty() {
+    let vec_seq = MAP_EMPTY.keys().collect::<Vec<&char>>();
+    let vec_par = MAP_EMPTY.par_keys().collect::<Vec<&char>>();
+
+    let expected: [&char; 0] = [];
+
+    assert_eq3!(vec_seq, vec_par, expected);
+}
+
+#[test]
+fn map_seq_par_equivalence_keys() {
+    let mut vec_seq = MAP.keys().collect::<Vec<_>>();
+    let mut vec_par = MAP.par_keys().collect::<Vec<_>>();
+
+    assert_eq!(vec_seq, vec_par);
+
+    // Do not depend on the exact order of values
+    let expected_sorted = [&'a', &'b', &'c', &'d', &'e', &'f'];
+
+    vec_seq.sort_unstable();
+    vec_par.sort_unstable();
+
+    assert_eq3!(vec_seq, vec_par, expected_sorted);
+}
+
+#[test]
+fn map_seq_par_equivalence_values_empty() {
+    let vec_seq = MAP_EMPTY.values().collect::<Vec<_>>();
+    let vec_par = MAP_EMPTY.par_values().collect::<Vec<_>>();
+
+    let expected: [&u32; 0] = [];
+
+    assert_eq3!(vec_seq, vec_par, expected);
+}
+
+#[test]
+fn map_seq_par_equivalence_values() {
+    let mut vec_seq = MAP.values().collect::<Vec<_>>();
+    let mut vec_par = MAP.par_values().collect::<Vec<_>>();
+
+    assert_eq!(vec_seq, vec_par);
+
+    // Do not depend on the exact order of values
+    let expected_sorted = [&10, &20, &30, &40, &50, &60];
+
+    vec_seq.sort_unstable();
+    vec_par.sort_unstable();
+
+    assert_eq3!(vec_seq, vec_par, expected_sorted);
+}
+
+#[test]
+fn map_seq_par_equivalence_iter_mut_empty() {
+    let mut map1 = MAP_EMPTY.clone();
+    let mut map2 = MAP_EMPTY.clone();
+
+    let vec_seq = map1.iter_mut().collect::<Vec<_>>();
+    let vec_par = map2.par_iter_mut().collect::<Vec<_>>();
+
+    assert_eq3!(vec_seq, vec_par, []);
+}
+
+#[test]
+fn map_seq_par_equivalence_iter_mut() {
+    let mut map1 = MAP.clone();
+    let mut map2 = MAP.clone();
+
+    let mut vec_seq = map1.iter_mut().collect::<Vec<_>>();
+    let mut vec_par = map2.par_iter_mut().collect::<Vec<_>>();
+
+    assert_eq!(vec_seq, vec_par);
+
+    // Do not depend on the exact order of values
+    let expected_sorted = [
+        (&'a', &mut 10),
+        (&'b', &mut 20),
+        (&'c', &mut 30),
+        (&'d', &mut 40),
+        (&'e', &mut 50),
+        (&'f', &mut 60),
+    ];
+
+    vec_seq.sort_unstable();
+    vec_par.sort_unstable();
+
+    assert_eq3!(vec_seq, vec_par, expected_sorted);
+}
+
+#[test]
+fn map_seq_par_equivalence_values_mut_empty() {
+    let mut map1 = MAP_EMPTY.clone();
+    let mut map2 = MAP_EMPTY.clone();
+
+    let vec_seq = map1.values_mut().collect::<Vec<_>>();
+    let vec_par = map2.par_values_mut().collect::<Vec<_>>();
+
+    let expected: [&u32; 0] = [];
+
+    assert_eq3!(vec_seq, vec_par, expected);
+}
+
+#[test]
+fn map_seq_par_equivalence_values_mut() {
+    let mut map1 = MAP.clone();
+    let mut map2 = MAP.clone();
+
+    let mut vec_seq = map1.values_mut().collect::<Vec<_>>();
+    let mut vec_par = map2.par_values_mut().collect::<Vec<_>>();
+
+    assert_eq!(vec_seq, vec_par);
+
+    // Do not depend on the exact order of values
+    let expected_sorted = [&mut 10, &mut 20, &mut 30, &mut 40, &mut 50, &mut 60];
+
+    vec_seq.sort_unstable();
+    vec_par.sort_unstable();
+
+    assert_eq3!(vec_seq, vec_par, expected_sorted);
+}
+
+#[test]
+fn map_seq_par_equivalence_into_iter_empty() {
+    let vec_seq = MAP_EMPTY.clone().into_iter().collect::<Vec<_>>();
+    let vec_par = MAP_EMPTY.clone().into_par_iter().collect::<Vec<_>>();
+
+    assert_eq3!(vec_seq, vec_par, []);
+}
+
+#[test]
+fn map_seq_par_equivalence_into_iter() {
+    let mut vec_seq = MAP.clone().into_iter().collect::<Vec<_>>();
+    let mut vec_par = MAP.clone().into_par_iter().collect::<Vec<_>>();
+
+    assert_eq!(vec_seq, vec_par);
+
+    // Do not depend on the exact order of values
+    let expected_sorted = [
+        ('a', 10),
+        ('b', 20),
+        ('c', 30),
+        ('d', 40),
+        ('e', 50),
+        ('f', 60),
+    ];
+
+    vec_seq.sort_unstable();
+    vec_par.sort_unstable();
+
+    assert_eq3!(vec_seq, vec_par, expected_sorted);
+}
+
+lazy_static! {
+    static ref MAP_VEC_EMPTY: Vec<(char, u32)> = vec![];
+    static ref MAP_VEC: Vec<(char, u32)> = vec![
+        ('b', 20),
+        ('a', 10),
+        ('c', 30),
+        ('e', 50),
+        ('f', 60),
+        ('d', 40),
+    ];
+}
+
+#[test]
+fn map_seq_par_equivalence_collect_empty() {
+    let map_expected = MAP_EMPTY.clone();
+    let map_seq = MAP_VEC_EMPTY.clone().into_iter().collect::<HashMap<_, _>>();
+    let map_par = MAP_VEC_EMPTY
+        .clone()
+        .into_par_iter()
+        .collect::<HashMap<_, _>>();
+
+    assert_eq!(map_seq, map_par);
+    assert_eq!(map_seq, map_expected);
+    assert_eq!(map_par, map_expected);
+}
+
+#[test]
+fn map_seq_par_equivalence_collect() {
+    let map_expected = MAP.clone();
+    let map_seq = MAP_VEC.clone().into_iter().collect::<HashMap<_, _>>();
+    let map_par = MAP_VEC.clone().into_par_iter().collect::<HashMap<_, _>>();
+
+    assert_eq!(map_seq, map_par);
+    assert_eq!(map_seq, map_expected);
+    assert_eq!(map_par, map_expected);
+}
+
+lazy_static! {
+    static ref MAP_EXISTING_EMPTY: HashMap<char, u32> = HashMap::new();
+    static ref MAP_EXISTING: HashMap<char, u32> = {
+        let mut m = HashMap::new();
+        m.insert('b', 20);
+        m.insert('a', 10);
+        m
+    };
+    static ref MAP_EXTENSION_EMPTY: Vec<(char, u32)> = vec![];
+    static ref MAP_EXTENSION: Vec<(char, u32)> = vec![('c', 30), ('e', 50), ('f', 60), ('d', 40),];
+}
+
+#[test]
+fn map_seq_par_equivalence_existing_empty_extend_empty() {
+    let expected = HashMap::new();
+    let mut map_seq = MAP_EXISTING_EMPTY.clone();
+    let mut map_par = MAP_EXISTING_EMPTY.clone();
+
+    map_seq.extend(MAP_EXTENSION_EMPTY.iter().cloned());
+    map_par.par_extend(MAP_EXTENSION_EMPTY.par_iter().cloned());
+
+    assert_eq3!(map_seq, map_par, expected);
+}
+
+#[test]
+fn map_seq_par_equivalence_existing_empty_extend() {
+    let expected = MAP_EXTENSION.iter().cloned().collect::<HashMap<_, _>>();
+    let mut map_seq = MAP_EXISTING_EMPTY.clone();
+    let mut map_par = MAP_EXISTING_EMPTY.clone();
+
+    map_seq.extend(MAP_EXTENSION.iter().cloned());
+    map_par.par_extend(MAP_EXTENSION.par_iter().cloned());
+
+    assert_eq3!(map_seq, map_par, expected);
+}
+
+#[test]
+fn map_seq_par_equivalence_existing_extend_empty() {
+    let expected = MAP_EXISTING.clone();
+    let mut map_seq = MAP_EXISTING.clone();
+    let mut map_par = MAP_EXISTING.clone();
+
+    map_seq.extend(MAP_EXTENSION_EMPTY.iter().cloned());
+    map_par.par_extend(MAP_EXTENSION_EMPTY.par_iter().cloned());
+
+    assert_eq3!(map_seq, map_par, expected);
+}
+
+#[test]
+fn map_seq_par_equivalence_existing_extend() {
+    let expected = MAP.clone();
+    let mut map_seq = MAP_EXISTING.clone();
+    let mut map_par = MAP_EXISTING.clone();
+
+    map_seq.extend(MAP_EXTENSION.iter().cloned());
+    map_par.par_extend(MAP_EXTENSION.par_iter().cloned());
+
+    assert_eq3!(map_seq, map_par, expected);
+}
+
+lazy_static! {
+    static ref SET_EMPTY: HashSet<char> = HashSet::new();
+    static ref SET: HashSet<char> = {
+        let mut s = HashSet::new();
+        s.insert('b');
+        s.insert('a');
+        s.insert('c');
+        s.insert('e');
+        s.insert('f');
+        s.insert('d');
+        s
+    };
+}
+
+#[test]
+fn set_seq_par_equivalence_iter_empty() {
+    let vec_seq = SET_EMPTY.iter().collect::<Vec<_>>();
+    let vec_par = SET_EMPTY.par_iter().collect::<Vec<_>>();
+
+    let expected: [&char; 0] = [];
+
+    assert_eq3!(vec_seq, vec_par, expected);
+}
+
+#[test]
+fn set_seq_par_equivalence_iter() {
+    let mut vec_seq = SET.iter().collect::<Vec<_>>();
+    let mut vec_par = SET.par_iter().collect::<Vec<_>>();
+
+    assert_eq!(vec_seq, vec_par);
+
+    // Do not depend on the exact order of values
+    let expected_sorted = [&'a', &'b', &'c', &'d', &'e', &'f'];
+
+    vec_seq.sort_unstable();
+    vec_par.sort_unstable();
+
+    assert_eq3!(vec_seq, vec_par, expected_sorted);
+}
+
+#[test]
+fn set_seq_par_equivalence_into_iter_empty() {
+    let vec_seq = SET_EMPTY.clone().into_iter().collect::<Vec<_>>();
+    let vec_par = SET_EMPTY.clone().into_par_iter().collect::<Vec<_>>();
+
+    assert_eq3!(vec_seq, vec_par, []);
+}
+
+#[test]
+fn set_seq_par_equivalence_into_iter() {
+    let mut vec_seq = SET.clone().into_iter().collect::<Vec<_>>();
+    let mut vec_par = SET.clone().into_par_iter().collect::<Vec<_>>();
+
+    assert_eq!(vec_seq, vec_par);
+
+    // Do not depend on the exact order of values
+    let expected_sorted = ['a', 'b', 'c', 'd', 'e', 'f'];
+
+    vec_seq.sort_unstable();
+    vec_par.sort_unstable();
+
+    assert_eq3!(vec_seq, vec_par, expected_sorted);
+}
+
+lazy_static! {
+    static ref SET_VEC_EMPTY: Vec<char> = vec![];
+    static ref SET_VEC: Vec<char> = vec!['b', 'a', 'c', 'e', 'f', 'd',];
+}
+
+#[test]
+fn set_seq_par_equivalence_collect_empty() {
+    let set_expected = SET_EMPTY.clone();
+    let set_seq = SET_VEC_EMPTY.clone().into_iter().collect::<HashSet<_>>();
+    let set_par = SET_VEC_EMPTY
+        .clone()
+        .into_par_iter()
+        .collect::<HashSet<_>>();
+
+    assert_eq!(set_seq, set_par);
+    assert_eq!(set_seq, set_expected);
+    assert_eq!(set_par, set_expected);
+}
+
+#[test]
+fn set_seq_par_equivalence_collect() {
+    let set_expected = SET.clone();
+    let set_seq = SET_VEC.clone().into_iter().collect::<HashSet<_>>();
+    let set_par = SET_VEC.clone().into_par_iter().collect::<HashSet<_>>();
+
+    assert_eq!(set_seq, set_par);
+    assert_eq!(set_seq, set_expected);
+    assert_eq!(set_par, set_expected);
+}
+
+lazy_static! {
+    static ref SET_EXISTING_EMPTY: HashSet<char> = HashSet::new();
+    static ref SET_EXISTING: HashSet<char> = {
+        let mut s = HashSet::new();
+        s.insert('b');
+        s.insert('a');
+        s
+    };
+    static ref SET_EXTENSION_EMPTY: Vec<char> = vec![];
+    static ref SET_EXTENSION: Vec<char> = vec!['c', 'e', 'f', 'd',];
+}
+
+#[test]
+fn set_seq_par_equivalence_existing_empty_extend_empty() {
+    let expected = HashSet::new();
+    let mut set_seq = SET_EXISTING_EMPTY.clone();
+    let mut set_par = SET_EXISTING_EMPTY.clone();
+
+    set_seq.extend(SET_EXTENSION_EMPTY.iter().cloned());
+    set_par.par_extend(SET_EXTENSION_EMPTY.par_iter().cloned());
+
+    assert_eq3!(set_seq, set_par, expected);
+}
+
+#[test]
+fn set_seq_par_equivalence_existing_empty_extend() {
+    let expected = SET_EXTENSION.iter().cloned().collect::<HashSet<_>>();
+    let mut set_seq = SET_EXISTING_EMPTY.clone();
+    let mut set_par = SET_EXISTING_EMPTY.clone();
+
+    set_seq.extend(SET_EXTENSION.iter().cloned());
+    set_par.par_extend(SET_EXTENSION.par_iter().cloned());
+
+    assert_eq3!(set_seq, set_par, expected);
+}
+
+#[test]
+fn set_seq_par_equivalence_existing_extend_empty() {
+    let expected = SET_EXISTING.clone();
+    let mut set_seq = SET_EXISTING.clone();
+    let mut set_par = SET_EXISTING.clone();
+
+    set_seq.extend(SET_EXTENSION_EMPTY.iter().cloned());
+    set_par.par_extend(SET_EXTENSION_EMPTY.par_iter().cloned());
+
+    assert_eq3!(set_seq, set_par, expected);
+}
+
+#[test]
+fn set_seq_par_equivalence_existing_extend() {
+    let expected = SET.clone();
+    let mut set_seq = SET_EXISTING.clone();
+    let mut set_par = SET_EXISTING.clone();
+
+    set_seq.extend(SET_EXTENSION.iter().cloned());
+    set_par.par_extend(SET_EXTENSION.par_iter().cloned());
+
+    assert_eq3!(set_seq, set_par, expected);
+}
+
+lazy_static! {
+    static ref SET_A: HashSet<char> = ['a', 'b', 'c', 'd'].iter().cloned().collect();
+    static ref SET_B: HashSet<char> = ['a', 'b', 'e', 'f'].iter().cloned().collect();
+    static ref SET_DIFF_AB: HashSet<char> = ['c', 'd'].iter().cloned().collect();
+    static ref SET_DIFF_BA: HashSet<char> = ['e', 'f'].iter().cloned().collect();
+    static ref SET_SYMM_DIFF_AB: HashSet<char> = ['c', 'd', 'e', 'f'].iter().cloned().collect();
+    static ref SET_INTERSECTION_AB: HashSet<char> = ['a', 'b'].iter().cloned().collect();
+    static ref SET_UNION_AB: HashSet<char> =
+        ['a', 'b', 'c', 'd', 'e', 'f'].iter().cloned().collect();
+}
+
+#[test]
+fn set_seq_par_equivalence_difference() {
+    let diff_ab_seq = SET_A.difference(&*SET_B).cloned().collect::<HashSet<_>>();
+    let diff_ab_par = SET_A
+        .par_difference(&*SET_B)
+        .cloned()
+        .collect::<HashSet<_>>();
+
+    assert_eq3!(diff_ab_seq, diff_ab_par, *SET_DIFF_AB);
+
+    let diff_ba_seq = SET_B.difference(&*SET_A).cloned().collect::<HashSet<_>>();
+    let diff_ba_par = SET_B
+        .par_difference(&*SET_A)
+        .cloned()
+        .collect::<HashSet<_>>();
+
+    assert_eq3!(diff_ba_seq, diff_ba_par, *SET_DIFF_BA);
+}
+
+#[test]
+fn set_seq_par_equivalence_symmetric_difference() {
+    let symm_diff_ab_seq = SET_A
+        .symmetric_difference(&*SET_B)
+        .cloned()
+        .collect::<HashSet<_>>();
+    let symm_diff_ab_par = SET_A
+        .par_symmetric_difference(&*SET_B)
+        .cloned()
+        .collect::<HashSet<_>>();
+
+    assert_eq3!(symm_diff_ab_seq, symm_diff_ab_par, *SET_SYMM_DIFF_AB);
+}
+
+#[test]
+fn set_seq_par_equivalence_intersection() {
+    let intersection_ab_seq = SET_A.intersection(&*SET_B).cloned().collect::<HashSet<_>>();
+    let intersection_ab_par = SET_A
+        .par_intersection(&*SET_B)
+        .cloned()
+        .collect::<HashSet<_>>();
+
+    assert_eq3!(
+        intersection_ab_seq,
+        intersection_ab_par,
+        *SET_INTERSECTION_AB
+    );
+}
+
+#[test]
+fn set_seq_par_equivalence_union() {
+    let union_ab_seq = SET_A.union(&*SET_B).cloned().collect::<HashSet<_>>();
+    let union_ab_par = SET_A.par_union(&*SET_B).cloned().collect::<HashSet<_>>();
+
+    assert_eq3!(union_ab_seq, union_ab_par, *SET_UNION_AB);
+}
diff --git a/tests/serde.rs b/tests/serde.rs
new file mode 100644
index 0000000..570bf70
--- /dev/null
+++ b/tests/serde.rs
@@ -0,0 +1,65 @@
+#![cfg(feature = "serde")]
+
+use core::hash::BuildHasherDefault;
+use hashbrown::{HashMap, HashSet};
+use rustc_hash::FxHasher;
+use serde_test::{assert_tokens, Token};
+
+// We use FxHash for this test because we rely on the ordering
+type FxHashMap<K, V> = HashMap<K, V, BuildHasherDefault<FxHasher>>;
+type FxHashSet<T> = HashSet<T, BuildHasherDefault<FxHasher>>;
+
+#[test]
+fn map_serde_tokens_empty() {
+    let map = FxHashMap::<char, u32>::default();
+
+    assert_tokens(&map, &[Token::Map { len: Some(0) }, Token::MapEnd]);
+}
+
+#[test]
+fn map_serde_tokens() {
+    let mut map = FxHashMap::default();
+    map.insert('b', 20);
+    map.insert('a', 10);
+    map.insert('c', 30);
+
+    assert_tokens(
+        &map,
+        &[
+            Token::Map { len: Some(3) },
+            Token::Char('a'),
+            Token::I32(10),
+            Token::Char('b'),
+            Token::I32(20),
+            Token::Char('c'),
+            Token::I32(30),
+            Token::MapEnd,
+        ],
+    );
+}
+
+#[test]
+fn set_serde_tokens_empty() {
+    let set = FxHashSet::<u32>::default();
+
+    assert_tokens(&set, &[Token::Seq { len: Some(0) }, Token::SeqEnd]);
+}
+
+#[test]
+fn set_serde_tokens() {
+    let mut set = FxHashSet::default();
+    set.insert(20);
+    set.insert(10);
+    set.insert(30);
+
+    assert_tokens(
+        &set,
+        &[
+            Token::Seq { len: Some(3) },
+            Token::I32(20),
+            Token::I32(10),
+            Token::I32(30),
+            Token::SeqEnd,
+        ],
+    );
+}
diff --git a/tests/set.rs b/tests/set.rs
new file mode 100644
index 0000000..3fc0717
--- /dev/null
+++ b/tests/set.rs
@@ -0,0 +1,30 @@
+#![cfg(not(miri))] // FIXME: takes too long
+
+use hashbrown::HashSet;
+use rand::{distributions::Alphanumeric, rngs::SmallRng, Rng, SeedableRng};
+
+#[test]
+fn test_hashset_insert_remove() {
+    let mut m: HashSet<Vec<char>> = HashSet::new();
+    //let num: u32 = 4096;
+    //let tx: Vec<Vec<u8>> = (0..num).map(|i| (i..(16 + i)).collect()).collect();
+    let seed: [u8; 16] = [
+        130, 220, 246, 217, 111, 124, 221, 189, 190, 234, 121, 93, 67, 95, 100, 43,
+    ];
+
+    let rng = &mut SmallRng::from_seed(seed);
+    let tx: Vec<Vec<char>> = (0..4096)
+        .map(|_| (rng.sample_iter(&Alphanumeric).take(32).collect()))
+        .collect();
+
+    for _ in 0..32 {
+        for i in 0..4096 {
+            assert_eq!(m.contains(&tx[i].clone()), false);
+            assert_eq!(m.insert(tx[i].clone()), true);
+        }
+        for i in 0..4096 {
+            println!("removing {} {:?}", i, tx[i]);
+            assert_eq!(m.remove(&tx[i]), true);
+        }
+    }
+}