Merge pull request #268 from dingelish/update-toolchain-09-08
Upgrade toolchain to 2020-09-10
diff --git a/rust-toolchain b/rust-toolchain
index 4e74872..641cabd 100644
--- a/rust-toolchain
+++ b/rust-toolchain
@@ -1 +1 @@
-nightly-2020-04-07
+nightly-2020-09-10
diff --git a/samplecode/http_req/app/src/main.rs b/samplecode/http_req/app/src/main.rs
index f69e304..3a84949 100644
--- a/samplecode/http_req/app/src/main.rs
+++ b/samplecode/http_req/app/src/main.rs
@@ -60,7 +60,7 @@
let mut retval = sgx_status_t::SGX_SUCCESS;
- let hostname = "example.com";
+ let hostname = "www.rust-lang.org";
let port = 443;
let hostname = format!("https://{}:{}", hostname, port);
diff --git a/samplecode/machine-learning/enclave/Cargo.toml b/samplecode/machine-learning/enclave/Cargo.toml
index 1736fdd..9c9f363 100644
--- a/samplecode/machine-learning/enclave/Cargo.toml
+++ b/samplecode/machine-learning/enclave/Cargo.toml
@@ -10,9 +10,6 @@
[features]
default = []
-[profile.release]
-lto = true
-
[target.'cfg(not(target_env = "sgx"))'.dependencies]
sgx_tstd = { git = "https://github.com/apache/teaclave-sgx-sdk.git" }
sgx_types = { git = "https://github.com/apache/teaclave-sgx-sdk.git" }
diff --git a/samplecode/unit-test/enclave/src/test_exception.rs b/samplecode/unit-test/enclave/src/test_exception.rs
index 36d9819..a57579f 100644
--- a/samplecode/unit-test/enclave/src/test_exception.rs
+++ b/samplecode/unit-test/enclave/src/test_exception.rs
@@ -32,7 +32,7 @@
let td = enclave::SgxThreadData::current();
println!("test_abort stack: {:x}-{:x}", td.stack_base(), td.stack_limit());
- unsafe{ std::intrinsics::abort() }
+ std::intrinsics::abort()
}
pub fn test_exception_handler() {
diff --git a/sgx_alloc/src/lib.rs b/sgx_alloc/src/lib.rs
index 32d7165..412e458 100644
--- a/sgx_alloc/src/lib.rs
+++ b/sgx_alloc/src/lib.rs
@@ -30,6 +30,8 @@
#![feature(dropck_eyepatch)]
#![feature(allocator_api)]
#![feature(core_intrinsics)]
+#![feature(nonnull_slice_from_raw_parts)]
+#![feature(slice_ptr_get)]
extern crate alloc;
diff --git a/sgx_alloc/src/system.rs b/sgx_alloc/src/system.rs
index 5a5e688..47ac408 100644
--- a/sgx_alloc/src/system.rs
+++ b/sgx_alloc/src/system.rs
@@ -23,10 +23,10 @@
//! 2018-06-22 Add liballoc components here
use core::alloc::{
- AllocErr, AllocInit, AllocRef, GlobalAlloc, Layout, MemoryBlock, ReallocPlacement,
+ AllocErr, AllocRef, GlobalAlloc, Layout,
};
use core::intrinsics;
-use core::ptr::NonNull;
+use core::ptr::{self, NonNull};
// The minimum alignment guaranteed by the architecture. This value is used to
// add fast paths for low alignment values. In practice, the alignment is a
@@ -41,27 +41,83 @@
pub struct System;
-unsafe impl AllocRef for System {
+impl System {
#[inline]
- fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> {
- unsafe {
- let size = layout.size();
- if size == 0 {
- Ok(MemoryBlock {
- ptr: layout.dangling(),
- size: 0,
- })
- } else {
- let raw_ptr = match init {
- AllocInit::Uninitialized => GlobalAlloc::alloc(self, layout),
- AllocInit::Zeroed => GlobalAlloc::alloc_zeroed(self, layout),
+ fn alloc_impl(&mut self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocErr> {
+ match layout.size() {
+ 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
+ // SAFETY: `layout` is non-zero in size,
+ size => unsafe {
+ let raw_ptr = if zeroed {
+ GlobalAlloc::alloc_zeroed(self, layout)
+ } else {
+ GlobalAlloc::alloc(self, layout)
};
let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?;
- Ok(MemoryBlock { ptr, size })
- }
+ Ok(NonNull::slice_from_raw_parts(ptr, size))
+ },
}
}
+ // Safety: Same as `AllocRef::grow`
+ #[inline]
+ unsafe fn grow_impl(
+ &mut self,
+ ptr: NonNull<u8>,
+ old_layout: Layout,
+ new_layout: Layout,
+ zeroed: bool,
+ ) -> Result<NonNull<[u8]>, AllocErr> {
+ debug_assert!(
+ new_layout.size() >= old_layout.size(),
+ "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
+ );
+
+ match old_layout.size() {
+ 0 => self.alloc_impl(new_layout, zeroed),
+
+ // SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size`
+ // as required by safety conditions. Other conditions must be upheld by the caller
+ old_size if old_layout.align() == new_layout.align() => {
+ let new_size = new_layout.size();
+
+ // `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
+ intrinsics::assume(new_size >= old_layout.size());
+
+ let raw_ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), old_layout, new_size);
+ let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?;
+ if zeroed {
+ raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
+ }
+ Ok(NonNull::slice_from_raw_parts(ptr, new_size))
+ },
+
+ // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
+ // both the old and new memory allocation are valid for reads and writes for `old_size`
+ // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
+ // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
+ // for `dealloc` must be upheld by the caller.
+ old_size => {
+ let new_ptr = self.alloc_impl(new_layout, zeroed)?;
+ ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size);
+ self.dealloc(ptr, old_layout);
+ Ok(new_ptr)
+ },
+ }
+ }
+}
+
+unsafe impl AllocRef for System {
+ #[inline]
+ fn alloc(&mut self, layout: Layout) -> Result<NonNull<[u8]>, AllocErr> {
+ self.alloc_impl(layout, false)
+ }
+
+ #[inline]
+ fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<[u8]>, AllocErr> {
+ self.alloc_impl(layout, true)
+ }
+
#[inline]
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 {
@@ -73,78 +129,59 @@
unsafe fn grow(
&mut self,
ptr: NonNull<u8>,
- layout: Layout,
- new_size: usize,
- placement: ReallocPlacement,
- init: AllocInit,
- ) -> Result<MemoryBlock, AllocErr> {
- let size = layout.size();
- debug_assert!(
- new_size >= size,
- "`new_size` must be greater than or equal to `memory.size()`"
- );
+ old_layout: Layout,
+ new_layout: Layout,
+ ) -> Result<NonNull<[u8]>, AllocErr> {
+ // SAFETY: all conditions must be upheld by the caller
+ self.grow_impl(ptr, old_layout, new_layout, false)
+ }
- if size == new_size {
- return Ok(MemoryBlock { ptr, size });
- }
-
- match placement {
- ReallocPlacement::InPlace => Err(AllocErr),
- ReallocPlacement::MayMove if layout.size() == 0 => {
- let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
- self.alloc(new_layout, init)
- }
- ReallocPlacement::MayMove => {
- // `realloc` probably checks for `new_size > size` or something similar.
- intrinsics::assume(new_size > size);
- let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size);
- let memory = MemoryBlock {
- ptr: NonNull::new(ptr).ok_or(AllocErr)?,
- size: new_size,
- };
- init.init_offset(memory, size);
- Ok(memory)
- }
- }
+ #[inline]
+ unsafe fn grow_zeroed(
+ &mut self,
+ ptr: NonNull<u8>,
+ old_layout: Layout,
+ new_layout: Layout,
+ ) -> Result<NonNull<[u8]>, AllocErr> {
+ // SAFETY: all conditions must be upheld by the caller
+ self.grow_impl(ptr, old_layout, new_layout, true)
}
#[inline]
unsafe fn shrink(
&mut self,
ptr: NonNull<u8>,
- layout: Layout,
- new_size: usize,
- placement: ReallocPlacement,
- ) -> Result<MemoryBlock, AllocErr> {
- let size = layout.size();
- debug_assert!(
- new_size <= size,
- "`new_size` must be smaller than or equal to `memory.size()`"
- );
+ old_layout: Layout,
+ new_layout: Layout,
+ ) -> Result<NonNull<[u8]>, AllocErr> {
+ match new_layout.size() {
+ // SAFETY: conditions must be upheld by the caller
+ 0 => {
+ self.dealloc(ptr, old_layout);
+ Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0))
+ },
+ // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
+ new_size if old_layout.align() == new_layout.align() => {
+ // `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
+ intrinsics::assume(new_size <= old_layout.size());
- if size == new_size {
- return Ok(MemoryBlock { ptr, size });
- }
+ let raw_ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), old_layout, new_size);
+ let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?;
+ Ok(NonNull::slice_from_raw_parts(ptr, new_size))
+ },
- match placement {
- ReallocPlacement::InPlace => Err(AllocErr),
- ReallocPlacement::MayMove if new_size == 0 => {
- self.dealloc(ptr, layout);
- Ok(MemoryBlock {
- ptr: layout.dangling(),
- size: 0,
- })
- }
- ReallocPlacement::MayMove => {
- // `realloc` probably checks for `new_size < size` or something similar.
- intrinsics::assume(new_size < size);
- let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size);
- Ok(MemoryBlock {
- ptr: NonNull::new(ptr).ok_or(AllocErr)?,
- size: new_size,
- })
- }
- }
+ // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
+ // both the old and new memory allocation are valid for reads and writes for `new_size`
+ // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
+ // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
+ // for `dealloc` must be upheld by the caller.
+ new_size => {
+ let new_ptr = self.alloc(new_layout)?;
+ ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
+ self.dealloc(ptr, old_layout);
+ Ok(new_ptr)
+ },
+ }
}
}
diff --git a/sgx_panic_abort/lib.rs b/sgx_panic_abort/lib.rs
index 6078713..95bf3e0 100644
--- a/sgx_panic_abort/lib.rs
+++ b/sgx_panic_abort/lib.rs
@@ -17,6 +17,7 @@
use core::any::Any;
#[rustc_std_internal_symbol]
+#[allow(improper_ctypes_definitions)]
pub unsafe extern "C" fn __rust_panic_cleanup(_: *mut u8) -> *mut (dyn Any + Send + 'static) {
unreachable!()
}
diff --git a/sgx_panic_unwind/lib.rs b/sgx_panic_unwind/lib.rs
index ecfb931..428b754 100644
--- a/sgx_panic_unwind/lib.rs
+++ b/sgx_panic_unwind/lib.rs
@@ -40,6 +40,7 @@
mod dwarf;
#[rustc_std_internal_symbol]
+#[allow(improper_ctypes_definitions)]
pub unsafe extern "C" fn __rust_panic_cleanup(payload: *mut u8) -> *mut (dyn Any + Send + 'static) {
Box::into_raw(imp::cleanup(payload))
}
diff --git a/sgx_trts/src/lib.rs b/sgx_trts/src/lib.rs
index 2a47e7d..e22093d 100644
--- a/sgx_trts/src/lib.rs
+++ b/sgx_trts/src/lib.rs
@@ -71,7 +71,7 @@
#![allow(non_camel_case_types)]
#![allow(non_upper_case_globals)]
#![allow(non_snake_case)]
-#![feature(specialization)]
+#![feature(min_specialization)]
#![feature(vec_into_raw_parts)]
#![feature(toowned_clone_into)]
diff --git a/sgx_tstd/hashbrown/.cargo_vcs_info.json b/sgx_tstd/hashbrown/.cargo_vcs_info.json
index 9859b5e..ec4027b 100644
--- a/sgx_tstd/hashbrown/.cargo_vcs_info.json
+++ b/sgx_tstd/hashbrown/.cargo_vcs_info.json
@@ -1,5 +1,5 @@
{
"git": {
- "sha1": "eaaa3667b72baaed53620e7f87e6fa0ca7824213"
+ "sha1": "d4bb3ea3321a73549aaf6bcc06e9e5e3e68f4063"
}
}
diff --git a/sgx_tstd/hashbrown/CHANGELOG.md b/sgx_tstd/hashbrown/CHANGELOG.md
index 1349370..e349925 100644
--- a/sgx_tstd/hashbrown/CHANGELOG.md
+++ b/sgx_tstd/hashbrown/CHANGELOG.md
@@ -7,6 +7,72 @@
## [Unreleased]
+## [v0.9.0] - 2020-09-03
+
+### Fixed
+- `drain_filter` now removes and yields items that do match the predicate,
+ rather than items that don't. This is a **breaking change** to match the
+ behavior of the `drain_filter` methods in `std`. (#187)
+
+### Added
+- Added `replace_entry_with` to `OccupiedEntry`, and `and_replace_entry_with` to `Entry`. (#190)
+- Implemented `FusedIterator` and `size_hint` for `DrainFilter`. (#188)
+
+### Changed
+- The minimum Rust version has been bumped to 1.36 (due to `crossbeam` dependency). (#193)
+- Updated `ahash` dependency to 0.4. (#198)
+- `HashMap::with_hasher` and `HashSet::with_hasher` are now `const fn`. (#195)
+- Removed `T: Hash + Eq` and `S: BuildHasher` bounds on `HashSet::new`,
+ `with_capacity`, `with_hasher`, and `with_capacity_and_hasher`. (#185)
+
+## [v0.8.2] - 2020-08-08
+
+### Changed
+- Avoid closures to improve compile times. (#183)
+- Do not iterate to drop if empty. (#182)
+
+## [v0.8.1] - 2020-07-16
+
+### Added
+- Added `erase` and `remove` to `RawTable`. (#171)
+- Added `try_with_capacity` to `RawTable`. (#174)
+- Added methods that allow re-using a `RawIter` for `RawDrain`,
+ `RawIntoIter`, and `RawParIter`. (#175)
+- Added `reflect_remove` and `reflect_insert` to `RawIter`. (#175)
+- Added a `drain_filter` function to `HashSet`. (#179)
+
+### Changed
+- Deprecated `RawTable::erase_no_drop` in favor of `erase` and `remove`. (#176)
+- `insert_no_grow` is now exposed under the `"raw"` feature. (#180)
+
+## [v0.8.0] - 2020-06-18
+
+### Fixed
+- Marked `RawTable::par_iter` as `unsafe`. (#157)
+
+### Changed
+- Reduced the size of `HashMap`. (#159)
+- No longer create tables with a capacity of 1 element. (#162)
+- Removed `K: Eq + Hash` bounds on `retain`. (#163)
+- Pulled in `HashMap` changes from rust-lang/rust (#164):
+ - `extend_one` support on nightly.
+ - `CollectionAllocErr` renamed to `TryReserveError`.
+ - Added `HashSet::get_or_insert_owned`.
+ - `Default` for `HashSet` no longer requires `T: Eq + Hash` and `S: BuildHasher`.
+
+## [v0.7.2] - 2020-04-27
+
+### Added
+- Added `or_insert_with_key` to `Entry`. (#152)
+
+### Fixed
+- Partially reverted `Clone` optimization which was unsound. (#154)
+
+### Changed
+- Disabled use of `const-random` by default, which prevented reproducible builds. (#155)
+- Optimized `repeat` function. (#150)
+- Use `NonNull` for buckets, which improves codegen for iterators. (#148)
+
## [v0.7.1] - 2020-03-16
### Added
@@ -183,7 +249,12 @@
- Initial release
-[Unreleased]: https://github.com/rust-lang/hashbrown/compare/v0.7.1...HEAD
+[Unreleased]: https://github.com/rust-lang/hashbrown/compare/v0.9.0...HEAD
+[v0.9.0]: https://github.com/rust-lang/hashbrown/compare/v0.8.2...v0.9.0
+[v0.8.2]: https://github.com/rust-lang/hashbrown/compare/v0.8.1...v0.8.2
+[v0.8.1]: https://github.com/rust-lang/hashbrown/compare/v0.8.0...v0.8.1
+[v0.8.0]: https://github.com/rust-lang/hashbrown/compare/v0.7.2...v0.8.0
+[v0.7.2]: https://github.com/rust-lang/hashbrown/compare/v0.7.1...v0.7.2
[v0.7.1]: https://github.com/rust-lang/hashbrown/compare/v0.7.0...v0.7.1
[v0.7.0]: https://github.com/rust-lang/hashbrown/compare/v0.6.3...v0.7.0
[v0.6.3]: https://github.com/rust-lang/hashbrown/compare/v0.6.2...v0.6.3
diff --git a/sgx_tstd/hashbrown/Cargo.toml b/sgx_tstd/hashbrown/Cargo.toml
index 43a18e9..6ed051b 100644
--- a/sgx_tstd/hashbrown/Cargo.toml
+++ b/sgx_tstd/hashbrown/Cargo.toml
@@ -13,9 +13,8 @@
[package]
edition = "2018"
name = "hashbrown_tstd"
-version = "0.7.1"
+version = "0.9.0"
authors = ["Amanieu d'Antras <amanieu@gmail.com>"]
-build = "build.rs"
exclude = [".travis.yml", "bors.toml", "/ci/*"]
description = "A Rust port of Google's SwissTable hash map"
readme = "README.md"
@@ -23,15 +22,27 @@
categories = ["data-structures", "no-std"]
license = "Apache-2.0/MIT"
repository = "https://github.com/rust-lang/hashbrown"
-
[package.metadata.docs.rs]
features = ["nightly", "rayon", "serde", "raw"]
-
[dependencies.ahash]
-version = "0.3.2"
+version = "0.4.4"
optional = true
default-features = false
+#[dependencies.alloc]
+#version = "1.0.0"
+#optional = true
+#package = "rustc-std-workspace-alloc"
+
+#[dependencies.compiler_builtins]
+#version = "0.1.2"
+#optional = true
+
+#[dependencies.core]
+#version = "1.0.0"
+#optional = true
+#package = "rustc-std-workspace-core"
+
[dependencies.rayon]
version = "1.0"
optional = true
@@ -40,7 +51,6 @@
version = "1.0.25"
optional = true
default-features = false
-
[dev-dependencies.doc-comment]
version = "0.3.1"
@@ -60,14 +70,14 @@
[dev-dependencies.serde_test]
version = "1.0"
-[build-dependencies.autocfg]
-version = "1"
-
[features]
ahash-compile-time-rng = ["ahash/compile-time-rng"]
-default = ["ahash", "ahash-compile-time-rng", "inline-more"]
+default = ["ahash", "inline-more"]
inline-more = []
nightly = []
raw = []
-rustc-dep-of-std = ["nightly", "rustc-internal-api"]
+rustc-dep-of-std = ["nightly", "core", "compiler_builtins", "alloc", "rustc-internal-api"]
rustc-internal-api = []
+core = []
+compiler_builtins = []
+alloc = []
diff --git a/sgx_tstd/hashbrown/Cargo.toml.orig b/sgx_tstd/hashbrown/Cargo.toml.orig
index 4d622df..25a1709 100644
--- a/sgx_tstd/hashbrown/Cargo.toml.orig
+++ b/sgx_tstd/hashbrown/Cargo.toml.orig
@@ -1,6 +1,6 @@
[package]
name = "hashbrown"
-version = "0.7.1"
+version = "0.9.0"
authors = ["Amanieu d'Antras <amanieu@gmail.com>"]
description = "A Rust port of Google's SwissTable hash map"
license = "Apache-2.0/MIT"
@@ -10,11 +10,10 @@
categories = ["data-structures", "no-std"]
exclude = [".travis.yml", "bors.toml", "/ci/*"]
edition = "2018"
-build = "build.rs"
[dependencies]
# For the default hasher
-ahash = { version = "0.3.2", optional = true, default-features = false }
+ahash = { version = "0.4.4", optional = true, default-features = false }
# For external trait impls
rayon = { version = "1.0", optional = true }
@@ -25,9 +24,6 @@
compiler_builtins = { version = "0.1.2", optional = true }
alloc = { version = "1.0.0", optional = true, package = "rustc-std-workspace-alloc" }
-[build-dependencies]
-autocfg = "1"
-
[dev-dependencies]
lazy_static = "1.2"
rand = { version = "0.7.3", features = ["small_rng"] }
@@ -37,16 +33,18 @@
doc-comment = "0.3.1"
[features]
-default = [
- "ahash",
- "ahash-compile-time-rng",
- "inline-more",
-]
+default = ["ahash", "inline-more"]
-ahash-compile-time-rng = [ "ahash/compile-time-rng" ]
+ahash-compile-time-rng = ["ahash/compile-time-rng"]
nightly = []
rustc-internal-api = []
-rustc-dep-of-std = ["nightly", "core", "compiler_builtins", "alloc", "rustc-internal-api"]
+rustc-dep-of-std = [
+ "nightly",
+ "core",
+ "compiler_builtins",
+ "alloc",
+ "rustc-internal-api",
+]
raw = []
# Enables usage of `#[inline]` on far more functions than by default in this
diff --git a/sgx_tstd/hashbrown/README.md b/sgx_tstd/hashbrown/README.md
index bbf6e6c..f3de05a 100644
--- a/sgx_tstd/hashbrown/README.md
+++ b/sgx_tstd/hashbrown/README.md
@@ -84,7 +84,7 @@
```toml
[dependencies]
-hashbrown = "0.7"
+hashbrown = "0.8"
```
Then:
diff --git a/sgx_tstd/hashbrown/build.rs b/sgx_tstd/hashbrown/build.rs
deleted file mode 100644
index 818b201..0000000
--- a/sgx_tstd/hashbrown/build.rs
+++ /dev/null
@@ -1,9 +0,0 @@
-fn main() {
- println!("cargo:rerun-if-changed=build.rs");
- let nightly = std::env::var_os("CARGO_FEATURE_NIGHTLY").is_some();
- let has_stable_alloc = || autocfg::new().probe_rustc_version(1, 36);
-
- if nightly || has_stable_alloc() {
- autocfg::emit("has_extern_crate_alloc")
- }
-}
diff --git a/sgx_tstd/hashbrown/src/external_trait_impls/rayon/map.rs b/sgx_tstd/hashbrown/src/external_trait_impls/rayon/map.rs
index 022e23e..334f8bb 100644
--- a/sgx_tstd/hashbrown/src/external_trait_impls/rayon/map.rs
+++ b/sgx_tstd/hashbrown/src/external_trait_impls/rayon/map.rs
@@ -27,9 +27,7 @@
where
C: UnindexedConsumer<Self::Item>,
{
- self.map
- .table
- .par_iter()
+ unsafe { self.map.table.par_iter() }
.map(|x| unsafe {
let r = x.as_ref();
(&r.0, &r.1)
@@ -70,9 +68,7 @@
where
C: UnindexedConsumer<Self::Item>,
{
- self.map
- .table
- .par_iter()
+ unsafe { self.map.table.par_iter() }
.map(|x| unsafe { &x.as_ref().0 })
.drive_unindexed(consumer)
}
@@ -110,9 +106,7 @@
where
C: UnindexedConsumer<Self::Item>,
{
- self.map
- .table
- .par_iter()
+ unsafe { self.map.table.par_iter() }
.map(|x| unsafe { &x.as_ref().1 })
.drive_unindexed(consumer)
}
@@ -152,9 +146,7 @@
where
C: UnindexedConsumer<Self::Item>,
{
- self.map
- .table
- .par_iter()
+ unsafe { self.map.table.par_iter() }
.map(|x| unsafe {
let r = x.as_mut();
(&r.0, &mut r.1)
@@ -190,9 +182,7 @@
where
C: UnindexedConsumer<Self::Item>,
{
- self.map
- .table
- .par_iter()
+ unsafe { self.map.table.par_iter() }
.map(|x| unsafe { &mut x.as_mut().1 })
.drive_unindexed(consumer)
}
diff --git a/sgx_tstd/hashbrown/src/external_trait_impls/rayon/raw.rs b/sgx_tstd/hashbrown/src/external_trait_impls/rayon/raw.rs
index c6fe09d..1bd2c17 100644
--- a/sgx_tstd/hashbrown/src/external_trait_impls/rayon/raw.rs
+++ b/sgx_tstd/hashbrown/src/external_trait_impls/rayon/raw.rs
@@ -1,5 +1,5 @@
use crate::raw::Bucket;
-use crate::raw::{RawIterRange, RawTable};
+use crate::raw::{RawIter, RawIterRange, RawTable};
use crate::scopeguard::guard;
use alloc::alloc::dealloc;
use core::marker::PhantomData;
@@ -15,6 +15,12 @@
iter: RawIterRange<T>,
}
+impl<T> From<RawIter<T>> for RawParIter<T> {
+ fn from(it: RawIter<T>) -> Self {
+ RawParIter { iter: it.iter }
+ }
+}
+
impl<T> ParallelIterator for RawParIter<T> {
type Item = Bucket<T>;
@@ -169,9 +175,9 @@
impl<T> RawTable<T> {
/// Returns a parallel iterator over the elements in a `RawTable`.
#[cfg_attr(feature = "inline-more", inline)]
- pub fn par_iter(&self) -> RawParIter<T> {
+ pub unsafe fn par_iter(&self) -> RawParIter<T> {
RawParIter {
- iter: unsafe { self.iter().iter },
+ iter: self.iter().iter,
}
}
diff --git a/sgx_tstd/hashbrown/src/lib.rs b/sgx_tstd/hashbrown/src/lib.rs
index 63b180c..3aff40a 100644
--- a/sgx_tstd/hashbrown/src/lib.rs
+++ b/sgx_tstd/hashbrown/src/lib.rs
@@ -12,20 +12,13 @@
#![no_std]
#![cfg_attr(
feature = "nightly",
- feature(
- alloc_layout_extra,
- allocator_api,
- ptr_offset_from,
- test,
- core_intrinsics,
- dropck_eyepatch,
- specialization,
- )
+ feature(test, core_intrinsics, dropck_eyepatch, min_specialization, extend_one)
)]
#![allow(
clippy::doc_markdown,
clippy::module_name_repetitions,
- clippy::must_use_candidate
+ clippy::must_use_candidate,
+ clippy::option_if_let_else
)]
#![warn(missing_docs)]
#![warn(rust_2018_idioms)]
@@ -34,11 +27,8 @@
#[macro_use]
extern crate std;
-#[cfg(has_extern_crate_alloc)]
#[cfg_attr(test, macro_use)]
extern crate alloc;
-#[cfg(not(has_extern_crate_alloc))]
-extern crate std as alloc;
#[cfg(feature = "nightly")]
#[cfg(doctest)]
@@ -107,14 +97,15 @@
pub use crate::map::HashMap;
pub use crate::set::HashSet;
-/// Augments `AllocErr` with a `CapacityOverflow` variant.
+/// The error type for `try_reserve` methods.
#[derive(Clone, PartialEq, Eq, Debug)]
-pub enum CollectionAllocErr {
+pub enum TryReserveError {
/// Error due to the computed capacity exceeding the collection's maximum
/// (usually `isize::MAX` bytes).
CapacityOverflow,
- /// Error due to the allocator.
- AllocErr {
+
+ /// The memory allocator returned an error
+ AllocError {
/// The layout of the allocation request that failed.
layout: alloc::alloc::Layout,
},
diff --git a/sgx_tstd/hashbrown/src/macros.rs b/sgx_tstd/hashbrown/src/macros.rs
index e743094..0279597 100644
--- a/sgx_tstd/hashbrown/src/macros.rs
+++ b/sgx_tstd/hashbrown/src/macros.rs
@@ -52,3 +52,18 @@
$(#[$m] $it)*
};
}
+
+// Helper macro for specialization. This also helps avoid parse errors if the
+// default fn syntax for specialization changes in the future.
+#[cfg(feature = "nightly")]
+macro_rules! default_fn {
+ ($($tt:tt)*) => {
+ default $($tt)*
+ }
+}
+#[cfg(not(feature = "nightly"))]
+macro_rules! default_fn {
+ ($($tt:tt)*) => {
+ $($tt)*
+ }
+}
diff --git a/sgx_tstd/hashbrown/src/map.rs b/sgx_tstd/hashbrown/src/map.rs
index edb9401..380d71a 100644
--- a/sgx_tstd/hashbrown/src/map.rs
+++ b/sgx_tstd/hashbrown/src/map.rs
@@ -1,5 +1,5 @@
use crate::raw::{Bucket, RawDrain, RawIntoIter, RawIter, RawTable};
-use crate::CollectionAllocErr;
+use crate::TryReserveError;
use core::borrow::Borrow;
use core::fmt::{self, Debug};
use core::hash::{BuildHasher, Hash, Hasher};
@@ -181,11 +181,8 @@
/// ```
/// use hashbrown::HashMap;
///
-/// let timber_resources: HashMap<&str, i32> =
-/// [("Norway", 100),
-/// ("Denmark", 50),
-/// ("Iceland", 10)]
-/// .iter().cloned().collect();
+/// let timber_resources: HashMap<&str, i32> = [("Norway", 100), ("Denmark", 50), ("Iceland", 10)]
+/// .iter().cloned().collect();
/// // use the values stored in map
/// ```
pub struct HashMap<K, V, S = DefaultHashBuilder> {
@@ -202,39 +199,10 @@
}
fn clone_from(&mut self, source: &Self) {
- // We clone the hash_builder first since this might panic and we don't
- // want the table to have elements hashed with the wrong hash_builder.
- let hash_builder = source.hash_builder.clone();
-
- #[cfg(not(feature = "nightly"))]
- {
- self.table.clone_from(&source.table);
- }
- #[cfg(feature = "nightly")]
- {
- trait HashClone<S> {
- fn clone_from(&mut self, source: &Self, hash_builder: &S);
- }
- impl<K: Clone, V: Clone, S> HashClone<S> for HashMap<K, V, S> {
- default fn clone_from(&mut self, source: &Self, _hash_builder: &S) {
- self.table.clone_from(&source.table);
- }
- }
- impl<K: Clone, V: Clone, S> HashClone<S> for HashMap<K, V, S>
- where
- K: Eq + Hash,
- S: BuildHasher,
- {
- fn clone_from(&mut self, source: &Self, hash_builder: &S) {
- self.table
- .clone_from_with_hasher(&source.table, |x| make_hash(hash_builder, &x.0));
- }
- }
- HashClone::clone_from(self, source, &hash_builder);
- }
+ self.table.clone_from(&source.table);
// Update hash_builder only if we successfully cloned all elements.
- self.hash_builder = hash_builder;
+ self.hash_builder.clone_from(&source.hash_builder);
}
}
@@ -291,6 +259,9 @@
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
+ /// The `hash_builder` passed should implement the [`BuildHasher`] trait for
+ /// the HashMap to be useful, see its documentation for details.
+ ///
/// # Examples
///
/// ```
@@ -301,8 +272,10 @@
/// let mut map = HashMap::with_hasher(s);
/// map.insert(1, 2);
/// ```
+ ///
+ /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
#[cfg_attr(feature = "inline-more", inline)]
- pub fn with_hasher(hash_builder: S) -> Self {
+ pub const fn with_hasher(hash_builder: S) -> Self {
Self {
hash_builder,
table: RawTable::new(),
@@ -320,6 +293,9 @@
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
+ /// The `hash_builder` passed should implement the [`BuildHasher`] trait for
+ /// the HashMap to be useful, see its documentation for details.
+ ///
/// # Examples
///
/// ```
@@ -330,6 +306,8 @@
/// let mut map = HashMap::with_capacity_and_hasher(10, s);
/// map.insert(1, 2);
/// ```
+ ///
+ /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Self {
Self {
@@ -578,6 +556,73 @@
}
}
+ /// Retains only the elements specified by the predicate.
+ ///
+ /// In other words, remove all pairs `(k, v)` such that `f(&k,&mut v)` returns `false`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use hashbrown::HashMap;
+ ///
+ /// let mut map: HashMap<i32, i32> = (0..8).map(|x|(x, x*10)).collect();
+ /// map.retain(|&k, _| k % 2 == 0);
+ /// assert_eq!(map.len(), 4);
+ /// ```
+ pub fn retain<F>(&mut self, mut f: F)
+ where
+ F: FnMut(&K, &mut V) -> bool,
+ {
+ // Here we only use `iter` as a temporary, preventing use-after-free
+ unsafe {
+ for item in self.table.iter() {
+ let &mut (ref key, ref mut value) = item.as_mut();
+ if !f(key, value) {
+ self.table.erase(item);
+ }
+ }
+ }
+ }
+
+ /// Drains elements which are true under the given predicate,
+ /// and returns an iterator over the removed items.
+ ///
+ /// In other words, move all pairs `(k, v)` such that `f(&k,&mut v)` returns `true` out
+ /// into another iterator.
+ ///
+ /// When the returned DrainedFilter is dropped, any remaining elements that satisfy
+ /// the predicate are dropped from the table.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use hashbrown::HashMap;
+ ///
+ /// let mut map: HashMap<i32, i32> = (0..8).map(|x| (x, x)).collect();
+ /// let drained: HashMap<i32, i32> = map.drain_filter(|k, _v| k % 2 == 0).collect();
+ ///
+ /// let mut evens = drained.keys().cloned().collect::<Vec<_>>();
+ /// let mut odds = map.keys().cloned().collect::<Vec<_>>();
+ /// evens.sort();
+ /// odds.sort();
+ ///
+ /// assert_eq!(evens, vec![0, 2, 4, 6]);
+ /// assert_eq!(odds, vec![1, 3, 5, 7]);
+ /// ```
+ #[cfg_attr(feature = "inline-more", inline)]
+ pub fn drain_filter<F>(&mut self, f: F) -> DrainFilter<'_, K, V, F>
+ where
+ F: FnMut(&K, &mut V) -> bool,
+ {
+ DrainFilter {
+ f,
+ inner: DrainFilterInner {
+ iter: unsafe { self.table.iter() },
+ table: &mut self.table,
+ },
+ }
+ }
+
/// Clears the map, removing all key-value pairs. Keeps the allocated memory
/// for reuse.
///
@@ -643,7 +688,7 @@
/// map.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
- pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
+ pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
let hash_builder = &self.hash_builder;
self.table
.try_reserve(additional, |x| make_hash(hash_builder, &x.0))
@@ -725,6 +770,7 @@
let hash = make_hash(&self.hash_builder, &key);
if let Some(elem) = self.table.find(hash, |q| q.0.eq(&key)) {
Entry::Occupied(OccupiedEntry {
+ hash,
key: Some(key),
elem,
table: self,
@@ -763,7 +809,11 @@
K: Borrow<Q>,
Q: Hash + Eq,
{
- self.get_key_value(k).map(|(_, v)| v)
+ // Avoid `Option::map` because it bloats LLVM IR.
+ match self.get_key_value(k) {
+ Some((_, v)) => Some(v),
+ None => None,
+ }
}
/// Returns the key-value pair corresponding to the supplied key.
@@ -792,12 +842,14 @@
Q: Hash + Eq,
{
let hash = make_hash(&self.hash_builder, k);
- self.table
- .find(hash, |x| k.eq(x.0.borrow()))
- .map(|item| unsafe {
+ // Avoid `Option::map` because it bloats LLVM IR.
+ match self.table.find(hash, |x| k.eq(x.0.borrow())) {
+ Some(item) => unsafe {
let &(ref key, ref value) = item.as_ref();
- (key, value)
- })
+ Some((key, value))
+ },
+ None => None,
+ }
}
/// Returns the key-value pair corresponding to the supplied key, with a mutable reference to value.
@@ -830,12 +882,14 @@
Q: Hash + Eq,
{
let hash = make_hash(&self.hash_builder, k);
- self.table
- .find(hash, |x| k.eq(x.0.borrow()))
- .map(|item| unsafe {
+ // Avoid `Option::map` because it bloats LLVM IR.
+ match self.table.find(hash, |x| k.eq(x.0.borrow())) {
+ Some(item) => unsafe {
let &mut (ref key, ref mut value) = item.as_mut();
- (key, value)
- })
+ Some((key, value))
+ },
+ None => None,
+ }
}
/// Returns `true` if the map contains a value for the specified key.
@@ -894,9 +948,11 @@
Q: Hash + Eq,
{
let hash = make_hash(&self.hash_builder, k);
- self.table
- .find(hash, |x| k.eq(x.0.borrow()))
- .map(|item| unsafe { &mut item.as_mut().1 })
+ // Avoid `Option::map` because it bloats LLVM IR.
+ match self.table.find(hash, |x| k.eq(x.0.borrow())) {
+ Some(item) => Some(unsafe { &mut item.as_mut().1 }),
+ None => None,
+ }
}
/// Inserts a key-value pair into the map.
@@ -965,7 +1021,11 @@
K: Borrow<Q>,
Q: Hash + Eq,
{
- self.remove_entry(k).map(|(_, v)| v)
+ // Avoid `Option::map` because it bloats LLVM IR.
+ match self.remove_entry(k) {
+ Some((_, v)) => Some(v),
+ None => None,
+ }
}
/// Removes a key from the map, returning the stored key and value if the
@@ -997,72 +1057,12 @@
unsafe {
let hash = make_hash(&self.hash_builder, &k);
if let Some(item) = self.table.find(hash, |x| k.eq(x.0.borrow())) {
- self.table.erase_no_drop(&item);
- Some(item.read())
+ Some(self.table.remove(item))
} else {
None
}
}
}
-
- /// Retains only the elements specified by the predicate.
- ///
- /// In other words, remove all pairs `(k, v)` such that `f(&k,&mut v)` returns `false`.
- ///
- /// # Examples
- ///
- /// ```
- /// use hashbrown::HashMap;
- ///
- /// let mut map: HashMap<i32, i32> = (0..8).map(|x|(x, x*10)).collect();
- /// map.retain(|&k, _| k % 2 == 0);
- /// assert_eq!(map.len(), 4);
- /// ```
- pub fn retain<F>(&mut self, mut f: F)
- where
- F: FnMut(&K, &mut V) -> bool,
- {
- // Here we only use `iter` as a temporary, preventing use-after-free
- unsafe {
- for item in self.table.iter() {
- let &mut (ref key, ref mut value) = item.as_mut();
- if !f(key, value) {
- // Erase the element from the table first since drop might panic.
- self.table.erase_no_drop(&item);
- item.drop();
- }
- }
- }
- }
- /// Drains elements which are false under the given predicate,
- /// and returns an iterator over the removed items.
- ///
- /// In other words, move all pairs `(k, v)` such that `f(&k,&mut v)` returns `false` out
- /// into another iterator.
- ///
- /// When the returned DrainedFilter is dropped, the elements that don't satisfy
- /// the predicate are dropped from the table.
- ///
- /// # Examples
- ///
- /// ```
- /// use hashbrown::HashMap;
- ///
- /// let mut map: HashMap<i32, i32> = (0..8).map(|x|(x, x*10)).collect();
- /// let drained = map.drain_filter(|&k, _| k % 2 == 0);
- /// assert_eq!(drained.count(), 4);
- /// assert_eq!(map.len(), 4);
- /// ```
- pub fn drain_filter<F>(&mut self, f: F) -> DrainFilter<'_, K, V, F>
- where
- F: FnMut(&K, &mut V) -> bool,
- {
- DrainFilter {
- f,
- iter: unsafe { self.table.iter() },
- table: &mut self.table,
- }
- }
}
impl<K, V, S> HashMap<K, V, S> {
@@ -1247,7 +1247,7 @@
/// An owning iterator over the entries of a `HashMap`.
///
-/// This `struct` is created by the [`into_iter`] method on [`HashMap`][`HashMap`]
+/// This `struct` is created by the [`into_iter`] method on [`HashMap`]
/// (provided by the `IntoIterator` trait). See its documentation for more.
///
/// [`into_iter`]: struct.HashMap.html#method.into_iter
@@ -1355,47 +1355,68 @@
F: FnMut(&K, &mut V) -> bool,
{
f: F,
- iter: RawIter<(K, V)>,
- table: &'a mut RawTable<(K, V)>,
+ inner: DrainFilterInner<'a, K, V>,
}
impl<'a, K, V, F> Drop for DrainFilter<'a, K, V, F>
where
F: FnMut(&K, &mut V) -> bool,
{
+ #[cfg_attr(feature = "inline-more", inline)]
fn drop(&mut self) {
- struct DropGuard<'r, 'a, K, V, F>(&'r mut DrainFilter<'a, K, V, F>)
- where
- F: FnMut(&K, &mut V) -> bool;
-
- impl<'r, 'a, K, V, F> Drop for DropGuard<'r, 'a, K, V, F>
- where
- F: FnMut(&K, &mut V) -> bool,
- {
- fn drop(&mut self) {
- while let Some(_) = self.0.next() {}
- }
- }
while let Some(item) = self.next() {
- let guard = DropGuard(self);
+ let guard = ConsumeAllOnDrop(self);
drop(item);
mem::forget(guard);
}
}
}
+pub(super) struct ConsumeAllOnDrop<'a, T: Iterator>(pub &'a mut T);
+
+impl<T: Iterator> Drop for ConsumeAllOnDrop<'_, T> {
+ #[cfg_attr(feature = "inline-more", inline)]
+ fn drop(&mut self) {
+ self.0.for_each(drop)
+ }
+}
+
impl<K, V, F> Iterator for DrainFilter<'_, K, V, F>
where
F: FnMut(&K, &mut V) -> bool,
{
type Item = (K, V);
+
+ #[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<Self::Item> {
+ self.inner.next(&mut self.f)
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (0, self.inner.iter.size_hint().1)
+ }
+}
+
+impl<K, V, F> FusedIterator for DrainFilter<'_, K, V, F> where F: FnMut(&K, &mut V) -> bool {}
+
+/// Portions of `DrainFilter` shared with `set::DrainFilter`
+pub(super) struct DrainFilterInner<'a, K, V> {
+ pub iter: RawIter<(K, V)>,
+ pub table: &'a mut RawTable<(K, V)>,
+}
+
+impl<K, V> DrainFilterInner<'_, K, V> {
+ #[cfg_attr(feature = "inline-more", inline)]
+ pub(super) fn next<F>(&mut self, f: &mut F) -> Option<(K, V)>
+ where
+ F: FnMut(&K, &mut V) -> bool,
+ {
unsafe {
while let Some(item) = self.iter.next() {
let &mut (ref key, ref mut value) = item.as_mut();
- if !(self.f)(key, value) {
- self.table.erase_no_drop(&item);
- return Some(item.read());
+ if f(key, value) {
+ return Some(self.table.remove(item));
}
}
}
@@ -1436,7 +1457,7 @@
/// [`RawEntryBuilderMut`]: struct.RawEntryBuilderMut.html
pub enum RawEntryMut<'a, K, V, S> {
/// An occupied entry.
- Occupied(RawOccupiedEntryMut<'a, K, V>),
+ Occupied(RawOccupiedEntryMut<'a, K, V, S>),
/// A vacant entry.
Vacant(RawVacantEntryMut<'a, K, V, S>),
}
@@ -1445,21 +1466,24 @@
/// It is part of the [`RawEntryMut`] enum.
///
/// [`RawEntryMut`]: enum.RawEntryMut.html
-pub struct RawOccupiedEntryMut<'a, K, V> {
+pub struct RawOccupiedEntryMut<'a, K, V, S> {
elem: Bucket<(K, V)>,
table: &'a mut RawTable<(K, V)>,
+ hash_builder: &'a S,
}
-unsafe impl<K, V> Send for RawOccupiedEntryMut<'_, K, V>
+unsafe impl<K, V, S> Send for RawOccupiedEntryMut<'_, K, V, S>
where
K: Send,
V: Send,
+ S: Sync,
{
}
-unsafe impl<K, V> Sync for RawOccupiedEntryMut<'_, K, V>
+unsafe impl<K, V, S> Sync for RawOccupiedEntryMut<'_, K, V, S>
where
K: Sync,
V: Sync,
+ S: Sync,
{
}
@@ -1528,6 +1552,7 @@
Some(elem) => RawEntryMut::Occupied(RawOccupiedEntryMut {
elem,
table: &mut self.map.table,
+ hash_builder: &self.map.hash_builder,
}),
None => RawEntryMut::Vacant(RawVacantEntryMut {
table: &mut self.map.table,
@@ -1568,13 +1593,13 @@
where
F: FnMut(&K) -> bool,
{
- self.map
- .table
- .find(hash, |(k, _)| is_match(k))
- .map(|item| unsafe {
+ match self.map.table.find(hash, |(k, _)| is_match(k)) {
+ Some(item) => unsafe {
let &(ref key, ref value) = item.as_ref();
- (key, value)
- })
+ Some((key, value))
+ },
+ None => None,
+ }
}
/// Access an entry by hash.
@@ -1602,7 +1627,7 @@
/// assert_eq!(entry.remove_entry(), ("horseyland", 37));
/// ```
#[cfg_attr(feature = "inline-more", inline)]
- pub fn insert(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V>
+ pub fn insert(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V, S>
where
K: Hash,
S: BuildHasher,
@@ -1714,9 +1739,75 @@
RawEntryMut::Vacant(entry) => RawEntryMut::Vacant(entry),
}
}
+
+ /// Provides shared access to the key and owned access to the value of
+ /// an occupied entry and allows to replace or remove it based on the
+ /// value of the returned option.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use hashbrown::HashMap;
+ /// use hashbrown::hash_map::RawEntryMut;
+ ///
+ /// let mut map: HashMap<&str, u32> = HashMap::new();
+ ///
+ /// let entry = map
+ /// .raw_entry_mut()
+ /// .from_key("poneyland")
+ /// .and_replace_entry_with(|_k, _v| panic!());
+ ///
+ /// match entry {
+ /// RawEntryMut::Vacant(_) => {},
+ /// RawEntryMut::Occupied(_) => panic!(),
+ /// }
+ ///
+ /// map.insert("poneyland", 42);
+ ///
+ /// let entry = map
+ /// .raw_entry_mut()
+ /// .from_key("poneyland")
+ /// .and_replace_entry_with(|k, v| {
+ /// assert_eq!(k, &"poneyland");
+ /// assert_eq!(v, 42);
+ /// Some(v + 1)
+ /// });
+ ///
+ /// match entry {
+ /// RawEntryMut::Occupied(e) => {
+ /// assert_eq!(e.key(), &"poneyland");
+ /// assert_eq!(e.get(), &43);
+ /// },
+ /// RawEntryMut::Vacant(_) => panic!(),
+ /// }
+ ///
+ /// assert_eq!(map["poneyland"], 43);
+ ///
+ /// let entry = map
+ /// .raw_entry_mut()
+ /// .from_key("poneyland")
+ /// .and_replace_entry_with(|_k, _v| None);
+ ///
+ /// match entry {
+ /// RawEntryMut::Vacant(_) => {},
+ /// RawEntryMut::Occupied(_) => panic!(),
+ /// }
+ ///
+ /// assert!(!map.contains_key("poneyland"));
+ /// ```
+ #[cfg_attr(feature = "inline-more", inline)]
+ pub fn and_replace_entry_with<F>(self, f: F) -> Self
+ where
+ F: FnOnce(&K, V) -> Option<V>,
+ {
+ match self {
+ RawEntryMut::Occupied(entry) => entry.replace_entry_with(f),
+ RawEntryMut::Vacant(_) => self,
+ }
+ }
}
-impl<'a, K, V> RawOccupiedEntryMut<'a, K, V> {
+impl<'a, K, V, S> RawOccupiedEntryMut<'a, K, V, S> {
/// Gets a reference to the key in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
@@ -1804,9 +1895,32 @@
/// Take the ownership of the key and value from the map.
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry(self) -> (K, V) {
+ unsafe { self.table.remove(self.elem) }
+ }
+
+ /// Provides shared access to the key and owned access to the value of
+ /// the entry and allows to replace or remove it based on the
+ /// value of the returned option.
+ #[cfg_attr(feature = "inline-more", inline)]
+ pub fn replace_entry_with<F>(self, f: F) -> RawEntryMut<'a, K, V, S>
+ where
+ F: FnOnce(&K, V) -> Option<V>,
+ {
unsafe {
- self.table.erase_no_drop(&self.elem);
- self.elem.read()
+ let still_occupied = self
+ .table
+ .replace_bucket_with(self.elem.clone(), |(key, value)| {
+ f(&key, value).map(|new_value| (key, new_value))
+ });
+
+ if still_occupied {
+ RawEntryMut::Occupied(self)
+ } else {
+ RawEntryMut::Vacant(RawVacantEntryMut {
+ table: self.table,
+ hash_builder: self.hash_builder,
+ })
+ }
}
}
}
@@ -1858,7 +1972,7 @@
}
#[cfg_attr(feature = "inline-more", inline)]
- fn insert_entry(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V>
+ fn insert_entry(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V, S>
where
K: Hash,
S: BuildHasher,
@@ -1873,6 +1987,7 @@
RawOccupiedEntryMut {
elem,
table: self.table,
+ hash_builder: self.hash_builder,
}
}
}
@@ -1892,7 +2007,7 @@
}
}
-impl<K: Debug, V: Debug> Debug for RawOccupiedEntryMut<'_, K, V> {
+impl<K: Debug, V: Debug, S> Debug for RawOccupiedEntryMut<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawOccupiedEntryMut")
.field("key", self.key())
@@ -1941,6 +2056,7 @@
///
/// [`Entry`]: enum.Entry.html
pub struct OccupiedEntry<'a, K, V, S> {
+ hash: u64,
key: Option<K>,
elem: Bucket<(K, V)>,
table: &'a mut HashMap<K, V, S>,
@@ -2040,10 +2156,14 @@
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(&'a K, &'a V)> {
- self.inner.next().map(|x| unsafe {
- let r = x.as_ref();
- (&r.0, &r.1)
- })
+ // Avoid `Option::map` because it bloats LLVM IR.
+ match self.inner.next() {
+ Some(x) => unsafe {
+ let r = x.as_ref();
+ Some((&r.0, &r.1))
+ },
+ None => None,
+ }
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
@@ -2064,10 +2184,14 @@
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
- self.inner.next().map(|x| unsafe {
- let r = x.as_mut();
- (&r.0, &mut r.1)
- })
+ // Avoid `Option::map` because it bloats LLVM IR.
+ match self.inner.next() {
+ Some(x) => unsafe {
+ let r = x.as_mut();
+ Some((&r.0, &mut r.1))
+ },
+ None => None,
+ }
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
@@ -2123,7 +2247,11 @@
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a K> {
- self.inner.next().map(|(k, _)| k)
+ // Avoid `Option::map` because it bloats LLVM IR.
+ match self.inner.next() {
+ Some((k, _)) => Some(k),
+ None => None,
+ }
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
@@ -2143,7 +2271,11 @@
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a V> {
- self.inner.next().map(|(_, v)| v)
+ // Avoid `Option::map` because it bloats LLVM IR.
+ match self.inner.next() {
+ Some((_, v)) => Some(v),
+ None => None,
+ }
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
@@ -2163,7 +2295,11 @@
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a mut V> {
- self.inner.next().map(|(_, v)| v)
+ // Avoid `Option::map` because it bloats LLVM IR.
+ match self.inner.next() {
+ Some((_, v)) => Some(v),
+ None => None,
+ }
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
@@ -2301,6 +2437,36 @@
}
}
+ /// Ensures a value is in the entry by inserting, if empty, the result of the default function,
+ /// which takes the key as its argument, and returns a mutable reference to the value in the
+ /// entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use hashbrown::HashMap;
+ ///
+ /// let mut map: HashMap<&str, usize> = HashMap::new();
+ ///
+ /// map.entry("poneyland").or_insert_with_key(|key| key.chars().count());
+ ///
+ /// assert_eq!(map["poneyland"], 9);
+ /// ```
+ #[cfg_attr(feature = "inline-more", inline)]
+ pub fn or_insert_with_key<F: FnOnce(&K) -> V>(self, default: F) -> &'a mut V
+ where
+ K: Hash,
+ S: BuildHasher,
+ {
+ match self {
+ Entry::Occupied(entry) => entry.into_mut(),
+ Entry::Vacant(entry) => {
+ let value = default(entry.key());
+ entry.insert(value)
+ }
+ }
+ }
+
/// Returns a reference to this entry's key.
///
/// # Examples
@@ -2352,6 +2518,71 @@
Entry::Vacant(entry) => Entry::Vacant(entry),
}
}
+
+ /// Provides shared access to the key and owned access to the value of
+ /// an occupied entry and allows to replace or remove it based on the
+ /// value of the returned option.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use hashbrown::HashMap;
+ /// use hashbrown::hash_map::Entry;
+ ///
+ /// let mut map: HashMap<&str, u32> = HashMap::new();
+ ///
+ /// let entry = map
+ /// .entry("poneyland")
+ /// .and_replace_entry_with(|_k, _v| panic!());
+ ///
+ /// match entry {
+ /// Entry::Vacant(e) => {
+ /// assert_eq!(e.key(), &"poneyland");
+ /// }
+ /// Entry::Occupied(_) => panic!(),
+ /// }
+ ///
+ /// map.insert("poneyland", 42);
+ ///
+ /// let entry = map
+ /// .entry("poneyland")
+ /// .and_replace_entry_with(|k, v| {
+ /// assert_eq!(k, &"poneyland");
+ /// assert_eq!(v, 42);
+ /// Some(v + 1)
+ /// });
+ ///
+ /// match entry {
+ /// Entry::Occupied(e) => {
+ /// assert_eq!(e.key(), &"poneyland");
+ /// assert_eq!(e.get(), &43);
+ /// }
+ /// Entry::Vacant(_) => panic!(),
+ /// }
+ ///
+ /// assert_eq!(map["poneyland"], 43);
+ ///
+ /// let entry = map
+ /// .entry("poneyland")
+ /// .and_replace_entry_with(|_k, _v| None);
+ ///
+ /// match entry {
+ /// Entry::Vacant(e) => assert_eq!(e.key(), &"poneyland"),
+ /// Entry::Occupied(_) => panic!(),
+ /// }
+ ///
+ /// assert!(!map.contains_key("poneyland"));
+ /// ```
+ #[cfg_attr(feature = "inline-more", inline)]
+ pub fn and_replace_entry_with<F>(self, f: F) -> Self
+ where
+ F: FnOnce(&K, V) -> Option<V>,
+ {
+ match self {
+ Entry::Occupied(entry) => entry.replace_entry_with(f),
+ Entry::Vacant(_) => self,
+ }
+ }
}
impl<'a, K, V: Default, S> Entry<'a, K, V, S> {
@@ -2418,10 +2649,7 @@
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry(self) -> (K, V) {
- unsafe {
- self.table.table.erase_no_drop(&self.elem);
- self.elem.read()
- }
+ unsafe { self.table.table.remove(self.elem) }
}
/// Gets a reference to the value in the entry.
@@ -2609,6 +2837,85 @@
let entry = unsafe { self.elem.as_mut() };
mem::replace(&mut entry.0, self.key.unwrap())
}
+
+ /// Provides shared access to the key and owned access to the value of
+ /// the entry and allows to replace or remove it based on the
+ /// value of the returned option.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use hashbrown::HashMap;
+ /// use hashbrown::hash_map::Entry;
+ ///
+ /// let mut map: HashMap<&str, u32> = HashMap::new();
+ /// map.insert("poneyland", 42);
+ ///
+ /// let entry = match map.entry("poneyland") {
+ /// Entry::Occupied(e) => {
+ /// e.replace_entry_with(|k, v| {
+ /// assert_eq!(k, &"poneyland");
+ /// assert_eq!(v, 42);
+ /// Some(v + 1)
+ /// })
+ /// }
+ /// Entry::Vacant(_) => panic!(),
+ /// };
+ ///
+ /// match entry {
+ /// Entry::Occupied(e) => {
+ /// assert_eq!(e.key(), &"poneyland");
+ /// assert_eq!(e.get(), &43);
+ /// }
+ /// Entry::Vacant(_) => panic!(),
+ /// }
+ ///
+ /// assert_eq!(map["poneyland"], 43);
+ ///
+ /// let entry = match map.entry("poneyland") {
+ /// Entry::Occupied(e) => e.replace_entry_with(|_k, _v| None),
+ /// Entry::Vacant(_) => panic!(),
+ /// };
+ ///
+ /// match entry {
+ /// Entry::Vacant(e) => {
+ /// assert_eq!(e.key(), &"poneyland");
+ /// }
+ /// Entry::Occupied(_) => panic!(),
+ /// }
+ ///
+ /// assert!(!map.contains_key("poneyland"));
+ /// ```
+ #[cfg_attr(feature = "inline-more", inline)]
+ pub fn replace_entry_with<F>(self, f: F) -> Entry<'a, K, V, S>
+ where
+ F: FnOnce(&K, V) -> Option<V>,
+ {
+ unsafe {
+ let mut spare_key = None;
+
+ self.table
+ .table
+ .replace_bucket_with(self.elem.clone(), |(key, value)| {
+ if let Some(new_value) = f(&key, value) {
+ Some((key, new_value))
+ } else {
+ spare_key = Some(key);
+ None
+ }
+ });
+
+ if let Some(key) = spare_key {
+ Entry::Vacant(VacantEntry {
+ hash: self.hash,
+ key,
+ table: self.table,
+ })
+ } else {
+ Entry::Occupied(self)
+ }
+ }
+ }
}
impl<'a, K, V, S> VacantEntry<'a, K, V, S> {
@@ -2687,6 +2994,7 @@
make_hash(hash_builder, &x.0)
});
OccupiedEntry {
+ hash: self.hash,
key: None,
elem,
table: self.table,
@@ -2710,6 +3018,8 @@
}
}
+/// Inserts all new key-values from the iterator and replaces values with existing
+/// keys with new values returned from the iterator.
impl<K, V, S> Extend<(K, V)> for HashMap<K, V, S>
where
K: Eq + Hash,
@@ -2732,6 +3042,27 @@
self.insert(k, v);
});
}
+
+ #[inline]
+ #[cfg(feature = "nightly")]
+ fn extend_one(&mut self, (k, v): (K, V)) {
+ self.insert(k, v);
+ }
+
+ #[inline]
+ #[cfg(feature = "nightly")]
+ fn extend_reserve(&mut self, additional: usize) {
+ // Keys may be already present or show multiple times in the iterator.
+ // Reserve the entire hint lower bound if the map is empty.
+ // Otherwise reserve half the hint (rounded up), so the map
+ // will only resize twice in the worst case.
+ let reserve = if self.is_empty() {
+ additional
+ } else {
+ (additional + 1) / 2
+ };
+ self.reserve(reserve);
+ }
}
impl<'a, K, V, S> Extend<(&'a K, &'a V)> for HashMap<K, V, S>
@@ -2744,6 +3075,18 @@
fn extend<T: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: T) {
self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
}
+
+ #[inline]
+ #[cfg(feature = "nightly")]
+ fn extend_one(&mut self, (k, v): (&'a K, &'a V)) {
+ self.insert(*k, *v);
+ }
+
+ #[inline]
+ #[cfg(feature = "nightly")]
+ fn extend_reserve(&mut self, additional: usize) {
+ Extend::<(K, V)>::extend_reserve(self, additional);
+ }
}
#[allow(dead_code)]
@@ -2790,7 +3133,7 @@
use super::DefaultHashBuilder;
use super::Entry::{Occupied, Vacant};
use super::{HashMap, RawEntryMut};
- use crate::CollectionAllocErr::*;
+ use crate::TryReserveError::*;
use rand::{rngs::SmallRng, Rng, SeedableRng};
use std::cell::RefCell;
use std::usize;
@@ -3410,13 +3753,15 @@
#[test]
fn test_from_iter() {
- let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
+ let xs = [(1, 1), (2, 2), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<_, _> = xs.iter().cloned().collect();
for &(k, v) in &xs {
assert_eq!(map.get(&k), Some(&v));
}
+
+ assert_eq!(map.iter().len(), xs.len() - 1);
}
#[test]
@@ -3658,6 +4003,233 @@
}
#[test]
+ fn test_occupied_entry_replace_entry_with() {
+ let mut a = HashMap::new();
+
+ let key = "a key";
+ let value = "an initial value";
+ let new_value = "a new value";
+
+ let entry = a.entry(key).insert(value).replace_entry_with(|k, v| {
+ assert_eq!(k, &key);
+ assert_eq!(v, value);
+ Some(new_value)
+ });
+
+ match entry {
+ Occupied(e) => {
+ assert_eq!(e.key(), &key);
+ assert_eq!(e.get(), &new_value);
+ }
+ Vacant(_) => panic!(),
+ }
+
+ assert_eq!(a[key], new_value);
+ assert_eq!(a.len(), 1);
+
+ let entry = match a.entry(key) {
+ Occupied(e) => e.replace_entry_with(|k, v| {
+ assert_eq!(k, &key);
+ assert_eq!(v, new_value);
+ None
+ }),
+ Vacant(_) => panic!(),
+ };
+
+ match entry {
+ Vacant(e) => assert_eq!(e.key(), &key),
+ Occupied(_) => panic!(),
+ }
+
+ assert!(!a.contains_key(key));
+ assert_eq!(a.len(), 0);
+ }
+
+ #[test]
+ fn test_entry_and_replace_entry_with() {
+ let mut a = HashMap::new();
+
+ let key = "a key";
+ let value = "an initial value";
+ let new_value = "a new value";
+
+ let entry = a.entry(key).and_replace_entry_with(|_, _| panic!());
+
+ match entry {
+ Vacant(e) => assert_eq!(e.key(), &key),
+ Occupied(_) => panic!(),
+ }
+
+ a.insert(key, value);
+
+ let entry = a.entry(key).and_replace_entry_with(|k, v| {
+ assert_eq!(k, &key);
+ assert_eq!(v, value);
+ Some(new_value)
+ });
+
+ match entry {
+ Occupied(e) => {
+ assert_eq!(e.key(), &key);
+ assert_eq!(e.get(), &new_value);
+ }
+ Vacant(_) => panic!(),
+ }
+
+ assert_eq!(a[key], new_value);
+ assert_eq!(a.len(), 1);
+
+ let entry = a.entry(key).and_replace_entry_with(|k, v| {
+ assert_eq!(k, &key);
+ assert_eq!(v, new_value);
+ None
+ });
+
+ match entry {
+ Vacant(e) => assert_eq!(e.key(), &key),
+ Occupied(_) => panic!(),
+ }
+
+ assert!(!a.contains_key(key));
+ assert_eq!(a.len(), 0);
+ }
+
+ #[test]
+ fn test_raw_occupied_entry_replace_entry_with() {
+ let mut a = HashMap::new();
+
+ let key = "a key";
+ let value = "an initial value";
+ let new_value = "a new value";
+
+ let entry = a
+ .raw_entry_mut()
+ .from_key(&key)
+ .insert(key, value)
+ .replace_entry_with(|k, v| {
+ assert_eq!(k, &key);
+ assert_eq!(v, value);
+ Some(new_value)
+ });
+
+ match entry {
+ RawEntryMut::Occupied(e) => {
+ assert_eq!(e.key(), &key);
+ assert_eq!(e.get(), &new_value);
+ }
+ RawEntryMut::Vacant(_) => panic!(),
+ }
+
+ assert_eq!(a[key], new_value);
+ assert_eq!(a.len(), 1);
+
+ let entry = match a.raw_entry_mut().from_key(&key) {
+ RawEntryMut::Occupied(e) => e.replace_entry_with(|k, v| {
+ assert_eq!(k, &key);
+ assert_eq!(v, new_value);
+ None
+ }),
+ RawEntryMut::Vacant(_) => panic!(),
+ };
+
+ match entry {
+ RawEntryMut::Vacant(_) => {}
+ RawEntryMut::Occupied(_) => panic!(),
+ }
+
+ assert!(!a.contains_key(key));
+ assert_eq!(a.len(), 0);
+ }
+
+ #[test]
+ fn test_raw_entry_and_replace_entry_with() {
+ let mut a = HashMap::new();
+
+ let key = "a key";
+ let value = "an initial value";
+ let new_value = "a new value";
+
+ let entry = a
+ .raw_entry_mut()
+ .from_key(&key)
+ .and_replace_entry_with(|_, _| panic!());
+
+ match entry {
+ RawEntryMut::Vacant(_) => {}
+ RawEntryMut::Occupied(_) => panic!(),
+ }
+
+ a.insert(key, value);
+
+ let entry = a
+ .raw_entry_mut()
+ .from_key(&key)
+ .and_replace_entry_with(|k, v| {
+ assert_eq!(k, &key);
+ assert_eq!(v, value);
+ Some(new_value)
+ });
+
+ match entry {
+ RawEntryMut::Occupied(e) => {
+ assert_eq!(e.key(), &key);
+ assert_eq!(e.get(), &new_value);
+ }
+ RawEntryMut::Vacant(_) => panic!(),
+ }
+
+ assert_eq!(a[key], new_value);
+ assert_eq!(a.len(), 1);
+
+ let entry = a
+ .raw_entry_mut()
+ .from_key(&key)
+ .and_replace_entry_with(|k, v| {
+ assert_eq!(k, &key);
+ assert_eq!(v, new_value);
+ None
+ });
+
+ match entry {
+ RawEntryMut::Vacant(_) => {}
+ RawEntryMut::Occupied(_) => panic!(),
+ }
+
+ assert!(!a.contains_key(key));
+ assert_eq!(a.len(), 0);
+ }
+
+ #[test]
+ fn test_replace_entry_with_doesnt_corrupt() {
+ #![allow(deprecated)] //rand
+ // Test for #19292
+ fn check(m: &HashMap<i32, ()>) {
+ for k in m.keys() {
+ assert!(m.contains_key(k), "{} is in keys() but not in the map?", k);
+ }
+ }
+
+ let mut m = HashMap::new();
+
+ let mut rng = {
+ let seed = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];
+ SmallRng::from_seed(seed)
+ };
+
+ // Populate the map with some items.
+ for _ in 0..50 {
+ let x = rng.gen_range(-10, 10);
+ m.insert(x, ());
+ }
+
+ for _ in 0..1000 {
+ let x = rng.gen_range(-10, 10);
+ m.entry(x).and_replace_entry_with(|_, _| None);
+ check(&m);
+ }
+ }
+
+ #[test]
fn test_retain() {
let mut map: HashMap<i32, i32> = (0..100).map(|x| (x, x * 10)).collect();
@@ -3675,7 +4247,7 @@
let drained = map.drain_filter(|&k, _| k % 2 == 0);
let mut out = drained.collect::<Vec<_>>();
out.sort_unstable();
- assert_eq!(vec![(1, 10), (3, 30), (5, 50), (7, 70)], out);
+ assert_eq!(vec![(0, 0), (2, 20), (4, 40), (6, 60)], out);
assert_eq!(map.len(), 4);
}
{
@@ -3697,12 +4269,12 @@
panic!("usize::MAX should trigger an overflow!");
}
- if let Err(AllocErr { .. }) = empty_bytes.try_reserve(MAX_USIZE / 8) {
+ if let Err(AllocError { .. }) = empty_bytes.try_reserve(MAX_USIZE / 8) {
} else {
// This may succeed if there is enough free memory. Attempt to
// allocate a second hashmap to ensure the allocation will fail.
let mut empty_bytes2: HashMap<u8, u8> = HashMap::new();
- if let Err(AllocErr { .. }) = empty_bytes2.try_reserve(MAX_USIZE / 8) {
+ if let Err(AllocError { .. }) = empty_bytes2.try_reserve(MAX_USIZE / 8) {
} else {
panic!("usize::MAX / 8 should trigger an OOM!");
}
@@ -3858,4 +4430,98 @@
assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_some());
assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
}
+
+ #[test]
+ #[cfg(feature = "raw")]
+ fn test_into_iter_refresh() {
+ use core::hash::{BuildHasher, Hash, Hasher};
+
+ #[cfg(miri)]
+ const N: usize = 32;
+ #[cfg(not(miri))]
+ const N: usize = 128;
+
+ let mut rng = rand::thread_rng();
+ for n in 0..N {
+ let mut m = HashMap::new();
+ for i in 0..n {
+ assert!(m.insert(i, 2 * i).is_none());
+ }
+ let hasher = m.hasher().clone();
+
+ let mut it = unsafe { m.table.iter() };
+ assert_eq!(it.len(), n);
+
+ let mut i = 0;
+ let mut left = n;
+ let mut removed = Vec::new();
+ loop {
+ // occasionally remove some elements
+ if i < n && rng.gen_bool(0.1) {
+ let mut hsh = hasher.build_hasher();
+ i.hash(&mut hsh);
+ let hash = hsh.finish();
+
+ unsafe {
+ let e = m.table.find(hash, |q| q.0.eq(&i));
+ if let Some(e) = e {
+ it.reflect_remove(&e);
+ let t = m.table.remove(e);
+ removed.push(t);
+ left -= 1;
+ } else {
+ assert!(removed.contains(&(i, 2 * i)), "{} not in {:?}", i, removed);
+ let e = m
+ .table
+ .insert(hash, (i, 2 * i), |x| super::make_hash(&hasher, &x.0));
+ it.reflect_insert(&e);
+ if let Some(p) = removed.iter().position(|e| e == &(i, 2 * i)) {
+ removed.swap_remove(p);
+ }
+ left += 1;
+ }
+ }
+ }
+
+ let e = it.next();
+ if e.is_none() {
+ break;
+ }
+ assert!(i < n);
+ let t = unsafe { e.unwrap().as_ref() };
+ assert!(!removed.contains(t));
+ let (k, v) = t;
+ assert_eq!(*v, 2 * k);
+ i += 1;
+ }
+ assert!(i <= n);
+
+ // just for safety:
+ assert_eq!(m.table.len(), left);
+ }
+ }
+
+ #[test]
+ fn test_const_with_hasher() {
+ use core::hash::BuildHasher;
+ use std::borrow::ToOwned;
+ use std::collections::hash_map::DefaultHasher;
+
+ #[derive(Clone)]
+ struct MyHasher;
+ impl BuildHasher for MyHasher {
+ type Hasher = DefaultHasher;
+
+ fn build_hasher(&self) -> DefaultHasher {
+ DefaultHasher::new()
+ }
+ }
+
+ const EMPTY_MAP: HashMap<u32, std::string::String, MyHasher> =
+ HashMap::with_hasher(MyHasher);
+
+ let mut map = EMPTY_MAP.clone();
+ map.insert(17, "seventeen".to_owned());
+ assert_eq!("seventeen", map[&17]);
+ }
}
diff --git a/sgx_tstd/hashbrown/src/raw/bitmask.rs b/sgx_tstd/hashbrown/src/raw/bitmask.rs
index 6e61d43..99b2d53 100644
--- a/sgx_tstd/hashbrown/src/raw/bitmask.rs
+++ b/sgx_tstd/hashbrown/src/raw/bitmask.rs
@@ -25,6 +25,20 @@
BitMask(self.0 ^ BITMASK_MASK)
}
+ /// Flip the bit in the mask for the entry at the given index.
+ ///
+ /// Returns the bit's previous state.
+ #[inline]
+ #[allow(clippy::cast_ptr_alignment)]
+ #[cfg(feature = "raw")]
+ pub unsafe fn flip(&mut self, index: usize) -> bool {
+ // NOTE: The + BITMASK_STRIDE - 1 is to set the high bit.
+ let mask = 1 << (index * BITMASK_STRIDE + BITMASK_STRIDE - 1);
+ self.0 ^= mask;
+ // The bit was set if the bit is now 0.
+ self.0 & mask == 0
+ }
+
/// Returns a new `BitMask` with the lowest bit removed.
#[inline]
#[must_use]
diff --git a/sgx_tstd/hashbrown/src/raw/generic.rs b/sgx_tstd/hashbrown/src/raw/generic.rs
index dc5bdc1..26f8c58 100644
--- a/sgx_tstd/hashbrown/src/raw/generic.rs
+++ b/sgx_tstd/hashbrown/src/raw/generic.rs
@@ -27,11 +27,7 @@
/// Helper function to replicate a byte across a `GroupWord`.
#[inline]
fn repeat(byte: u8) -> GroupWord {
- let repeat = GroupWord::from(byte);
- let repeat = repeat | repeat.wrapping_shl(8);
- let repeat = repeat | repeat.wrapping_shl(16);
- // This last line is a no-op with a 32-bit GroupWord
- repeat | repeat.wrapping_shl(32)
+ GroupWord::from_ne_bytes([byte; Group::WIDTH])
}
/// Abstraction over a group of control bytes which can be scanned in
@@ -51,20 +47,20 @@
pub const WIDTH: usize = mem::size_of::<Self>();
/// Returns a full group of empty bytes, suitable for use as the initial
- /// value for an empty hash table. This value is explicitly declared as
- /// a static variable to ensure the address is consistent across dylibs.
+ /// value for an empty hash table.
///
/// This is guaranteed to be aligned to the group size.
- #[inline]
- pub fn static_empty() -> &'static [u8] {
- union AlignedBytes {
- _align: Group,
+ pub const fn static_empty() -> &'static [u8; Group::WIDTH] {
+ #[repr(C)]
+ struct AlignedBytes {
+ _align: [Group; 0],
bytes: [u8; Group::WIDTH],
};
- static ALIGNED_BYTES: AlignedBytes = AlignedBytes {
+ const ALIGNED_BYTES: AlignedBytes = AlignedBytes {
+ _align: [],
bytes: [EMPTY; Group::WIDTH],
};
- unsafe { &ALIGNED_BYTES.bytes }
+ &ALIGNED_BYTES.bytes
}
/// Loads a group of bytes starting at the given address.
diff --git a/sgx_tstd/hashbrown/src/raw/mod.rs b/sgx_tstd/hashbrown/src/raw/mod.rs
index f22a63d..fe95932 100644
--- a/sgx_tstd/hashbrown/src/raw/mod.rs
+++ b/sgx_tstd/hashbrown/src/raw/mod.rs
@@ -1,6 +1,6 @@
use crate::alloc::alloc::{alloc, dealloc, handle_alloc_error};
use crate::scopeguard::guard;
-use crate::CollectionAllocErr;
+use crate::TryReserveError;
use core::alloc::Layout;
use core::hint;
use core::iter::FusedIterator;
@@ -34,7 +34,7 @@
mod bitmask;
-use self::bitmask::BitMask;
+use self::bitmask::{BitMask, BitMaskIter};
use self::imp::Group;
// Branch prediction hint. This is currently only available on nightly but it
@@ -73,18 +73,18 @@
impl Fallibility {
/// Error to return on capacity overflow.
#[cfg_attr(feature = "inline-more", inline)]
- fn capacity_overflow(self) -> CollectionAllocErr {
+ fn capacity_overflow(self) -> TryReserveError {
match self {
- Fallibility::Fallible => CollectionAllocErr::CapacityOverflow,
+ Fallibility::Fallible => TryReserveError::CapacityOverflow,
Fallibility::Infallible => panic!("Hash table capacity overflow"),
}
}
/// Error to return on allocation error.
#[cfg_attr(feature = "inline-more", inline)]
- fn alloc_err(self, layout: Layout) -> CollectionAllocErr {
+ fn alloc_err(self, layout: Layout) -> TryReserveError {
match self {
- Fallibility::Fallible => CollectionAllocErr::AllocErr { layout },
+ Fallibility::Fallible => TryReserveError::AllocError { layout },
Fallibility::Infallible => handle_alloc_error(layout),
}
}
@@ -173,20 +173,26 @@
/// taking the maximum load factor into account.
///
/// Returns `None` if an overflow occurs.
-#[cfg_attr(feature = "inline-more", inline)]
// Workaround for emscripten bug emscripten-core/emscripten-fastcomp#258
#[cfg_attr(target_os = "emscripten", inline(never))]
+#[cfg_attr(not(target_os = "emscripten"), inline)]
fn capacity_to_buckets(cap: usize) -> Option<usize> {
- let adjusted_cap = if cap < 8 {
- // Need at least 1 free bucket on small tables
- cap + 1
- } else {
- // Otherwise require 1/8 buckets to be empty (87.5% load)
- //
- // Be careful when modifying this, calculate_layout relies on the
- // overflow check here.
- cap.checked_mul(8)? / 7
- };
+ debug_assert_ne!(cap, 0);
+
+ // For small tables we require at least 1 empty bucket so that lookups are
+ // guaranteed to terminate if an element doesn't exist in the table.
+ if cap < 8 {
+ // We don't bother with a table size of 2 buckets since that can only
+ // hold a single element. Instead we skip directly to a 4 bucket table
+ // which can hold 3 elements.
+ return Some(if cap < 4 { 4 } else { 8 });
+ }
+
+ // Otherwise require 1/8 buckets to be empty (87.5% load)
+ //
+ // Be careful when modifying this, calculate_layout relies on the
+ // overflow check here.
+ let adjusted_cap = cap.checked_mul(8)? / 7;
// Any overflows will have been caught by the checked_mul. Also, any
// rounding errors from the division above will be cleaned up by
@@ -196,7 +202,7 @@
/// Returns the maximum effective capacity for the given bucket mask, taking
/// the maximum load factor into account.
-#[cfg_attr(feature = "inline-more", inline)]
+#[inline]
fn bucket_mask_to_capacity(bucket_mask: usize) -> usize {
if bucket_mask < 8 {
// For tables with 1/2/4/8 buckets, we always reserve one empty slot.
@@ -208,8 +214,9 @@
}
}
-// Returns a Layout which describes the allocation required for a hash table,
-// and the offset of the buckets in the allocation.
+/// Returns a Layout which describes the allocation required for a hash table,
+/// and the offset of the control bytes in the allocation.
+/// (the offset is also one past last element of buckets)
///
/// Returns `None` if an overflow occurs.
#[cfg_attr(feature = "inline-more", inline)]
@@ -230,24 +237,30 @@
// Group::WIDTH is a small number.
let ctrl = unsafe { Layout::from_size_align_unchecked(buckets + Group::WIDTH, Group::WIDTH) };
- ctrl.extend(data).ok()
+ data.extend(ctrl).ok()
}
-// Returns a Layout which describes the allocation required for a hash table,
-// and the offset of the buckets in the allocation.
+/// Returns a Layout which describes the allocation required for a hash table,
+/// and the offset of the control bytes in the allocation.
+/// (the offset is also one past last element of buckets)
+///
+/// Returns `None` if an overflow occurs.
#[cfg_attr(feature = "inline-more", inline)]
#[cfg(not(feature = "nightly"))]
fn calculate_layout<T>(buckets: usize) -> Option<(Layout, usize)> {
debug_assert!(buckets.is_power_of_two());
// Manual layout calculation since Layout methods are not yet stable.
- let data_align = usize::max(mem::align_of::<T>(), Group::WIDTH);
- let data_offset = (buckets + Group::WIDTH).checked_add(data_align - 1)? & !(data_align - 1);
- let len = data_offset.checked_add(mem::size_of::<T>().checked_mul(buckets)?)?;
+ let ctrl_align = usize::max(mem::align_of::<T>(), Group::WIDTH);
+ let ctrl_offset = mem::size_of::<T>()
+ .checked_mul(buckets)?
+ .checked_add(ctrl_align - 1)?
+ & !(ctrl_align - 1);
+ let len = ctrl_offset.checked_add(buckets + Group::WIDTH)?;
Some((
- unsafe { Layout::from_size_align_unchecked(len, data_align) },
- data_offset,
+ unsafe { Layout::from_size_align_unchecked(len, ctrl_align) },
+ ctrl_offset,
))
}
@@ -257,8 +270,11 @@
/// is a ZST, then we instead track the index of the element in the table so
/// that `erase` works properly.
pub struct Bucket<T> {
- // Using *const for variance
- ptr: *const T,
+ // Actually it is pointer to next element than element itself
+ // this is needed to maintain pointer arithmetic invariants
+ // keeping direct pointer to element introduces difficulty.
+ // Using `NonNull` for variance and niche layout
+ ptr: NonNull<T>,
}
// This Send impl is needed for rayon support. This is safe since Bucket is
@@ -274,13 +290,24 @@
impl<T> Bucket<T> {
#[cfg_attr(feature = "inline-more", inline)]
- unsafe fn from_base_index(base: *const T, index: usize) -> Self {
+ unsafe fn from_base_index(base: NonNull<T>, index: usize) -> Self {
let ptr = if mem::size_of::<T>() == 0 {
- index as *const T
+ // won't overflow because index must be less than length
+ (index + 1) as *mut T
} else {
- base.add(index)
+ base.as_ptr().sub(index)
};
- Self { ptr }
+ Self {
+ ptr: NonNull::new_unchecked(ptr),
+ }
+ }
+ #[cfg_attr(feature = "inline-more", inline)]
+ unsafe fn to_base_index(&self, base: NonNull<T>) -> usize {
+ if mem::size_of::<T>() == 0 {
+ self.ptr.as_ptr() as usize - 1
+ } else {
+ offset_from(base.as_ptr(), self.ptr.as_ptr())
+ }
}
#[cfg_attr(feature = "inline-more", inline)]
pub unsafe fn as_ptr(&self) -> *mut T {
@@ -288,17 +315,19 @@
// Just return an arbitrary ZST pointer which is properly aligned
mem::align_of::<T>() as *mut T
} else {
- self.ptr as *mut T
+ self.ptr.as_ptr().sub(1)
}
}
#[cfg_attr(feature = "inline-more", inline)]
- unsafe fn add(&self, offset: usize) -> Self {
+ unsafe fn next_n(&self, offset: usize) -> Self {
let ptr = if mem::size_of::<T>() == 0 {
- (self.ptr as usize + offset) as *const T
+ (self.ptr.as_ptr() as usize + offset) as *mut T
} else {
- self.ptr.add(offset)
+ self.ptr.as_ptr().sub(offset)
};
- Self { ptr }
+ Self {
+ ptr: NonNull::new_unchecked(ptr),
+ }
}
#[cfg_attr(feature = "inline-more", inline)]
pub unsafe fn drop(&self) {
@@ -332,12 +361,10 @@
// number of buckets in the table.
bucket_mask: usize,
- // Pointer to the array of control bytes
+ // [Padding], T1, T2, ..., Tlast, C1, C2, ...
+ // ^ points here
ctrl: NonNull<u8>,
- // Pointer to the array of buckets
- data: NonNull<T>,
-
// Number of elements that can be inserted before we need to grow the table
growth_left: usize,
@@ -355,11 +382,10 @@
/// leave the data pointer dangling since that bucket is never written to
/// due to our load factor forcing us to always have at least 1 free bucket.
#[cfg_attr(feature = "inline-more", inline)]
- pub fn new() -> Self {
+ pub const fn new() -> Self {
Self {
- data: NonNull::dangling(),
// Be careful to cast the entire slice to a raw pointer.
- ctrl: unsafe { NonNull::new_unchecked(Group::static_empty().as_ptr() as *mut u8) },
+ ctrl: unsafe { NonNull::new_unchecked(Group::static_empty() as *const _ as *mut u8) },
bucket_mask: 0,
items: 0,
growth_left: 0,
@@ -374,14 +400,20 @@
unsafe fn new_uninitialized(
buckets: usize,
fallability: Fallibility,
- ) -> Result<Self, CollectionAllocErr> {
+ ) -> Result<Self, TryReserveError> {
debug_assert!(buckets.is_power_of_two());
- let (layout, data_offset) =
- calculate_layout::<T>(buckets).ok_or_else(|| fallability.capacity_overflow())?;
- let ctrl = NonNull::new(alloc(layout)).ok_or_else(|| fallability.alloc_err(layout))?;
- let data = NonNull::new_unchecked(ctrl.as_ptr().add(data_offset) as *mut T);
+
+ // Avoid `Option::ok_or_else` because it bloats LLVM IR.
+ let (layout, ctrl_offset) = match calculate_layout::<T>(buckets) {
+ Some(lco) => lco,
+ None => return Err(fallability.capacity_overflow()),
+ };
+ let ptr = match NonNull::new(alloc(layout)) {
+ Some(ptr) => ptr,
+ None => return Err(fallability.alloc_err(layout)),
+ };
+ let ctrl = NonNull::new_unchecked(ptr.as_ptr().add(ctrl_offset));
Ok(Self {
- data,
ctrl,
bucket_mask: buckets - 1,
items: 0,
@@ -392,16 +424,19 @@
/// Attempts to allocate a new hash table with at least enough capacity
/// for inserting the given number of elements without reallocating.
- fn try_with_capacity(
+ fn fallible_with_capacity(
capacity: usize,
fallability: Fallibility,
- ) -> Result<Self, CollectionAllocErr> {
+ ) -> Result<Self, TryReserveError> {
if capacity == 0 {
Ok(Self::new())
} else {
unsafe {
- let buckets =
- capacity_to_buckets(capacity).ok_or_else(|| fallability.capacity_overflow())?;
+ // Avoid `Option::ok_or_else` because it bloats LLVM IR.
+ let buckets = match capacity_to_buckets(capacity) {
+ Some(buckets) => buckets,
+ None => return Err(fallability.capacity_overflow()),
+ };
let result = Self::new_uninitialized(buckets, fallability)?;
result.ctrl(0).write_bytes(EMPTY, result.num_ctrl_bytes());
@@ -410,29 +445,51 @@
}
}
+ /// Attempts to allocate a new hash table with at least enough capacity
+ /// for inserting the given number of elements without reallocating.
+ #[cfg(feature = "raw")]
+ pub fn try_with_capacity(capacity: usize) -> Result<Self, TryReserveError> {
+ Self::fallible_with_capacity(capacity, Fallibility::Fallible)
+ }
+
/// Allocates a new hash table with at least enough capacity for inserting
/// the given number of elements without reallocating.
pub fn with_capacity(capacity: usize) -> Self {
- Self::try_with_capacity(capacity, Fallibility::Infallible)
- .unwrap_or_else(|_| unsafe { hint::unreachable_unchecked() })
+ // Avoid `Result::unwrap_or_else` because it bloats LLVM IR.
+ match Self::fallible_with_capacity(capacity, Fallibility::Infallible) {
+ Ok(capacity) => capacity,
+ Err(_) => unsafe { hint::unreachable_unchecked() },
+ }
}
/// Deallocates the table without dropping any entries.
#[cfg_attr(feature = "inline-more", inline)]
unsafe fn free_buckets(&mut self) {
- let (layout, _) =
- calculate_layout::<T>(self.buckets()).unwrap_or_else(|| hint::unreachable_unchecked());
- dealloc(self.ctrl.as_ptr(), layout);
+ // Avoid `Option::unwrap_or_else` because it bloats LLVM IR.
+ let (layout, ctrl_offset) = match calculate_layout::<T>(self.buckets()) {
+ Some(lco) => lco,
+ None => hint::unreachable_unchecked(),
+ };
+ dealloc(self.ctrl.as_ptr().sub(ctrl_offset), layout);
+ }
+
+ /// Returns pointer to one past last element of data table.
+ #[cfg_attr(feature = "inline-more", inline)]
+ pub unsafe fn data_end(&self) -> NonNull<T> {
+ NonNull::new_unchecked(self.ctrl.as_ptr() as *mut T)
+ }
+
+ /// Returns pointer to start of data table.
+ #[cfg_attr(feature = "inline-more", inline)]
+ #[cfg(feature = "nightly")]
+ pub unsafe fn data_start(&self) -> *mut T {
+ self.data_end().as_ptr().wrapping_sub(self.buckets())
}
/// Returns the index of a bucket from a `Bucket`.
#[cfg_attr(feature = "inline-more", inline)]
pub unsafe fn bucket_index(&self, bucket: &Bucket<T>) -> usize {
- if mem::size_of::<T>() == 0 {
- bucket.ptr as usize
- } else {
- offset_from(bucket.ptr, self.data.as_ptr())
- }
+ bucket.to_base_index(self.data_end())
}
/// Returns a pointer to a control byte.
@@ -447,11 +504,12 @@
pub unsafe fn bucket(&self, index: usize) -> Bucket<T> {
debug_assert_ne!(self.bucket_mask, 0);
debug_assert!(index < self.buckets());
- Bucket::from_base_index(self.data.as_ptr(), index)
+ Bucket::from_base_index(self.data_end(), index)
}
/// Erases an element from the table without dropping it.
#[cfg_attr(feature = "inline-more", inline)]
+ #[deprecated(since = "0.8.1", note = "use erase or remove instead")]
pub unsafe fn erase_no_drop(&mut self, item: &Bucket<T>) {
let index = self.bucket_index(item);
debug_assert!(is_full(*self.ctrl(index)));
@@ -477,6 +535,25 @@
self.items -= 1;
}
+ /// Erases an element from the table, dropping it in place.
+ #[cfg_attr(feature = "inline-more", inline)]
+ #[allow(clippy::needless_pass_by_value)]
+ #[allow(deprecated)]
+ pub unsafe fn erase(&mut self, item: Bucket<T>) {
+ // Erase the element from the table first since drop might panic.
+ self.erase_no_drop(&item);
+ item.drop();
+ }
+
+ /// Removes an element from the table, returning it.
+ #[cfg_attr(feature = "inline-more", inline)]
+ #[allow(clippy::needless_pass_by_value)]
+ #[allow(deprecated)]
+ pub unsafe fn remove(&mut self, item: Bucket<T>) -> T {
+ self.erase_no_drop(&item);
+ item.read()
+ }
+
/// Returns an iterator for a probe sequence on the table.
///
/// This iterator never terminates, but is guaranteed to visit each bucket
@@ -575,7 +652,7 @@
// Ensure that the table is reset even if one of the drops panic
let self_ = guard(self, |self_| self_.clear_no_drop());
- if mem::needs_drop::<T>() {
+ if mem::needs_drop::<T>() && self_.len() != 0 {
unsafe {
for item in self_.iter() {
item.drop();
@@ -610,8 +687,13 @@
if self.items == 0 {
*self = Self::with_capacity(min_size)
} else {
- self.resize(min_size, hasher, Fallibility::Infallible)
- .unwrap_or_else(|_| unsafe { hint::unreachable_unchecked() });
+ // Avoid `Result::unwrap_or_else` because it bloats LLVM IR.
+ if self
+ .resize(min_size, hasher, Fallibility::Infallible)
+ .is_err()
+ {
+ unsafe { hint::unreachable_unchecked() }
+ }
}
}
}
@@ -621,8 +703,13 @@
#[cfg_attr(feature = "inline-more", inline)]
pub fn reserve(&mut self, additional: usize, hasher: impl Fn(&T) -> u64) {
if additional > self.growth_left {
- self.reserve_rehash(additional, hasher, Fallibility::Infallible)
- .unwrap_or_else(|_| unsafe { hint::unreachable_unchecked() });
+ // Avoid `Result::unwrap_or_else` because it bloats LLVM IR.
+ if self
+ .reserve_rehash(additional, hasher, Fallibility::Infallible)
+ .is_err()
+ {
+ unsafe { hint::unreachable_unchecked() }
+ }
}
}
@@ -633,7 +720,7 @@
&mut self,
additional: usize,
hasher: impl Fn(&T) -> u64,
- ) -> Result<(), CollectionAllocErr> {
+ ) -> Result<(), TryReserveError> {
if additional > self.growth_left {
self.reserve_rehash(additional, hasher, Fallibility::Fallible)
} else {
@@ -649,12 +736,12 @@
additional: usize,
hasher: impl Fn(&T) -> u64,
fallability: Fallibility,
- ) -> Result<(), CollectionAllocErr> {
- let new_items = self
- .items
- .checked_add(additional)
- .ok_or_else(|| fallability.capacity_overflow())?;
-
+ ) -> Result<(), TryReserveError> {
+ // Avoid `Option::ok_or_else` because it bloats LLVM IR.
+ let new_items = match self.items.checked_add(additional) {
+ Some(new_items) => new_items,
+ None => return Err(fallability.capacity_overflow()),
+ };
let full_capacity = bucket_mask_to_capacity(self.bucket_mask);
if new_items <= full_capacity / 2 {
// Rehash in-place without re-allocating if we have plenty of spare
@@ -778,12 +865,12 @@
capacity: usize,
hasher: impl Fn(&T) -> u64,
fallability: Fallibility,
- ) -> Result<(), CollectionAllocErr> {
+ ) -> Result<(), TryReserveError> {
unsafe {
debug_assert!(self.items <= capacity);
// Allocate and initialize the new table.
- let mut new_table = Self::try_with_capacity(capacity, fallability)?;
+ let mut new_table = Self::fallible_with_capacity(capacity, fallability)?;
new_table.growth_left -= self.items;
new_table.items = self.items;
@@ -855,7 +942,7 @@
///
/// This does not check if the given element already exists in the table.
#[cfg_attr(feature = "inline-more", inline)]
- #[cfg(feature = "rustc-internal-api")]
+ #[cfg(any(feature = "raw", feature = "rustc-internal-api"))]
pub fn insert_no_grow(&mut self, hash: u64, value: T) -> Bucket<T> {
unsafe {
let index = self.find_insert_slot(hash);
@@ -873,27 +960,45 @@
}
}
+ /// Temporary removes a bucket, applying the given function to the removed
+ /// element and optionally put back the returned value in the same bucket.
+ ///
+ /// Returns `true` if the bucket still contains an element
+ ///
+ /// This does not check if the given bucket is actually occupied.
+ #[cfg_attr(feature = "inline-more", inline)]
+ pub unsafe fn replace_bucket_with<F>(&mut self, bucket: Bucket<T>, f: F) -> bool
+ where
+ F: FnOnce(T) -> Option<T>,
+ {
+ let index = self.bucket_index(&bucket);
+ let old_ctrl = *self.ctrl(index);
+ debug_assert!(is_full(old_ctrl));
+ let old_growth_left = self.growth_left;
+ let item = self.remove(bucket);
+ if let Some(new_item) = f(item) {
+ self.growth_left = old_growth_left;
+ self.set_ctrl(index, old_ctrl);
+ self.items += 1;
+ self.bucket(index).write(new_item);
+ true
+ } else {
+ false
+ }
+ }
+
/// Searches for an element in the table.
#[inline]
pub fn find(&self, hash: u64, mut eq: impl FnMut(&T) -> bool) -> Option<Bucket<T>> {
unsafe {
- for pos in self.probe_seq(hash) {
- let group = Group::load(self.ctrl(pos));
- for bit in group.match_byte(h2(hash)) {
- let index = (pos + bit) & self.bucket_mask;
- let bucket = self.bucket(index);
- if likely(eq(bucket.as_ref())) {
- return Some(bucket);
- }
- }
- if likely(group.match_empty().any_bit_set()) {
- return None;
+ for bucket in self.iter_hash(hash) {
+ let elm = bucket.as_ref();
+ if likely(eq(elm)) {
+ return Some(bucket);
}
}
+ None
}
-
- // probe_seq never returns.
- unreachable!();
}
/// Returns the number of elements the map can hold without reallocating.
@@ -936,27 +1041,76 @@
/// struct, we have to make the `iter` method unsafe.
#[cfg_attr(feature = "inline-more", inline)]
pub unsafe fn iter(&self) -> RawIter<T> {
- let data = Bucket::from_base_index(self.data.as_ptr(), 0);
+ let data = Bucket::from_base_index(self.data_end(), 0);
RawIter {
iter: RawIterRange::new(self.ctrl.as_ptr(), data, self.buckets()),
items: self.items,
}
}
+ /// Returns an iterator over occupied buckets that could match a given hash.
+ ///
+ /// In rare cases, the iterator may return a bucket with a different hash.
+ ///
+ /// It is up to the caller to ensure that the `RawTable` outlives the
+ /// `RawIterHash`. Because we cannot make the `next` method unsafe on the
+ /// `RawIterHash` struct, we have to make the `iter_hash` method unsafe.
+ #[cfg_attr(feature = "inline-more", inline)]
+ pub unsafe fn iter_hash(&self, hash: u64) -> RawIterHash<'_, T> {
+ RawIterHash::new(self, hash)
+ }
+
/// Returns an iterator which removes all elements from the table without
- /// freeing the memory. It is up to the caller to ensure that the `RawTable`
- /// outlives the `RawDrain`. Because we cannot make the `next` method unsafe
- /// on the `RawDrain`, we have to make the `drain` method unsafe.
+ /// freeing the memory.
+ ///
+ /// It is up to the caller to ensure that the `RawTable` outlives the `RawDrain`.
+ /// Because we cannot make the `next` method unsafe on the `RawDrain`,
+ /// we have to make the `drain` method unsafe.
#[cfg_attr(feature = "inline-more", inline)]
pub unsafe fn drain(&mut self) -> RawDrain<'_, T> {
+ let iter = self.iter();
+ self.drain_iter_from(iter)
+ }
+
+ /// Returns an iterator which removes all elements from the table without
+ /// freeing the memory.
+ ///
+ /// It is up to the caller to ensure that the `RawTable` outlives the `RawDrain`.
+ /// Because we cannot make the `next` method unsafe on the `RawDrain`,
+ /// we have to make the `drain` method unsafe.
+ ///
+ /// Iteration starts at the provided iterator's current location.
+ /// You must ensure that the iterator covers all items that remain in the table.
+ #[cfg_attr(feature = "inline-more", inline)]
+ pub unsafe fn drain_iter_from(&mut self, iter: RawIter<T>) -> RawDrain<'_, T> {
+ debug_assert_eq!(iter.len(), self.len());
RawDrain {
- iter: self.iter(),
+ iter,
table: ManuallyDrop::new(mem::replace(self, Self::new())),
orig_table: NonNull::from(self),
marker: PhantomData,
}
}
+ /// Returns an iterator which consumes all elements from the table.
+ ///
+ /// It is up to the caller to ensure that the `RawTable` outlives the `RawIntoIter`.
+ /// Because we cannot make the `next` method unsafe on the `RawIntoIter`,
+ /// we have to make the `into_iter_from` method unsafe.
+ ///
+ /// Iteration starts at the provided iterator's current location.
+ /// You must ensure that the iterator covers all items that remain in the table.
+ pub unsafe fn into_iter_from(self, iter: RawIter<T>) -> RawIntoIter<T> {
+ debug_assert_eq!(iter.len(), self.len());
+
+ let alloc = self.into_alloc();
+ RawIntoIter {
+ iter,
+ alloc,
+ marker: PhantomData,
+ }
+ }
+
/// Converts the table into a raw allocation. The contents of the table
/// should be dropped using a `RawIter` before freeing the allocation.
#[cfg_attr(feature = "inline-more", inline)]
@@ -964,9 +1118,15 @@
let alloc = if self.is_empty_singleton() {
None
} else {
- let (layout, _) = calculate_layout::<T>(self.buckets())
- .unwrap_or_else(|| unsafe { hint::unreachable_unchecked() });
- Some((self.ctrl.cast(), layout))
+ // Avoid `Option::unwrap_or_else` because it bloats LLVM IR.
+ let (layout, ctrl_offset) = match calculate_layout::<T>(self.buckets()) {
+ Some(lco) => lco,
+ None => unsafe { hint::unreachable_unchecked() },
+ };
+ Some((
+ unsafe { NonNull::new_unchecked(self.ctrl.as_ptr().sub(ctrl_offset)) },
+ layout,
+ ))
};
mem::forget(self);
alloc
@@ -983,8 +1143,11 @@
} else {
unsafe {
let mut new_table = ManuallyDrop::new(
- Self::new_uninitialized(self.buckets(), Fallibility::Infallible)
- .unwrap_or_else(|_| hint::unreachable_unchecked()),
+ // Avoid `Result::ok_or_else` because it bloats LLVM IR.
+ match Self::new_uninitialized(self.buckets(), Fallibility::Infallible) {
+ Ok(table) => table,
+ Err(_) => hint::unreachable_unchecked(),
+ },
);
new_table.clone_from_spec(self, |new_table| {
@@ -1004,7 +1167,7 @@
} else {
unsafe {
// First, drop all our elements without clearing the control bytes.
- if mem::needs_drop::<T>() {
+ if mem::needs_drop::<T>() && self.len() != 0 {
for item in self.iter() {
item.drop();
}
@@ -1017,8 +1180,11 @@
self.free_buckets();
}
(self as *mut Self).write(
- Self::new_uninitialized(source.buckets(), Fallibility::Infallible)
- .unwrap_or_else(|_| hint::unreachable_unchecked()),
+ // Avoid `Result::unwrap_or_else` because it bloats LLVM IR.
+ match Self::new_uninitialized(source.buckets(), Fallibility::Infallible) {
+ Ok(table) => table,
+ Err(_) => hint::unreachable_unchecked(),
+ },
);
}
@@ -1036,16 +1202,11 @@
unsafe fn clone_from_spec(&mut self, source: &Self, on_panic: impl FnMut(&mut Self));
}
impl<T: Clone> RawTableClone for RawTable<T> {
- #[cfg(feature = "nightly")]
#[cfg_attr(feature = "inline-more", inline)]
- default unsafe fn clone_from_spec(&mut self, source: &Self, on_panic: impl FnMut(&mut Self)) {
- self.clone_from_impl(source, on_panic);
- }
-
- #[cfg(not(feature = "nightly"))]
- #[cfg_attr(feature = "inline-more", inline)]
- unsafe fn clone_from_spec(&mut self, source: &Self, on_panic: impl FnMut(&mut Self)) {
- self.clone_from_impl(source, on_panic);
+ default_fn! {
+ unsafe fn clone_from_spec(&mut self, source: &Self, on_panic: impl FnMut(&mut Self)) {
+ self.clone_from_impl(source, on_panic);
+ }
}
}
#[cfg(feature = "nightly")]
@@ -1056,9 +1217,8 @@
.ctrl(0)
.copy_to_nonoverlapping(self.ctrl(0), self.num_ctrl_bytes());
source
- .data
- .as_ptr()
- .copy_to_nonoverlapping(self.data.as_ptr(), self.buckets());
+ .data_start()
+ .copy_to_nonoverlapping(self.data_start(), self.buckets());
self.items = source.items;
self.growth_left = source.growth_left;
@@ -1078,7 +1238,7 @@
// to make sure we drop only the elements that have been
// cloned so far.
let mut guard = guard((0, &mut *self), |(index, self_)| {
- if mem::needs_drop::<T>() {
+ if mem::needs_drop::<T>() && self_.len() != 0 {
for i in 0..=*index {
if is_full(*self_.ctrl(i)) {
self_.bucket(i).drop();
@@ -1109,7 +1269,7 @@
}
/// Variant of `clone_from` to use when a hasher is available.
- #[cfg(any(feature = "nightly", feature = "raw"))]
+ #[cfg(feature = "raw")]
pub fn clone_from_with_hasher(&mut self, source: &Self, hasher: impl Fn(&T) -> u64) {
// If we have enough capacity in the table, just clear it and insert
// elements one by one. We don't do this if we have the same number of
@@ -1160,7 +1320,7 @@
fn drop(&mut self) {
if !self.is_empty_singleton() {
unsafe {
- if mem::needs_drop::<T>() {
+ if mem::needs_drop::<T>() && self.len() != 0 {
for item in self.iter() {
item.drop();
}
@@ -1176,7 +1336,7 @@
fn drop(&mut self) {
if !self.is_empty_singleton() {
unsafe {
- if mem::needs_drop::<T>() {
+ if mem::needs_drop::<T>() && self.len() != 0 {
for item in self.iter() {
item.drop();
}
@@ -1195,12 +1355,7 @@
fn into_iter(self) -> RawIntoIter<T> {
unsafe {
let iter = self.iter();
- let alloc = self.into_alloc();
- RawIntoIter {
- iter,
- alloc,
- marker: PhantomData,
- }
+ self.into_iter_from(iter)
}
}
}
@@ -1274,10 +1429,13 @@
let tail = Self::new(
self.next_ctrl.add(mid),
- self.data.add(Group::WIDTH).add(mid),
+ self.data.next_n(Group::WIDTH).next_n(mid),
len - mid,
);
- debug_assert_eq!(self.data.add(Group::WIDTH).add(mid).ptr, tail.data.ptr);
+ debug_assert_eq!(
+ self.data.next_n(Group::WIDTH).next_n(mid).ptr,
+ tail.data.ptr
+ );
debug_assert_eq!(self.end, tail.end);
self.end = self.next_ctrl.add(mid);
debug_assert_eq!(self.end.add(Group::WIDTH), tail.next_ctrl);
@@ -1313,7 +1471,7 @@
loop {
if let Some(index) = self.current_group.lowest_set_bit() {
self.current_group = self.current_group.remove_lowest_bit();
- return Some(self.data.add(index));
+ return Some(self.data.next_n(index));
}
if self.next_ctrl >= self.end {
@@ -1326,7 +1484,7 @@
// EMPTY. On larger tables self.end is guaranteed to be aligned
// to the group size (since tables are power-of-two sized).
self.current_group = Group::load_aligned(self.next_ctrl).match_full();
- self.data = self.data.add(Group::WIDTH);
+ self.data = self.data.next_n(Group::WIDTH);
self.next_ctrl = self.next_ctrl.add(Group::WIDTH);
}
}
@@ -1345,11 +1503,140 @@
impl<T> FusedIterator for RawIterRange<T> {}
/// Iterator which returns a raw pointer to every full bucket in the table.
+///
+/// For maximum flexibility this iterator is not bound by a lifetime, but you
+/// must observe several rules when using it:
+/// - You must not free the hash table while iterating (including via growing/shrinking).
+/// - It is fine to erase a bucket that has been yielded by the iterator.
+/// - Erasing a bucket that has not yet been yielded by the iterator may still
+/// result in the iterator yielding that bucket (unless `reflect_remove` is called).
+/// - It is unspecified whether an element inserted after the iterator was
+/// created will be yielded by that iterator (unless `reflect_insert` is called).
+/// - The order in which the iterator yields bucket is unspecified and may
+/// change in the future.
pub struct RawIter<T> {
pub(crate) iter: RawIterRange<T>,
items: usize,
}
+impl<T> RawIter<T> {
+ /// Refresh the iterator so that it reflects a removal from the given bucket.
+ ///
+ /// For the iterator to remain valid, this method must be called once
+ /// for each removed bucket before `next` is called again.
+ ///
+ /// This method should be called _before_ the removal is made. It is not necessary to call this
+ /// method if you are removing an item that this iterator yielded in the past.
+ #[cfg(feature = "raw")]
+ pub fn reflect_remove(&mut self, b: &Bucket<T>) {
+ self.reflect_toggle_full(b, false);
+ }
+
+ /// Refresh the iterator so that it reflects an insertion into the given bucket.
+ ///
+ /// For the iterator to remain valid, this method must be called once
+ /// for each insert before `next` is called again.
+ ///
+ /// This method does not guarantee that an insertion of a bucket witha greater
+ /// index than the last one yielded will be reflected in the iterator.
+ ///
+ /// This method should be called _after_ the given insert is made.
+ #[cfg(feature = "raw")]
+ pub fn reflect_insert(&mut self, b: &Bucket<T>) {
+ self.reflect_toggle_full(b, true);
+ }
+
+ /// Refresh the iterator so that it reflects a change to the state of the given bucket.
+ #[cfg(feature = "raw")]
+ fn reflect_toggle_full(&mut self, b: &Bucket<T>, is_insert: bool) {
+ unsafe {
+ if b.as_ptr() > self.iter.data.as_ptr() {
+ // The iterator has already passed the bucket's group.
+ // So the toggle isn't relevant to this iterator.
+ return;
+ }
+
+ if self.iter.next_ctrl < self.iter.end
+ && b.as_ptr() <= self.iter.data.next_n(Group::WIDTH).as_ptr()
+ {
+ // The iterator has not yet reached the bucket's group.
+ // We don't need to reload anything, but we do need to adjust the item count.
+
+ if cfg!(debug_assertions) {
+ // Double-check that the user isn't lying to us by checking the bucket state.
+ // To do that, we need to find its control byte. We know that self.iter.data is
+ // at self.iter.next_ctrl - Group::WIDTH, so we work from there:
+ let offset = offset_from(self.iter.data.as_ptr(), b.as_ptr());
+ let ctrl = self.iter.next_ctrl.sub(Group::WIDTH).add(offset);
+ // This method should be called _before_ a removal, or _after_ an insert,
+ // so in both cases the ctrl byte should indicate that the bucket is full.
+ assert!(is_full(*ctrl));
+ }
+
+ if is_insert {
+ self.items += 1;
+ } else {
+ self.items -= 1;
+ }
+
+ return;
+ }
+
+ // The iterator is at the bucket group that the toggled bucket is in.
+ // We need to do two things:
+ //
+ // - Determine if the iterator already yielded the toggled bucket.
+ // If it did, we're done.
+ // - Otherwise, update the iterator cached group so that it won't
+ // yield a to-be-removed bucket, or _will_ yield a to-be-added bucket.
+ // We'll also need ot update the item count accordingly.
+ if let Some(index) = self.iter.current_group.lowest_set_bit() {
+ let next_bucket = self.iter.data.next_n(index);
+ if b.as_ptr() > next_bucket.as_ptr() {
+ // The toggled bucket is "before" the bucket the iterator would yield next. We
+ // therefore don't need to do anything --- the iterator has already passed the
+ // bucket in question.
+ //
+ // The item count must already be correct, since a removal or insert "prior" to
+ // the iterator's position wouldn't affect the item count.
+ } else {
+ // The removed bucket is an upcoming bucket. We need to make sure it does _not_
+ // get yielded, and also that it's no longer included in the item count.
+ //
+ // NOTE: We can't just reload the group here, both since that might reflect
+ // inserts we've already passed, and because that might inadvertently unset the
+ // bits for _other_ removals. If we do that, we'd have to also decrement the
+ // item count for those other bits that we unset. But the presumably subsequent
+ // call to reflect for those buckets might _also_ decrement the item count.
+ // Instead, we _just_ flip the bit for the particular bucket the caller asked
+ // us to reflect.
+ let our_bit = offset_from(self.iter.data.as_ptr(), b.as_ptr());
+ let was_full = self.iter.current_group.flip(our_bit);
+ debug_assert_ne!(was_full, is_insert);
+
+ if is_insert {
+ self.items += 1;
+ } else {
+ self.items -= 1;
+ }
+
+ if cfg!(debug_assertions) {
+ if b.as_ptr() == next_bucket.as_ptr() {
+ // The removed bucket should no longer be next
+ debug_assert_ne!(self.iter.current_group.lowest_set_bit(), Some(index));
+ } else {
+ // We should not have changed what bucket comes next.
+ debug_assert_eq!(self.iter.current_group.lowest_set_bit(), Some(index));
+ }
+ }
+ }
+ } else {
+ // We must have already iterated past the removed item.
+ }
+ }
+ }
+}
+
impl<T> Clone for RawIter<T> {
#[cfg_attr(feature = "inline-more", inline)]
fn clone(&self) -> Self {
@@ -1409,7 +1696,7 @@
fn drop(&mut self) {
unsafe {
// Drop all remaining elements
- if mem::needs_drop::<T>() {
+ if mem::needs_drop::<T>() && self.iter.len() != 0 {
while let Some(item) = self.iter.next() {
item.drop();
}
@@ -1428,7 +1715,7 @@
fn drop(&mut self) {
unsafe {
// Drop all remaining elements
- if mem::needs_drop::<T>() {
+ if mem::needs_drop::<T>() && self.iter.len() != 0 {
while let Some(item) = self.iter.next() {
item.drop();
}
@@ -1489,7 +1776,7 @@
fn drop(&mut self) {
unsafe {
// Drop all remaining elements. Note that this may panic.
- if mem::needs_drop::<T>() {
+ if mem::needs_drop::<T>() && self.iter.len() != 0 {
while let Some(item) = self.iter.next() {
item.drop();
}
@@ -1526,3 +1813,66 @@
impl<T> ExactSizeIterator for RawDrain<'_, T> {}
impl<T> FusedIterator for RawDrain<'_, T> {}
+
+/// Iterator over occupied buckets that could match a given hash.
+///
+/// In rare cases, the iterator may return a bucket with a different hash.
+pub struct RawIterHash<'a, T> {
+ table: &'a RawTable<T>,
+
+ // The top 7 bits of the hash.
+ h2_hash: u8,
+
+ // The sequence of groups to probe in the search.
+ probe_seq: ProbeSeq,
+
+ // The current group and its position.
+ pos: usize,
+ group: Group,
+
+ // The elements within the group with a matching h2-hash.
+ bitmask: BitMaskIter,
+}
+
+impl<'a, T> RawIterHash<'a, T> {
+ fn new(table: &'a RawTable<T>, hash: u64) -> Self {
+ unsafe {
+ let h2_hash = h2(hash);
+ let mut probe_seq = table.probe_seq(hash);
+ let pos = probe_seq.next().unwrap();
+ let group = Group::load(table.ctrl(pos));
+ let bitmask = group.match_byte(h2_hash).into_iter();
+
+ RawIterHash {
+ table,
+ h2_hash,
+ probe_seq,
+ pos,
+ group,
+ bitmask,
+ }
+ }
+ }
+}
+
+impl<'a, T> Iterator for RawIterHash<'a, T> {
+ type Item = Bucket<T>;
+
+ fn next(&mut self) -> Option<Bucket<T>> {
+ unsafe {
+ loop {
+ if let Some(bit) = self.bitmask.next() {
+ let index = (self.pos + bit) & self.table.bucket_mask;
+ let bucket = self.table.bucket(index);
+ return Some(bucket);
+ }
+ if likely(self.group.match_empty().any_bit_set()) {
+ return None;
+ }
+ self.pos = self.probe_seq.next().unwrap();
+ self.group = Group::load(self.table.ctrl(self.pos));
+ self.bitmask = self.group.match_byte(self.h2_hash).into_iter();
+ }
+ }
+ }
+}
diff --git a/sgx_tstd/hashbrown/src/raw/sse2.rs b/sgx_tstd/hashbrown/src/raw/sse2.rs
index 79b0aad..a27bc09 100644
--- a/sgx_tstd/hashbrown/src/raw/sse2.rs
+++ b/sgx_tstd/hashbrown/src/raw/sse2.rs
@@ -25,19 +25,20 @@
pub const WIDTH: usize = mem::size_of::<Self>();
/// Returns a full group of empty bytes, suitable for use as the initial
- /// value for an empty hash table. This value is explicitly declared as
- /// a static variable to ensure the address is consistent across dylibs.
+ /// value for an empty hash table.
///
/// This is guaranteed to be aligned to the group size.
- pub fn static_empty() -> &'static [u8] {
- union AlignedBytes {
- _align: Group,
+ pub const fn static_empty() -> &'static [u8; Group::WIDTH] {
+ #[repr(C)]
+ struct AlignedBytes {
+ _align: [Group; 0],
bytes: [u8; Group::WIDTH],
};
- static ALIGNED_BYTES: AlignedBytes = AlignedBytes {
+ const ALIGNED_BYTES: AlignedBytes = AlignedBytes {
+ _align: [],
bytes: [EMPTY; Group::WIDTH],
};
- unsafe { &ALIGNED_BYTES.bytes }
+ &ALIGNED_BYTES.bytes
}
/// Loads a group of bytes starting at the given address.
diff --git a/sgx_tstd/hashbrown/src/rustc_entry.rs b/sgx_tstd/hashbrown/src/rustc_entry.rs
index 39dc51a..b6ea7bc 100644
--- a/sgx_tstd/hashbrown/src/rustc_entry.rs
+++ b/sgx_tstd/hashbrown/src/rustc_entry.rs
@@ -318,10 +318,7 @@
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry(self) -> (K, V) {
- unsafe {
- self.table.erase_no_drop(&self.elem);
- self.elem.read()
- }
+ unsafe { self.table.remove(self.elem) }
}
/// Gets a reference to the value in the entry.
diff --git a/sgx_tstd/hashbrown/src/set.rs b/sgx_tstd/hashbrown/src/set.rs
index e5a94c2..b8460fd 100644
--- a/sgx_tstd/hashbrown/src/set.rs
+++ b/sgx_tstd/hashbrown/src/set.rs
@@ -1,11 +1,13 @@
-use crate::CollectionAllocErr;
+use crate::TryReserveError;
+use alloc::borrow::ToOwned;
use core::borrow::Borrow;
use core::fmt;
use core::hash::{BuildHasher, Hash};
use core::iter::{Chain, FromIterator, FusedIterator};
+use core::mem;
use core::ops::{BitAnd, BitOr, BitXor, Sub};
-use super::map::{self, DefaultHashBuilder, HashMap, Keys};
+use super::map::{self, ConsumeAllOnDrop, DefaultHashBuilder, DrainFilterInner, HashMap, Keys};
// Future Optimization (FIXME!)
// =============================
@@ -98,11 +100,9 @@
/// ```
/// use hashbrown::HashSet;
///
-/// fn main() {
-/// let viking_names: HashSet<&'static str> =
-/// [ "Einar", "Olaf", "Harald" ].iter().cloned().collect();
-/// // use the values stored in the set
-/// }
+/// let viking_names: HashSet<&'static str> =
+/// [ "Einar", "Olaf", "Harald" ].iter().cloned().collect();
+/// // use the values stored in the set
/// ```
///
/// [`Cell`]: https://doc.rust-lang.org/std/cell/struct.Cell.html
@@ -128,7 +128,7 @@
}
#[cfg(feature = "ahash")]
-impl<T: Hash + Eq> HashSet<T, DefaultHashBuilder> {
+impl<T> HashSet<T, DefaultHashBuilder> {
/// Creates an empty `HashSet`.
///
/// The hash set is initially created with a capacity of 0, so it will not allocate until it
@@ -168,6 +168,72 @@
}
impl<T, S> HashSet<T, S> {
+ /// Creates a new empty hash set which will use the given hasher to hash
+ /// keys.
+ ///
+ /// The hash set is also created with the default initial capacity.
+ ///
+ /// Warning: `hasher` is normally randomly generated, and
+ /// is designed to allow `HashSet`s to be resistant to attacks that
+ /// cause many collisions and very poor performance. Setting it
+ /// manually using this function can expose a DoS attack vector.
+ ///
+ /// The `hash_builder` passed should implement the [`BuildHasher`] trait for
+ /// the HashMap to be useful, see its documentation for details.
+ ///
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use hashbrown::HashSet;
+ /// use hashbrown::hash_map::DefaultHashBuilder;
+ ///
+ /// let s = DefaultHashBuilder::default();
+ /// let mut set = HashSet::with_hasher(s);
+ /// set.insert(2);
+ /// ```
+ ///
+ /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
+ #[cfg_attr(feature = "inline-more", inline)]
+ pub const fn with_hasher(hasher: S) -> Self {
+ Self {
+ map: HashMap::with_hasher(hasher),
+ }
+ }
+
+ /// Creates an empty `HashSet` with the specified capacity, using
+ /// `hasher` to hash the keys.
+ ///
+ /// The hash set will be able to hold at least `capacity` elements without
+ /// reallocating. If `capacity` is 0, the hash set will not allocate.
+ ///
+ /// Warning: `hasher` is normally randomly generated, and
+ /// is designed to allow `HashSet`s to be resistant to attacks that
+ /// cause many collisions and very poor performance. Setting it
+ /// manually using this function can expose a DoS attack vector.
+ ///
+ /// The `hash_builder` passed should implement the [`BuildHasher`] trait for
+ /// the HashMap to be useful, see its documentation for details.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use hashbrown::HashSet;
+ /// use hashbrown::hash_map::DefaultHashBuilder;
+ ///
+ /// let s = DefaultHashBuilder::default();
+ /// let mut set = HashSet::with_capacity_and_hasher(10, s);
+ /// set.insert(1);
+ /// ```
+ ///
+ /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
+ #[cfg_attr(feature = "inline-more", inline)]
+ pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> Self {
+ Self {
+ map: HashMap::with_capacity_and_hasher(capacity, hasher),
+ }
+ }
+
/// Returns the number of elements the set can hold without reallocating.
///
/// # Examples
@@ -263,6 +329,66 @@
}
}
+ /// Retains only the elements specified by the predicate.
+ ///
+ /// In other words, remove all elements `e` such that `f(&e)` returns `false`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use hashbrown::HashSet;
+ ///
+ /// let xs = [1,2,3,4,5,6];
+ /// let mut set: HashSet<i32> = xs.iter().cloned().collect();
+ /// set.retain(|&k| k % 2 == 0);
+ /// assert_eq!(set.len(), 3);
+ /// ```
+ pub fn retain<F>(&mut self, mut f: F)
+ where
+ F: FnMut(&T) -> bool,
+ {
+ self.map.retain(|k, _| f(k));
+ }
+
+ /// Drains elements which are true under the given predicate,
+ /// and returns an iterator over the removed items.
+ ///
+ /// In other words, move all elements `e` such that `f(&e)` returns `true` out
+ /// into another iterator.
+ ///
+ /// When the returned DrainedFilter is dropped, any remaining elements that satisfy
+ /// the predicate are dropped from the set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use hashbrown::HashSet;
+ ///
+ /// let mut set: HashSet<i32> = (0..8).collect();
+ /// let drained: HashSet<i32> = set.drain_filter(|v| v % 2 == 0).collect();
+ ///
+ /// let mut evens = drained.into_iter().collect::<Vec<_>>();
+ /// let mut odds = set.into_iter().collect::<Vec<_>>();
+ /// evens.sort();
+ /// odds.sort();
+ ///
+ /// assert_eq!(evens, vec![0, 2, 4, 6]);
+ /// assert_eq!(odds, vec![1, 3, 5, 7]);
+ /// ```
+ #[cfg_attr(feature = "inline-more", inline)]
+ pub fn drain_filter<F>(&mut self, f: F) -> DrainFilter<'_, T, F>
+ where
+ F: FnMut(&T) -> bool,
+ {
+ DrainFilter {
+ f,
+ inner: DrainFilterInner {
+ iter: unsafe { self.map.table.iter() },
+ table: &mut self.map.table,
+ },
+ }
+ }
+
/// Clears the set, removing all values.
///
/// # Examples
@@ -279,67 +405,6 @@
pub fn clear(&mut self) {
self.map.clear()
}
-}
-
-impl<T, S> HashSet<T, S>
-where
- T: Eq + Hash,
- S: BuildHasher,
-{
- /// Creates a new empty hash set which will use the given hasher to hash
- /// keys.
- ///
- /// The hash set is also created with the default initial capacity.
- ///
- /// Warning: `hasher` is normally randomly generated, and
- /// is designed to allow `HashSet`s to be resistant to attacks that
- /// cause many collisions and very poor performance. Setting it
- /// manually using this function can expose a DoS attack vector.
- ///
- /// # Examples
- ///
- /// ```
- /// use hashbrown::HashSet;
- /// use hashbrown::hash_map::DefaultHashBuilder;
- ///
- /// let s = DefaultHashBuilder::default();
- /// let mut set = HashSet::with_hasher(s);
- /// set.insert(2);
- /// ```
- #[cfg_attr(feature = "inline-more", inline)]
- pub fn with_hasher(hasher: S) -> Self {
- Self {
- map: HashMap::with_hasher(hasher),
- }
- }
-
- /// Creates an empty `HashSet` with the specified capacity, using
- /// `hasher` to hash the keys.
- ///
- /// The hash set will be able to hold at least `capacity` elements without
- /// reallocating. If `capacity` is 0, the hash set will not allocate.
- ///
- /// Warning: `hasher` is normally randomly generated, and
- /// is designed to allow `HashSet`s to be resistant to attacks that
- /// cause many collisions and very poor performance. Setting it
- /// manually using this function can expose a DoS attack vector.
- ///
- /// # Examples
- ///
- /// ```
- /// use hashbrown::HashSet;
- /// use hashbrown::hash_map::DefaultHashBuilder;
- ///
- /// let s = DefaultHashBuilder::default();
- /// let mut set = HashSet::with_capacity_and_hasher(10, s);
- /// set.insert(1);
- /// ```
- #[cfg_attr(feature = "inline-more", inline)]
- pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> Self {
- Self {
- map: HashMap::with_capacity_and_hasher(capacity, hasher),
- }
- }
/// Returns a reference to the set's [`BuildHasher`].
///
@@ -359,7 +424,13 @@
pub fn hasher(&self) -> &S {
self.map.hasher()
}
+}
+impl<T, S> HashSet<T, S>
+where
+ T: Eq + Hash,
+ S: BuildHasher,
+{
/// Reserves capacity for at least `additional` more elements to be inserted
/// in the `HashSet`. The collection may reserve more space to avoid
/// frequent reallocations.
@@ -398,7 +469,7 @@
/// set.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
- pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
+ pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
self.map.try_reserve(additional)
}
@@ -559,7 +630,7 @@
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn union<'a>(&'a self, other: &'a Self) -> Union<'a, T, S> {
- let (smaller, larger) = if self.len() <= other.len() {
+ let (smaller, larger) = if self.len() >= other.len() {
(self, other)
} else {
(other, self)
@@ -620,7 +691,11 @@
T: Borrow<Q>,
Q: Hash + Eq,
{
- self.map.get_key_value(value).map(|(k, _)| k)
+ // Avoid `Option::map` because it bloats LLVM IR.
+ match self.map.get_key_value(value) {
+ Some((k, _)) => Some(k),
+ None => None,
+ }
}
/// Inserts the given `value` into the set if it is not present, then
@@ -648,6 +723,39 @@
.0
}
+ /// Inserts an owned copy of the given `value` into the set if it is not
+ /// present, then returns a reference to the value in the set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use hashbrown::HashSet;
+ ///
+ /// let mut set: HashSet<String> = ["cat", "dog", "horse"]
+ /// .iter().map(|&pet| pet.to_owned()).collect();
+ ///
+ /// assert_eq!(set.len(), 3);
+ /// for &pet in &["cat", "dog", "fish"] {
+ /// let value = set.get_or_insert_owned(pet);
+ /// assert_eq!(value, pet);
+ /// }
+ /// assert_eq!(set.len(), 4); // a new "fish" was inserted
+ /// ```
+ #[inline]
+ pub fn get_or_insert_owned<Q: ?Sized>(&mut self, value: &Q) -> &T
+ where
+ T: Borrow<Q>,
+ Q: Hash + Eq + ToOwned<Owned = T>,
+ {
+ // Although the raw entry gives us `&mut T`, we only return `&T` to be consistent with
+ // `get`. Key mutation is "raw" because you're not supposed to affect `Eq` or `Hash`.
+ self.map
+ .raw_entry_mut()
+ .from_key(value)
+ .or_insert_with(|| (value.to_owned(), ()))
+ .0
+ }
+
/// Inserts a value computed from `f` into the set if the given `value` is
/// not present, then returns a reference to the value in the set.
///
@@ -851,28 +959,11 @@
T: Borrow<Q>,
Q: Hash + Eq,
{
- self.map.remove_entry(value).map(|(k, _)| k)
- }
-
- /// Retains only the elements specified by the predicate.
- ///
- /// In other words, remove all elements `e` such that `f(&e)` returns `false`.
- ///
- /// # Examples
- ///
- /// ```
- /// use hashbrown::HashSet;
- ///
- /// let xs = [1,2,3,4,5,6];
- /// let mut set: HashSet<i32> = xs.iter().cloned().collect();
- /// set.retain(|&k| k % 2 == 0);
- /// assert_eq!(set.len(), 3);
- /// ```
- pub fn retain<F>(&mut self, mut f: F)
- where
- F: FnMut(&T) -> bool,
- {
- self.map.retain(|k, _| f(k));
+ // Avoid `Option::map` because it bloats LLVM IR.
+ match self.map.remove_entry(value) {
+ Some((k, _)) => Some(k),
+ None => None,
+ }
}
}
@@ -929,6 +1020,18 @@
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
self.map.extend(iter.into_iter().map(|k| (k, ())));
}
+
+ #[inline]
+ #[cfg(feature = "nightly")]
+ fn extend_one(&mut self, k: T) {
+ self.map.insert(k, ());
+ }
+
+ #[inline]
+ #[cfg(feature = "nightly")]
+ fn extend_reserve(&mut self, additional: usize) {
+ Extend::<(T, ())>::extend_reserve(&mut self.map, additional);
+ }
}
impl<'a, T, S> Extend<&'a T> for HashSet<T, S>
@@ -940,12 +1043,23 @@
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
self.extend(iter.into_iter().cloned());
}
+
+ #[inline]
+ #[cfg(feature = "nightly")]
+ fn extend_one(&mut self, k: &'a T) {
+ self.map.insert(*k, ());
+ }
+
+ #[inline]
+ #[cfg(feature = "nightly")]
+ fn extend_reserve(&mut self, additional: usize) {
+ Extend::<(T, ())>::extend_reserve(&mut self.map, additional);
+ }
}
impl<T, S> Default for HashSet<T, S>
where
- T: Eq + Hash,
- S: BuildHasher + Default,
+ S: Default,
{
/// Creates an empty `HashSet<T, S>` with the `Default` value for the hasher.
#[cfg_attr(feature = "inline-more", inline)]
@@ -1097,7 +1211,7 @@
/// An owning iterator over the items of a `HashSet`.
///
-/// This `struct` is created by the [`into_iter`] method on [`HashSet`][`HashSet`]
+/// This `struct` is created by the [`into_iter`] method on [`HashSet`]
/// (provided by the `IntoIterator` trait). See its documentation for more.
///
/// [`HashSet`]: struct.HashSet.html
@@ -1117,6 +1231,21 @@
iter: map::Drain<'a, K, ()>,
}
+/// A draining iterator over entries of a `HashSet` which don't satisfy the predicate `f`.
+///
+/// This `struct` is created by the [`drain_filter`] method on [`HashSet`]. See its
+/// documentation for more.
+///
+/// [`drain_filter`]: struct.HashSet.html#method.drain_filter
+/// [`HashSet`]: struct.HashSet.html
+pub struct DrainFilter<'a, K, F>
+where
+ F: FnMut(&K) -> bool,
+{
+ f: F,
+ inner: DrainFilterInner<'a, K, ()>,
+}
+
/// A lazy iterator producing elements in the intersection of `HashSet`s.
///
/// This `struct` is created by the [`intersection`] method on [`HashSet`].
@@ -1248,7 +1377,11 @@
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<K> {
- self.iter.next().map(|(k, _)| k)
+ // Avoid `Option::map` because it bloats LLVM IR.
+ match self.iter.next() {
+ Some((k, _)) => Some(k),
+ None => None,
+ }
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
@@ -1275,7 +1408,11 @@
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<K> {
- self.iter.next().map(|(k, _)| k)
+ // Avoid `Option::map` because it bloats LLVM IR.
+ match self.iter.next() {
+ Some((k, _)) => Some(k),
+ None => None,
+ }
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
@@ -1297,6 +1434,41 @@
}
}
+impl<'a, K, F> Drop for DrainFilter<'a, K, F>
+where
+ F: FnMut(&K) -> bool,
+{
+ #[cfg_attr(feature = "inline-more", inline)]
+ fn drop(&mut self) {
+ while let Some(item) = self.next() {
+ let guard = ConsumeAllOnDrop(self);
+ drop(item);
+ mem::forget(guard);
+ }
+ }
+}
+
+impl<K, F> Iterator for DrainFilter<'_, K, F>
+where
+ F: FnMut(&K) -> bool,
+{
+ type Item = K;
+
+ #[cfg_attr(feature = "inline-more", inline)]
+ fn next(&mut self) -> Option<Self::Item> {
+ let f = &mut self.f;
+ let (k, _) = self.inner.next(&mut |k, _| f(k))?;
+ Some(k)
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (0, self.inner.iter.size_hint().1)
+ }
+}
+
+impl<K, F> FusedIterator for DrainFilter<'_, K, F> where F: FnMut(&K) -> bool {}
+
impl<T, S> Clone for Intersection<'_, T, S> {
#[cfg_attr(feature = "inline-more", inline)]
fn clone(&self) -> Self {
@@ -1734,13 +1906,15 @@
#[test]
fn test_from_iter() {
- let xs = [1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let xs = [1, 2, 2, 3, 4, 5, 6, 7, 8, 9];
let set: HashSet<_> = xs.iter().cloned().collect();
for x in &xs {
assert!(set.contains(x));
}
+
+ assert_eq!(set.iter().len(), xs.len() - 1);
}
#[test]
@@ -1903,4 +2077,43 @@
assert!(set.contains(&4));
assert!(set.contains(&6));
}
+
+ #[test]
+ fn test_drain_filter() {
+ {
+ let mut set: HashSet<i32> = (0..8).collect();
+ let drained = set.drain_filter(|&k| k % 2 == 0);
+ let mut out = drained.collect::<Vec<_>>();
+ out.sort_unstable();
+ assert_eq!(vec![0, 2, 4, 6], out);
+ assert_eq!(set.len(), 4);
+ }
+ {
+ let mut set: HashSet<i32> = (0..8).collect();
+ drop(set.drain_filter(|&k| k % 2 == 0));
+ assert_eq!(set.len(), 4, "Removes non-matching items on drop");
+ }
+ }
+
+ #[test]
+ fn test_const_with_hasher() {
+ use core::hash::BuildHasher;
+ use std::collections::hash_map::DefaultHasher;
+
+ #[derive(Clone)]
+ struct MyHasher;
+ impl BuildHasher for MyHasher {
+ type Hasher = DefaultHasher;
+
+ fn build_hasher(&self) -> DefaultHasher {
+ DefaultHasher::new()
+ }
+ }
+
+ const EMPTY_SET: HashSet<u32, MyHasher> = HashSet::with_hasher(MyHasher);
+
+ let mut set = EMPTY_SET.clone();
+ set.insert(19);
+ assert!(set.contains(&19));
+ }
}
diff --git a/sgx_tstd/src/collections/hash/map.rs b/sgx_tstd/src/collections/hash/map.rs
index fddb553..d74ccf3 100644
--- a/sgx_tstd/src/collections/hash/map.rs
+++ b/sgx_tstd/src/collections/hash/map.rs
@@ -1,21 +1,5 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements. See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership. The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License. You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied. See the License for the
-// specific language governing permissions and limitations
-// under the License..
-
-// ignore-tidy-filelength
+//#[cfg(test)]
+//mod tests;
use self::Entry::*;
@@ -165,14 +149,11 @@
/// The easiest way to use `HashMap` with a custom key type is to derive [`Eq`] and [`Hash`].
/// We must also derive [`PartialEq`].
///
-/// [`Eq`]: ../../std/cmp/trait.Eq.html
-/// [`Hash`]: ../../std/hash/trait.Hash.html
-/// [`PartialEq`]: ../../std/cmp/trait.PartialEq.html
-/// [`RefCell`]: ../../std/cell/struct.RefCell.html
-/// [`Cell`]: ../../std/cell/struct.Cell.html
-/// [`default`]: #method.default
-/// [`with_hasher`]: #method.with_hasher
-/// [`with_capacity_and_hasher`]: #method.with_capacity_and_hasher
+/// [`RefCell`]: crate::cell::RefCell
+/// [`Cell`]: crate::cell::Cell
+/// [`default`]: Default::default
+/// [`with_hasher`]: Self::with_hasher
+/// [`with_capacity_and_hasher`]: Self::with_capacity_and_hasher
/// [`fnv`]: https://crates.io/crates/fnv
///
/// ```
@@ -215,6 +196,8 @@
/// ```
#[derive(Clone)]
+#[cfg_attr(not(test), rustc_diagnostic_item = "hashmap_type")]
+//#[stable(feature = "rust1", since = "1.0.0")]
pub struct HashMap<K, V, S = RandomState> {
base: base::HashMap<K, V, S>,
}
@@ -232,6 +215,7 @@
/// let mut map: HashMap<&str, i32> = HashMap::new();
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn new() -> HashMap<K, V, RandomState> {
Default::default()
}
@@ -248,6 +232,7 @@
/// let mut map: HashMap<&str, i32> = HashMap::with_capacity(10);
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn with_capacity(capacity: usize) -> HashMap<K, V, RandomState> {
HashMap::with_capacity_and_hasher(capacity, Default::default())
}
@@ -264,6 +249,9 @@
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
+ /// The `hash_builder` passed should implement the [`BuildHasher`] trait for
+ /// the HashMap to be useful, see its documentation for details.
+ ///
/// # Examples
///
/// ```
@@ -275,6 +263,7 @@
/// map.insert(1, 2);
/// ```
#[inline]
+ //#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
pub fn with_hasher(hash_builder: S) -> HashMap<K, V, S> {
HashMap { base: base::HashMap::with_hasher(hash_builder) }
}
@@ -290,6 +279,9 @@
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
+ /// The `hash_builder` passed should implement the [`BuildHasher`] trait for
+ /// the HashMap to be useful, see its documentation for details.
+ ///
/// # Examples
///
/// ```
@@ -301,6 +293,7 @@
/// map.insert(1, 2);
/// ```
#[inline]
+ //#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> HashMap<K, V, S> {
HashMap { base: base::HashMap::with_capacity_and_hasher(capacity, hash_builder) }
}
@@ -318,6 +311,7 @@
/// assert!(map.capacity() >= 100);
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn capacity(&self) -> usize {
self.base.capacity()
}
@@ -339,6 +333,7 @@
/// println!("{}", key);
/// }
/// ```
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn keys(&self) -> Keys<'_, K, V> {
Keys { inner: self.iter() }
}
@@ -360,6 +355,7 @@
/// println!("{}", val);
/// }
/// ```
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn values(&self) -> Values<'_, K, V> {
Values { inner: self.iter() }
}
@@ -386,6 +382,7 @@
/// println!("{}", val);
/// }
/// ```
+ //#[stable(feature = "map_values_mut", since = "1.10.0")]
pub fn values_mut(&mut self) -> ValuesMut<'_, K, V> {
ValuesMut { inner: self.iter_mut() }
}
@@ -407,6 +404,7 @@
/// println!("key: {} val: {}", key, val);
/// }
/// ```
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter(&self) -> Iter<'_, K, V> {
Iter { base: self.base.iter() }
}
@@ -434,6 +432,7 @@
/// println!("key: {} val: {}", key, val);
/// }
/// ```
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
IterMut { base: self.base.iter_mut() }
}
@@ -450,6 +449,7 @@
/// a.insert(1, "a");
/// assert_eq!(a.len(), 1);
/// ```
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn len(&self) -> usize {
self.base.len()
}
@@ -467,6 +467,7 @@
/// assert!(!a.is_empty());
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_empty(&self) -> bool {
self.base.is_empty()
}
@@ -491,10 +492,55 @@
/// assert!(a.is_empty());
/// ```
#[inline]
+ //#[stable(feature = "drain", since = "1.6.0")]
pub fn drain(&mut self) -> Drain<'_, K, V> {
Drain { base: self.base.drain() }
}
+ /// Creates an iterator which uses a closure to determine if an element should be removed.
+ ///
+ /// If the closure returns true, the element is removed from the map and yielded.
+ /// If the closure returns false, or panics, the element remains in the map and will not be
+ /// yielded.
+ ///
+ /// Note that `drain_filter` lets you mutate every value in the filter closure, regardless of
+ /// whether you choose to keep or remove it.
+ ///
+ /// If the iterator is only partially consumed or not consumed at all, each of the remaining
+ /// elements will still be subjected to the closure and removed and dropped if it returns true.
+ ///
+ /// It is unspecified how many more elements will be subjected to the closure
+ /// if a panic occurs in the closure, or a panic occurs while dropping an element,
+ /// or if the `DrainFilter` value is leaked.
+ ///
+ /// # Examples
+ ///
+ /// Splitting a map into even and odd keys, reusing the original map:
+ ///
+ /// ```
+ /// #![feature(hash_drain_filter)]
+ /// use std::collections::HashMap;
+ ///
+ /// let mut map: HashMap<i32, i32> = (0..8).map(|x| (x, x)).collect();
+ /// let drained: HashMap<i32, i32> = map.drain_filter(|k, _v| k % 2 == 0).collect();
+ ///
+ /// let mut evens = drained.keys().copied().collect::<Vec<_>>();
+ /// let mut odds = map.keys().copied().collect::<Vec<_>>();
+ /// evens.sort();
+ /// odds.sort();
+ ///
+ /// assert_eq!(evens, vec![0, 2, 4, 6]);
+ /// assert_eq!(odds, vec![1, 3, 5, 7]);
+ /// ```
+ #[inline]
+ //#[unstable(feature = "hash_drain_filter", issue = "59618")]
+ pub fn drain_filter<F>(&mut self, pred: F) -> DrainFilter<'_, K, V, F>
+ where
+ F: FnMut(&K, &mut V) -> bool,
+ {
+ DrainFilter { base: self.base.drain_filter(pred) }
+ }
+
/// Clears the map, removing all key-value pairs. Keeps the allocated memory
/// for reuse.
///
@@ -508,6 +554,7 @@
/// a.clear();
/// assert!(a.is_empty());
/// ```
+ //#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn clear(&mut self) {
self.base.clear();
@@ -515,8 +562,6 @@
/// Returns a reference to the map's [`BuildHasher`].
///
- /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
- ///
/// # Examples
///
/// ```
@@ -528,6 +573,7 @@
/// let hasher: &RandomState = map.hasher();
/// ```
#[inline]
+ //#[stable(feature = "hashmap_public_hasher", since = "1.9.0")]
pub fn hasher(&self) -> &S {
self.base.hasher()
}
@@ -546,8 +592,6 @@
///
/// Panics if the new allocation size overflows [`usize`].
///
- /// [`usize`]: ../../std/primitive.usize.html
- ///
/// # Examples
///
/// ```
@@ -556,6 +600,7 @@
/// map.reserve(10);
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve(&mut self, additional: usize) {
self.base.reserve(additional)
}
@@ -578,8 +623,9 @@
/// map.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?");
/// ```
#[inline]
+ //#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
- self.base.try_reserve(additional).map_err(map_collection_alloc_err)
+ self.base.try_reserve(additional).map_err(map_try_reserve_error)
}
/// Shrinks the capacity of the map as much as possible. It will drop
@@ -599,6 +645,7 @@
/// assert!(map.capacity() >= 2);
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn shrink_to_fit(&mut self) {
self.base.shrink_to_fit();
}
@@ -626,6 +673,7 @@
/// assert!(map.capacity() >= 2);
/// ```
#[inline]
+ //#[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
pub fn shrink_to(&mut self, min_capacity: usize) {
assert!(self.capacity() >= min_capacity, "Tried to shrink to a larger capacity");
self.base.shrink_to(min_capacity);
@@ -651,6 +699,7 @@
/// assert_eq!(letters.get(&'y'), None);
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn entry(&mut self, key: K) -> Entry<'_, K, V> {
map_entry(self.base.rustc_entry(key))
}
@@ -661,9 +710,6 @@
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
- /// [`Eq`]: ../../std/cmp/trait.Eq.html
- /// [`Hash`]: ../../std/hash/trait.Hash.html
- ///
/// # Examples
///
/// ```
@@ -674,6 +720,7 @@
/// assert_eq!(map.get(&1), Some(&"a"));
/// assert_eq!(map.get(&2), None);
/// ```
+ //#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V>
where
@@ -689,9 +736,6 @@
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
- /// [`Eq`]: ../../std/cmp/trait.Eq.html
- /// [`Hash`]: ../../std/hash/trait.Hash.html
- ///
/// # Examples
///
/// ```
@@ -702,6 +746,7 @@
/// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
/// assert_eq!(map.get_key_value(&2), None);
/// ```
+ //#[stable(feature = "map_get_key_value", since = "1.40.0")]
#[inline]
pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
where
@@ -717,9 +762,6 @@
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
- /// [`Eq`]: ../../std/cmp/trait.Eq.html
- /// [`Hash`]: ../../std/hash/trait.Hash.html
- ///
/// # Examples
///
/// ```
@@ -730,6 +772,7 @@
/// assert_eq!(map.contains_key(&1), true);
/// assert_eq!(map.contains_key(&2), false);
/// ```
+ //#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn contains_key<Q: ?Sized>(&self, k: &Q) -> bool
where
@@ -745,9 +788,6 @@
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
- /// [`Eq`]: ../../std/cmp/trait.Eq.html
- /// [`Hash`]: ../../std/hash/trait.Hash.html
- ///
/// # Examples
///
/// ```
@@ -760,6 +800,7 @@
/// }
/// assert_eq!(map[&1], "b");
/// ```
+ //#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn get_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut V>
where
@@ -778,8 +819,7 @@
/// types that can be `==` without being identical. See the [module-level
/// documentation] for more.
///
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
- /// [module-level documentation]: index.html#insert-and-complex-keys
+ /// [module-level documentation]: crate::collections#insert-and-complex-keys
///
/// # Examples
///
@@ -794,6 +834,7 @@
/// assert_eq!(map.insert(37, "c"), Some("b"));
/// assert_eq!(map[&37], "c");
/// ```
+ //#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn insert(&mut self, k: K, v: V) -> Option<V> {
self.base.insert(k, v)
@@ -806,9 +847,6 @@
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
- /// [`Eq`]: ../../std/cmp/trait.Eq.html
- /// [`Hash`]: ../../std/hash/trait.Hash.html
- ///
/// # Examples
///
/// ```
@@ -819,6 +857,7 @@
/// assert_eq!(map.remove(&1), Some("a"));
/// assert_eq!(map.remove(&1), None);
/// ```
+ //#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn remove<Q: ?Sized>(&mut self, k: &Q) -> Option<V>
where
@@ -835,9 +874,6 @@
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
- /// [`Eq`]: ../../std/cmp/trait.Eq.html
- /// [`Hash`]: ../../std/hash/trait.Hash.html
- ///
/// # Examples
///
/// ```
@@ -850,6 +886,7 @@
/// assert_eq!(map.remove(&1), None);
/// # }
/// ```
+ //#[stable(feature = "hash_map_remove_entry", since = "1.27.0")]
#[inline]
pub fn remove_entry<Q: ?Sized>(&mut self, k: &Q) -> Option<(K, V)>
where
@@ -872,6 +909,7 @@
/// map.retain(|&k, _| k % 2 == 0);
/// assert_eq!(map.len(), 4);
/// ```
+ //#[stable(feature = "retain_hash_collection", since = "1.18.0")]
#[inline]
pub fn retain<F>(&mut self, f: F)
where
@@ -879,6 +917,52 @@
{
self.base.retain(f)
}
+
+ /// Creates a consuming iterator visiting all the keys in arbitrary order.
+ /// The map cannot be used after calling this.
+ /// The iterator element type is `K`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(map_into_keys_values)]
+ /// use std::collections::HashMap;
+ ///
+ /// let mut map = HashMap::new();
+ /// map.insert("a", 1);
+ /// map.insert("b", 2);
+ /// map.insert("c", 3);
+ ///
+ /// let vec: Vec<&str> = map.into_keys().collect();
+ /// ```
+ #[inline]
+ //#[unstable(feature = "map_into_keys_values", issue = "75294")]
+ pub fn into_keys(self) -> IntoKeys<K, V> {
+ IntoKeys { inner: self.into_iter() }
+ }
+
+ /// Creates a consuming iterator visiting all the values in arbitrary order.
+ /// The map cannot be used after calling this.
+ /// The iterator element type is `V`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(map_into_keys_values)]
+ /// use std::collections::HashMap;
+ ///
+ /// let mut map = HashMap::new();
+ /// map.insert("a", 1);
+ /// map.insert("b", 2);
+ /// map.insert("c", 3);
+ ///
+ /// let vec: Vec<i32> = map.into_values().collect();
+ /// ```
+ #[inline]
+ //#[unstable(feature = "map_into_keys_values", issue = "75294")]
+ pub fn into_values(self) -> IntoValues<K, V> {
+ IntoValues { inner: self.into_iter() }
+ }
}
impl<K, V, S> HashMap<K, V, S>
@@ -917,6 +1001,7 @@
/// acting erratically, with two keys randomly masking each other. Implementations
/// are free to assume this doesn't happen (within the limits of memory-safety).
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<'_, K, V, S> {
RawEntryBuilderMut { map: self }
}
@@ -937,11 +1022,13 @@
///
/// Immutable raw entries have very limited use; you might instead want `raw_entry_mut`.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn raw_entry(&self) -> RawEntryBuilder<'_, K, V, S> {
RawEntryBuilder { map: self }
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V, S> PartialEq for HashMap<K, V, S>
where
K: Eq + Hash,
@@ -957,6 +1044,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V, S> Eq for HashMap<K, V, S>
where
K: Eq + Hash,
@@ -965,6 +1053,7 @@
{
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V, S> Debug for HashMap<K, V, S>
where
K: Debug,
@@ -975,6 +1064,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V, S> Default for HashMap<K, V, S>
where
S: Default,
@@ -986,6 +1076,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K, Q: ?Sized, V, S> Index<&Q> for HashMap<K, V, S>
where
K: Eq + Hash + Borrow<Q>,
@@ -1010,13 +1101,14 @@
/// This `struct` is created by the [`iter`] method on [`HashMap`]. See its
/// documentation for more.
///
-/// [`iter`]: struct.HashMap.html#method.iter
-/// [`HashMap`]: struct.HashMap.html
+/// [`iter`]: HashMap::iter
+//#[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, K: 'a, V: 'a> {
base: base::Iter<'a, K, V>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V> Clone for Iter<'_, K, V> {
#[inline]
fn clone(&self) -> Self {
@@ -1024,6 +1116,7 @@
}
}
+//#[stable(feature = "std_debug", since = "1.16.0")]
impl<K: Debug, V: Debug> fmt::Debug for Iter<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
@@ -1035,8 +1128,8 @@
/// This `struct` is created by the [`iter_mut`] method on [`HashMap`]. See its
/// documentation for more.
///
-/// [`iter_mut`]: struct.HashMap.html#method.iter_mut
-/// [`HashMap`]: struct.HashMap.html
+/// [`iter_mut`]: HashMap::iter_mut
+//#[stable(feature = "rust1", since = "1.0.0")]
pub struct IterMut<'a, K: 'a, V: 'a> {
base: base::IterMut<'a, K, V>,
}
@@ -1054,8 +1147,8 @@
/// This `struct` is created by the [`into_iter`] method on [`HashMap`]
/// (provided by the `IntoIterator` trait). See its documentation for more.
///
-/// [`into_iter`]: struct.HashMap.html#method.into_iter
-/// [`HashMap`]: struct.HashMap.html
+/// [`into_iter`]: IntoIterator::into_iter
+//#[stable(feature = "rust1", since = "1.0.0")]
pub struct IntoIter<K, V> {
base: base::IntoIter<K, V>,
}
@@ -1073,13 +1166,14 @@
/// This `struct` is created by the [`keys`] method on [`HashMap`]. See its
/// documentation for more.
///
-/// [`keys`]: struct.HashMap.html#method.keys
-/// [`HashMap`]: struct.HashMap.html
+/// [`keys`]: HashMap::keys
+//#[stable(feature = "rust1", since = "1.0.0")]
pub struct Keys<'a, K: 'a, V: 'a> {
inner: Iter<'a, K, V>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V> Clone for Keys<'_, K, V> {
#[inline]
fn clone(&self) -> Self {
@@ -1087,6 +1181,7 @@
}
}
+//#[stable(feature = "std_debug", since = "1.16.0")]
impl<K: Debug, V> fmt::Debug for Keys<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
@@ -1098,13 +1193,14 @@
/// This `struct` is created by the [`values`] method on [`HashMap`]. See its
/// documentation for more.
///
-/// [`values`]: struct.HashMap.html#method.values
-/// [`HashMap`]: struct.HashMap.html
+/// [`values`]: HashMap::values
+//#[stable(feature = "rust1", since = "1.0.0")]
pub struct Values<'a, K: 'a, V: 'a> {
inner: Iter<'a, K, V>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V> Clone for Values<'_, K, V> {
#[inline]
fn clone(&self) -> Self {
@@ -1112,6 +1208,7 @@
}
}
+//#[stable(feature = "std_debug", since = "1.16.0")]
impl<K, V: Debug> fmt::Debug for Values<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
@@ -1123,8 +1220,8 @@
/// This `struct` is created by the [`drain`] method on [`HashMap`]. See its
/// documentation for more.
///
-/// [`drain`]: struct.HashMap.html#method.drain
-/// [`HashMap`]: struct.HashMap.html
+/// [`drain`]: HashMap::drain
+//#[stable(feature = "drain", since = "1.6.0")]
pub struct Drain<'a, K: 'a, V: 'a> {
base: base::Drain<'a, K, V>,
}
@@ -1137,23 +1234,59 @@
}
}
+/// A draining, filtering iterator over the entries of a `HashMap`.
+///
+/// This `struct` is created by the [`drain_filter`] method on [`HashMap`].
+///
+/// [`drain_filter`]: HashMap::drain_filter
+//#[unstable(feature = "hash_drain_filter", issue = "59618")]
+pub struct DrainFilter<'a, K, V, F>
+where
+ F: FnMut(&K, &mut V) -> bool,
+{
+ base: base::DrainFilter<'a, K, V, F>,
+}
+
/// A mutable iterator over the values of a `HashMap`.
///
/// This `struct` is created by the [`values_mut`] method on [`HashMap`]. See its
/// documentation for more.
///
-/// [`values_mut`]: struct.HashMap.html#method.values_mut
-/// [`HashMap`]: struct.HashMap.html
+/// [`values_mut`]: HashMap::values_mut
+//#[stable(feature = "map_values_mut", since = "1.10.0")]
pub struct ValuesMut<'a, K: 'a, V: 'a> {
inner: IterMut<'a, K, V>,
}
+/// An owning iterator over the keys of a `HashMap`.
+///
+/// This `struct` is created by the [`into_keys`] method on [`HashMap`].
+/// See its documentation for more.
+///
+/// [`into_keys`]: HashMap::into_keys
+//#[unstable(feature = "map_into_keys_values", issue = "75294")]
+pub struct IntoKeys<K, V> {
+ inner: IntoIter<K, V>,
+}
+
+/// An owning iterator over the values of a `HashMap`.
+///
+/// This `struct` is created by the [`into_values`] method on [`HashMap`].
+/// See its documentation for more.
+///
+/// [`into_values`]: HashMap::into_values
+//#[unstable(feature = "map_into_keys_values", issue = "75294")]
+pub struct IntoValues<K, V> {
+ inner: IntoIter<K, V>,
+}
+
/// A builder for computing where in a HashMap a key-value pair would be stored.
///
/// See the [`HashMap::raw_entry_mut`] docs for usage examples.
///
-/// [`HashMap::raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
+/// [`HashMap::raw_entry_mut`]: HashMap::raw_entry_mut
+//#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub struct RawEntryBuilderMut<'a, K: 'a, V: 'a, S: 'a> {
map: &'a mut HashMap<K, V, S>,
}
@@ -1165,29 +1298,27 @@
/// This `enum` is constructed through the [`raw_entry_mut`] method on [`HashMap`],
/// then calling one of the methods of that [`RawEntryBuilderMut`].
///
-/// [`HashMap`]: struct.HashMap.html
/// [`Entry`]: enum.Entry.html
-/// [`raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
+/// [`raw_entry_mut`]: HashMap::raw_entry_mut
/// [`RawEntryBuilderMut`]: struct.RawEntryBuilderMut.html
+//#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub enum RawEntryMut<'a, K: 'a, V: 'a, S: 'a> {
/// An occupied entry.
- Occupied(RawOccupiedEntryMut<'a, K, V>),
+ Occupied(RawOccupiedEntryMut<'a, K, V, S>),
/// A vacant entry.
Vacant(RawVacantEntryMut<'a, K, V, S>),
}
/// A view into an occupied entry in a `HashMap`.
/// It is part of the [`RawEntryMut`] enum.
-///
-/// [`RawEntryMut`]: enum.RawEntryMut.html
-pub struct RawOccupiedEntryMut<'a, K: 'a, V: 'a> {
- base: base::RawOccupiedEntryMut<'a, K, V>,
+//#[unstable(feature = "hash_raw_entry", issue = "56167")]
+pub struct RawOccupiedEntryMut<'a, K: 'a, V: 'a, S: 'a> {
+ base: base::RawOccupiedEntryMut<'a, K, V, S>,
}
/// A view into a vacant entry in a `HashMap`.
/// It is part of the [`RawEntryMut`] enum.
-///
-/// [`RawEntryMut`]: enum.RawEntryMut.html
+//#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub struct RawVacantEntryMut<'a, K: 'a, V: 'a, S: 'a> {
base: base::RawVacantEntryMut<'a, K, V, S>,
}
@@ -1196,7 +1327,8 @@
///
/// See the [`HashMap::raw_entry`] docs for usage examples.
///
-/// [`HashMap::raw_entry`]: struct.HashMap.html#method.raw_entry
+/// [`HashMap::raw_entry`]: HashMap::raw_entry
+//#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub struct RawEntryBuilder<'a, K: 'a, V: 'a, S: 'a> {
map: &'a HashMap<K, V, S>,
}
@@ -1207,6 +1339,7 @@
{
/// Creates a `RawEntryMut` from the given key.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn from_key<Q: ?Sized>(self, k: &Q) -> RawEntryMut<'a, K, V, S>
where
K: Borrow<Q>,
@@ -1217,6 +1350,7 @@
/// Creates a `RawEntryMut` from the given key and its hash.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S>
where
K: Borrow<Q>,
@@ -1227,6 +1361,7 @@
/// Creates a `RawEntryMut` from the given hash.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn from_hash<F>(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S>
where
for<'b> F: FnMut(&'b K) -> bool,
@@ -1241,6 +1376,7 @@
{
/// Access an entry by key.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn from_key<Q: ?Sized>(self, k: &Q) -> Option<(&'a K, &'a V)>
where
K: Borrow<Q>,
@@ -1251,6 +1387,7 @@
/// Access an entry by a key and its hash.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> Option<(&'a K, &'a V)>
where
K: Borrow<Q>,
@@ -1261,6 +1398,7 @@
/// Access an entry by hash.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn from_hash<F>(self, hash: u64, is_match: F) -> Option<(&'a K, &'a V)>
where
F: FnMut(&K) -> bool,
@@ -1288,6 +1426,7 @@
/// assert_eq!(map["poneyland"], 6);
/// ```
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn or_insert(self, default_key: K, default_val: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
@@ -1317,6 +1456,7 @@
/// assert_eq!(map["poneyland"], "hoho".to_string());
/// ```
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn or_insert_with<F>(self, default: F) -> (&'a mut K, &'a mut V)
where
F: FnOnce() -> (K, V),
@@ -1356,6 +1496,7 @@
/// assert_eq!(map["poneyland"], 43);
/// ```
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn and_modify<F>(self, f: F) -> Self
where
F: FnOnce(&mut K, &mut V),
@@ -1373,15 +1514,17 @@
}
}
-impl<'a, K, V> RawOccupiedEntryMut<'a, K, V> {
+impl<'a, K, V, S> RawOccupiedEntryMut<'a, K, V, S> {
/// Gets a reference to the key in the entry.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn key(&self) -> &K {
self.base.key()
}
/// Gets a mutable reference to the key in the entry.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn key_mut(&mut self) -> &mut K {
self.base.key_mut()
}
@@ -1389,12 +1532,14 @@
/// Converts the entry into a mutable reference to the key in the entry
/// with a lifetime bound to the map itself.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn into_key(self) -> &'a mut K {
self.base.into_key()
}
/// Gets a reference to the value in the entry.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn get(&self) -> &V {
self.base.get()
}
@@ -1402,24 +1547,28 @@
/// Converts the OccupiedEntry into a mutable reference to the value in the entry
/// with a lifetime bound to the map itself.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn into_mut(self) -> &'a mut V {
self.base.into_mut()
}
/// Gets a mutable reference to the value in the entry.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn get_mut(&mut self) -> &mut V {
self.base.get_mut()
}
/// Gets a reference to the key and value in the entry.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn get_key_value(&mut self) -> (&K, &V) {
self.base.get_key_value()
}
/// Gets a mutable reference to the key and value in the entry.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn get_key_value_mut(&mut self) -> (&mut K, &mut V) {
self.base.get_key_value_mut()
}
@@ -1427,30 +1576,35 @@
/// Converts the OccupiedEntry into a mutable reference to the key and value in the entry
/// with a lifetime bound to the map itself.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn into_key_value(self) -> (&'a mut K, &'a mut V) {
self.base.into_key_value()
}
/// Sets the value of the entry, and returns the entry's old value.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn insert(&mut self, value: V) -> V {
self.base.insert(value)
}
/// Sets the value of the entry, and returns the entry's old value.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn insert_key(&mut self, key: K) -> K {
self.base.insert_key(key)
}
/// Takes the value out of the entry, and returns it.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn remove(self) -> V {
self.base.remove()
}
/// Take the ownership of the key and value from the map.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn remove_entry(self) -> (K, V) {
self.base.remove_entry()
}
@@ -1460,6 +1614,7 @@
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn insert(self, key: K, value: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
@@ -1471,6 +1626,7 @@
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
#[inline]
+ //#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn insert_hashed_nocheck(self, hash: u64, key: K, value: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
@@ -1480,12 +1636,14 @@
}
}
+//#[unstable(feature = "hash_raw_entry", issue = "56167")]
impl<K, V, S> Debug for RawEntryBuilderMut<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawEntryBuilder").finish()
}
}
+//#[unstable(feature = "hash_raw_entry", issue = "56167")]
impl<K: Debug, V: Debug, S> Debug for RawEntryMut<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
@@ -1495,7 +1653,8 @@
}
}
-impl<K: Debug, V: Debug> Debug for RawOccupiedEntryMut<'_, K, V> {
+//#[unstable(feature = "hash_raw_entry", issue = "56167")]
+impl<K: Debug, V: Debug, S> Debug for RawOccupiedEntryMut<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawOccupiedEntryMut")
.field("key", self.key())
@@ -1504,12 +1663,14 @@
}
}
+//#[unstable(feature = "hash_raw_entry", issue = "56167")]
impl<K, V, S> Debug for RawVacantEntryMut<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawVacantEntryMut").finish()
}
}
+//#[unstable(feature = "hash_raw_entry", issue = "56167")]
impl<K, V, S> Debug for RawEntryBuilder<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawEntryBuilder").finish()
@@ -1520,16 +1681,19 @@
///
/// This `enum` is constructed from the [`entry`] method on [`HashMap`].
///
-/// [`HashMap`]: struct.HashMap.html
-/// [`entry`]: struct.HashMap.html#method.entry
+/// [`entry`]: HashMap::entry
+//#[stable(feature = "rust1", since = "1.0.0")]
pub enum Entry<'a, K: 'a, V: 'a> {
/// An occupied entry.
+ //#[stable(feature = "rust1", since = "1.0.0")]
Occupied(OccupiedEntry<'a, K, V>),
/// A vacant entry.
+ //#[stable(feature = "rust1", since = "1.0.0")]
Vacant(VacantEntry<'a, K, V>),
}
+//#[stable(feature = "debug_hash_map", since = "1.12.0")]
impl<K: Debug, V: Debug> Debug for Entry<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
@@ -1543,10 +1707,12 @@
/// It is part of the [`Entry`] enum.
///
/// [`Entry`]: enum.Entry.html
+//#[stable(feature = "rust1", since = "1.0.0")]
pub struct OccupiedEntry<'a, K: 'a, V: 'a> {
base: base::RustcOccupiedEntry<'a, K, V>,
}
+//#[stable(feature = "debug_hash_map", since = "1.12.0")]
impl<K: Debug, V: Debug> Debug for OccupiedEntry<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("OccupiedEntry").field("key", self.key()).field("value", self.get()).finish()
@@ -1557,16 +1723,19 @@
/// It is part of the [`Entry`] enum.
///
/// [`Entry`]: enum.Entry.html
+//#[stable(feature = "rust1", since = "1.0.0")]
pub struct VacantEntry<'a, K: 'a, V: 'a> {
base: base::RustcVacantEntry<'a, K, V>,
}
+//#[stable(feature = "debug_hash_map", since = "1.12.0")]
impl<K: Debug, V> Debug for VacantEntry<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("VacantEntry").field(self.key()).finish()
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K, V, S> IntoIterator for &'a HashMap<K, V, S> {
type Item = (&'a K, &'a V);
type IntoIter = Iter<'a, K, V>;
@@ -1577,6 +1746,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K, V, S> IntoIterator for &'a mut HashMap<K, V, S> {
type Item = (&'a K, &'a mut V);
type IntoIter = IterMut<'a, K, V>;
@@ -1587,6 +1757,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V, S> IntoIterator for HashMap<K, V, S> {
type Item = (K, V);
type IntoIter = IntoIter<K, V>;
@@ -1614,6 +1785,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K, V> Iterator for Iter<'a, K, V> {
type Item = (&'a K, &'a V);
@@ -1626,7 +1798,7 @@
self.base.size_hint()
}
}
-
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V> ExactSizeIterator for Iter<'_, K, V> {
#[inline]
fn len(&self) -> usize {
@@ -1634,8 +1806,10 @@
}
}
+//#[stable(feature = "fused", since = "1.26.0")]
impl<K, V> FusedIterator for Iter<'_, K, V> {}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K, V> Iterator for IterMut<'a, K, V> {
type Item = (&'a K, &'a mut V);
@@ -1648,16 +1822,17 @@
self.base.size_hint()
}
}
-
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V> ExactSizeIterator for IterMut<'_, K, V> {
#[inline]
fn len(&self) -> usize {
self.base.len()
}
}
-
+//#[stable(feature = "fused", since = "1.26.0")]
impl<K, V> FusedIterator for IterMut<'_, K, V> {}
+//#[stable(feature = "std_debug", since = "1.16.0")]
impl<K, V> fmt::Debug for IterMut<'_, K, V>
where
K: fmt::Debug,
@@ -1668,6 +1843,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V> Iterator for IntoIter<K, V> {
type Item = (K, V);
@@ -1680,22 +1856,24 @@
self.base.size_hint()
}
}
-
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V> ExactSizeIterator for IntoIter<K, V> {
#[inline]
fn len(&self) -> usize {
self.base.len()
}
}
-
+//#[stable(feature = "fused", since = "1.26.0")]
impl<K, V> FusedIterator for IntoIter<K, V> {}
+//#[stable(feature = "std_debug", since = "1.16.0")]
impl<K: Debug, V: Debug> fmt::Debug for IntoIter<K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K, V> Iterator for Keys<'a, K, V> {
type Item = &'a K;
@@ -1708,16 +1886,17 @@
self.inner.size_hint()
}
}
-
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V> ExactSizeIterator for Keys<'_, K, V> {
#[inline]
fn len(&self) -> usize {
self.inner.len()
}
}
-
+//#[stable(feature = "fused", since = "1.26.0")]
impl<K, V> FusedIterator for Keys<'_, K, V> {}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K, V> Iterator for Values<'a, K, V> {
type Item = &'a V;
@@ -1730,16 +1909,17 @@
self.inner.size_hint()
}
}
-
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V> ExactSizeIterator for Values<'_, K, V> {
#[inline]
fn len(&self) -> usize {
self.inner.len()
}
}
-
+//#[stable(feature = "fused", since = "1.26.0")]
impl<K, V> FusedIterator for Values<'_, K, V> {}
+//#[stable(feature = "map_values_mut", since = "1.10.0")]
impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
type Item = &'a mut V;
@@ -1752,16 +1932,17 @@
self.inner.size_hint()
}
}
-
+//#[stable(feature = "map_values_mut", since = "1.10.0")]
impl<K, V> ExactSizeIterator for ValuesMut<'_, K, V> {
#[inline]
fn len(&self) -> usize {
self.inner.len()
}
}
-
+//#[stable(feature = "fused", since = "1.26.0")]
impl<K, V> FusedIterator for ValuesMut<'_, K, V> {}
+//#[stable(feature = "std_debug", since = "1.16.0")]
impl<K, V> fmt::Debug for ValuesMut<'_, K, V>
where
K: fmt::Debug,
@@ -1772,6 +1953,67 @@
}
}
+//#[unstable(feature = "map_into_keys_values", issue = "75294")]
+impl<K, V> Iterator for IntoKeys<K, V> {
+ type Item = K;
+
+ #[inline]
+ fn next(&mut self) -> Option<K> {
+ self.inner.next().map(|(k, _)| k)
+ }
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+//#[unstable(feature = "map_into_keys_values", issue = "75294")]
+impl<K, V> ExactSizeIterator for IntoKeys<K, V> {
+ #[inline]
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+//#[unstable(feature = "map_into_keys_values", issue = "75294")]
+impl<K, V> FusedIterator for IntoKeys<K, V> {}
+
+//#[unstable(feature = "map_into_keys_values", issue = "75294")]
+impl<K: Debug, V: Debug> fmt::Debug for IntoKeys<K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.inner.iter().map(|(k, _)| k)).finish()
+ }
+}
+
+//#[unstable(feature = "map_into_keys_values", issue = "75294")]
+impl<K, V> Iterator for IntoValues<K, V> {
+ type Item = V;
+
+ #[inline]
+ fn next(&mut self) -> Option<V> {
+ self.inner.next().map(|(_, v)| v)
+ }
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+//#[unstable(feature = "map_into_keys_values", issue = "75294")]
+impl<K, V> ExactSizeIterator for IntoValues<K, V> {
+ #[inline]
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+//#[unstable(feature = "map_into_keys_values", issue = "75294")]
+impl<K, V> FusedIterator for IntoValues<K, V> {}
+
+//#[unstable(feature = "map_into_keys_values", issue = "75294")]
+impl<K: Debug, V: Debug> fmt::Debug for IntoValues<K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.inner.iter().map(|(_, v)| v)).finish()
+ }
+}
+
+//#[stable(feature = "drain", since = "1.6.0")]
impl<'a, K, V> Iterator for Drain<'a, K, V> {
type Item = (K, V);
@@ -1784,16 +2026,17 @@
self.base.size_hint()
}
}
-
+//#[stable(feature = "drain", since = "1.6.0")]
impl<K, V> ExactSizeIterator for Drain<'_, K, V> {
#[inline]
fn len(&self) -> usize {
self.base.len()
}
}
-
+//#[stable(feature = "fused", since = "1.26.0")]
impl<K, V> FusedIterator for Drain<'_, K, V> {}
+//#[stable(feature = "std_debug", since = "1.16.0")]
impl<K, V> fmt::Debug for Drain<'_, K, V>
where
K: fmt::Debug,
@@ -1804,7 +2047,38 @@
}
}
+//#[unstable(feature = "hash_drain_filter", issue = "59618")]
+impl<K, V, F> Iterator for DrainFilter<'_, K, V, F>
+where
+ F: FnMut(&K, &mut V) -> bool,
+{
+ type Item = (K, V);
+
+ #[inline]
+ fn next(&mut self) -> Option<(K, V)> {
+ self.base.next()
+ }
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.base.size_hint()
+ }
+}
+
+//#[unstable(feature = "hash_drain_filter", issue = "59618")]
+impl<K, V, F> FusedIterator for DrainFilter<'_, K, V, F> where F: FnMut(&K, &mut V) -> bool {}
+
+//#[unstable(feature = "hash_drain_filter", issue = "59618")]
+impl<'a, K, V, F> fmt::Debug for DrainFilter<'a, K, V, F>
+where
+ F: FnMut(&K, &mut V) -> bool,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.pad("DrainFilter { .. }")
+ }
+}
+
impl<'a, K, V> Entry<'a, K, V> {
+ //#[stable(feature = "rust1", since = "1.0.0")]
/// Ensures a value is in the entry by inserting the default if empty, and returns
/// a mutable reference to the value in the entry.
///
@@ -1829,6 +2103,7 @@
}
}
+ //#[stable(feature = "rust1", since = "1.0.0")]
/// Ensures a value is in the entry by inserting the result of the default function if empty,
/// and returns a mutable reference to the value in the entry.
///
@@ -1852,6 +2127,34 @@
}
}
+ //#[unstable(feature = "or_insert_with_key", issue = "71024")]
+ /// Ensures a value is in the entry by inserting, if empty, the result of the default function,
+ /// which takes the key as its argument, and returns a mutable reference to the value in the
+ /// entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(or_insert_with_key)]
+ /// use std::collections::HashMap;
+ ///
+ /// let mut map: HashMap<&str, usize> = HashMap::new();
+ ///
+ /// map.entry("poneyland").or_insert_with_key(|key| key.chars().count());
+ ///
+ /// assert_eq!(map["poneyland"], 9);
+ /// ```
+ #[inline]
+ pub fn or_insert_with_key<F: FnOnce(&K) -> V>(self, default: F) -> &'a mut V {
+ match self {
+ Occupied(entry) => entry.into_mut(),
+ Vacant(entry) => {
+ let value = default(entry.key());
+ entry.insert(value)
+ }
+ }
+ }
+
/// Returns a reference to this entry's key.
///
/// # Examples
@@ -1863,6 +2166,7 @@
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[inline]
+ //#[stable(feature = "map_entry_keys", since = "1.10.0")]
pub fn key(&self) -> &K {
match *self {
Occupied(ref entry) => entry.key(),
@@ -1891,6 +2195,7 @@
/// assert_eq!(map["poneyland"], 43);
/// ```
#[inline]
+ //#[stable(feature = "entry_and_modify", since = "1.26.0")]
pub fn and_modify<F>(self, f: F) -> Self
where
F: FnOnce(&mut V),
@@ -1918,6 +2223,7 @@
/// assert_eq!(entry.key(), &"poneyland");
/// ```
#[inline]
+ //#[unstable(feature = "entry_insert", issue = "65225")]
pub fn insert(self, value: V) -> OccupiedEntry<'a, K, V> {
match self {
Occupied(mut entry) => {
@@ -1930,6 +2236,7 @@
}
impl<'a, K, V: Default> Entry<'a, K, V> {
+ //#[stable(feature = "entry_or_default", since = "1.28.0")]
/// Ensures a value is in the entry by inserting the default value if empty,
/// and returns a mutable reference to the value in the entry.
///
@@ -1967,6 +2274,7 @@
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[inline]
+ //#[stable(feature = "map_entry_keys", since = "1.10.0")]
pub fn key(&self) -> &K {
self.base.key()
}
@@ -1990,6 +2298,7 @@
/// assert_eq!(map.contains_key("poneyland"), false);
/// ```
#[inline]
+ //#[stable(feature = "map_entry_recover_keys2", since = "1.12.0")]
pub fn remove_entry(self) -> (K, V) {
self.base.remove_entry()
}
@@ -2010,6 +2319,7 @@
/// }
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn get(&self) -> &V {
self.base.get()
}
@@ -2019,7 +2329,7 @@
/// If you need a reference to the `OccupiedEntry` which may outlive the
/// destruction of the `Entry` value, see [`into_mut`].
///
- /// [`into_mut`]: #method.into_mut
+ /// [`into_mut`]: Self::into_mut
///
/// # Examples
///
@@ -2042,6 +2352,7 @@
/// assert_eq!(map["poneyland"], 24);
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn get_mut(&mut self) -> &mut V {
self.base.get_mut()
}
@@ -2051,7 +2362,7 @@
///
/// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
///
- /// [`get_mut`]: #method.get_mut
+ /// [`get_mut`]: Self::get_mut
///
/// # Examples
///
@@ -2070,6 +2381,7 @@
/// assert_eq!(map["poneyland"], 22);
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn into_mut(self) -> &'a mut V {
self.base.into_mut()
}
@@ -2092,6 +2404,7 @@
/// assert_eq!(map["poneyland"], 15);
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(&mut self, value: V) -> V {
self.base.insert(value)
}
@@ -2114,6 +2427,7 @@
/// assert_eq!(map.contains_key("poneyland"), false);
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove(self) -> V {
self.base.remove()
}
@@ -2140,6 +2454,7 @@
///
/// ```
#[inline]
+ //#[unstable(feature = "map_entry_replace", issue = "44286")]
pub fn replace_entry(self, value: V) -> (K, V) {
self.base.replace_entry(value)
}
@@ -2154,7 +2469,7 @@
/// use std::rc::Rc;
///
/// let mut map: HashMap<Rc<String>, u32> = HashMap::new();
- /// let mut known_strings: Vec<Rc<String>> = Vec::new();
+ /// let known_strings: Vec<Rc<String>> = Vec::new();
///
/// // Initialise known strings, run program, etc.
///
@@ -2162,7 +2477,7 @@
///
/// fn reclaim_memory(map: &mut HashMap<Rc<String>, u32>, known_strings: &[Rc<String>] ) {
/// for s in known_strings {
- /// if let Entry::Occupied(entry) = map.entry(s.clone()) {
+ /// if let Entry::Occupied(entry) = map.entry(Rc::clone(s)) {
/// // Replaces the entry's key with our version of it in `known_strings`.
/// entry.replace_key();
/// }
@@ -2170,6 +2485,7 @@
/// }
/// ```
#[inline]
+ //#[unstable(feature = "map_entry_replace", issue = "44286")]
pub fn replace_key(self) -> K {
self.base.replace_key()
}
@@ -2188,6 +2504,7 @@
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[inline]
+ //#[stable(feature = "map_entry_keys", since = "1.10.0")]
pub fn key(&self) -> &K {
self.base.key()
}
@@ -2207,6 +2524,7 @@
/// }
/// ```
#[inline]
+ //#[stable(feature = "map_entry_recover_keys2", since = "1.12.0")]
pub fn into_key(self) -> K {
self.base.into_key()
}
@@ -2228,6 +2546,7 @@
/// assert_eq!(map["poneyland"], 37);
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(self, value: V) -> &'a mut V {
self.base.insert(value)
}
@@ -2255,6 +2574,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V, S> FromIterator<(K, V)> for HashMap<K, V, S>
where
K: Eq + Hash,
@@ -2269,6 +2589,7 @@
/// Inserts all new key-values from the iterator and replaces values with existing
/// keys with new values returned from the iterator.
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V, S> Extend<(K, V)> for HashMap<K, V, S>
where
K: Eq + Hash,
@@ -2278,8 +2599,27 @@
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
self.base.extend(iter)
}
+
+ #[inline]
+ fn extend_one(&mut self, (k, v): (K, V)) {
+ self.base.insert(k, v);
+ }
+
+ #[inline]
+ fn extend_reserve(&mut self, additional: usize) {
+ // self.base.extend_reserve(additional);
+ // FIXME: hashbrown should implement this method.
+ // But until then, use the same reservation logic:
+
+ // Reserve the entire hint lower bound if the map is empty.
+ // Otherwise reserve half the hint (rounded up), so the map
+ // will only resize twice in the worst case.
+ let reserve = if self.is_empty() { additional } else { (additional + 1) / 2 };
+ self.base.reserve(reserve);
+ }
}
+//#[stable(feature = "hash_extend_copy", since = "1.4.0")]
impl<'a, K: 'a, V: 'a, S> Extend<(&'a K, &'a V)> for HashMap<K, V, S>
where
K: Eq + Hash + Copy,
@@ -2290,6 +2630,16 @@
fn extend<T: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: T) {
self.base.extend(iter)
}
+
+ #[inline]
+ fn extend_one(&mut self, (&k, &v): (&'a K, &'a V)) {
+ self.base.insert(k, v);
+ }
+
+ #[inline]
+ fn extend_reserve(&mut self, additional: usize) {
+ Extend::<(K, V)>::extend_reserve(self, additional)
+ }
}
/// `RandomState` is the default state for [`HashMap`] types.
@@ -2298,9 +2648,6 @@
/// [`Hasher`], but the hashers created by two different `RandomState`
/// instances are unlikely to produce the same result for the same values.
///
-/// [`HashMap`]: struct.HashMap.html
-/// [`Hasher`]: ../../hash/trait.Hasher.html
-///
/// # Examples
///
/// ```
@@ -2312,6 +2659,7 @@
/// map.insert(1, 2);
/// ```
#[derive(Clone)]
+//#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
pub struct RandomState {
k0: u64,
k1: u64,
@@ -2330,6 +2678,7 @@
#[inline]
#[allow(deprecated)]
// rand
+ //#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
pub fn new() -> RandomState {
// Historically this function did not cache keys from the OS and instead
// simply always called `rand::thread_rng().gen()` twice. In #31356 it
@@ -2354,6 +2703,7 @@
}
}
+//#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
impl BuildHasher for RandomState {
type Hasher = DefaultHasher;
#[inline]
@@ -2367,9 +2717,7 @@
///
/// The internal algorithm is not specified, and so it and its hashes should
/// not be relied upon over releases.
-///
-/// [`RandomState`]: struct.RandomState.html
-/// [`Hasher`]: ../../hash/trait.Hasher.html
+//#[stable(feature = "hashmap_default_hasher", since = "1.13.0")]
#[allow(deprecated)]
#[derive(Clone, Debug)]
pub struct DefaultHasher(SipHasher13);
@@ -2380,12 +2728,14 @@
/// This hasher is not guaranteed to be the same as all other
/// `DefaultHasher` instances, but is the same as all other `DefaultHasher`
/// instances created through `new` or `default`.
+ //#[stable(feature = "hashmap_default_hasher", since = "1.13.0")]
#[allow(deprecated)]
pub fn new() -> DefaultHasher {
DefaultHasher(SipHasher13::new_with_keys(0, 0))
}
}
+//#[stable(feature = "hashmap_default_hasher", since = "1.13.0")]
impl Default for DefaultHasher {
// FIXME: here should link `new` to [DefaultHasher::new], but it occurs intra-doc link
// resolution failure when re-exporting libstd items. When #56922 fixed,
@@ -2397,6 +2747,7 @@
}
}
+//#[stable(feature = "hashmap_default_hasher", since = "1.13.0")]
impl Hasher for DefaultHasher {
#[inline]
fn write(&mut self, msg: &[u8]) {
@@ -2409,6 +2760,7 @@
}
}
+//#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
impl Default for RandomState {
/// Constructs a new `RandomState`.
#[inline]
@@ -2417,6 +2769,7 @@
}
}
+//#[stable(feature = "std_debug", since = "1.16.0")]
impl fmt::Debug for RandomState {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.pad("RandomState { .. }")
@@ -2432,10 +2785,10 @@
}
#[inline]
-fn map_collection_alloc_err(err: hashbrown::CollectionAllocErr) -> TryReserveError {
+pub(super) fn map_try_reserve_error(err: hashbrown::TryReserveError) -> TryReserveError {
match err {
- hashbrown::CollectionAllocErr::CapacityOverflow => TryReserveError::CapacityOverflow,
- hashbrown::CollectionAllocErr::AllocErr { layout } => {
+ hashbrown::TryReserveError::CapacityOverflow => TryReserveError::CapacityOverflow,
+ hashbrown::TryReserveError::AllocError { layout } => {
TryReserveError::AllocError { layout, non_exhaustive: () }
}
}
@@ -2489,909 +2842,3 @@
d
}
}
-
-#[cfg(test)]
-mod test_map {
- use super::Entry::{Occupied, Vacant};
- use super::HashMap;
- use super::RandomState;
- use crate::cell::RefCell;
- use rand::{thread_rng, Rng};
- use realstd::collections::TryReserveError::*;
-
- // https://github.com/rust-lang/rust/issues/62301
- fn _assert_hashmap_is_unwind_safe() {
- fn assert_unwind_safe<T: crate::panic::UnwindSafe>() {}
- assert_unwind_safe::<HashMap<(), crate::cell::UnsafeCell<()>>>();
- }
-
- #[test]
- fn test_zero_capacities() {
- type HM = HashMap<i32, i32>;
-
- let m = HM::new();
- assert_eq!(m.capacity(), 0);
-
- let m = HM::default();
- assert_eq!(m.capacity(), 0);
-
- let m = HM::with_hasher(RandomState::new());
- assert_eq!(m.capacity(), 0);
-
- let m = HM::with_capacity(0);
- assert_eq!(m.capacity(), 0);
-
- let m = HM::with_capacity_and_hasher(0, RandomState::new());
- assert_eq!(m.capacity(), 0);
-
- let mut m = HM::new();
- m.insert(1, 1);
- m.insert(2, 2);
- m.remove(&1);
- m.remove(&2);
- m.shrink_to_fit();
- assert_eq!(m.capacity(), 0);
-
- let mut m = HM::new();
- m.reserve(0);
- assert_eq!(m.capacity(), 0);
- }
-
- #[test]
- fn test_create_capacity_zero() {
- let mut m = HashMap::with_capacity(0);
-
- assert!(m.insert(1, 1).is_none());
-
- assert!(m.contains_key(&1));
- assert!(!m.contains_key(&0));
- }
-
- #[test]
- fn test_insert() {
- let mut m = HashMap::new();
- assert_eq!(m.len(), 0);
- assert!(m.insert(1, 2).is_none());
- assert_eq!(m.len(), 1);
- assert!(m.insert(2, 4).is_none());
- assert_eq!(m.len(), 2);
- assert_eq!(*m.get(&1).unwrap(), 2);
- assert_eq!(*m.get(&2).unwrap(), 4);
- }
-
- #[test]
- fn test_clone() {
- let mut m = HashMap::new();
- assert_eq!(m.len(), 0);
- assert!(m.insert(1, 2).is_none());
- assert_eq!(m.len(), 1);
- assert!(m.insert(2, 4).is_none());
- assert_eq!(m.len(), 2);
- let m2 = m.clone();
- assert_eq!(*m2.get(&1).unwrap(), 2);
- assert_eq!(*m2.get(&2).unwrap(), 4);
- assert_eq!(m2.len(), 2);
- }
-
- thread_local! { static DROP_VECTOR: RefCell<Vec<i32>> = RefCell::new(Vec::new()) }
-
- #[derive(Hash, PartialEq, Eq)]
- struct Droppable {
- k: usize,
- }
-
- impl Droppable {
- fn new(k: usize) -> Droppable {
- DROP_VECTOR.with(|slot| {
- slot.borrow_mut()[k] += 1;
- });
-
- Droppable { k }
- }
- }
-
- impl Drop for Droppable {
- fn drop(&mut self) {
- DROP_VECTOR.with(|slot| {
- slot.borrow_mut()[self.k] -= 1;
- });
- }
- }
-
- impl Clone for Droppable {
- fn clone(&self) -> Droppable {
- Droppable::new(self.k)
- }
- }
-
- #[test]
- fn test_drops() {
- DROP_VECTOR.with(|slot| {
- *slot.borrow_mut() = vec![0; 200];
- });
-
- {
- let mut m = HashMap::new();
-
- DROP_VECTOR.with(|v| {
- for i in 0..200 {
- assert_eq!(v.borrow()[i], 0);
- }
- });
-
- for i in 0..100 {
- let d1 = Droppable::new(i);
- let d2 = Droppable::new(i + 100);
- m.insert(d1, d2);
- }
-
- DROP_VECTOR.with(|v| {
- for i in 0..200 {
- assert_eq!(v.borrow()[i], 1);
- }
- });
-
- for i in 0..50 {
- let k = Droppable::new(i);
- let v = m.remove(&k);
-
- assert!(v.is_some());
-
- DROP_VECTOR.with(|v| {
- assert_eq!(v.borrow()[i], 1);
- assert_eq!(v.borrow()[i + 100], 1);
- });
- }
-
- DROP_VECTOR.with(|v| {
- for i in 0..50 {
- assert_eq!(v.borrow()[i], 0);
- assert_eq!(v.borrow()[i + 100], 0);
- }
-
- for i in 50..100 {
- assert_eq!(v.borrow()[i], 1);
- assert_eq!(v.borrow()[i + 100], 1);
- }
- });
- }
-
- DROP_VECTOR.with(|v| {
- for i in 0..200 {
- assert_eq!(v.borrow()[i], 0);
- }
- });
- }
-
- #[test]
- fn test_into_iter_drops() {
- DROP_VECTOR.with(|v| {
- *v.borrow_mut() = vec![0; 200];
- });
-
- let hm = {
- let mut hm = HashMap::new();
-
- DROP_VECTOR.with(|v| {
- for i in 0..200 {
- assert_eq!(v.borrow()[i], 0);
- }
- });
-
- for i in 0..100 {
- let d1 = Droppable::new(i);
- let d2 = Droppable::new(i + 100);
- hm.insert(d1, d2);
- }
-
- DROP_VECTOR.with(|v| {
- for i in 0..200 {
- assert_eq!(v.borrow()[i], 1);
- }
- });
-
- hm
- };
-
- // By the way, ensure that cloning doesn't screw up the dropping.
- drop(hm.clone());
-
- {
- let mut half = hm.into_iter().take(50);
-
- DROP_VECTOR.with(|v| {
- for i in 0..200 {
- assert_eq!(v.borrow()[i], 1);
- }
- });
-
- for _ in half.by_ref() {}
-
- DROP_VECTOR.with(|v| {
- let nk = (0..100).filter(|&i| v.borrow()[i] == 1).count();
-
- let nv = (0..100).filter(|&i| v.borrow()[i + 100] == 1).count();
-
- assert_eq!(nk, 50);
- assert_eq!(nv, 50);
- });
- };
-
- DROP_VECTOR.with(|v| {
- for i in 0..200 {
- assert_eq!(v.borrow()[i], 0);
- }
- });
- }
-
- #[test]
- fn test_empty_remove() {
- let mut m: HashMap<i32, bool> = HashMap::new();
- assert_eq!(m.remove(&0), None);
- }
-
- #[test]
- fn test_empty_entry() {
- let mut m: HashMap<i32, bool> = HashMap::new();
- match m.entry(0) {
- Occupied(_) => panic!(),
- Vacant(_) => {}
- }
- assert!(*m.entry(0).or_insert(true));
- assert_eq!(m.len(), 1);
- }
-
- #[test]
- fn test_empty_iter() {
- let mut m: HashMap<i32, bool> = HashMap::new();
- assert_eq!(m.drain().next(), None);
- assert_eq!(m.keys().next(), None);
- assert_eq!(m.values().next(), None);
- assert_eq!(m.values_mut().next(), None);
- assert_eq!(m.iter().next(), None);
- assert_eq!(m.iter_mut().next(), None);
- assert_eq!(m.len(), 0);
- assert!(m.is_empty());
- assert_eq!(m.into_iter().next(), None);
- }
-
- #[test]
- fn test_lots_of_insertions() {
- let mut m = HashMap::new();
-
- // Try this a few times to make sure we never screw up the hashmap's
- // internal state.
- for _ in 0..10 {
- assert!(m.is_empty());
-
- for i in 1..1001 {
- assert!(m.insert(i, i).is_none());
-
- for j in 1..=i {
- let r = m.get(&j);
- assert_eq!(r, Some(&j));
- }
-
- for j in i + 1..1001 {
- let r = m.get(&j);
- assert_eq!(r, None);
- }
- }
-
- for i in 1001..2001 {
- assert!(!m.contains_key(&i));
- }
-
- // remove forwards
- for i in 1..1001 {
- assert!(m.remove(&i).is_some());
-
- for j in 1..=i {
- assert!(!m.contains_key(&j));
- }
-
- for j in i + 1..1001 {
- assert!(m.contains_key(&j));
- }
- }
-
- for i in 1..1001 {
- assert!(!m.contains_key(&i));
- }
-
- for i in 1..1001 {
- assert!(m.insert(i, i).is_none());
- }
-
- // remove backwards
- for i in (1..1001).rev() {
- assert!(m.remove(&i).is_some());
-
- for j in i..1001 {
- assert!(!m.contains_key(&j));
- }
-
- for j in 1..i {
- assert!(m.contains_key(&j));
- }
- }
- }
- }
-
- #[test]
- fn test_find_mut() {
- let mut m = HashMap::new();
- assert!(m.insert(1, 12).is_none());
- assert!(m.insert(2, 8).is_none());
- assert!(m.insert(5, 14).is_none());
- let new = 100;
- match m.get_mut(&5) {
- None => panic!(),
- Some(x) => *x = new,
- }
- assert_eq!(m.get(&5), Some(&new));
- }
-
- #[test]
- fn test_insert_overwrite() {
- let mut m = HashMap::new();
- assert!(m.insert(1, 2).is_none());
- assert_eq!(*m.get(&1).unwrap(), 2);
- assert!(!m.insert(1, 3).is_none());
- assert_eq!(*m.get(&1).unwrap(), 3);
- }
-
- #[test]
- fn test_insert_conflicts() {
- let mut m = HashMap::with_capacity(4);
- assert!(m.insert(1, 2).is_none());
- assert!(m.insert(5, 3).is_none());
- assert!(m.insert(9, 4).is_none());
- assert_eq!(*m.get(&9).unwrap(), 4);
- assert_eq!(*m.get(&5).unwrap(), 3);
- assert_eq!(*m.get(&1).unwrap(), 2);
- }
-
- #[test]
- fn test_conflict_remove() {
- let mut m = HashMap::with_capacity(4);
- assert!(m.insert(1, 2).is_none());
- assert_eq!(*m.get(&1).unwrap(), 2);
- assert!(m.insert(5, 3).is_none());
- assert_eq!(*m.get(&1).unwrap(), 2);
- assert_eq!(*m.get(&5).unwrap(), 3);
- assert!(m.insert(9, 4).is_none());
- assert_eq!(*m.get(&1).unwrap(), 2);
- assert_eq!(*m.get(&5).unwrap(), 3);
- assert_eq!(*m.get(&9).unwrap(), 4);
- assert!(m.remove(&1).is_some());
- assert_eq!(*m.get(&9).unwrap(), 4);
- assert_eq!(*m.get(&5).unwrap(), 3);
- }
-
- #[test]
- fn test_is_empty() {
- let mut m = HashMap::with_capacity(4);
- assert!(m.insert(1, 2).is_none());
- assert!(!m.is_empty());
- assert!(m.remove(&1).is_some());
- assert!(m.is_empty());
- }
-
- #[test]
- fn test_remove() {
- let mut m = HashMap::new();
- m.insert(1, 2);
- assert_eq!(m.remove(&1), Some(2));
- assert_eq!(m.remove(&1), None);
- }
-
- #[test]
- fn test_remove_entry() {
- let mut m = HashMap::new();
- m.insert(1, 2);
- assert_eq!(m.remove_entry(&1), Some((1, 2)));
- assert_eq!(m.remove(&1), None);
- }
-
- #[test]
- fn test_iterate() {
- let mut m = HashMap::with_capacity(4);
- for i in 0..32 {
- assert!(m.insert(i, i * 2).is_none());
- }
- assert_eq!(m.len(), 32);
-
- let mut observed: u32 = 0;
-
- for (k, v) in &m {
- assert_eq!(*v, *k * 2);
- observed |= 1 << *k;
- }
- assert_eq!(observed, 0xFFFF_FFFF);
- }
-
- #[test]
- fn test_keys() {
- let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
- let map: HashMap<_, _> = vec.into_iter().collect();
- let keys: Vec<_> = map.keys().cloned().collect();
- assert_eq!(keys.len(), 3);
- assert!(keys.contains(&1));
- assert!(keys.contains(&2));
- assert!(keys.contains(&3));
- }
-
- #[test]
- fn test_values() {
- let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
- let map: HashMap<_, _> = vec.into_iter().collect();
- let values: Vec<_> = map.values().cloned().collect();
- assert_eq!(values.len(), 3);
- assert!(values.contains(&'a'));
- assert!(values.contains(&'b'));
- assert!(values.contains(&'c'));
- }
-
- #[test]
- fn test_values_mut() {
- let vec = vec![(1, 1), (2, 2), (3, 3)];
- let mut map: HashMap<_, _> = vec.into_iter().collect();
- for value in map.values_mut() {
- *value = (*value) * 2
- }
- let values: Vec<_> = map.values().cloned().collect();
- assert_eq!(values.len(), 3);
- assert!(values.contains(&2));
- assert!(values.contains(&4));
- assert!(values.contains(&6));
- }
-
- #[test]
- fn test_find() {
- let mut m = HashMap::new();
- assert!(m.get(&1).is_none());
- m.insert(1, 2);
- match m.get(&1) {
- None => panic!(),
- Some(v) => assert_eq!(*v, 2),
- }
- }
-
- #[test]
- fn test_eq() {
- let mut m1 = HashMap::new();
- m1.insert(1, 2);
- m1.insert(2, 3);
- m1.insert(3, 4);
-
- let mut m2 = HashMap::new();
- m2.insert(1, 2);
- m2.insert(2, 3);
-
- assert!(m1 != m2);
-
- m2.insert(3, 4);
-
- assert_eq!(m1, m2);
- }
-
- #[test]
- fn test_show() {
- let mut map = HashMap::new();
- let empty: HashMap<i32, i32> = HashMap::new();
-
- map.insert(1, 2);
- map.insert(3, 4);
-
- let map_str = format!("{:?}", map);
-
- assert!(map_str == "{1: 2, 3: 4}" || map_str == "{3: 4, 1: 2}");
- assert_eq!(format!("{:?}", empty), "{}");
- }
-
- #[test]
- fn test_reserve_shrink_to_fit() {
- let mut m = HashMap::new();
- m.insert(0, 0);
- m.remove(&0);
- assert!(m.capacity() >= m.len());
- for i in 0..128 {
- m.insert(i, i);
- }
- m.reserve(256);
-
- let usable_cap = m.capacity();
- for i in 128..(128 + 256) {
- m.insert(i, i);
- assert_eq!(m.capacity(), usable_cap);
- }
-
- for i in 100..(128 + 256) {
- assert_eq!(m.remove(&i), Some(i));
- }
- m.shrink_to_fit();
-
- assert_eq!(m.len(), 100);
- assert!(!m.is_empty());
- assert!(m.capacity() >= m.len());
-
- for i in 0..100 {
- assert_eq!(m.remove(&i), Some(i));
- }
- m.shrink_to_fit();
- m.insert(0, 0);
-
- assert_eq!(m.len(), 1);
- assert!(m.capacity() >= m.len());
- assert_eq!(m.remove(&0), Some(0));
- }
-
- #[test]
- fn test_from_iter() {
- let xs = [(1, 1), (2, 2), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
-
- let map: HashMap<_, _> = xs.iter().cloned().collect();
-
- for &(k, v) in &xs {
- assert_eq!(map.get(&k), Some(&v));
- }
-
- assert_eq!(map.iter().len(), xs.len() - 1);
- }
-
- #[test]
- fn test_size_hint() {
- let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
-
- let map: HashMap<_, _> = xs.iter().cloned().collect();
-
- let mut iter = map.iter();
-
- for _ in iter.by_ref().take(3) {}
-
- assert_eq!(iter.size_hint(), (3, Some(3)));
- }
-
- #[test]
- fn test_iter_len() {
- let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
-
- let map: HashMap<_, _> = xs.iter().cloned().collect();
-
- let mut iter = map.iter();
-
- for _ in iter.by_ref().take(3) {}
-
- assert_eq!(iter.len(), 3);
- }
-
- #[test]
- fn test_mut_size_hint() {
- let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
-
- let mut map: HashMap<_, _> = xs.iter().cloned().collect();
-
- let mut iter = map.iter_mut();
-
- for _ in iter.by_ref().take(3) {}
-
- assert_eq!(iter.size_hint(), (3, Some(3)));
- }
-
- #[test]
- fn test_iter_mut_len() {
- let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
-
- let mut map: HashMap<_, _> = xs.iter().cloned().collect();
-
- let mut iter = map.iter_mut();
-
- for _ in iter.by_ref().take(3) {}
-
- assert_eq!(iter.len(), 3);
- }
-
- #[test]
- fn test_index() {
- let mut map = HashMap::new();
-
- map.insert(1, 2);
- map.insert(2, 1);
- map.insert(3, 4);
-
- assert_eq!(map[&2], 1);
- }
-
- #[test]
- #[should_panic]
- fn test_index_nonexistent() {
- let mut map = HashMap::new();
-
- map.insert(1, 2);
- map.insert(2, 1);
- map.insert(3, 4);
-
- map[&4];
- }
-
- #[test]
- fn test_entry() {
- let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
-
- let mut map: HashMap<_, _> = xs.iter().cloned().collect();
-
- // Existing key (insert)
- match map.entry(1) {
- Vacant(_) => unreachable!(),
- Occupied(mut view) => {
- assert_eq!(view.get(), &10);
- assert_eq!(view.insert(100), 10);
- }
- }
- assert_eq!(map.get(&1).unwrap(), &100);
- assert_eq!(map.len(), 6);
-
- // Existing key (update)
- match map.entry(2) {
- Vacant(_) => unreachable!(),
- Occupied(mut view) => {
- let v = view.get_mut();
- let new_v = (*v) * 10;
- *v = new_v;
- }
- }
- assert_eq!(map.get(&2).unwrap(), &200);
- assert_eq!(map.len(), 6);
-
- // Existing key (take)
- match map.entry(3) {
- Vacant(_) => unreachable!(),
- Occupied(view) => {
- assert_eq!(view.remove(), 30);
- }
- }
- assert_eq!(map.get(&3), None);
- assert_eq!(map.len(), 5);
-
- // Inexistent key (insert)
- match map.entry(10) {
- Occupied(_) => unreachable!(),
- Vacant(view) => {
- assert_eq!(*view.insert(1000), 1000);
- }
- }
- assert_eq!(map.get(&10).unwrap(), &1000);
- assert_eq!(map.len(), 6);
- }
-
- #[test]
- fn test_entry_take_doesnt_corrupt() {
- #![allow(deprecated)] //rand
- // Test for #19292
- fn check(m: &HashMap<i32, ()>) {
- for k in m.keys() {
- assert!(m.contains_key(k), "{} is in keys() but not in the map?", k);
- }
- }
-
- let mut m = HashMap::new();
- let mut rng = thread_rng();
-
- // Populate the map with some items.
- for _ in 0..50 {
- let x = rng.gen_range(-10, 10);
- m.insert(x, ());
- }
-
- for _ in 0..1000 {
- let x = rng.gen_range(-10, 10);
- match m.entry(x) {
- Vacant(_) => {}
- Occupied(e) => {
- e.remove();
- }
- }
-
- check(&m);
- }
- }
-
- #[test]
- fn test_extend_ref() {
- let mut a = HashMap::new();
- a.insert(1, "one");
- let mut b = HashMap::new();
- b.insert(2, "two");
- b.insert(3, "three");
-
- a.extend(&b);
-
- assert_eq!(a.len(), 3);
- assert_eq!(a[&1], "one");
- assert_eq!(a[&2], "two");
- assert_eq!(a[&3], "three");
- }
-
- #[test]
- fn test_capacity_not_less_than_len() {
- let mut a = HashMap::new();
- let mut item = 0;
-
- for _ in 0..116 {
- a.insert(item, 0);
- item += 1;
- }
-
- assert!(a.capacity() > a.len());
-
- let free = a.capacity() - a.len();
- for _ in 0..free {
- a.insert(item, 0);
- item += 1;
- }
-
- assert_eq!(a.len(), a.capacity());
-
- // Insert at capacity should cause allocation.
- a.insert(item, 0);
- assert!(a.capacity() > a.len());
- }
-
- #[test]
- fn test_occupied_entry_key() {
- let mut a = HashMap::new();
- let key = "hello there";
- let value = "value goes here";
- assert!(a.is_empty());
- a.insert(key.clone(), value.clone());
- assert_eq!(a.len(), 1);
- assert_eq!(a[key], value);
-
- match a.entry(key.clone()) {
- Vacant(_) => panic!(),
- Occupied(e) => assert_eq!(key, *e.key()),
- }
- assert_eq!(a.len(), 1);
- assert_eq!(a[key], value);
- }
-
- #[test]
- fn test_vacant_entry_key() {
- let mut a = HashMap::new();
- let key = "hello there";
- let value = "value goes here";
-
- assert!(a.is_empty());
- match a.entry(key.clone()) {
- Occupied(_) => panic!(),
- Vacant(e) => {
- assert_eq!(key, *e.key());
- e.insert(value.clone());
- }
- }
- assert_eq!(a.len(), 1);
- assert_eq!(a[key], value);
- }
-
- #[test]
- fn test_retain() {
- let mut map: HashMap<i32, i32> = (0..100).map(|x| (x, x * 10)).collect();
-
- map.retain(|&k, _| k % 2 == 0);
- assert_eq!(map.len(), 50);
- assert_eq!(map[&2], 20);
- assert_eq!(map[&4], 40);
- assert_eq!(map[&6], 60);
- }
-
- #[test]
- fn test_try_reserve() {
- let mut empty_bytes: HashMap<u8, u8> = HashMap::new();
-
- const MAX_USIZE: usize = usize::MAX;
-
- if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_USIZE) {
- } else {
- panic!("usize::MAX should trigger an overflow!");
- }
-
- if let Err(AllocError { .. }) = empty_bytes.try_reserve(MAX_USIZE / 8) {
- } else {
- panic!("usize::MAX / 8 should trigger an OOM!")
- }
- }
-
- #[test]
- fn test_raw_entry() {
- use super::RawEntryMut::{Occupied, Vacant};
-
- let xs = [(1i32, 10i32), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
-
- let mut map: HashMap<_, _> = xs.iter().cloned().collect();
-
- let compute_hash = |map: &HashMap<i32, i32>, k: i32| -> u64 {
- use core::hash::{BuildHasher, Hash, Hasher};
-
- let mut hasher = map.hasher().build_hasher();
- k.hash(&mut hasher);
- hasher.finish()
- };
-
- // Existing key (insert)
- match map.raw_entry_mut().from_key(&1) {
- Vacant(_) => unreachable!(),
- Occupied(mut view) => {
- assert_eq!(view.get(), &10);
- assert_eq!(view.insert(100), 10);
- }
- }
- let hash1 = compute_hash(&map, 1);
- assert_eq!(map.raw_entry().from_key(&1).unwrap(), (&1, &100));
- assert_eq!(map.raw_entry().from_hash(hash1, |k| *k == 1).unwrap(), (&1, &100));
- assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash1, &1).unwrap(), (&1, &100));
- assert_eq!(map.len(), 6);
-
- // Existing key (update)
- match map.raw_entry_mut().from_key(&2) {
- Vacant(_) => unreachable!(),
- Occupied(mut view) => {
- let v = view.get_mut();
- let new_v = (*v) * 10;
- *v = new_v;
- }
- }
- let hash2 = compute_hash(&map, 2);
- assert_eq!(map.raw_entry().from_key(&2).unwrap(), (&2, &200));
- assert_eq!(map.raw_entry().from_hash(hash2, |k| *k == 2).unwrap(), (&2, &200));
- assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash2, &2).unwrap(), (&2, &200));
- assert_eq!(map.len(), 6);
-
- // Existing key (take)
- let hash3 = compute_hash(&map, 3);
- match map.raw_entry_mut().from_key_hashed_nocheck(hash3, &3) {
- Vacant(_) => unreachable!(),
- Occupied(view) => {
- assert_eq!(view.remove_entry(), (3, 30));
- }
- }
- assert_eq!(map.raw_entry().from_key(&3), None);
- assert_eq!(map.raw_entry().from_hash(hash3, |k| *k == 3), None);
- assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash3, &3), None);
- assert_eq!(map.len(), 5);
-
- // Nonexistent key (insert)
- match map.raw_entry_mut().from_key(&10) {
- Occupied(_) => unreachable!(),
- Vacant(view) => {
- assert_eq!(view.insert(10, 1000), (&mut 10, &mut 1000));
- }
- }
- assert_eq!(map.raw_entry().from_key(&10).unwrap(), (&10, &1000));
- assert_eq!(map.len(), 6);
-
- // Ensure all lookup methods produce equivalent results.
- for k in 0..12 {
- let hash = compute_hash(&map, k);
- let v = map.get(&k).cloned();
- let kv = v.as_ref().map(|v| (&k, v));
-
- assert_eq!(map.raw_entry().from_key(&k), kv);
- assert_eq!(map.raw_entry().from_hash(hash, |q| *q == k), kv);
- assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &k), kv);
-
- match map.raw_entry_mut().from_key(&k) {
- Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
- Vacant(_) => assert_eq!(v, None),
- }
- match map.raw_entry_mut().from_key_hashed_nocheck(hash, &k) {
- Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
- Vacant(_) => assert_eq!(v, None),
- }
- match map.raw_entry_mut().from_hash(hash, |q| *q == k) {
- Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
- Vacant(_) => assert_eq!(v, None),
- }
- }
- }
-}
diff --git a/sgx_tstd/src/collections/hash/mod.rs b/sgx_tstd/src/collections/hash/mod.rs
index 2d01320..348820a 100644
--- a/sgx_tstd/src/collections/hash/mod.rs
+++ b/sgx_tstd/src/collections/hash/mod.rs
@@ -1,20 +1,3 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements. See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership. The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License. You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied. See the License for the
-// specific language governing permissions and limitations
-// under the License..
-
//! Unordered containers, implemented as hash-tables
pub mod map;
diff --git a/sgx_tstd/src/collections/hash/set.rs b/sgx_tstd/src/collections/hash/set.rs
index 0cd331b..3f1edda 100644
--- a/sgx_tstd/src/collections/hash/set.rs
+++ b/sgx_tstd/src/collections/hash/set.rs
@@ -1,19 +1,7 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements. See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership. The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License. You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied. See the License for the
-// specific language governing permissions and limitations
-// under the License..
+//#[cfg(test)]
+//mod tests;
+
+use hashbrown::hash_set as base;
use crate::borrow::Borrow;
use crate::collections::TryReserveError;
@@ -22,7 +10,7 @@
use crate::iter::{Chain, FromIterator, FusedIterator};
use crate::ops::{BitAnd, BitOr, BitXor, Sub};
-use super::map::{self, HashMap, Keys, RandomState};
+use super::map::{map_try_reserve_error, RandomState};
// Future Optimization (FIXME!)
// ============================
@@ -115,15 +103,14 @@
/// // use the values stored in the set
/// ```
///
-/// [`Cell`]: ../../std/cell/struct.Cell.html
-/// [`Eq`]: ../../std/cmp/trait.Eq.html
-/// [`Hash`]: ../../std/hash/trait.Hash.html
-/// [`HashMap`]: struct.HashMap.html
-/// [`PartialEq`]: ../../std/cmp/trait.PartialEq.html
-/// [`RefCell`]: ../../std/cell/struct.RefCell.html
+/// [`HashMap`]: crate::collections::HashMap
+/// [`RefCell`]: crate::cell::RefCell
+/// [`Cell`]: crate::cell::Cell
#[derive(Clone)]
+#[cfg_attr(not(test), rustc_diagnostic_item = "hashset_type")]
+//#[stable(feature = "rust1", since = "1.0.0")]
pub struct HashSet<T, S = RandomState> {
- map: HashMap<T, (), S>,
+ base: base::HashSet<T, S>,
}
impl<T> HashSet<T, RandomState> {
@@ -139,8 +126,9 @@
/// let set: HashSet<i32> = HashSet::new();
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn new() -> HashSet<T, RandomState> {
- HashSet { map: HashMap::new() }
+ Default::default()
}
/// Creates an empty `HashSet` with the specified capacity.
@@ -156,8 +144,9 @@
/// assert!(set.capacity() >= 10);
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn with_capacity(capacity: usize) -> HashSet<T, RandomState> {
- HashSet { map: HashMap::with_capacity(capacity) }
+ HashSet { base: base::HashSet::with_capacity_and_hasher(capacity, Default::default()) }
}
}
@@ -172,8 +161,9 @@
/// assert!(set.capacity() >= 100);
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn capacity(&self) -> usize {
- self.map.capacity()
+ self.base.capacity()
}
/// An iterator visiting all elements in arbitrary order.
@@ -193,8 +183,9 @@
/// }
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter(&self) -> Iter<'_, T> {
- Iter { iter: self.map.keys() }
+ Iter { base: self.base.iter() }
}
/// Returns the number of elements in the set.
@@ -210,8 +201,9 @@
/// assert_eq!(v.len(), 1);
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn len(&self) -> usize {
- self.map.len()
+ self.base.len()
}
/// Returns `true` if the set contains no elements.
@@ -227,8 +219,9 @@
/// assert!(!v.is_empty());
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_empty(&self) -> bool {
- self.map.is_empty()
+ self.base.is_empty()
}
/// Clears the set, returning all elements in an iterator.
@@ -249,8 +242,50 @@
/// assert!(set.is_empty());
/// ```
#[inline]
+ //#[stable(feature = "drain", since = "1.6.0")]
pub fn drain(&mut self) -> Drain<'_, T> {
- Drain { iter: self.map.drain() }
+ Drain { base: self.base.drain() }
+ }
+
+ /// Creates an iterator which uses a closure to determine if a value should be removed.
+ ///
+ /// If the closure returns true, then the value is removed and yielded.
+ /// If the closure returns false, the value will remain in the list and will not be yielded
+ /// by the iterator.
+ ///
+ /// If the iterator is only partially consumed or not consumed at all, each of the remaining
+ /// values will still be subjected to the closure and removed and dropped if it returns true.
+ ///
+ /// It is unspecified how many more values will be subjected to the closure
+ /// if a panic occurs in the closure, or if a panic occurs while dropping a value, or if the
+ /// `DrainFilter` itself is leaked.
+ ///
+ /// # Examples
+ ///
+ /// Splitting a set into even and odd values, reusing the original set:
+ ///
+ /// ```
+ /// #![feature(hash_drain_filter)]
+ /// use std::collections::HashSet;
+ ///
+ /// let mut set: HashSet<i32> = (0..8).collect();
+ /// let drained: HashSet<i32> = set.drain_filter(|v| v % 2 == 0).collect();
+ ///
+ /// let mut evens = drained.into_iter().collect::<Vec<_>>();
+ /// let mut odds = set.into_iter().collect::<Vec<_>>();
+ /// evens.sort();
+ /// odds.sort();
+ ///
+ /// assert_eq!(evens, vec![0, 2, 4, 6]);
+ /// assert_eq!(odds, vec![1, 3, 5, 7]);
+ /// ```
+ #[inline]
+ //#[unstable(feature = "hash_drain_filter", issue = "59618")]
+ pub fn drain_filter<F>(&mut self, pred: F) -> DrainFilter<'_, T, F>
+ where
+ F: FnMut(&T) -> bool,
+ {
+ DrainFilter { base: self.base.drain_filter(pred) }
}
/// Clears the set, removing all values.
@@ -266,8 +301,9 @@
/// assert!(v.is_empty());
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn clear(&mut self) {
- self.map.clear()
+ self.base.clear()
}
/// Creates a new empty hash set which will use the given hasher to hash
@@ -280,6 +316,9 @@
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
+ /// The `hash_builder` passed should implement the [`BuildHasher`] trait for
+ /// the HashMap to be useful, see its documentation for details.
+ ///
/// # Examples
///
/// ```
@@ -291,8 +330,9 @@
/// set.insert(2);
/// ```
#[inline]
+ //#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
pub fn with_hasher(hasher: S) -> HashSet<T, S> {
- HashSet { map: HashMap::with_hasher(hasher) }
+ HashSet { base: base::HashSet::with_hasher(hasher) }
}
/// Creates an empty `HashSet` with the specified capacity, using
@@ -306,6 +346,9 @@
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
+ /// The `hash_builder` passed should implement the [`BuildHasher`] trait for
+ /// the HashMap to be useful, see its documentation for details.
+ ///
/// # Examples
///
/// ```
@@ -317,14 +360,13 @@
/// set.insert(1);
/// ```
#[inline]
+ //#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> HashSet<T, S> {
- HashSet { map: HashMap::with_capacity_and_hasher(capacity, hasher) }
+ HashSet { base: base::HashSet::with_capacity_and_hasher(capacity, hasher) }
}
/// Returns a reference to the set's [`BuildHasher`].
///
- /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
- ///
/// # Examples
///
/// ```
@@ -336,8 +378,9 @@
/// let hasher: &RandomState = set.hasher();
/// ```
#[inline]
+ //#[stable(feature = "hashmap_public_hasher", since = "1.9.0")]
pub fn hasher(&self) -> &S {
- self.map.hasher()
+ self.base.hasher()
}
}
@@ -363,8 +406,9 @@
/// assert!(set.capacity() >= 10);
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve(&mut self, additional: usize) {
- self.map.reserve(additional)
+ self.base.reserve(additional)
}
/// Tries to reserve capacity for at least `additional` more elements to be inserted
@@ -385,8 +429,9 @@
/// set.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?");
/// ```
#[inline]
+ //#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
- self.map.try_reserve(additional)
+ self.base.try_reserve(additional).map_err(map_try_reserve_error)
}
/// Shrinks the capacity of the set as much as possible. It will drop
@@ -406,8 +451,9 @@
/// assert!(set.capacity() >= 2);
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn shrink_to_fit(&mut self) {
- self.map.shrink_to_fit()
+ self.base.shrink_to_fit()
}
/// Shrinks the capacity of the set with a lower limit. It will drop
@@ -433,8 +479,9 @@
/// assert!(set.capacity() >= 2);
/// ```
#[inline]
+ //#[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
pub fn shrink_to(&mut self, min_capacity: usize) {
- self.map.shrink_to(min_capacity)
+ self.base.shrink_to(min_capacity)
}
/// Visits the values representing the difference,
@@ -461,6 +508,7 @@
/// assert_eq!(diff, [4].iter().collect());
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn difference<'a>(&'a self, other: &'a HashSet<T, S>) -> Difference<'a, T, S> {
Difference { iter: self.iter(), other }
}
@@ -487,6 +535,7 @@
/// assert_eq!(diff1, [1, 4].iter().collect());
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn symmetric_difference<'a>(
&'a self,
other: &'a HashSet<T, S>,
@@ -513,6 +562,7 @@
/// assert_eq!(intersection, [2, 3].iter().collect());
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn intersection<'a>(&'a self, other: &'a HashSet<T, S>) -> Intersection<'a, T, S> {
if self.len() <= other.len() {
Intersection { iter: self.iter(), other }
@@ -540,6 +590,7 @@
/// assert_eq!(union, [1, 2, 3, 4].iter().collect());
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn union<'a>(&'a self, other: &'a HashSet<T, S>) -> Union<'a, T, S> {
if self.len() >= other.len() {
Union { iter: self.iter().chain(other.difference(self)) }
@@ -563,16 +614,14 @@
/// assert_eq!(set.contains(&1), true);
/// assert_eq!(set.contains(&4), false);
/// ```
- ///
- /// [`Eq`]: ../../std/cmp/trait.Eq.html
- /// [`Hash`]: ../../std/hash/trait.Hash.html
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn contains<Q: ?Sized>(&self, value: &Q) -> bool
where
T: Borrow<Q>,
Q: Hash + Eq,
{
- self.map.contains_key(value)
+ self.base.contains(value)
}
/// Returns a reference to the value in the set, if any, that is equal to the given value.
@@ -590,16 +639,14 @@
/// assert_eq!(set.get(&2), Some(&2));
/// assert_eq!(set.get(&4), None);
/// ```
- ///
- /// [`Eq`]: ../../std/cmp/trait.Eq.html
- /// [`Hash`]: ../../std/hash/trait.Hash.html
#[inline]
+ //#[stable(feature = "set_recovery", since = "1.9.0")]
pub fn get<Q: ?Sized>(&self, value: &Q) -> Option<&T>
where
T: Borrow<Q>,
Q: Hash + Eq,
{
- self.map.get_key_value(value).map(|(k, _)| k)
+ self.base.get(value)
}
/// Inserts the given `value` into the set if it is not present, then
@@ -619,10 +666,11 @@
/// assert_eq!(set.len(), 4); // 100 was inserted
/// ```
#[inline]
+ //#[unstable(feature = "hash_set_entry", issue = "60896")]
pub fn get_or_insert(&mut self, value: T) -> &T {
// Although the raw entry gives us `&mut T`, we only return `&T` to be consistent with
// `get`. Key mutation is "raw" because you're not supposed to affect `Eq` or `Hash`.
- self.map.raw_entry_mut().from_key(&value).or_insert(value, ()).0
+ self.base.get_or_insert(value)
}
/// Inserts an owned copy of the given `value` into the set if it is not
@@ -646,6 +694,7 @@
/// assert_eq!(set.len(), 4); // a new "fish" was inserted
/// ```
#[inline]
+ //#[unstable(feature = "hash_set_entry", issue = "60896")]
pub fn get_or_insert_owned<Q: ?Sized>(&mut self, value: &Q) -> &T
where
T: Borrow<Q>,
@@ -653,7 +702,7 @@
{
// Although the raw entry gives us `&mut T`, we only return `&T` to be consistent with
// `get`. Key mutation is "raw" because you're not supposed to affect `Eq` or `Hash`.
- self.map.raw_entry_mut().from_key(value).or_insert_with(|| (value.to_owned(), ())).0
+ self.base.get_or_insert_owned(value)
}
/// Inserts a value computed from `f` into the set if the given `value` is
@@ -677,6 +726,7 @@
/// assert_eq!(set.len(), 4); // a new "fish" was inserted
/// ```
#[inline]
+ //#[unstable(feature = "hash_set_entry", issue = "60896")]
pub fn get_or_insert_with<Q: ?Sized, F>(&mut self, value: &Q, f: F) -> &T
where
T: Borrow<Q>,
@@ -685,7 +735,7 @@
{
// Although the raw entry gives us `&mut T`, we only return `&T` to be consistent with
// `get`. Key mutation is "raw" because you're not supposed to affect `Eq` or `Hash`.
- self.map.raw_entry_mut().from_key(value).or_insert_with(|| (f(value), ())).0
+ self.base.get_or_insert_with(value, f)
}
/// Returns `true` if `self` has no elements in common with `other`.
@@ -705,6 +755,7 @@
/// b.insert(1);
/// assert_eq!(a.is_disjoint(&b), false);
/// ```
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_disjoint(&self, other: &HashSet<T, S>) -> bool {
if self.len() <= other.len() {
self.iter().all(|v| !other.contains(v))
@@ -730,6 +781,7 @@
/// set.insert(4);
/// assert_eq!(set.is_subset(&sup), false);
/// ```
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_subset(&self, other: &HashSet<T, S>) -> bool {
if self.len() <= other.len() { self.iter().all(|v| other.contains(v)) } else { false }
}
@@ -755,6 +807,7 @@
/// assert_eq!(set.is_superset(&sub), true);
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_superset(&self, other: &HashSet<T, S>) -> bool {
other.is_subset(self)
}
@@ -777,8 +830,9 @@
/// assert_eq!(set.len(), 1);
/// ```
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(&mut self, value: T) -> bool {
- self.map.insert(value, ()).is_none()
+ self.base.insert(value)
}
/// Adds a value to the set, replacing the existing value, if any, that is equal to the given
@@ -797,14 +851,9 @@
/// assert_eq!(set.get(&[][..]).unwrap().capacity(), 10);
/// ```
#[inline]
+ //#[stable(feature = "set_recovery", since = "1.9.0")]
pub fn replace(&mut self, value: T) -> Option<T> {
- match self.map.entry(value) {
- map::Entry::Occupied(occupied) => Some(occupied.replace_key()),
- map::Entry::Vacant(vacant) => {
- vacant.insert(());
- None
- }
- }
+ self.base.replace(value)
}
/// Removes a value from the set. Returns whether the value was
@@ -825,16 +874,14 @@
/// assert_eq!(set.remove(&2), true);
/// assert_eq!(set.remove(&2), false);
/// ```
- ///
- /// [`Eq`]: ../../std/cmp/trait.Eq.html
- /// [`Hash`]: ../../std/hash/trait.Hash.html
#[inline]
+ //#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove<Q: ?Sized>(&mut self, value: &Q) -> bool
where
T: Borrow<Q>,
Q: Hash + Eq,
{
- self.map.remove(value).is_some()
+ self.base.remove(value)
}
/// Removes and returns the value in the set, if any, that is equal to the given one.
@@ -852,16 +899,14 @@
/// assert_eq!(set.take(&2), Some(2));
/// assert_eq!(set.take(&2), None);
/// ```
- ///
- /// [`Eq`]: ../../std/cmp/trait.Eq.html
- /// [`Hash`]: ../../std/hash/trait.Hash.html
#[inline]
+ //#[stable(feature = "set_recovery", since = "1.9.0")]
pub fn take<Q: ?Sized>(&mut self, value: &Q) -> Option<T>
where
T: Borrow<Q>,
Q: Hash + Eq,
{
- self.map.remove_entry(value).map(|(k, _)| k)
+ self.base.take(value)
}
/// Retains only the elements specified by the predicate.
@@ -878,14 +923,16 @@
/// set.retain(|&k| k % 2 == 0);
/// assert_eq!(set.len(), 3);
/// ```
- pub fn retain<F>(&mut self, mut f: F)
+ //#[stable(feature = "retain_hash_collection", since = "1.18.0")]
+ pub fn retain<F>(&mut self, f: F)
where
F: FnMut(&T) -> bool,
{
- self.map.retain(|k, _| f(k));
+ self.base.retain(f)
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> PartialEq for HashSet<T, S>
where
T: Eq + Hash,
@@ -900,6 +947,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> Eq for HashSet<T, S>
where
T: Eq + Hash,
@@ -907,6 +955,7 @@
{
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> fmt::Debug for HashSet<T, S>
where
T: fmt::Debug,
@@ -916,6 +965,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> FromIterator<T> for HashSet<T, S>
where
T: Eq + Hash,
@@ -929,6 +979,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> Extend<T> for HashSet<T, S>
where
T: Eq + Hash,
@@ -936,10 +987,21 @@
{
#[inline]
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
- self.map.extend(iter.into_iter().map(|k| (k, ())));
+ self.base.extend(iter);
+ }
+
+ #[inline]
+ fn extend_one(&mut self, item: T) {
+ self.base.insert(item);
+ }
+
+ #[inline]
+ fn extend_reserve(&mut self, additional: usize) {
+ self.base.extend_reserve(additional);
}
}
+//#[stable(feature = "hash_extend_copy", since = "1.4.0")]
impl<'a, T, S> Extend<&'a T> for HashSet<T, S>
where
T: 'a + Eq + Hash + Copy,
@@ -949,8 +1011,19 @@
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
self.extend(iter.into_iter().cloned());
}
+
+ #[inline]
+ fn extend_one(&mut self, &item: &'a T) {
+ self.base.insert(item);
+ }
+
+ #[inline]
+ fn extend_reserve(&mut self, additional: usize) {
+ Extend::<T>::extend_reserve(self, additional)
+ }
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> Default for HashSet<T, S>
where
S: Default,
@@ -958,10 +1031,11 @@
/// Creates an empty `HashSet<T, S>` with the `Default` value for the hasher.
#[inline]
fn default() -> HashSet<T, S> {
- HashSet { map: HashMap::default() }
+ HashSet { base: Default::default() }
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> BitOr<&HashSet<T, S>> for &HashSet<T, S>
where
T: Eq + Hash + Clone,
@@ -994,6 +1068,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> BitAnd<&HashSet<T, S>> for &HashSet<T, S>
where
T: Eq + Hash + Clone,
@@ -1026,6 +1101,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> BitXor<&HashSet<T, S>> for &HashSet<T, S>
where
T: Eq + Hash + Clone,
@@ -1058,6 +1134,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> Sub<&HashSet<T, S>> for &HashSet<T, S>
where
T: Eq + Hash + Clone,
@@ -1095,10 +1172,10 @@
/// This `struct` is created by the [`iter`] method on [`HashSet`].
/// See its documentation for more.
///
-/// [`HashSet`]: struct.HashSet.html
-/// [`iter`]: struct.HashSet.html#method.iter
+/// [`iter`]: HashSet::iter
+//#[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, K: 'a> {
- iter: Keys<'a, K, ()>,
+ base: base::Iter<'a, K>,
}
/// An owning iterator over the items of a `HashSet`.
@@ -1106,10 +1183,10 @@
/// This `struct` is created by the [`into_iter`] method on [`HashSet`]
/// (provided by the `IntoIterator` trait). See its documentation for more.
///
-/// [`HashSet`]: struct.HashSet.html
-/// [`into_iter`]: struct.HashSet.html#method.into_iter
+/// [`into_iter`]: IntoIterator::into_iter
+//#[stable(feature = "rust1", since = "1.0.0")]
pub struct IntoIter<K> {
- iter: map::IntoIter<K, ()>,
+ base: base::IntoIter<K>,
}
/// A draining iterator over the items of a `HashSet`.
@@ -1117,10 +1194,23 @@
/// This `struct` is created by the [`drain`] method on [`HashSet`].
/// See its documentation for more.
///
-/// [`HashSet`]: struct.HashSet.html
-/// [`drain`]: struct.HashSet.html#method.drain
+/// [`drain`]: HashSet::drain
+//#[stable(feature = "rust1", since = "1.0.0")]
pub struct Drain<'a, K: 'a> {
- iter: map::Drain<'a, K, ()>,
+ base: base::Drain<'a, K>,
+}
+
+/// A draining, filtering iterator over the items of a `HashSet`.
+///
+/// This `struct` is created by the [`drain_filter`] method on [`HashSet`].
+///
+/// [`drain_filter`]: HashSet::drain_filter
+//#[unstable(feature = "hash_drain_filter", issue = "59618")]
+pub struct DrainFilter<'a, K, F>
+where
+ F: FnMut(&K) -> bool,
+{
+ base: base::DrainFilter<'a, K, F>,
}
/// A lazy iterator producing elements in the intersection of `HashSet`s.
@@ -1128,8 +1218,8 @@
/// This `struct` is created by the [`intersection`] method on [`HashSet`].
/// See its documentation for more.
///
-/// [`HashSet`]: struct.HashSet.html
-/// [`intersection`]: struct.HashSet.html#method.intersection
+/// [`intersection`]: HashSet::intersection
+//#[stable(feature = "rust1", since = "1.0.0")]
pub struct Intersection<'a, T: 'a, S: 'a> {
// iterator of the first set
iter: Iter<'a, T>,
@@ -1142,8 +1232,8 @@
/// This `struct` is created by the [`difference`] method on [`HashSet`].
/// See its documentation for more.
///
-/// [`HashSet`]: struct.HashSet.html
-/// [`difference`]: struct.HashSet.html#method.difference
+/// [`difference`]: HashSet::difference
+//#[stable(feature = "rust1", since = "1.0.0")]
pub struct Difference<'a, T: 'a, S: 'a> {
// iterator of the first set
iter: Iter<'a, T>,
@@ -1156,8 +1246,8 @@
/// This `struct` is created by the [`symmetric_difference`] method on
/// [`HashSet`]. See its documentation for more.
///
-/// [`HashSet`]: struct.HashSet.html
-/// [`symmetric_difference`]: struct.HashSet.html#method.symmetric_difference
+/// [`symmetric_difference`]: HashSet::symmetric_difference
+//#[stable(feature = "rust1", since = "1.0.0")]
pub struct SymmetricDifference<'a, T: 'a, S: 'a> {
iter: Chain<Difference<'a, T, S>, Difference<'a, T, S>>,
}
@@ -1167,12 +1257,13 @@
/// This `struct` is created by the [`union`] method on [`HashSet`].
/// See its documentation for more.
///
-/// [`HashSet`]: struct.HashSet.html
-/// [`union`]: struct.HashSet.html#method.union
+/// [`union`]: HashSet::union
+//#[stable(feature = "rust1", since = "1.0.0")]
pub struct Union<'a, T: 'a, S: 'a> {
iter: Chain<Iter<'a, T>, Difference<'a, T, S>>,
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, S> IntoIterator for &'a HashSet<T, S> {
type Item = &'a T;
type IntoIter = Iter<'a, T>;
@@ -1183,6 +1274,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> IntoIterator for HashSet<T, S> {
type Item = T;
type IntoIter = IntoIter<T>;
@@ -1209,103 +1301,138 @@
/// ```
#[inline]
fn into_iter(self) -> IntoIter<T> {
- IntoIter { iter: self.map.into_iter() }
+ IntoIter { base: self.base.into_iter() }
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K> Clone for Iter<'_, K> {
#[inline]
fn clone(&self) -> Self {
- Iter { iter: self.iter.clone() }
+ Iter { base: self.base.clone() }
}
}
-
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K> Iterator for Iter<'a, K> {
type Item = &'a K;
#[inline]
fn next(&mut self) -> Option<&'a K> {
- self.iter.next()
+ self.base.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
- self.iter.size_hint()
+ self.base.size_hint()
}
}
-
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K> ExactSizeIterator for Iter<'_, K> {
#[inline]
fn len(&self) -> usize {
- self.iter.len()
+ self.base.len()
}
}
-
+//#[stable(feature = "fused", since = "1.26.0")]
impl<K> FusedIterator for Iter<'_, K> {}
+//#[stable(feature = "std_debug", since = "1.16.0")]
impl<K: fmt::Debug> fmt::Debug for Iter<'_, K> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K> Iterator for IntoIter<K> {
type Item = K;
#[inline]
fn next(&mut self) -> Option<K> {
- self.iter.next().map(|(k, _)| k)
+ self.base.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
- self.iter.size_hint()
+ self.base.size_hint()
}
}
-
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K> ExactSizeIterator for IntoIter<K> {
#[inline]
fn len(&self) -> usize {
- self.iter.len()
+ self.base.len()
}
}
-
+//#[stable(feature = "fused", since = "1.26.0")]
impl<K> FusedIterator for IntoIter<K> {}
+//#[stable(feature = "std_debug", since = "1.16.0")]
impl<K: fmt::Debug> fmt::Debug for IntoIter<K> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let entries_iter = self.iter.iter().map(|(k, _)| k);
- f.debug_list().entries(entries_iter).finish()
+ fmt::Debug::fmt(&self.base, f)
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K> Iterator for Drain<'a, K> {
type Item = K;
#[inline]
fn next(&mut self) -> Option<K> {
- self.iter.next().map(|(k, _)| k)
+ self.base.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
- self.iter.size_hint()
+ self.base.size_hint()
}
}
-
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<K> ExactSizeIterator for Drain<'_, K> {
#[inline]
fn len(&self) -> usize {
- self.iter.len()
+ self.base.len()
}
}
-
+//#[stable(feature = "fused", since = "1.26.0")]
impl<K> FusedIterator for Drain<'_, K> {}
+//#[stable(feature = "std_debug", since = "1.16.0")]
impl<K: fmt::Debug> fmt::Debug for Drain<'_, K> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let entries_iter = self.iter.iter().map(|(k, _)| k);
- f.debug_list().entries(entries_iter).finish()
+ fmt::Debug::fmt(&self.base, f)
}
}
+//#[unstable(feature = "hash_drain_filter", issue = "59618")]
+impl<K, F> Iterator for DrainFilter<'_, K, F>
+where
+ F: FnMut(&K) -> bool,
+{
+ type Item = K;
+
+ #[inline]
+ fn next(&mut self) -> Option<K> {
+ self.base.next()
+ }
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.base.size_hint()
+ }
+}
+
+//#[unstable(feature = "hash_drain_filter", issue = "59618")]
+impl<K, F> FusedIterator for DrainFilter<'_, K, F> where F: FnMut(&K) -> bool {}
+
+//#[unstable(feature = "hash_drain_filter", issue = "59618")]
+impl<'a, K, F> fmt::Debug for DrainFilter<'a, K, F>
+where
+ F: FnMut(&K) -> bool,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.pad("DrainFilter { .. }")
+ }
+}
+
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> Clone for Intersection<'_, T, S> {
#[inline]
fn clone(&self) -> Self {
@@ -1313,6 +1440,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, S> Iterator for Intersection<'a, T, S>
where
T: Eq + Hash,
@@ -1337,6 +1465,7 @@
}
}
+//#[stable(feature = "std_debug", since = "1.16.0")]
impl<T, S> fmt::Debug for Intersection<'_, T, S>
where
T: fmt::Debug + Eq + Hash,
@@ -1347,6 +1476,7 @@
}
}
+//#[stable(feature = "fused", since = "1.26.0")]
impl<T, S> FusedIterator for Intersection<'_, T, S>
where
T: Eq + Hash,
@@ -1354,6 +1484,7 @@
{
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> Clone for Difference<'_, T, S> {
#[inline]
fn clone(&self) -> Self {
@@ -1361,6 +1492,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, S> Iterator for Difference<'a, T, S>
where
T: Eq + Hash,
@@ -1385,6 +1517,7 @@
}
}
+//#[stable(feature = "fused", since = "1.26.0")]
impl<T, S> FusedIterator for Difference<'_, T, S>
where
T: Eq + Hash,
@@ -1392,6 +1525,7 @@
{
}
+//#[stable(feature = "std_debug", since = "1.16.0")]
impl<T, S> fmt::Debug for Difference<'_, T, S>
where
T: fmt::Debug + Eq + Hash,
@@ -1402,6 +1536,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> Clone for SymmetricDifference<'_, T, S> {
#[inline]
fn clone(&self) -> Self {
@@ -1409,6 +1544,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, S> Iterator for SymmetricDifference<'a, T, S>
where
T: Eq + Hash,
@@ -1426,6 +1562,7 @@
}
}
+//#[stable(feature = "fused", since = "1.26.0")]
impl<T, S> FusedIterator for SymmetricDifference<'_, T, S>
where
T: Eq + Hash,
@@ -1433,6 +1570,7 @@
{
}
+//#[stable(feature = "std_debug", since = "1.16.0")]
impl<T, S> fmt::Debug for SymmetricDifference<'_, T, S>
where
T: fmt::Debug + Eq + Hash,
@@ -1443,6 +1581,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> Clone for Union<'_, T, S> {
#[inline]
fn clone(&self) -> Self {
@@ -1450,6 +1589,7 @@
}
}
+//#[stable(feature = "fused", since = "1.26.0")]
impl<T, S> FusedIterator for Union<'_, T, S>
where
T: Eq + Hash,
@@ -1457,6 +1597,7 @@
{
}
+//#[stable(feature = "std_debug", since = "1.16.0")]
impl<T, S> fmt::Debug for Union<'_, T, S>
where
T: fmt::Debug + Eq + Hash,
@@ -1467,6 +1608,7 @@
}
}
+//#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, S> Iterator for Union<'a, T, S>
where
T: Eq + Hash,
@@ -1519,422 +1661,3 @@
d
}
}
-
-#[cfg(test)]
-mod test_set {
- use super::super::map::RandomState;
- use super::HashSet;
-
- #[test]
- fn test_zero_capacities() {
- type HS = HashSet<i32>;
-
- let s = HS::new();
- assert_eq!(s.capacity(), 0);
-
- let s = HS::default();
- assert_eq!(s.capacity(), 0);
-
- let s = HS::with_hasher(RandomState::new());
- assert_eq!(s.capacity(), 0);
-
- let s = HS::with_capacity(0);
- assert_eq!(s.capacity(), 0);
-
- let s = HS::with_capacity_and_hasher(0, RandomState::new());
- assert_eq!(s.capacity(), 0);
-
- let mut s = HS::new();
- s.insert(1);
- s.insert(2);
- s.remove(&1);
- s.remove(&2);
- s.shrink_to_fit();
- assert_eq!(s.capacity(), 0);
-
- let mut s = HS::new();
- s.reserve(0);
- assert_eq!(s.capacity(), 0);
- }
-
- #[test]
- fn test_disjoint() {
- let mut xs = HashSet::new();
- let mut ys = HashSet::new();
- assert!(xs.is_disjoint(&ys));
- assert!(ys.is_disjoint(&xs));
- assert!(xs.insert(5));
- assert!(ys.insert(11));
- assert!(xs.is_disjoint(&ys));
- assert!(ys.is_disjoint(&xs));
- assert!(xs.insert(7));
- assert!(xs.insert(19));
- assert!(xs.insert(4));
- assert!(ys.insert(2));
- assert!(ys.insert(-11));
- assert!(xs.is_disjoint(&ys));
- assert!(ys.is_disjoint(&xs));
- assert!(ys.insert(7));
- assert!(!xs.is_disjoint(&ys));
- assert!(!ys.is_disjoint(&xs));
- }
-
- #[test]
- fn test_subset_and_superset() {
- let mut a = HashSet::new();
- assert!(a.insert(0));
- assert!(a.insert(5));
- assert!(a.insert(11));
- assert!(a.insert(7));
-
- let mut b = HashSet::new();
- assert!(b.insert(0));
- assert!(b.insert(7));
- assert!(b.insert(19));
- assert!(b.insert(250));
- assert!(b.insert(11));
- assert!(b.insert(200));
-
- assert!(!a.is_subset(&b));
- assert!(!a.is_superset(&b));
- assert!(!b.is_subset(&a));
- assert!(!b.is_superset(&a));
-
- assert!(b.insert(5));
-
- assert!(a.is_subset(&b));
- assert!(!a.is_superset(&b));
- assert!(!b.is_subset(&a));
- assert!(b.is_superset(&a));
- }
-
- #[test]
- fn test_iterate() {
- let mut a = HashSet::new();
- for i in 0..32 {
- assert!(a.insert(i));
- }
- let mut observed: u32 = 0;
- for k in &a {
- observed |= 1 << *k;
- }
- assert_eq!(observed, 0xFFFF_FFFF);
- }
-
- #[test]
- fn test_intersection() {
- let mut a = HashSet::new();
- let mut b = HashSet::new();
- assert!(a.intersection(&b).next().is_none());
-
- assert!(a.insert(11));
- assert!(a.insert(1));
- assert!(a.insert(3));
- assert!(a.insert(77));
- assert!(a.insert(103));
- assert!(a.insert(5));
- assert!(a.insert(-5));
-
- assert!(b.insert(2));
- assert!(b.insert(11));
- assert!(b.insert(77));
- assert!(b.insert(-9));
- assert!(b.insert(-42));
- assert!(b.insert(5));
- assert!(b.insert(3));
-
- let mut i = 0;
- let expected = [3, 5, 11, 77];
- for x in a.intersection(&b) {
- assert!(expected.contains(x));
- i += 1
- }
- assert_eq!(i, expected.len());
-
- assert!(a.insert(9)); // make a bigger than b
-
- i = 0;
- for x in a.intersection(&b) {
- assert!(expected.contains(x));
- i += 1
- }
- assert_eq!(i, expected.len());
-
- i = 0;
- for x in b.intersection(&a) {
- assert!(expected.contains(x));
- i += 1
- }
- assert_eq!(i, expected.len());
- }
-
- #[test]
- fn test_difference() {
- let mut a = HashSet::new();
- let mut b = HashSet::new();
-
- assert!(a.insert(1));
- assert!(a.insert(3));
- assert!(a.insert(5));
- assert!(a.insert(9));
- assert!(a.insert(11));
-
- assert!(b.insert(3));
- assert!(b.insert(9));
-
- let mut i = 0;
- let expected = [1, 5, 11];
- for x in a.difference(&b) {
- assert!(expected.contains(x));
- i += 1
- }
- assert_eq!(i, expected.len());
- }
-
- #[test]
- fn test_symmetric_difference() {
- let mut a = HashSet::new();
- let mut b = HashSet::new();
-
- assert!(a.insert(1));
- assert!(a.insert(3));
- assert!(a.insert(5));
- assert!(a.insert(9));
- assert!(a.insert(11));
-
- assert!(b.insert(-2));
- assert!(b.insert(3));
- assert!(b.insert(9));
- assert!(b.insert(14));
- assert!(b.insert(22));
-
- let mut i = 0;
- let expected = [-2, 1, 5, 11, 14, 22];
- for x in a.symmetric_difference(&b) {
- assert!(expected.contains(x));
- i += 1
- }
- assert_eq!(i, expected.len());
- }
-
- #[test]
- fn test_union() {
- let mut a = HashSet::new();
- let mut b = HashSet::new();
- assert!(a.union(&b).next().is_none());
- assert!(b.union(&a).next().is_none());
-
- assert!(a.insert(1));
- assert!(a.insert(3));
- assert!(a.insert(11));
- assert!(a.insert(16));
- assert!(a.insert(19));
- assert!(a.insert(24));
-
- assert!(b.insert(-2));
- assert!(b.insert(1));
- assert!(b.insert(5));
- assert!(b.insert(9));
- assert!(b.insert(13));
- assert!(b.insert(19));
-
- let mut i = 0;
- let expected = [-2, 1, 3, 5, 9, 11, 13, 16, 19, 24];
- for x in a.union(&b) {
- assert!(expected.contains(x));
- i += 1
- }
- assert_eq!(i, expected.len());
-
- assert!(a.insert(9)); // make a bigger than b
- assert!(a.insert(5));
-
- i = 0;
- for x in a.union(&b) {
- assert!(expected.contains(x));
- i += 1
- }
- assert_eq!(i, expected.len());
-
- i = 0;
- for x in b.union(&a) {
- assert!(expected.contains(x));
- i += 1
- }
- assert_eq!(i, expected.len());
- }
-
- #[test]
- fn test_from_iter() {
- let xs = [1, 2, 2, 3, 4, 5, 6, 7, 8, 9];
-
- let set: HashSet<_> = xs.iter().cloned().collect();
-
- for x in &xs {
- assert!(set.contains(x));
- }
-
- assert_eq!(set.iter().len(), xs.len() - 1);
- }
-
- #[test]
- fn test_move_iter() {
- let hs = {
- let mut hs = HashSet::new();
-
- hs.insert('a');
- hs.insert('b');
-
- hs
- };
-
- let v = hs.into_iter().collect::<Vec<char>>();
- assert!(v == ['a', 'b'] || v == ['b', 'a']);
- }
-
- #[test]
- fn test_eq() {
- // These constants once happened to expose a bug in insert().
- // I'm keeping them around to prevent a regression.
- let mut s1 = HashSet::new();
-
- s1.insert(1);
- s1.insert(2);
- s1.insert(3);
-
- let mut s2 = HashSet::new();
-
- s2.insert(1);
- s2.insert(2);
-
- assert!(s1 != s2);
-
- s2.insert(3);
-
- assert_eq!(s1, s2);
- }
-
- #[test]
- fn test_show() {
- let mut set = HashSet::new();
- let empty = HashSet::<i32>::new();
-
- set.insert(1);
- set.insert(2);
-
- let set_str = format!("{:?}", set);
-
- assert!(set_str == "{1, 2}" || set_str == "{2, 1}");
- assert_eq!(format!("{:?}", empty), "{}");
- }
-
- #[test]
- fn test_trivial_drain() {
- let mut s = HashSet::<i32>::new();
- for _ in s.drain() {}
- assert!(s.is_empty());
- drop(s);
-
- let mut s = HashSet::<i32>::new();
- drop(s.drain());
- assert!(s.is_empty());
- }
-
- #[test]
- fn test_drain() {
- let mut s: HashSet<_> = (1..100).collect();
-
- // try this a bunch of times to make sure we don't screw up internal state.
- for _ in 0..20 {
- assert_eq!(s.len(), 99);
-
- {
- let mut last_i = 0;
- let mut d = s.drain();
- for (i, x) in d.by_ref().take(50).enumerate() {
- last_i = i;
- assert!(x != 0);
- }
- assert_eq!(last_i, 49);
- }
-
- for _ in &s {
- panic!("s should be empty!");
- }
-
- // reset to try again.
- s.extend(1..100);
- }
- }
-
- #[test]
- fn test_replace() {
- use crate::hash;
-
- #[derive(Debug)]
- struct Foo(&'static str, i32);
-
- impl PartialEq for Foo {
- fn eq(&self, other: &Self) -> bool {
- self.0 == other.0
- }
- }
-
- impl Eq for Foo {}
-
- impl hash::Hash for Foo {
- fn hash<H: hash::Hasher>(&self, h: &mut H) {
- self.0.hash(h);
- }
- }
-
- let mut s = HashSet::new();
- assert_eq!(s.replace(Foo("a", 1)), None);
- assert_eq!(s.len(), 1);
- assert_eq!(s.replace(Foo("a", 2)), Some(Foo("a", 1)));
- assert_eq!(s.len(), 1);
-
- let mut it = s.iter();
- assert_eq!(it.next(), Some(&Foo("a", 2)));
- assert_eq!(it.next(), None);
- }
-
- #[test]
- fn test_extend_ref() {
- let mut a = HashSet::new();
- a.insert(1);
-
- a.extend(&[2, 3, 4]);
-
- assert_eq!(a.len(), 4);
- assert!(a.contains(&1));
- assert!(a.contains(&2));
- assert!(a.contains(&3));
- assert!(a.contains(&4));
-
- let mut b = HashSet::new();
- b.insert(5);
- b.insert(6);
-
- a.extend(&b);
-
- assert_eq!(a.len(), 6);
- assert!(a.contains(&1));
- assert!(a.contains(&2));
- assert!(a.contains(&3));
- assert!(a.contains(&4));
- assert!(a.contains(&5));
- assert!(a.contains(&6));
- }
-
- #[test]
- fn test_retain() {
- let xs = [1, 2, 3, 4, 5, 6];
- let mut set: HashSet<i32> = xs.iter().cloned().collect();
- set.retain(|&k| k % 2 == 0);
- assert_eq!(set.len(), 3);
- assert!(set.contains(&2));
- assert!(set.contains(&4));
- assert!(set.contains(&6));
- }
-}
diff --git a/sgx_tstd/src/io/util.rs b/sgx_tstd/src/io/util.rs
index dfb92d2..ca5466b 100644
--- a/sgx_tstd/src/io/util.rs
+++ b/sgx_tstd/src/io/util.rs
@@ -52,18 +52,18 @@
// to uninitialized data, but within libstd we can rely on more guarantees
// than if this code were in an external lib.
unsafe {
- reader.initializer().initialize(buf.get_mut());
+ reader.initializer().initialize(buf.assume_init_mut());
}
let mut written = 0;
loop {
- let len = match reader.read(unsafe { buf.get_mut() }) {
+ let len = match reader.read(unsafe { buf.assume_init_mut() }) {
Ok(0) => return Ok(written),
Ok(len) => len,
Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,
Err(e) => return Err(e),
};
- writer.write_all(unsafe { &buf.get_ref()[..len] })?;
+ writer.write_all(unsafe { &buf.assume_init_ref()[..len] })?;
written += len as u64;
}
}
@@ -201,4 +201,4 @@
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.pad("Sink { .. }")
}
-}
\ No newline at end of file
+}
diff --git a/sgx_tstd/src/lib.rs b/sgx_tstd/src/lib.rs
index f8ff0f6..08b52e0 100644
--- a/sgx_tstd/src/lib.rs
+++ b/sgx_tstd/src/lib.rs
@@ -59,6 +59,7 @@
#![feature(core_intrinsics)]
#![feature(custom_test_frameworks)]
#![feature(dropck_eyepatch)]
+#![feature(extend_one)]
#![feature(fixed_size_array)]
#![feature(fn_traits)]
#![feature(generator_trait)]
@@ -83,7 +84,6 @@
#![feature(thread_local)]
#![feature(toowned_clone_into)]
#![feature(trace_macros)]
-#![feature(track_caller)]
#![feature(try_reserve)]
#![feature(unboxed_closures)]
#![feature(untagged_unions)]
diff --git a/sgx_tstd/src/panicking.rs b/sgx_tstd/src/panicking.rs
index 1e66cde..c219162 100644
--- a/sgx_tstd/src/panicking.rs
+++ b/sgx_tstd/src/panicking.rs
@@ -67,6 +67,11 @@
rtabort!("Rust panics must be rethrown");
}
+#[rustc_std_internal_symbol]
+extern "C" fn __rust_foreign_exception() -> ! {
+ rtabort!("Rust cannot catch foreign exceptions");
+}
+
static PANIC_HANDLER: AtomicPtr<()> = AtomicPtr::new(ptr::null_mut());
#[cfg(not(feature = "stdio"))]
diff --git a/sgx_tstd/src/sync/mpsc/shared.rs b/sgx_tstd/src/sync/mpsc/shared.rs
index 8f0f184..ee18611 100644
--- a/sgx_tstd/src/sync/mpsc/shared.rs
+++ b/sgx_tstd/src/sync/mpsc/shared.rs
@@ -369,9 +369,7 @@
// See comments on Arc::clone() on why we do this (for `mem::forget`).
if old_count > MAX_REFCOUNT {
- unsafe {
- abort();
- }
+ abort();
}
}
diff --git a/sgx_tstd/src/sync/mpsc/sync.rs b/sgx_tstd/src/sync/mpsc/sync.rs
index 07963fa..31fd4da 100644
--- a/sgx_tstd/src/sync/mpsc/sync.rs
+++ b/sgx_tstd/src/sync/mpsc/sync.rs
@@ -372,9 +372,7 @@
// See comments on Arc::clone() on why we do this (for `mem::forget`).
if old_count > MAX_REFCOUNT {
- unsafe {
- abort();
- }
+ abort();
}
}