diff --git a/benches/benches/benchmarks/aoc_2020_19b.rs b/benches/benches/benchmarks/aoc_2020_19b.rs index 096679f2b..c6aa6e91c 100644 --- a/benches/benches/benchmarks/aoc_2020_19b.rs +++ b/benches/benches/benchmarks/aoc_2020_19b.rs @@ -1,5 +1,7 @@ use criterion::Criterion; +use rune::alloc::TryClone; + criterion::criterion_group!(benches, aoc_2020_19b); const INPUT: &str = include_str!("data/aoc_2020_19b.txt"); @@ -8,7 +10,7 @@ fn aoc_2020_19b(b: &mut Criterion) { let mut data = rune::runtime::Vec::new(); for line in INPUT.split('\n').filter(|s| !s.is_empty()) { - data.push(line.to_owned().into()); + data.push(rune::to_value(line.to_owned()).unwrap()).unwrap(); } let mut vm = rune_vm! { @@ -234,6 +236,9 @@ fn aoc_2020_19b(b: &mut Criterion) { let entry = rune::Hash::type_hash(["main"]); b.bench_function("aoc_2020_19b", |b| { - b.iter(|| vm.call(entry, (data.clone(),)).expect("failed call")); + b.iter(|| { + vm.call(entry, (data.try_clone().unwrap(),)) + .expect("failed call") + }); }); } diff --git a/benches/benches/benchmarks/aoc_2020_1a.rs b/benches/benches/benchmarks/aoc_2020_1a.rs index 0bad64c7e..967ab7f84 100644 --- a/benches/benches/benchmarks/aoc_2020_1a.rs +++ b/benches/benches/benchmarks/aoc_2020_1a.rs @@ -4,6 +4,7 @@ use anyhow::Context; use criterion::Criterion; +use rune::alloc::TryClone; criterion::criterion_group!(benches, aoc_2020_1a); @@ -86,6 +87,9 @@ fn aoc_2020_1a(b: &mut Criterion) { let entry = rune::Hash::type_hash(["main"]); b.bench_function("aoc_2020_1a", |b| { - b.iter(|| vm.call(entry, (data.clone(),)).expect("failed call")); + b.iter(|| { + vm.call(entry, (data.try_clone().unwrap(),)) + .expect("failed call") + }); }); } diff --git a/benches/benches/benchmarks/aoc_2020_1b.rs b/benches/benches/benchmarks/aoc_2020_1b.rs index 5f46e82c3..a5b0ead68 100644 --- a/benches/benches/benchmarks/aoc_2020_1b.rs +++ b/benches/benches/benchmarks/aoc_2020_1b.rs @@ -4,6 +4,8 @@ use criterion::Criterion; +use rune::alloc::TryClone; + criterion::criterion_group!(benches, aoc_2020_1b); const INPUT: &str = include_str!("data/aoc_2020_1.txt"); @@ -60,6 +62,9 @@ fn aoc_2020_1b(b: &mut Criterion) { let entry = rune::Hash::type_hash(["main"]); b.bench_function("aoc_2020_1b", |b| { - b.iter(|| vm.call(entry, (data.clone(),)).expect("failed call")); + b.iter(|| { + vm.call(entry, (data.try_clone().unwrap(),)) + .expect("failed call") + }); }); } diff --git a/crates/rune-alloc/.gitignore b/crates/rune-alloc/.gitignore new file mode 100644 index 000000000..c2cdb236c --- /dev/null +++ b/crates/rune-alloc/.gitignore @@ -0,0 +1 @@ +/patches/ diff --git a/crates/rune-alloc/Cargo.toml b/crates/rune-alloc/Cargo.toml new file mode 100644 index 000000000..a73cd184b --- /dev/null +++ b/crates/rune-alloc/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "rune-alloc" +version = "0.12.3" +authors = ["John-John Tedro "] +edition = "2021" +rust-version = "1.70" +description = "The Rune Language, an embeddable dynamic programming language for Rust." +documentation = "https://docs.rs/rune" +readme = "README.md" +homepage = "https://github.com/rune-rs/rune" +repository = "https://github.com/rune-rs/rune" +license = "MIT OR Apache-2.0" +keywords = ["language", "scripting", "scripting-language"] +categories = ["parser-implementations"] + +[features] +default = ["std", "serde"] +std = ["alloc", "ahash/std"] +alloc = [] + +[dependencies] +serde = { version = "1.0", optional = true } +ahash = { version = "0.8.3", default-features = false } + +[dev-dependencies] +rand = { version = "0.8.5", features = ["small_rng"] } diff --git a/crates/rune-alloc/src/alloc/allocator.rs b/crates/rune-alloc/src/alloc/allocator.rs new file mode 100644 index 000000000..c215e45c8 --- /dev/null +++ b/crates/rune-alloc/src/alloc/allocator.rs @@ -0,0 +1,383 @@ +//! Types used to govern how allocations are performed. + +use core::alloc::Layout; +use core::fmt; + +use crate::ptr::{self, invalid_mut, NonNull}; + +use ::rust_alloc::alloc::{alloc, alloc_zeroed, dealloc}; + +/// Error raised while allocating. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct AllocError { + pub(crate) layout: Layout, +} + +impl fmt::Display for AllocError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "Failed to allocate {} bytes of memory", + self.layout.size() + ) + } +} + +#[cfg(feature = "std")] +impl ::rust_std::error::Error for AllocError {} + +/// An implementation of `Allocator` can allocate, grow, shrink, and deallocate +/// arbitrary blocks of data described via [`Layout`]. +/// +/// `Allocator` is designed to be implemented on ZSTs, references, or smart +/// pointers because having an allocator like `MyAlloc([u8; N])` cannot be +/// moved, without updating the pointers to the allocated memory. +/// +/// Zero-sized allocations are allowed in `Allocator`. If an underlying +/// allocator does not support this (like jemalloc) or return a null pointer +/// (such as `libc::malloc`), this must be caught by the implementation. +/// +/// ### Currently allocated memory +/// +/// Some of the methods require that a memory block be *currently allocated* via +/// an allocator. This means that: +/// +/// * the starting address for that memory block was previously returned by +/// [`allocate`], [`grow`], or [`shrink`], and +/// +/// * the memory block has not been subsequently deallocated, where blocks are +/// either deallocated directly by being passed to [`deallocate`] or were +/// changed by being passed to [`grow`] or [`shrink`] that returns `Ok`. If +/// `grow` or `shrink` have returned `Err`, the passed pointer remains valid. +/// +/// [`allocate`]: Allocator::allocate +/// [`grow`]: Allocator::grow +/// [`shrink`]: Allocator::shrink +/// [`deallocate`]: Allocator::deallocate +/// +/// ### Memory fitting +/// +/// Some of the methods require that a layout *fit* a memory block. What it +/// means for a layout to "fit" a memory block means (or equivalently, for a +/// memory block to "fit" a layout) is that the following conditions must hold: +/// +/// * The block must be allocated with the same alignment as [`layout.align()`], +/// and +/// +/// * The provided [`layout.size()`] must fall in the range `min ..= max`, +/// where: +/// - `min` is the size of the layout most recently used to allocate the +/// block, and +/// - `max` is the latest actual size returned from [`allocate`], [`grow`], or +/// [`shrink`]. +/// +/// [`layout.align()`]: Layout::align +/// [`layout.size()`]: Layout::size +/// +/// # Safety +/// +/// * Memory blocks returned from an allocator that are [*currently allocated*] +/// must point to valid memory and retain their validity while they are +/// [*currently allocated*] and at least one of the instance and all of its +/// clones has not been dropped. +/// +/// * copying, cloning, or moving the allocator must not invalidate memory +/// blocks returned from this allocator. A copied or cloned allocator must +/// behave like the same allocator, and +/// +/// * any pointer to a memory block which is [*currently allocated*] may be +/// passed to any other method of the allocator. +/// +/// [*currently allocated*]: #currently-allocated-memory +pub unsafe trait Allocator { + /// Attempts to allocate a block of memory. + /// + /// On success, returns a [`NonNull<[u8]>`][NonNull] meeting the size and alignment guarantees of `layout`. + /// + /// The returned block may have a larger size than specified by `layout.size()`, and may or may + /// not have its contents initialized. + /// + /// # Errors + /// + /// Returning `Err` indicates that either memory is exhausted or `layout` does not meet + /// allocator's size or alignment constraints. + /// + /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or + /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement + /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) + /// + /// Clients wishing to abort computation in response to an allocation error are encouraged to + /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. + /// + /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html + fn allocate(&self, layout: Layout) -> Result, AllocError>; + + /// Behaves like `allocate`, but also ensures that the returned memory is zero-initialized. + /// + /// # Errors + /// + /// Returning `Err` indicates that either memory is exhausted or `layout` does not meet + /// allocator's size or alignment constraints. + /// + /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or + /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement + /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) + /// + /// Clients wishing to abort computation in response to an allocation error are encouraged to + /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. + /// + /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html + fn allocate_zeroed(&self, layout: Layout) -> Result, AllocError> { + let ptr = self.allocate(layout)?; + // SAFETY: `alloc` returns a valid memory block + unsafe { ptr.as_ptr().cast::().write_bytes(0, ptr.len()) } + Ok(ptr) + } + + /// Deallocates the memory referenced by `ptr`. + /// + /// # Safety + /// + /// * `ptr` must denote a block of memory [*currently allocated*] via this + /// allocator, and + /// * `layout` must [*fit*] that block of memory. + /// + /// [*currently allocated*]: #currently-allocated-memory + /// [*fit*]: #memory-fitting + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout); + + /// Attempts to extend the memory block. + /// + /// Returns a new [`NonNull<[u8]>`][NonNull] containing a pointer and the actual size of the allocated + /// memory. The pointer is suitable for holding data described by `new_layout`. To accomplish + /// this, the allocator may extend the allocation referenced by `ptr` to fit the new layout. + /// + /// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been + /// transferred to this allocator. Any access to the old `ptr` is Undefined Behavior, even if the + /// allocation was grown in-place. The newly returned pointer is the only valid pointer + /// for accessing this memory now. + /// + /// If this method returns `Err`, then ownership of the memory block has not been transferred to + /// this allocator, and the contents of the memory block are unaltered. + /// + /// # Safety + /// + /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator. + /// * `old_layout` must [*fit*] that block of memory (The `new_layout` argument need not fit it.). + /// * `new_layout.size()` must be greater than or equal to `old_layout.size()`. + /// + /// Note that `new_layout.align()` need not be the same as `old_layout.align()`. + /// + /// [*currently allocated*]: #currently-allocated-memory + /// [*fit*]: #memory-fitting + /// + /// # Errors + /// + /// Returns `Err` if the new layout does not meet the allocator's size and alignment + /// constraints of the allocator, or if growing otherwise fails. + /// + /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or + /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement + /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) + /// + /// Clients wishing to abort computation in response to an allocation error are encouraged to + /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. + /// + /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html + unsafe fn grow( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, AllocError> { + debug_assert!( + new_layout.size() >= old_layout.size(), + "`new_layout.size()` must be greater than or equal to `old_layout.size()`" + ); + + let new_ptr = self.allocate(new_layout)?; + + // SAFETY: because `new_layout.size()` must be greater than or equal to + // `old_layout.size()`, both the old and new memory allocation are valid + // for reads and writes for `old_layout.size()` bytes. Also, because the + // old allocation wasn't yet deallocated, it cannot overlap `new_ptr`. + // Thus, the call to `copy_nonoverlapping` is safe. The safety contract + // for `dealloc` must be upheld by the caller. + unsafe { + ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr() as *mut u8, old_layout.size()); + self.deallocate(ptr, old_layout); + } + + Ok(new_ptr) + } + + /// Behaves like `grow`, but also ensures that the new contents are set to + /// zero before being returned. + /// + /// The memory block will contain the following contents after a successful + /// call to `grow_zeroed`: + /// * Bytes `0..old_layout.size()` are preserved from the original + /// allocation. + /// * Bytes `old_layout.size()..old_size` will either be preserved or + /// zeroed, depending on the allocator implementation. `old_size` refers + /// to the size of the memory block prior to the `grow_zeroed` call, + /// which may be larger than the size that was originally requested when + /// it was allocated. + /// * Bytes `old_size..new_size` are zeroed. `new_size` refers to the size + /// of the memory block returned by the `grow_zeroed` call. + /// + /// # Safety + /// + /// * `ptr` must denote a block of memory [*currently allocated*] via this + /// allocator. + /// * `old_layout` must [*fit*] that block of memory (The `new_layout` + /// argument need not fit it.). + /// * `new_layout.size()` must be greater than or equal to + /// `old_layout.size()`. + /// + /// Note that `new_layout.align()` need not be the same as + /// `old_layout.align()`. + /// + /// [*currently allocated*]: #currently-allocated-memory + /// [*fit*]: #memory-fitting + /// + /// # Errors + /// + /// Returns `Err` if the new layout does not meet the allocator's size and + /// alignment constraints of the allocator, or if growing otherwise fails. + /// + /// Implementations are encouraged to return `Err` on memory exhaustion + /// rather than panicking or aborting, but this is not a strict requirement. + /// (Specifically: it is *legal* to implement this trait atop an underlying + /// native allocation library that aborts on memory exhaustion.) + /// + /// Clients wishing to abort computation in response to an allocation error + /// are encouraged to call the [`handle_alloc_error`] function, rather than + /// directly invoking `panic!` or similar. + /// + /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html + unsafe fn shrink( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, AllocError> { + debug_assert!( + new_layout.size() <= old_layout.size(), + "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" + ); + + let new_ptr = self.allocate(new_layout)?; + + // SAFETY: because `new_layout.size()` must be lower than or equal to + // `old_layout.size()`, both the old and new memory allocation are valid for reads and + // writes for `new_layout.size()` bytes. Also, because the old allocation wasn't yet + // deallocated, it cannot overlap `new_ptr`. Thus, the call to `copy_nonoverlapping` is + // safe. The safety contract for `dealloc` must be upheld by the caller. + unsafe { + ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr() as *mut u8, new_layout.size()); + self.deallocate(ptr, old_layout); + } + + Ok(new_ptr) + } +} + +unsafe impl Allocator for &A +where + A: Allocator + ?Sized, +{ + #[inline] + fn allocate(&self, layout: Layout) -> Result, AllocError> { + (**self).allocate(layout) + } + + #[inline] + fn allocate_zeroed(&self, layout: Layout) -> Result, AllocError> { + (**self).allocate_zeroed(layout) + } + + #[inline] + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).deallocate(ptr, layout) } + } + + #[inline] + unsafe fn grow( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, AllocError> { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).grow(ptr, old_layout, new_layout) } + } + + #[inline] + unsafe fn shrink( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, AllocError> { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).shrink(ptr, old_layout, new_layout) } + } +} + +/// The default global allocator. +#[derive(Default, Debug, Clone)] +pub struct Global; + +impl Global { + #[inline] + fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result, AllocError> { + /// Creates a `NonNull` that is dangling, but well-aligned for this Layout. + /// + /// Note that the pointer value may potentially represent a valid pointer, which + /// means this must not be used as a "not yet initialized" sentinel value. Types + /// that lazily allocate must track initialization by some other means. + pub(crate) const fn dangling(layout: &Layout) -> NonNull { + unsafe { NonNull::new_unchecked(invalid_mut::(layout.align())) } + } + + match layout.size() { + 0 => Ok(NonNull::slice_from_raw_parts(dangling(&layout), 0)), + // SAFETY: `layout` is non-zero in size, + size => unsafe { + let raw_ptr = if zeroed { + alloc_zeroed(layout) + } else { + alloc(layout) + }; + + let Some(ptr) = NonNull::new(raw_ptr) else { + return Err(AllocError { layout }); + }; + + Ok(NonNull::slice_from_raw_parts(ptr, size)) + }, + } + } +} + +unsafe impl Allocator for Global { + #[inline] + fn allocate(&self, layout: Layout) -> Result, AllocError> { + self.alloc_impl(layout, false) + } + + #[inline] + fn allocate_zeroed(&self, layout: Layout) -> Result, AllocError> { + self.alloc_impl(layout, true) + } + + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + if layout.size() != 0 { + // SAFETY: `layout` is non-zero in size, + // other conditions must be upheld by the caller + unsafe { dealloc(ptr.as_ptr(), layout) } + } + } +} diff --git a/crates/rune-alloc/src/alloc/borrow/mod.rs b/crates/rune-alloc/src/alloc/borrow/mod.rs new file mode 100644 index 000000000..877b3cfb8 --- /dev/null +++ b/crates/rune-alloc/src/alloc/borrow/mod.rs @@ -0,0 +1,2 @@ +pub use self::try_to_owned::TryToOwned; +pub(crate) mod try_to_owned; diff --git a/crates/rune-alloc/src/alloc/borrow/try_to_owned.rs b/crates/rune-alloc/src/alloc/borrow/try_to_owned.rs new file mode 100644 index 000000000..c77729e83 --- /dev/null +++ b/crates/rune-alloc/src/alloc/borrow/try_to_owned.rs @@ -0,0 +1,43 @@ +use core::borrow::Borrow; + +use crate::{Error, TryClone}; + +/// A generalization of `TryClone` to borrowed data. +/// +/// Some types make it possible to go from borrowed to owned, usually by +/// implementing the `TryClone` trait. But `TryClone` works only for going from +/// `&T` to `T`. The `ToOwned` trait generalizes `TryClone` to construct owned +/// data from any borrow of a given type. +pub trait TryToOwned { + /// The resulting type after obtaining ownership. + type Owned: Borrow; + + /// Creates owned data from borrowed data, usually by cloning. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::{Vec, String, TryToOwned}; + /// + /// let s: &str = "a"; + /// let ss: String = s.try_to_owned()?; + /// # let v: &[i32] = &[1, 2]; + /// # let vv: Vec = v.try_to_owned()?; + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + fn try_to_owned(&self) -> Result; +} + +impl TryToOwned for T +where + T: TryClone, +{ + type Owned = T; + + #[inline] + fn try_to_owned(&self) -> Result { + self.try_clone() + } +} diff --git a/crates/rune-alloc/src/alloc/boxed.rs b/crates/rune-alloc/src/alloc/boxed.rs new file mode 100644 index 000000000..253a789a7 --- /dev/null +++ b/crates/rune-alloc/src/alloc/boxed.rs @@ -0,0 +1,542 @@ +use core::alloc::Layout; +use core::fmt; +use core::mem; +use core::ops::{Deref, DerefMut}; + +use crate::alloc::raw_vec::RawVec; +use crate::alloc::{AllocError, Allocator, Error, Global, TryClone, Vec}; +use crate::ptr; +use crate::ptr::Unique; + +#[test] +fn ensure_niche_size() { + assert_eq!( + ::core::mem::size_of::>>(), + ::core::mem::size_of::>() + ); +} + +/// A pointer type that uniquely owns a heap allocation of type `T`. +pub struct Box { + ptr: Unique, + alloc: A, +} + +impl Box { + /// Allocates memory on the heap and then places `x` into it. + /// + /// This doesn't actually allocate if `T` is zero-sized. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::Box; + /// + /// let five = Box::new(5)?; + /// # Ok::<_, rune_alloc::AllocError>(()) + /// ``` + pub fn new(value: T) -> Result { + Self::try_new_in(value, Global) + } +} + +impl Box { + /// Allocates memory in the given allocator then places `x` into it, + /// returning an error if the allocation fails + /// + /// This doesn't actually allocate if `T` is zero-sized. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{Box, Global}; + /// + /// let five = Box::try_new_in(5, Global)?; + /// # Ok::<(), rune_alloc::AllocError>(()) + /// ``` + #[inline] + pub fn try_new_in(x: T, alloc: A) -> Result { + let mut boxed = Self::try_new_uninit_in(alloc)?; + + unsafe { + boxed.as_mut_ptr().write(x); + Ok(boxed.assume_init()) + } + } + + /// Constructs a new box with uninitialized contents in the provided + /// allocator, returning an error if the allocation fails + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{Box, Global}; + /// + /// let mut five = Box::::try_new_uninit_in(Global)?; + /// + /// let five: Box = unsafe { + /// // Deferred initialization: + /// five.as_mut_ptr().write(5); + /// + /// five.assume_init() + /// }; + /// + /// assert_eq!(*five, 5); + /// # Ok::<_, rune_alloc::AllocError>(()) + /// ``` + pub fn try_new_uninit_in(alloc: A) -> Result, A>, AllocError> + where + A: Allocator, + { + let layout = Layout::new::>(); + let ptr = alloc.allocate(layout)?.cast(); + unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) } + } + + /// Converts a `Box` into a `Box<[T]>` + /// + /// This conversion does not allocate on the heap and happens in place. + pub(crate) fn into_boxed_slice(boxed: Self) -> Box<[T], A> { + let (raw, alloc) = Box::into_raw_with_allocator(boxed); + unsafe { Box::from_raw_in(raw as *mut [T; 1], alloc) } + } + + /// Consumes the `Box`, returning the wrapped value. + #[inline] + pub fn into_inner(boxed: Self) -> T { + let this = mem::ManuallyDrop::new(boxed); + let value = unsafe { ptr::read(this.ptr.as_ptr()) }; + + // Free memory associated with the box. + // + // SAFETY: We own the box, so we know we can safely deallocate it. + unsafe { + let layout = for_value_raw(this.ptr.as_ptr()); + + if layout.size() != 0 { + this.alloc.deallocate(From::from(this.ptr.cast()), layout); + } + } + + value + } +} + +impl Box { + /// Consumes and leaks the `Box`, returning a mutable reference, `&'a mut + /// T`. Note that the type `T` must outlive the chosen lifetime `'a`. If the + /// type has only static references, or none at all, then this may be chosen + /// to be `'static`. + /// + /// This function is mainly useful for data that lives for the remainder of + /// the program's life. Dropping the returned reference will cause a memory + /// leak. If this is not acceptable, the reference should first be wrapped + /// with the [`Box::from_raw_in`] function producing a `Box`. This `Box` can + /// then be dropped which will properly destroy `T` and release the + /// allocated memory. + /// + /// Note: this is an associated function, which means that you have to call + /// it as `Box::leak(b)` instead of `b.leak()`. This is so that there is no + /// conflict with a method on the inner type. + /// + /// # Examples + /// + /// Simple usage: + /// + /// ``` + /// use rune_alloc::Box; + /// + /// let x = Box::new(41)?; + /// let static_ref: &'static mut usize = Box::leak(x); + /// *static_ref += 1; + /// assert_eq!(*static_ref, 42); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// Unsized data: + /// + /// ``` + /// use rune_alloc::Box; + /// + /// let x = rune_alloc::try_vec![1, 2, 3].try_into_boxed_slice()?; + /// let static_ref = Box::leak(x); + /// static_ref[0] = 4; + /// assert_eq!(*static_ref, [4, 2, 3]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn leak<'a>(b: Self) -> &'a mut T + where + A: 'a, + { + unsafe { &mut *mem::ManuallyDrop::new(b).ptr.as_ptr() } + } + + /// Constructs a box from a raw pointer in the given allocator. + /// + /// After calling this function, the raw pointer is owned by the resulting + /// `Box`. Specifically, the `Box` destructor will call the destructor of + /// `T` and free the allocated memory. For this to be safe, the memory must + /// have been allocated in accordance with the [memory layout] used by `Box` + /// . + /// + /// # Safety + /// + /// This function is unsafe because improper use may lead to memory + /// problems. For example, a double-free may occur if the function is called + /// twice on the same raw pointer. + /// + /// # Examples + /// + /// Recreate a `Box` which was previously converted to a raw pointer using + /// [`Box::into_raw_with_allocator`]: + /// + /// ``` + /// use rune_alloc::{Box, Global}; + /// + /// let x = Box::try_new_in(5, Global)?; + /// let (ptr, alloc) = Box::into_raw_with_allocator(x); + /// let x = unsafe { Box::from_raw_in(ptr, alloc) }; + /// # Ok::<(), rune_alloc::AllocError>(()) + /// ``` + /// + /// Manually create a `Box` from scratch by using the system allocator: + /// + /// ``` + /// use core::alloc::Layout; + /// use rune_alloc::{Box, Allocator, Global}; + /// + /// unsafe { + /// let ptr = Global.allocate(Layout::new::())?.as_ptr() as *mut i32; + /// // In general .write is required to avoid attempting to destruct + /// // the (uninitialized) previous contents of `ptr`, though for this + /// // simple example `*ptr = 5` would have worked as well. + /// ptr.write(5); + /// let x = Box::from_raw_in(ptr, Global); + /// } + /// # Ok::<(), rune_alloc::AllocError>(()) + /// ``` + /// + /// [memory layout]: self#memory-layout + /// [`Layout`]: crate::Layout + #[inline] + pub unsafe fn from_raw_in(raw: *mut T, alloc: A) -> Self { + Self { + ptr: unsafe { Unique::new_unchecked(raw) }, + alloc, + } + } + + /// Consumes the `Box`, returning a wrapped raw pointer and the allocator. + /// + /// The pointer will be properly aligned and non-null. + /// + /// After calling this function, the caller is responsible for the + /// memory previously managed by the `Box`. In particular, the + /// caller should properly destroy `T` and release the memory, taking + /// into account the [memory layout] used by `Box`. The easiest way to + /// do this is to convert the raw pointer back into a `Box` with the + /// [`Box::from_raw_in`] function, allowing the `Box` destructor to perform + /// the cleanup. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `Box::into_raw_with_allocator(b)` instead of `b.into_raw_with_allocator()`. This + /// is so that there is no conflict with a method on the inner type. + /// + /// # Examples + /// + /// Converting the raw pointer back into a `Box` with [`Box::from_raw_in`] + /// for automatic cleanup: + /// + /// ``` + /// use rune_alloc::{Box, String, Global}; + /// + /// let x = Box::try_new_in(String::try_from("Hello")?, Global)?; + /// let (ptr, alloc) = Box::into_raw_with_allocator(x); + /// let x = unsafe { Box::from_raw_in(ptr, alloc) }; + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// Manual cleanup by explicitly running the destructor and deallocating the + /// memory: + /// + /// ``` + /// use core::alloc::Layout; + /// use core::ptr::{self, NonNull}; + /// use rune_alloc::{Allocator, Box, String, Global}; + /// + /// let x = Box::try_new_in(String::try_from("Hello")?, Global)?; + /// + /// let (ptr, alloc) = Box::into_raw_with_allocator(x); + /// + /// unsafe { + /// ptr::drop_in_place(ptr); + /// let non_null = NonNull::new_unchecked(ptr); + /// alloc.deallocate(non_null.cast(), Layout::new::()); + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// [memory layout]: self#memory-layout + #[inline] + pub fn into_raw_with_allocator(b: Self) -> (*mut T, A) { + let leaked = mem::ManuallyDrop::new(b); + // SAFETY: We prevent the alloc field from being dropped, so we can + // safely smuggle it out. + let alloc = unsafe { ptr::read(&leaked.alloc) }; + (leaked.ptr.as_ptr(), alloc) + } +} + +impl Box, A> { + /// Converts to `Box`. + /// + /// # Safety + /// + /// As with [`MaybeUninit::assume_init`], + /// it is up to the caller to guarantee that the value + /// really is in an initialized state. + /// Calling this when the content is not yet fully initialized + /// causes immediate undefined behavior. + /// + /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{Box, Global}; + /// + /// let mut five = Box::::try_new_uninit_in(Global)?; + /// + /// let five: Box = unsafe { + /// // Deferred initialization: + /// five.as_mut_ptr().write(5); + /// + /// five.assume_init() + /// }; + /// + /// assert_eq!(*five, 5); + /// # Ok::<_, rune_alloc::AllocError>(()) + /// ``` + #[inline] + pub unsafe fn assume_init(self) -> Box { + let (raw, alloc) = Box::into_raw_with_allocator(self); + unsafe { Box::from_raw_in(raw as *mut T, alloc) } + } +} + +impl Box<[T], A> { + /// Constructs a new boxed slice with uninitialized contents. Returns an error if + /// the allocation fails + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{Box, Global}; + /// + /// let mut values = Box::<[u32]>::try_new_uninit_slice_in(3, Global)?; + /// + /// let values = unsafe { + /// // Deferred initialization: + /// values[0].as_mut_ptr().write(1); + /// values[1].as_mut_ptr().write(2); + /// values[2].as_mut_ptr().write(3); + /// values.assume_init() + /// }; + /// + /// assert_eq!(*values, [1, 2, 3]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn try_new_uninit_slice_in( + len: usize, + alloc: A, + ) -> Result], A>, Error> { + unsafe { + let layout = match Layout::array::>(len) { + Ok(l) => l, + Err(_) => return Err(Error::LayoutError), + }; + let ptr = alloc.allocate(layout)?; + Ok(RawVec::from_raw_parts_in(ptr.as_ptr() as *mut _, len, alloc).into_box(len)) + } + } +} + +impl Box<[mem::MaybeUninit], A> { + /// Converts to `Box<[T], A>`. + /// + /// # Safety + /// + /// As with [`MaybeUninit::assume_init`], + /// it is up to the caller to guarantee that the values + /// really are in an initialized state. + /// Calling this when the content is not yet fully initialized + /// causes immediate undefined behavior. + /// + /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{Box, Global}; + /// + /// let mut values = Box::<[u32]>::try_new_uninit_slice_in(3, Global)?; + /// + /// let values = unsafe { + /// // Deferred initialization: + /// values[0].as_mut_ptr().write(1); + /// values[1].as_mut_ptr().write(2); + /// values[2].as_mut_ptr().write(3); + /// values.assume_init() + /// }; + /// + /// assert_eq!(*values, [1, 2, 3]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub unsafe fn assume_init(self) -> Box<[T], A> { + let (raw, alloc) = Box::into_raw_with_allocator(self); + unsafe { Box::from_raw_in(raw as *mut [T], alloc) } + } +} + +impl TryClone for Box +where + T: TryClone, +{ + #[inline] + fn try_clone(&self) -> Result { + let value = (**self).try_clone()?; + let alloc = self.alloc.clone(); + Ok(Box::try_new_in(value, alloc)?) + } +} + +impl TryClone for Box<[T], A> +where + T: TryClone, +{ + #[inline] + fn try_clone(&self) -> Result { + let alloc = self.alloc.clone(); + let vec = crate::alloc::slice::to_vec(self, alloc)?; + vec.try_into_boxed_slice() + } +} + +impl Deref for Box { + type Target = T; + + #[inline] + fn deref(&self) -> &T { + unsafe { self.ptr.as_ref() } + } +} + +impl DerefMut for Box { + #[inline] + fn deref_mut(&mut self) -> &mut T { + unsafe { self.ptr.as_mut() } + } +} + +impl Drop for Box { + #[inline] + fn drop(&mut self) { + unsafe { + let ptr = self.ptr; + + if mem::needs_drop::() { + ptr::drop_in_place(ptr.as_ptr()); + } + + let layout = for_value_raw(ptr.as_ptr()); + + if layout.size() != 0 { + self.alloc.deallocate(From::from(ptr.cast()), layout); + } + } + } +} + +impl Default for Box<[T], Global> { + fn default() -> Self { + Box { + ptr: Unique::dangling_empty_slice(), + alloc: Global, + } + } +} + +impl fmt::Display for Box +where + T: fmt::Display, +{ + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + (**self).fmt(f) + } +} + +impl fmt::Debug for Box +where + T: fmt::Debug, +{ + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + (**self).fmt(f) + } +} + +impl From> for Box<[u8], A> { + fn from(value: Box) -> Self { + // SAFETY: `[u8]` is layout compatible with `str` and there are no + // checks needed. + unsafe { + let (ptr, alloc) = Box::into_raw_with_allocator(value); + Box::from_raw_in(ptr as *mut [u8], alloc) + } + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<::rust_alloc::boxed::Box<[T]>> for Box<[T]> { + type Error = Error; + + #[inline] + fn try_from(values: ::rust_alloc::boxed::Box<[T]>) -> Result { + let mut vec = Vec::try_with_capacity(values.len())?; + + for value in ::rust_alloc::vec::Vec::from(values) { + vec.try_push(value)?; + } + + vec.try_into_boxed_slice() + } +} + +impl TryFrom<[T; N]> for Box<[T]> { + type Error = Error; + + #[inline] + fn try_from(values: [T; N]) -> Result { + let mut vec = Vec::try_with_capacity(values.len())?; + + for value in values { + vec.try_push(value)?; + } + + vec.try_into_boxed_slice() + } +} + +unsafe fn for_value_raw(t: *const T) -> Layout { + // SAFETY: we pass along the prerequisites of these functions to the caller + // TODO: Use mem::{size_of_val_raw, align_of_val_raw} when they become + // stable, for now we privately know that this can safely be turned into a + // reference since it's only used while dropping an owned value of type `T`. + let (size, align) = (mem::size_of_val(&*t), mem::align_of_val(&*t)); + // SAFETY: see rationale in `new` for why this is using the unsafe variant + Layout::from_size_align_unchecked(size, align) +} diff --git a/crates/rune-alloc/src/alloc/btree/append.rs b/crates/rune-alloc/src/alloc/btree/append.rs new file mode 100644 index 000000000..82de09728 --- /dev/null +++ b/crates/rune-alloc/src/alloc/btree/append.rs @@ -0,0 +1,128 @@ +use core::iter::FusedIterator; + +use super::merge_iter::MergeIterInner; +use super::node::{self, Root}; + +#[cfg(test)] +use crate::alloc::testing::*; +use crate::alloc::{AllocError, Allocator}; + +impl Root { + /// Appends all key-value pairs from the union of two ascending iterators, + /// incrementing a `length` variable along the way. The latter makes it + /// easier for the caller to avoid a leak when a drop handler panicks. + /// + /// If both iterators produce the same key, this method drops the pair from + /// the left iterator and appends the pair from the right iterator. + /// + /// If you want the tree to end up in a strictly ascending order, like for + /// a `BTreeMap`, both iterators should produce keys in strictly ascending + /// order, each greater than all keys in the tree, including any keys + /// already in the tree upon entry. + pub(crate) fn try_append_from_sorted_iters( + &mut self, + left: I, + right: I, + length: &mut usize, + alloc: &A, + ) -> Result<(), AllocError> + where + K: Ord, + I: Iterator + FusedIterator, + { + // We prepare to merge `left` and `right` into a sorted sequence in linear time. + let iter = MergeIter(MergeIterInner::new(left, right)); + + // Meanwhile, we build a tree from the sorted sequence in linear time. + self.try_bulk_push(iter, length, alloc) + } + + /// Pushes all key-value pairs to the end of the tree, incrementing a + /// `length` variable along the way. The latter makes it easier for the + /// caller to avoid a leak when the iterator panicks. + pub(crate) fn try_bulk_push( + &mut self, + iter: I, + length: &mut usize, + alloc: &A, + ) -> Result<(), AllocError> + where + I: Iterator, + { + let mut cur_node = self.borrow_mut().last_leaf_edge().into_node(); + // Iterate through all key-value pairs, pushing them into nodes at the right level. + for (key, value) in iter { + // Try to push key-value pair into the current leaf node. + if cur_node.len() < node::CAPACITY { + cur_node.push(key, value); + } else { + // No space left, go up and push there. + let mut open_node; + let mut test_node = cur_node.forget_type(); + loop { + match test_node.ascend() { + Ok(parent) => { + let parent = parent.into_node(); + if parent.len() < node::CAPACITY { + // Found a node with space left, push here. + open_node = parent; + break; + } else { + // Go up again. + test_node = parent.forget_type(); + } + } + Err(_) => { + // We are at the top, create a new root node and push there. + open_node = self.push_internal_level(alloc)?; + break; + } + } + } + + // Push key-value pair and new right subtree. + let tree_height = open_node.height() - 1; + let mut right_tree = Root::new(alloc)?; + + for _ in 0..tree_height { + right_tree.push_internal_level(alloc)?; + } + + open_node.push(key, value, right_tree); + + // Go down to the right-most leaf again. + cur_node = open_node.forget_type().last_leaf_edge().into_node(); + } + + // Increment length every iteration, to make sure the map drops + // the appended elements even if advancing the iterator panicks. + *length += 1; + } + self.fix_right_border_of_plentiful(); + Ok(()) + } + + #[cfg(test)] + pub(crate) fn bulk_push(&mut self, iter: I, length: &mut usize, alloc: &A) + where + I: Iterator, + { + self.try_bulk_push(iter, length, alloc).abort() + } +} + +// An iterator for merging two sorted sequences into one +struct MergeIter>(MergeIterInner); + +impl Iterator for MergeIter +where + I: Iterator + FusedIterator, +{ + type Item = (K, V); + + /// If two keys are equal, returns the key-value pair from the right source. + fn next(&mut self) -> Option<(K, V)> { + let (a_next, b_next) = self.0.nexts(|a: &(K, V), b: &(K, V)| K::cmp(&a.0, &b.0)); + b_next.or(a_next) + } +} diff --git a/crates/rune-alloc/src/alloc/btree/borrow.rs b/crates/rune-alloc/src/alloc/btree/borrow.rs new file mode 100644 index 000000000..d1bafb9c0 --- /dev/null +++ b/crates/rune-alloc/src/alloc/btree/borrow.rs @@ -0,0 +1,73 @@ +use core::marker::PhantomData; + +use crate::ptr::NonNull; + +/// Models a reborrow of some unique reference, when you know that the reborrow +/// and all its descendants (i.e., all pointers and references derived from it) +/// will not be used any more at some point, after which you want to use the +/// original unique reference again. +/// +/// The borrow checker usually handles this stacking of borrows for you, but +/// some control flows that accomplish this stacking are too complicated for +/// the compiler to follow. A `DormantMutRef` allows you to check borrowing +/// yourself, while still expressing its stacked nature, and encapsulating +/// the raw pointer code needed to do this without undefined behavior. +pub(crate) struct DormantMutRef<'a, T> { + ptr: NonNull, + _marker: PhantomData<&'a mut T>, +} + +unsafe impl<'a, T> Sync for DormantMutRef<'a, T> where &'a mut T: Sync {} +unsafe impl<'a, T> Send for DormantMutRef<'a, T> where &'a mut T: Send {} + +impl<'a, T> DormantMutRef<'a, T> { + /// Capture a unique borrow, and immediately reborrow it. For the compiler, + /// the lifetime of the new reference is the same as the lifetime of the + /// original reference, but you promise to use it for a shorter period. + pub(crate) fn new(t: &'a mut T) -> (&'a mut T, Self) { + let ptr = NonNull::from(t); + // SAFETY: we hold the borrow throughout 'a via `_marker`, and we expose + // only this reference, so it is unique. + let new_ref = unsafe { &mut *ptr.as_ptr() }; + ( + new_ref, + Self { + ptr, + _marker: PhantomData, + }, + ) + } + + /// Revert to the unique borrow initially captured. + /// + /// # Safety + /// + /// The reborrow must have ended, i.e., the reference returned by `new` and + /// all pointers and references derived from it, must not be used anymore. + pub(crate) unsafe fn awaken(self) -> &'a mut T { + // SAFETY: our own safety conditions imply this reference is again unique. + unsafe { &mut *self.ptr.as_ptr() } + } + + /// Borrows a new mutable reference from the unique borrow initially captured. + /// + /// # Safety + /// + /// The reborrow must have ended, i.e., the reference returned by `new` and + /// all pointers and references derived from it, must not be used anymore. + pub(crate) unsafe fn reborrow(&mut self) -> &'a mut T { + // SAFETY: our own safety conditions imply this reference is again unique. + unsafe { &mut *self.ptr.as_ptr() } + } + + /// Borrows a new shared reference from the unique borrow initially captured. + /// + /// # Safety + /// + /// The reborrow must have ended, i.e., the reference returned by `new` and + /// all pointers and references derived from it, must not be used anymore. + pub(crate) unsafe fn reborrow_shared(&self) -> &'a T { + // SAFETY: our own safety conditions imply this reference is again unique. + unsafe { &*self.ptr.as_ptr() } + } +} diff --git a/crates/rune-alloc/src/alloc/btree/fix.rs b/crates/rune-alloc/src/alloc/btree/fix.rs new file mode 100644 index 000000000..cd1869067 --- /dev/null +++ b/crates/rune-alloc/src/alloc/btree/fix.rs @@ -0,0 +1,184 @@ +use super::map::MIN_LEN; +use super::node::{marker, ForceResult::*, Handle, LeftOrRight::*, NodeRef, Root}; + +use crate::alloc::Allocator; + +impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { + /// Stocks up a possibly underfull node by merging with or stealing from a + /// sibling. If successful but at the cost of shrinking the parent node, + /// returns that shrunk parent node. Returns an `Err` if the node is + /// an empty root. + fn fix_node_through_parent( + self, + alloc: &A, + ) -> Result, K, V, marker::Internal>>, Self> { + let len = self.len(); + if len >= MIN_LEN { + Ok(None) + } else { + match self.choose_parent_kv() { + Ok(Left(mut left_parent_kv)) => { + if left_parent_kv.can_merge() { + let parent = left_parent_kv.merge_tracking_parent(alloc); + Ok(Some(parent)) + } else { + left_parent_kv.bulk_steal_left(MIN_LEN - len); + Ok(None) + } + } + Ok(Right(mut right_parent_kv)) => { + if right_parent_kv.can_merge() { + let parent = right_parent_kv.merge_tracking_parent(alloc); + Ok(Some(parent)) + } else { + right_parent_kv.bulk_steal_right(MIN_LEN - len); + Ok(None) + } + } + Err(root) => { + if len > 0 { + Ok(None) + } else { + Err(root) + } + } + } + } + } +} + +impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { + /// Stocks up a possibly underfull node, and if that causes its parent node + /// to shrink, stocks up the parent, recursively. + /// Returns `true` if it fixed the tree, `false` if it couldn't because the + /// root node became empty. + /// + /// This method does not expect ancestors to already be underfull upon entry + /// and panics if it encounters an empty ancestor. + pub(crate) fn fix_node_and_affected_ancestors(mut self, alloc: &A) -> bool { + loop { + match self.fix_node_through_parent(alloc) { + Ok(Some(parent)) => self = parent.forget_type(), + Ok(None) => return true, + Err(_) => return false, + } + } + } +} + +impl Root { + /// Removes empty levels on the top, but keeps an empty leaf if the entire tree is empty. + pub(crate) fn fix_top(&mut self, alloc: &A) { + while self.height() > 0 && self.len() == 0 { + self.pop_internal_level(alloc); + } + } + + /// Stocks up or merge away any underfull nodes on the right border of the + /// tree. The other nodes, those that are not the root nor a rightmost edge, + /// must already have at least MIN_LEN elements. + pub(crate) fn fix_right_border(&mut self, alloc: &A) { + self.fix_top(alloc); + if self.len() > 0 { + self.borrow_mut() + .last_kv() + .fix_right_border_of_right_edge(alloc); + self.fix_top(alloc); + } + } + + /// The symmetric clone of `fix_right_border`. + pub(crate) fn fix_left_border(&mut self, alloc: &A) { + self.fix_top(alloc); + if self.len() > 0 { + self.borrow_mut() + .first_kv() + .fix_left_border_of_left_edge(alloc); + self.fix_top(alloc); + } + } + + /// Stocks up any underfull nodes on the right border of the tree. + /// The other nodes, those that are neither the root nor a rightmost edge, + /// must be prepared to have up to MIN_LEN elements stolen. + pub(crate) fn fix_right_border_of_plentiful(&mut self) { + let mut cur_node = self.borrow_mut(); + while let Internal(internal) = cur_node.force() { + // Check if right-most child is underfull. + let mut last_kv = internal.last_kv().consider_for_balancing(); + debug_assert!(last_kv.left_child_len() >= MIN_LEN * 2); + let right_child_len = last_kv.right_child_len(); + if right_child_len < MIN_LEN { + // We need to steal. + last_kv.bulk_steal_left(MIN_LEN - right_child_len); + } + + // Go further down. + cur_node = last_kv.into_right_child(); + } + } +} + +impl<'a, K: 'a, V: 'a> Handle, K, V, marker::LeafOrInternal>, marker::KV> { + fn fix_left_border_of_left_edge(mut self, alloc: &A) { + while let Internal(internal_kv) = self.force() { + self = internal_kv.fix_left_child(alloc).first_kv(); + debug_assert!(self.reborrow().into_node().len() > MIN_LEN); + } + } + + fn fix_right_border_of_right_edge(mut self, alloc: &A) { + while let Internal(internal_kv) = self.force() { + self = internal_kv.fix_right_child(alloc).last_kv(); + debug_assert!(self.reborrow().into_node().len() > MIN_LEN); + } + } +} + +impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, marker::KV> { + /// Stocks up the left child, assuming the right child isn't underfull, and + /// provisions an extra element to allow merging its children in turn + /// without becoming underfull. + /// Returns the left child. + fn fix_left_child( + self, + alloc: &A, + ) -> NodeRef, K, V, marker::LeafOrInternal> { + let mut internal_kv = self.consider_for_balancing(); + let left_len = internal_kv.left_child_len(); + debug_assert!(internal_kv.right_child_len() >= MIN_LEN); + if internal_kv.can_merge() { + internal_kv.merge_tracking_child(alloc) + } else { + // `MIN_LEN + 1` to avoid readjust if merge happens on the next level. + let count = (MIN_LEN + 1).saturating_sub(left_len); + if count > 0 { + internal_kv.bulk_steal_right(count); + } + internal_kv.into_left_child() + } + } + + /// Stocks up the right child, assuming the left child isn't underfull, and + /// provisions an extra element to allow merging its children in turn + /// without becoming underfull. + /// Returns wherever the right child ended up. + fn fix_right_child( + self, + alloc: &A, + ) -> NodeRef, K, V, marker::LeafOrInternal> { + let mut internal_kv = self.consider_for_balancing(); + let right_len = internal_kv.right_child_len(); + debug_assert!(internal_kv.left_child_len() >= MIN_LEN); + if internal_kv.can_merge() { + internal_kv.merge_tracking_child(alloc) + } else { + // `MIN_LEN + 1` to avoid readjust if merge happens on the next level. + let count = (MIN_LEN + 1).saturating_sub(right_len); + if count > 0 { + internal_kv.bulk_steal_left(count); + } + internal_kv.into_right_child() + } + } +} diff --git a/crates/rune-alloc/src/alloc/btree/map.rs b/crates/rune-alloc/src/alloc/btree/map.rs new file mode 100644 index 000000000..7c2b8cc50 --- /dev/null +++ b/crates/rune-alloc/src/alloc/btree/map.rs @@ -0,0 +1,3699 @@ +//! An ordered map based on a B-Tree. + +use core::borrow::Borrow; +use core::cmp::Ordering; +use core::convert::Infallible; +use core::fmt::{self, Debug}; +use core::hash::{Hash, Hasher}; +use core::iter::FusedIterator; +use core::marker::PhantomData; +use core::mem::{self, ManuallyDrop}; +use core::ops::{Bound, Index, RangeBounds}; + +#[cfg(test)] +use crate::alloc::testing::*; +use crate::alloc::TryExtend; +use crate::ptr; + +use super::borrow::DormantMutRef; +use super::navigate::{LazyLeafRange, LeafRange}; +use super::node::{self, marker, ForceResult::*, Handle, NodeRef, Root}; +use super::search::{SearchBound, SearchResult::*}; +use super::set_val::SetValZST; +use super::Recover; + +use crate::alloc::{ + AllocError, Allocator, Box, CustomError, Error, Global, TryClone, TryFromIteratorIn, +}; + +pub use entry::{Entry, OccupiedEntry, OccupiedError, VacantEntry}; +mod entry; + +pub(crate) type CmpFn = fn(&mut C, &Q, &Q) -> Result; + +use Entry::*; + +macro_rules! into_iter { + ($this:expr) => {{ + let length = mem::take(&mut $this.length); + + if let Some(root) = $this.root.take() { + let full_range = root.into_dying().full_range(); + + IntoIter { + range: full_range, + length, + alloc: &*$this.alloc, + } + } else { + IntoIter { + range: LazyLeafRange::none(), + length: 0, + alloc: &*$this.alloc, + } + } + }}; +} + +#[inline(always)] +pub(crate) fn into_ok(result: Result) -> T { + match result { + Ok(value) => value, + Err(error) => match error {}, + } +} + +#[inline(always)] +pub(crate) fn infallible_cmp(_: &mut (), a: &T, b: &T) -> Result +where + T: Ord, +{ + Ok(a.cmp(b)) +} + +/// Minimum number of elements in a node that is not a root. +/// We might temporarily have fewer elements during methods. +pub(super) const MIN_LEN: usize = node::MIN_LEN_AFTER_SPLIT; + +// A tree in a `BTreeMap` is a tree in the `node` module with additional invariants: +// - Keys must appear in ascending order (according to the key's type). +// - Every non-leaf node contains at least 1 element (has at least 2 children). +// - Every non-root node contains at least MIN_LEN elements. +// +// An empty map is represented either by the absence of a root node or by a +// root node that is an empty leaf. + +/// An ordered map based on a [B-Tree]. +/// +/// B-Trees represent a fundamental compromise between cache-efficiency and actually minimizing +/// the amount of work performed in a search. In theory, a binary search tree (BST) is the optimal +/// choice for a sorted map, as a perfectly balanced BST performs the theoretical minimum amount of +/// comparisons necessary to find an element (log2n). However, in practice the way this +/// is done is *very* inefficient for modern computer architectures. In particular, every element +/// is stored in its own individually heap-allocated node. This means that every single insertion +/// triggers a heap-allocation, and every single comparison should be a cache-miss. Since these +/// are both notably expensive things to do in practice, we are forced to, at the very least, +/// reconsider the BST strategy. +/// +/// A B-Tree instead makes each node contain B-1 to 2B-1 elements in a contiguous array. By doing +/// this, we reduce the number of allocations by a factor of B, and improve cache efficiency in +/// searches. However, this does mean that searches will have to do *more* comparisons on average. +/// The precise number of comparisons depends on the node search strategy used. For optimal cache +/// efficiency, one could search the nodes linearly. For optimal comparisons, one could search +/// the node using binary search. As a compromise, one could also perform a linear search +/// that initially only checks every ith element for some choice of i. +/// +/// Currently, our implementation simply performs naive linear search. This provides excellent +/// performance on *small* nodes of elements which are cheap to compare. However in the future we +/// would like to further explore choosing the optimal search strategy based on the choice of B, +/// and possibly other factors. Using linear search, searching for a random element is expected +/// to take B * log(n) comparisons, which is generally worse than a BST. In practice, +/// however, performance is excellent. +/// +/// It is a logic error for a key to be modified in such a way that the key's ordering relative to +/// any other key, as determined by the [`Ord`] trait, changes while it is in the map. This is +/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code. +/// The behavior resulting from such a logic error is not specified, but will be encapsulated to the +/// `BTreeMap` that observed the logic error and not result in undefined behavior. This could +/// include panics, incorrect results, aborts, memory leaks, and non-termination. +/// +/// Iterators obtained from functions such as [`BTreeMap::iter`], [`BTreeMap::values`], or +/// [`BTreeMap::keys`] produce their items in order by key, and take worst-case logarithmic and +/// amortized constant time per item returned. +/// +/// [B-Tree]: https://en.wikipedia.org/wiki/B-tree +/// [`Cell`]: core::cell::Cell +/// [`RefCell`]: core::cell::RefCell +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::BTreeMap; +/// +/// // type inference lets us omit an explicit type signature (which +/// // would be `BTreeMap<&str, &str>` in this example). +/// let mut movie_reviews = BTreeMap::new(); +/// +/// // review some movies. +/// movie_reviews.try_insert("Office Space", "Deals with real issues in the workplace.")?; +/// movie_reviews.try_insert("Pulp Fiction", "Masterpiece.")?; +/// movie_reviews.try_insert("The Godfather", "Very enjoyable.")?; +/// movie_reviews.try_insert("The Blues Brothers", "Eye lyked it a lot.")?; +/// +/// // check for a specific one. +/// if !movie_reviews.contains_key("Les Misérables") { +/// println!("We've got {} reviews, but Les Misérables ain't one.", +/// movie_reviews.len()); +/// } +/// +/// // oops, this review has a lot of spelling mistakes, let's delete it. +/// movie_reviews.remove("The Blues Brothers"); +/// +/// // look up the values associated with some keys. +/// let to_find = ["Up!", "Office Space"]; +/// for movie in &to_find { +/// match movie_reviews.get(movie) { +/// Some(review) => println!("{movie}: {review}"), +/// None => println!("{movie} is unreviewed.") +/// } +/// } +/// +/// // Look up the value for a key (will panic if the key is not found). +/// println!("Movie review: {}", movie_reviews["Office Space"]); +/// +/// // iterate over everything. +/// for (movie, review) in &movie_reviews { +/// println!("{movie}: \"{review}\""); +/// } +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// A `BTreeMap` with a known list of items can be initialized from an array: +/// +/// ``` +/// use rune_alloc::BTreeMap; +/// +/// let solar_distance = BTreeMap::try_from([ +/// ("Mercury", 0.4), +/// ("Venus", 0.7), +/// ("Earth", 1.0), +/// ("Mars", 1.5), +/// ])?; +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// `BTreeMap` implements an [`Entry API`], which allows for complex +/// methods of getting, setting, updating and removing keys and their values: +/// +/// [`Entry API`]: BTreeMap::entry +/// +/// ``` +/// use rune_alloc::BTreeMap; +/// +/// // type inference lets us omit an explicit type signature (which +/// // would be `BTreeMap<&str, u8>` in this example). +/// let mut player_stats = BTreeMap::new(); +/// +/// fn random_stat_buff() -> u8 { +/// // could actually return some random value here - let's just return +/// // some fixed value for now +/// 42 +/// } +/// +/// // insert a key only if it doesn't already exist +/// player_stats.entry("health").or_try_insert(100)?; +/// +/// // insert a key using a function that provides a new value only if it +/// // doesn't already exist +/// player_stats.entry("defence").or_try_insert_with(random_stat_buff)?; +/// +/// // update a key, guarding against the key possibly not being set +/// let stat = player_stats.entry("attack").or_try_insert(100)?; +/// *stat += random_stat_buff(); +/// +/// // modify an entry before an insert with in-place mutation +/// player_stats.entry("mana").and_modify(|mana| *mana += 200).or_try_insert(100)?; +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct BTreeMap { + root: Option>, + length: usize, + /// `ManuallyDrop` to control drop order (needs to be dropped after all the nodes). + pub(super) alloc: ManuallyDrop, + // For dropck; the `Box` avoids making the `Unpin` impl more strict than before + _marker: PhantomData>, +} + +#[cfg(rune_nightly)] +unsafe impl<#[may_dangle] K, #[may_dangle] V, A: Allocator> Drop for BTreeMap { + fn drop(&mut self) { + drop(unsafe { ptr::read(self) }.into_iter()) + } +} + +#[cfg(not(rune_nightly))] +impl Drop for BTreeMap { + fn drop(&mut self) { + drop(unsafe { ptr::read(self) }.into_iter()) + } +} + +// FIXME: This implementation is "wrong", but changing it would be a breaking change. +// (The bounds of the automatic `UnwindSafe` implementation have been like this since Rust 1.50.) +// Maybe we can fix it nonetheless with a crater run, or if the `UnwindSafe` +// traits are deprecated, or disarmed (no longer causing hard errors) in the future. +impl core::panic::UnwindSafe for BTreeMap +where + A: core::panic::UnwindSafe, + K: core::panic::RefUnwindSafe, + V: core::panic::RefUnwindSafe, +{ +} + +impl TryClone for BTreeMap { + fn try_clone(&self) -> Result, Error> { + fn clone_subtree<'a, K: TryClone, V: TryClone, A: Allocator + Clone>( + node: NodeRef, K, V, marker::LeafOrInternal>, + alloc: &A, + ) -> Result, Error> + where + K: 'a, + V: 'a, + { + match node.force() { + Leaf(leaf) => { + let mut out_tree = BTreeMap { + root: Some(Root::new(alloc)?), + length: 0, + alloc: ManuallyDrop::new(alloc.clone()), + _marker: PhantomData, + }; + + { + let root = out_tree.root.as_mut().unwrap(); // unwrap succeeds because we just wrapped + let mut out_node = match root.borrow_mut().force() { + Leaf(leaf) => leaf, + Internal(_) => unreachable!(), + }; + + let mut in_edge = leaf.first_edge(); + while let Ok(kv) = in_edge.right_kv() { + let (k, v) = kv.into_kv(); + in_edge = kv.right_edge(); + + out_node.push(k.try_clone()?, v.try_clone()?); + out_tree.length += 1; + } + } + + Ok(out_tree) + } + Internal(internal) => { + let mut out_tree = clone_subtree(internal.first_edge().descend(), alloc)?; + + { + let out_root = out_tree.root.as_mut().unwrap(); + let mut out_node = out_root.push_internal_level(alloc)?; + let mut in_edge = internal.first_edge(); + while let Ok(kv) = in_edge.right_kv() { + let (k, v) = kv.into_kv(); + in_edge = kv.right_edge(); + + let k = (*k).try_clone()?; + let v = (*v).try_clone()?; + let subtree = clone_subtree(in_edge.descend(), alloc)?; + + // We can't destructure subtree directly + // because BTreeMap implements Drop + let (subroot, sublength) = unsafe { + let subtree = ManuallyDrop::new(subtree); + let root = ptr::read(&subtree.root); + let length = subtree.length; + (root, length) + }; + + let subroot = match subroot { + Some(subroot) => subroot, + None => Root::new(alloc)?, + }; + + out_node.push(k, v, subroot); + out_tree.length += 1 + sublength; + } + } + + Ok(out_tree) + } + } + } + + if self.is_empty() { + Ok(BTreeMap::new_in((*self.alloc).clone())) + } else { + clone_subtree(self.root.as_ref().unwrap().reborrow(), &*self.alloc) // unwrap succeeds because not empty + } + } +} + +#[cfg(test)] +impl Clone for BTreeMap { + #[inline] + fn clone(&self) -> Self { + self.try_clone().abort() + } + + #[inline] + fn clone_from(&mut self, source: &Self) { + self.try_clone_from(source).abort() + } +} + +impl Recover for BTreeMap +where + K: Borrow, +{ + type Key = K; + + fn get(&self, cx: &mut C, key: &Q, cmp: CmpFn) -> Result, E> { + let Some(root_node) = self.root.as_ref() else { + return Ok(None); + }; + + let root_node = root_node.reborrow(); + + Ok(match root_node.search_tree(cx, key, cmp)? { + Found(handle) => Some(handle.into_kv().0), + GoDown(_) => None, + }) + } + + fn take( + &mut self, + cx: &mut C, + key: &Q, + cmp: CmpFn, + ) -> Result, E> { + let (map, dormant_map) = DormantMutRef::new(self); + + let Some(root_node) = map.root.as_mut() else { + return Ok(None); + }; + + let root_node = root_node.borrow_mut(); + + Ok(match root_node.search_tree(cx, key, cmp)? { + Found(handle) => { + let entry = OccupiedEntry { + handle, + dormant_map, + alloc: &*map.alloc, + _marker: PhantomData, + }; + + Some(entry.remove_kv().0) + } + GoDown(_) => None, + }) + } + + fn try_replace( + &mut self, + cx: &mut C, + key: K, + cmp: CmpFn, + ) -> Result, AllocError>, E> { + let (map, dormant_map) = DormantMutRef::new(self); + + let root_node = match &mut map.root { + Some(root) => root, + None => { + let root = match Root::new(&*map.alloc) { + Ok(root) => root, + Err(error) => return Ok(Err(error)), + }; + + map.root.insert(root) + } + }; + + let root_node = root_node.borrow_mut(); + + match root_node.search_tree(cx, key.borrow(), cmp)? { + Found(mut kv) => Ok(Ok(Some(mem::replace(kv.key_mut(), key)))), + GoDown(handle) => { + let entry = VacantEntry { + key, + handle: Some(handle), + dormant_map, + alloc: &*map.alloc, + _marker: PhantomData, + }; + + if let Err(error) = entry.try_insert(SetValZST) { + return Ok(Err(error)); + } + + Ok(Ok(None)) + } + } + } +} + +/// A raw iterator over a map where the caller is responsible for ensuring that +/// it doesn't outlive the data it's iterating over. +/// +/// See [BTreeMap::iter_raw]. +#[must_use = "iterators are lazy and do nothing unless consumed"] +pub struct IterRaw { + range: LazyLeafRange, + length: usize, +} + +impl Iterator for IterRaw { + type Item = (*const K, *const V); + + fn next(&mut self) -> Option<(*const K, *const V)> { + if self.length == 0 { + None + } else { + self.length -= 1; + Some(unsafe { self.range.next_unchecked() }) + } + } + + fn size_hint(&self) -> (usize, Option) { + (self.length, Some(self.length)) + } + + fn last(mut self) -> Option<(*const K, *const V)> { + self.next_back() + } +} + +impl FusedIterator for IterRaw {} + +impl DoubleEndedIterator for IterRaw { + fn next_back(&mut self) -> Option<(*const K, *const V)> { + if self.length == 0 { + None + } else { + self.length -= 1; + Some(unsafe { self.range.next_back_unchecked() }) + } + } +} + +impl ExactSizeIterator for IterRaw { + fn len(&self) -> usize { + self.length + } +} + +impl Clone for IterRaw { + fn clone(&self) -> Self { + IterRaw { + range: self.range.clone(), + length: self.length, + } + } +} + +/// An iterator over the entries of a `BTreeMap`. +/// +/// This `struct` is created by the [`iter`] method on [`BTreeMap`]. See its +/// documentation for more. +/// +/// [`iter`]: BTreeMap::iter +#[must_use = "iterators are lazy and do nothing unless consumed"] +pub struct Iter<'a, K: 'a, V: 'a> { + range: LazyLeafRange, K, V>, + length: usize, +} + +impl fmt::Debug for Iter<'_, K, V> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +impl<'a, K: 'a, V: 'a> Default for Iter<'a, K, V> { + /// Creates an empty `btree_map::Iter`. + /// + /// ``` + /// use rune_alloc::btree_map; + /// + /// let iter: btree_map::Iter<'_, u8, u8> = Default::default(); + /// assert_eq!(iter.len(), 0); + /// ``` + fn default() -> Self { + Iter { + range: Default::default(), + length: 0, + } + } +} + +/// A mutable iterator over the entries of a `BTreeMap`. +/// +/// This `struct` is created by the [`iter_mut`] method on [`BTreeMap`]. See its +/// documentation for more. +/// +/// [`iter_mut`]: BTreeMap::iter_mut +pub struct IterMut<'a, K: 'a, V: 'a> { + range: LazyLeafRange, K, V>, + length: usize, + + // Be invariant in `K` and `V` + _marker: PhantomData<&'a mut (K, V)>, +} + +impl fmt::Debug for IterMut<'_, K, V> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let range = Iter { + range: self.range.reborrow(), + length: self.length, + }; + f.debug_list().entries(range).finish() + } +} + +impl<'a, K: 'a, V: 'a> Default for IterMut<'a, K, V> { + /// Creates an empty `btree_map::IterMut`. + /// + /// ``` + /// use rune_alloc::btree_map; + /// + /// let iter: btree_map::IterMut<'_, u8, u8> = Default::default(); + /// assert_eq!(iter.len(), 0); + /// ``` + fn default() -> Self { + IterMut { + range: Default::default(), + length: 0, + _marker: PhantomData {}, + } + } +} + +/// An owning iterator over the entries of a `BTreeMap`. +/// +/// This `struct` is created by the [`into_iter`] method on [`BTreeMap`] +/// (provided by the [`IntoIterator`] trait). See its documentation for more. +/// +/// [`into_iter`]: IntoIterator::into_iter +pub struct IntoIter { + range: LazyLeafRange, + length: usize, + /// The BTreeMap will outlive this IntoIter so we don't care about drop order for `alloc`. + alloc: A, +} + +impl IntoIter { + /// Returns an iterator of references over the remaining items. + #[inline] + pub(super) fn iter(&self) -> Iter<'_, K, V> { + Iter { + range: self.range.reborrow(), + length: self.length, + } + } +} + +impl Debug for IntoIter { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.iter()).finish() + } +} + +impl Default for IntoIter +where + A: Allocator + Default, +{ + /// Creates an empty `btree_map::IntoIter`. + /// + /// ``` + /// use rune_alloc::btree_map; + /// + /// let iter: btree_map::IntoIter = Default::default(); + /// assert_eq!(iter.len(), 0); + /// ``` + fn default() -> Self { + IntoIter { + range: Default::default(), + length: 0, + alloc: Default::default(), + } + } +} + +/// An iterator over the keys of a `BTreeMap`. +/// +/// This `struct` is created by the [`keys`] method on [`BTreeMap`]. See its +/// documentation for more. +/// +/// [`keys`]: BTreeMap::keys +#[must_use = "iterators are lazy and do nothing unless consumed"] +pub struct Keys<'a, K, V> { + inner: Iter<'a, K, V>, +} + +impl fmt::Debug for Keys<'_, K, V> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +/// An iterator over the values of a `BTreeMap`. +/// +/// This `struct` is created by the [`values`] method on [`BTreeMap`]. See its +/// documentation for more. +/// +/// [`values`]: BTreeMap::values +#[must_use = "iterators are lazy and do nothing unless consumed"] +pub struct Values<'a, K, V> { + inner: Iter<'a, K, V>, +} + +impl fmt::Debug for Values<'_, K, V> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +/// A mutable iterator over the values of a `BTreeMap`. +/// +/// This `struct` is created by the [`values_mut`] method on [`BTreeMap`]. See its +/// documentation for more. +/// +/// [`values_mut`]: BTreeMap::values_mut +#[must_use = "iterators are lazy and do nothing unless consumed"] +pub struct ValuesMut<'a, K, V> { + inner: IterMut<'a, K, V>, +} + +impl fmt::Debug for ValuesMut<'_, K, V> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list() + .entries(self.inner.iter().map(|(_, val)| val)) + .finish() + } +} + +/// An owning iterator over the keys of a `BTreeMap`. +/// +/// This `struct` is created by the [`into_keys`] method on [`BTreeMap`]. See +/// its documentation for more. +/// +/// [`into_keys`]: BTreeMap::into_keys +#[must_use = "iterators are lazy and do nothing unless consumed"] +pub struct IntoKeys { + inner: IntoIter, +} + +impl fmt::Debug for IntoKeys { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list() + .entries(self.inner.iter().map(|(key, _)| key)) + .finish() + } +} + +/// An owning iterator over the values of a `BTreeMap`. +/// +/// This `struct` is created by the [`into_values`] method on [`BTreeMap`]. See +/// its documentation for more. +/// +/// [`into_values`]: BTreeMap::into_values +#[must_use = "iterators are lazy and do nothing unless consumed"] +pub struct IntoValues { + inner: IntoIter, +} + +impl fmt::Debug for IntoValues { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list() + .entries(self.inner.iter().map(|(_, val)| val)) + .finish() + } +} + +/// An iterator over a sub-range of entries in a `BTreeMap`. +/// +/// This `struct` is created by the [`range`] method on [`BTreeMap`]. See its +/// documentation for more. +/// +/// [`range`]: BTreeMap::range +#[must_use = "iterators are lazy and do nothing unless consumed"] +pub struct Range<'a, K: 'a, V: 'a> { + inner: LeafRange, K, V>, +} + +impl fmt::Debug for Range<'_, K, V> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +/// A mutable iterator over a sub-range of entries in a `BTreeMap`. +/// +/// This `struct` is created by the [`range_mut`] method on [`BTreeMap`]. See its +/// documentation for more. +/// +/// [`range_mut`]: BTreeMap::range_mut +#[must_use = "iterators are lazy and do nothing unless consumed"] +pub struct RangeMut<'a, K: 'a, V: 'a> { + inner: LeafRange, K, V>, + + // Be invariant in `K` and `V` + _marker: PhantomData<&'a mut (K, V)>, +} + +impl fmt::Debug for RangeMut<'_, K, V> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let range = Range { + inner: self.inner.reborrow(), + }; + f.debug_list().entries(range).finish() + } +} + +impl BTreeMap { + /// Makes a new, empty `BTreeMap`. + /// + /// Does not allocate anything on its own. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map = BTreeMap::new(); + /// + /// // entries can now be inserted into the empty map + /// map.try_insert(1, "a")?; + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[must_use] + pub const fn new() -> BTreeMap { + BTreeMap { + root: None, + length: 0, + alloc: ManuallyDrop::new(Global), + _marker: PhantomData, + } + } + + #[cfg(test)] + pub(crate) fn from(value: [(K, V); N]) -> Self + where + K: Ord, + { + Self::try_from(value).abort() + } +} + +impl BTreeMap { + /// Makes a new empty BTreeMap with a reasonable choice for B. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::{BTreeMap, Global}; + /// + /// let mut map = BTreeMap::new_in(Global); + /// + /// // entries can now be inserted into the empty map + /// map.try_insert(1, "a")?; + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn new_in(alloc: A) -> BTreeMap { + BTreeMap { + root: None, + length: 0, + alloc: ManuallyDrop::new(alloc), + _marker: PhantomData, + } + } +} + +impl BTreeMap { + /// Clears the map, removing all elements. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut a = BTreeMap::new(); + /// a.try_insert(1, "a")?; + /// a.clear(); + /// assert!(a.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn clear(&mut self) { + drop(into_iter!(self)); + } +} + +impl BTreeMap { + /// Returns a reference to the value corresponding to the key. + /// + /// The key may be any borrowed form of the map's key type, but the ordering + /// on the borrowed form *must* match the ordering on the key type. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map = BTreeMap::new(); + /// map.try_insert(1, "a")?; + /// assert_eq!(map.get(&1), Some(&"a")); + /// assert_eq!(map.get(&2), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn get(&self, key: &Q) -> Option<&V> + where + K: Borrow + Ord, + Q: Ord, + { + into_ok(self.get_with(&mut (), key, infallible_cmp)) + } + + pub(crate) fn get_with( + &self, + cx: &mut C, + key: &Q, + cmp: CmpFn, + ) -> Result, E> + where + K: Borrow, + { + let Some(root_node) = self.root.as_ref().map(NodeRef::reborrow) else { + return Ok(None); + }; + + Ok(match root_node.search_tree(cx, key, cmp)? { + Found(handle) => Some(handle.into_kv().1), + GoDown(_) => None, + }) + } + + /// Returns the key-value pair corresponding to the supplied key. + /// + /// The supplied key may be any borrowed form of the map's key type, but the ordering + /// on the borrowed form *must* match the ordering on the key type. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map = BTreeMap::new(); + /// map.try_insert(1, "a")?; + /// assert_eq!(map.get_key_value(&1), Some((&1, &"a"))); + /// assert_eq!(map.get_key_value(&2), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn get_key_value(&self, k: &Q) -> Option<(&K, &V)> + where + K: Borrow + Ord, + Q: Ord, + { + let root_node = self.root.as_ref()?.reborrow(); + match into_ok(root_node.search_tree(&mut (), k, infallible_cmp)) { + Found(handle) => Some(handle.into_kv()), + GoDown(_) => None, + } + } + + /// Returns the first key-value pair in the map. + /// The key in this pair is the minimum key in the map. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map = BTreeMap::new(); + /// assert_eq!(map.first_key_value(), None); + /// map.try_insert(1, "b")?; + /// map.try_insert(2, "a")?; + /// assert_eq!(map.first_key_value(), Some((&1, &"b"))); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn first_key_value(&self) -> Option<(&K, &V)> { + let root_node = self.root.as_ref()?.reborrow(); + root_node + .first_leaf_edge() + .right_kv() + .ok() + .map(Handle::into_kv) + } + + /// Returns the first entry in the map for in-place manipulation. + /// The key of this entry is the minimum key in the map. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map = BTreeMap::new(); + /// map.try_insert(1, "a")?; + /// map.try_insert(2, "b")?; + /// + /// if let Some(mut entry) = map.first_entry() { + /// if *entry.key() > 0 { + /// entry.insert("first"); + /// } + /// } + /// + /// assert_eq!(*map.get(&1).unwrap(), "first"); + /// assert_eq!(*map.get(&2).unwrap(), "b"); + /// # Ok::<_, rune_alloc::AllocError>(()) + /// ``` + pub fn first_entry(&mut self) -> Option> { + let (map, dormant_map) = DormantMutRef::new(self); + let root_node = map.root.as_mut()?.borrow_mut(); + let kv = root_node.first_leaf_edge().right_kv().ok()?; + Some(OccupiedEntry { + handle: kv.forget_node_type(), + dormant_map, + alloc: &*map.alloc, + _marker: PhantomData, + }) + } + + /// Removes and returns the first element in the map. + /// The key of this element is the minimum key that was in the map. + /// + /// # Examples + /// + /// Draining elements in ascending order, while keeping a usable map each iteration. + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map = BTreeMap::new(); + /// map.try_insert(1, "a")?; + /// map.try_insert(2, "b")?; + /// while let Some((key, _val)) = map.pop_first() { + /// assert!(map.iter().all(|(k, _v)| *k > key)); + /// } + /// assert!(map.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn pop_first(&mut self) -> Option<(K, V)> { + self.first_entry().map(|entry| entry.remove_entry()) + } + + /// Returns the last key-value pair in the map. + /// The key in this pair is the maximum key in the map. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map = BTreeMap::new(); + /// map.try_insert(1, "b")?; + /// map.try_insert(2, "a")?; + /// assert_eq!(map.last_key_value(), Some((&2, &"a"))); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn last_key_value(&self) -> Option<(&K, &V)> { + let root_node = self.root.as_ref()?.reborrow(); + root_node + .last_leaf_edge() + .left_kv() + .ok() + .map(Handle::into_kv) + } + + /// Returns the last entry in the map for in-place manipulation. + /// The key of this entry is the maximum key in the map. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map = BTreeMap::new(); + /// map.try_insert(1, "a")?; + /// map.try_insert(2, "b")?; + /// + /// if let Some(mut entry) = map.last_entry() { + /// if *entry.key() > 0 { + /// entry.insert("last"); + /// } + /// } + /// + /// assert_eq!(*map.get(&1).unwrap(), "a"); + /// assert_eq!(*map.get(&2).unwrap(), "last"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn last_entry(&mut self) -> Option> { + let (map, dormant_map) = DormantMutRef::new(self); + let root_node = map.root.as_mut()?.borrow_mut(); + let kv = root_node.last_leaf_edge().left_kv().ok()?; + Some(OccupiedEntry { + handle: kv.forget_node_type(), + dormant_map, + alloc: &*map.alloc, + _marker: PhantomData, + }) + } + + /// Removes and returns the last element in the map. + /// The key of this element is the maximum key that was in the map. + /// + /// # Examples + /// + /// Draining elements in descending order, while keeping a usable map each iteration. + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map = BTreeMap::new(); + /// map.try_insert(1, "a")?; + /// map.try_insert(2, "b")?; + /// + /// while let Some((key, _val)) = map.pop_last() { + /// assert!(map.iter().all(|(k, _v)| *k < key)); + /// } + /// + /// assert!(map.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn pop_last(&mut self) -> Option<(K, V)> { + self.last_entry().map(|entry| entry.remove_entry()) + } + + /// Returns `true` if the map contains a value for the specified key. + /// + /// The key may be any borrowed form of the map's key type, but the ordering + /// on the borrowed form *must* match the ordering on the key type. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map = BTreeMap::new(); + /// map.try_insert(1, "a")?; + /// + /// assert_eq!(map.contains_key(&1), true); + /// assert_eq!(map.contains_key(&2), false); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn contains_key(&self, key: &Q) -> bool + where + K: Borrow + Ord, + Q: Ord, + { + into_ok(self.contains_key_with(&mut (), key, infallible_cmp)) + } + + pub(crate) fn contains_key_with( + &self, + cx: &mut C, + key: &Q, + cmp: CmpFn, + ) -> Result + where + K: Borrow + Ord, + Q: Ord, + { + Ok(self.get_with(cx, key, cmp)?.is_some()) + } + + /// Returns a mutable reference to the value corresponding to the key. + /// + /// The key may be any borrowed form of the map's key type, but the ordering + /// on the borrowed form *must* match the ordering on the key type. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map = BTreeMap::new(); + /// + /// map.try_insert(1, "a")?; + /// + /// if let Some(x) = map.get_mut(&1) { + /// *x = "b"; + /// } + /// + /// assert_eq!(map[&1], "b"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + // See `get` for implementation notes, this is basically a copy-paste with mut's added + pub fn get_mut(&mut self, key: &Q) -> Option<&mut V> + where + K: Borrow + Ord, + Q: Ord, + { + into_ok(self.get_mut_with(&mut (), key, infallible_cmp)) + } + + pub(crate) fn get_mut_with( + &mut self, + cx: &mut C, + key: &Q, + cmp: CmpFn, + ) -> Result, E> + where + K: Borrow, + { + let Some(root_node) = self.root.as_mut().map(NodeRef::borrow_mut) else { + return Ok(None); + }; + + Ok(match root_node.search_tree(cx, key, cmp)? { + Found(handle) => Some(handle.into_val_mut()), + GoDown(_) => None, + }) + } + + /// Inserts a key-value pair into the map. + /// + /// If the map did not have this key present, `None` is returned. + /// + /// If the map did have this key present, the value is updated, and the old + /// value is returned. The key is not updated, though; this matters for + /// types that can be `==` without being identical. See the [module-level + /// documentation] for more. + /// + /// [module-level documentation]: index.html#insert-and-complex-keys + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map = BTreeMap::new(); + /// assert_eq!(map.try_insert(37, "a")?, None); + /// assert_eq!(map.is_empty(), false); + /// + /// map.try_insert(37, "b")?; + /// assert_eq!(map.try_insert(37, "c")?, Some("b")); + /// assert_eq!(map[&37], "c"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_insert(&mut self, key: K, value: V) -> Result, AllocError> + where + K: Ord, + { + match self.entry(key) { + Occupied(mut entry) => Ok(Some(entry.insert(value))), + Vacant(entry) => { + entry.try_insert(value)?; + Ok(None) + } + } + } + + #[cfg(test)] + pub(crate) fn insert(&mut self, key: K, value: V) -> Option + where + K: Ord, + { + self.try_insert(key, value).abort() + } + + /// Tries to insert a key-value pair into the map, and returns a mutable + /// reference to the value in the entry. + /// + /// If the map already had this key present, nothing is updated, and an + /// error containing the occupied entry and the value is returned. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::{BTreeMap, CustomError}; + /// + /// let mut map = BTreeMap::new(); + /// assert_eq!(map.try_insert_or(37, "a").unwrap(), &"a"); + /// + /// if let CustomError::Custom(err) = map.try_insert_or(37, "b").unwrap_err() { + /// assert_eq!(err.entry.key(), &37); + /// assert_eq!(err.entry.get(), &"a"); + /// assert_eq!(err.value, "b"); + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_insert_or( + &mut self, + key: K, + value: V, + ) -> Result<&mut V, CustomError>> + where + K: Ord, + { + match self.entry(key) { + Occupied(entry) => Err(CustomError::Custom(OccupiedError { entry, value })), + Vacant(entry) => Ok(entry.try_insert(value)?), + } + } + + #[cfg(test)] + pub(crate) fn insert_or( + &mut self, + key: K, + value: V, + ) -> Result<&mut V, OccupiedError<'_, K, V, A>> + where + K: Ord, + { + self.try_insert_or(key, value).custom_result() + } + + /// Removes a key from the map, returning the value at the key if the key + /// was previously in the map. + /// + /// The key may be any borrowed form of the map's key type, but the ordering + /// on the borrowed form *must* match the ordering on the key type. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map = BTreeMap::new(); + /// map.try_insert(1, "a")?; + /// assert_eq!(map.remove(&1), Some("a")); + /// assert_eq!(map.remove(&1), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn remove(&mut self, key: &Q) -> Option + where + K: Borrow + Ord, + Q: Ord, + { + self.remove_entry(key).map(|(_, v)| v) + } + + /// Removes a key from the map, returning the stored key and value if the key + /// was previously in the map. + /// + /// The key may be any borrowed form of the map's key type, but the ordering + /// on the borrowed form *must* match the ordering on the key type. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map = BTreeMap::new(); + /// map.try_insert(1, "a")?; + /// assert_eq!(map.remove_entry(&1), Some((1, "a"))); + /// assert_eq!(map.remove_entry(&1), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn remove_entry(&mut self, key: &Q) -> Option<(K, V)> + where + Q: Ord, + K: Borrow + Ord, + { + into_ok(self.remove_entry_with(&mut (), key, infallible_cmp)) + } + + pub(crate) fn remove_entry_with( + &mut self, + cx: &mut C, + key: &Q, + cmp: CmpFn, + ) -> Result, E> + where + K: Borrow, + { + let (map, dormant_map) = DormantMutRef::new(self); + + let Some(root_node) = map.root.as_mut().map(NodeRef::borrow_mut) else { + return Ok(None); + }; + + Ok(match root_node.search_tree(cx, key, cmp)? { + Found(handle) => { + let entry = OccupiedEntry { + handle, + dormant_map, + alloc: &*map.alloc, + _marker: PhantomData, + }; + + Some(entry.remove_entry()) + } + GoDown(_) => None, + }) + } + + /// Retains only the elements specified by the predicate. + /// + /// In other words, remove all pairs `(k, v)` for which `f(&k, &mut v)` + /// returns `false`. The elements are visited in ascending key order. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{BTreeMap, IteratorExt}; + /// + /// let mut map: BTreeMap = (0..8).map(|x| (x, x*10)).try_collect()?; + /// // Keep only the elements with even-numbered keys. + /// map.retain(|&k, _| k % 2 == 0); + /// assert!(map.into_iter().eq(vec![(0, 0), (2, 20), (4, 40), (6, 60)])); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn retain(&mut self, mut f: F) + where + K: Ord, + F: FnMut(&K, &mut V) -> bool, + { + self.extract_if(|k, v| !f(k, v)).for_each(drop); + } + + /// Moves all elements from `other` into `self`, leaving `other` empty. + /// + /// If a key from `other` is already present in `self`, the respective + /// value from `self` will be overwritten with the respective value from `other`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut a = BTreeMap::new(); + /// a.try_insert(1, "a")?; + /// a.try_insert(2, "b")?; + /// a.try_insert(3, "c")?; // Note: Key (3) also present in b. + /// + /// let mut b = BTreeMap::new(); + /// b.try_insert(3, "d")?; // Note: Key (3) also present in a. + /// b.try_insert(4, "e")?; + /// b.try_insert(5, "f")?; + /// + /// a.try_append(&mut b); + /// + /// assert_eq!(a.len(), 5); + /// assert_eq!(b.len(), 0); + /// + /// assert_eq!(a[&1], "a"); + /// assert_eq!(a[&2], "b"); + /// assert_eq!(a[&3], "d"); // Note: "c" has been overwritten. + /// assert_eq!(a[&4], "e"); + /// assert_eq!(a[&5], "f"); + /// # Ok::<_, rune_alloc::AllocError>(()) + /// ``` + pub fn try_append(&mut self, other: &mut Self) -> Result<(), AllocError> + where + K: Ord, + { + // Do we have to append anything at all? + if other.is_empty() { + return Ok(()); + } + + // We can just swap `self` and `other` if `self` is empty. + if self.is_empty() { + mem::swap(self, other); + return Ok(()); + } + + let self_iter = into_iter!(self); + let other_iter = into_iter!(other); + + let root = match &mut self.root { + Some(root) => root, + None => self.root.insert(Root::new(&*self.alloc)?), + }; + + root.try_append_from_sorted_iters(self_iter, other_iter, &mut self.length, &*self.alloc) + } + + #[cfg(test)] + pub(crate) fn append(&mut self, other: &mut Self) + where + K: Ord, + { + self.try_append(other).abort() + } + + /// Constructs a double-ended iterator over a sub-range of elements in the map. + /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will + /// yield elements from min (inclusive) to max (exclusive). + /// The range may also be entered as `(Bound, Bound)`, so for example + /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive + /// range from 4 to 10. + /// + /// # Panics + /// + /// Panics if range `start > end`. + /// Panics if range `start == end` and both bounds are `Excluded`. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// use core::ops::Bound::Included; + /// + /// let mut map = BTreeMap::new(); + /// map.try_insert(3, "a")?; + /// map.try_insert(5, "b")?; + /// map.try_insert(8, "c")?; + /// + /// for (&key, &value) in map.range((Included(&4), Included(&8))) { + /// println!("{key}: {value}"); + /// } + /// + /// assert_eq!(Some((&5, &"b")), map.range(4..).next()); + /// # Ok::<_, rune_alloc::AllocError>(()) + /// ``` + pub fn range(&self, range: R) -> Range<'_, K, V> + where + Q: Ord, + K: Borrow + Ord, + R: RangeBounds, + { + into_ok(self.range_with(&mut (), range, infallible_cmp)) + } + + pub(crate) fn range_with( + &self, + cx: &mut C, + range: R, + cmp: CmpFn, + ) -> Result, E> + where + K: Borrow, + R: RangeBounds, + { + Ok(if let Some(root) = &self.root { + Range { + inner: root.reborrow().range_search(cx, range, cmp)?, + } + } else { + Range { + inner: LeafRange::none(), + } + }) + } + + /// Constructs a mutable double-ended iterator over a sub-range of elements in the map. + /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will + /// yield elements from min (inclusive) to max (exclusive). + /// The range may also be entered as `(Bound, Bound)`, so for example + /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive + /// range from 4 to 10. + /// + /// # Panics + /// + /// Panics if range `start > end`. + /// Panics if range `start == end` and both bounds are `Excluded`. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map: BTreeMap<&str, i32> = + /// [("Alice", 0), ("Bob", 0), ("Carol", 0), ("Cheryl", 0)].try_into()?; + /// + /// for (_, balance) in map.range_mut("B".."Cheryl") { + /// *balance += 100; + /// } + /// + /// for (name, balance) in &map { + /// println!("{name} => {balance}"); + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn range_mut(&mut self, range: R) -> RangeMut<'_, K, V> + where + Q: Ord, + K: Borrow + Ord, + R: RangeBounds, + { + into_ok(self.range_mut_with(&mut (), range, infallible_cmp)) + } + + pub(crate) fn range_mut_with( + &mut self, + cx: &mut C, + range: R, + cmp: CmpFn, + ) -> Result, E> + where + K: Borrow, + R: RangeBounds, + { + Ok(if let Some(root) = &mut self.root { + RangeMut { + inner: root.borrow_valmut().range_search(cx, range, cmp)?, + _marker: PhantomData, + } + } else { + RangeMut { + inner: LeafRange::none(), + _marker: PhantomData, + } + }) + } + + /// Gets the given key's corresponding entry in the map for in-place + /// manipulation. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut count: BTreeMap<&str, usize> = BTreeMap::new(); + /// + /// // count the number of occurrences of letters in the vec + /// for x in ["a", "b", "a", "c", "a", "b"] { + /// count.entry(x).and_modify(|curr| *curr += 1).or_try_insert(1)?; + /// } + /// + /// assert_eq!(count["a"], 3); + /// assert_eq!(count["b"], 2); + /// assert_eq!(count["c"], 1); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn entry(&mut self, key: K) -> Entry<'_, K, V, A> + where + K: Ord, + { + into_ok(self.entry_with(&mut (), key, infallible_cmp)) + } + + pub(crate) fn entry_with( + &mut self, + cx: &mut C, + key: K, + cmp: CmpFn, + ) -> Result, E> { + let (map, dormant_map) = DormantMutRef::new(self); + + Ok(match map.root { + None => Vacant(VacantEntry { + key, + handle: None, + dormant_map, + alloc: &*map.alloc, + _marker: PhantomData, + }), + + Some(ref mut root) => match root.borrow_mut().search_tree(cx, &key, cmp)? { + Found(handle) => Occupied(OccupiedEntry { + handle, + dormant_map, + alloc: &*map.alloc, + _marker: PhantomData, + }), + GoDown(handle) => Vacant(VacantEntry { + key, + handle: Some(handle), + dormant_map, + alloc: &*map.alloc, + _marker: PhantomData, + }), + }, + }) + } + + /// Splits the collection into two at the given key. Returns everything after the given key, + /// including the key. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut a = BTreeMap::new(); + /// a.try_insert(1, "a")?; + /// a.try_insert(2, "b")?; + /// a.try_insert(3, "c")?; + /// a.try_insert(17, "d")?; + /// a.try_insert(41, "e")?; + /// + /// let b = a.try_split_off(&3)?; + /// + /// assert_eq!(a.len(), 2); + /// assert_eq!(b.len(), 3); + /// + /// assert_eq!(a[&1], "a"); + /// assert_eq!(a[&2], "b"); + /// + /// assert_eq!(b[&3], "c"); + /// assert_eq!(b[&17], "d"); + /// assert_eq!(b[&41], "e"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_split_off(&mut self, key: &Q) -> Result + where + Q: Ord, + K: Borrow + Ord, + A: Clone, + { + into_ok(self.try_split_off_with(&mut (), key, infallible_cmp)) + } + + #[cfg(test)] + pub(crate) fn split_off(&mut self, key: &Q) -> Self + where + Q: Ord, + K: Borrow + Ord, + A: Clone, + { + self.try_split_off(key).abort() + } + + pub(crate) fn try_split_off_with( + &mut self, + cx: &mut C, + key: &Q, + cmp: CmpFn, + ) -> Result, E> + where + K: Borrow, + A: Clone, + { + if self.is_empty() { + return Ok(Ok(Self::new_in((*self.alloc).clone()))); + } + + let total_num = self.len(); + let left_root = self.root.as_mut().unwrap(); // unwrap succeeds because not empty + + let right_root = match left_root.split_off(cx, key, &*self.alloc, cmp)? { + Ok(right_root) => right_root, + Err(error) => return Ok(Err(Error::from(error))), + }; + + let (new_left_len, right_len) = Root::calc_split_length(total_num, left_root, &right_root); + self.length = new_left_len; + + Ok(Ok(BTreeMap { + root: Some(right_root), + length: right_len, + alloc: self.alloc.clone(), + _marker: PhantomData, + })) + } + + /// Creates an iterator that visits all elements (key-value pairs) in + /// ascending key order and uses a closure to determine if an element should + /// be removed. If the closure returns `true`, the element is removed from + /// the map and yielded. If the closure returns `false`, or panics, the + /// element remains in the map and will not be yielded. + /// + /// The iterator also lets you mutate the value of each element in the + /// closure, regardless of whether you choose to keep or remove it. + /// + /// If the returned `ExtractIf` is not exhausted, e.g. because it is dropped without iterating + /// or the iteration short-circuits, then the remaining elements will be retained. + /// Use [`retain`] with a negated predicate if you do not need the returned iterator. + /// + /// [`retain`]: BTreeMap::retain + /// + /// # Examples + /// + /// Splitting a map into even and odd keys, reusing the original map: + /// + /// ``` + /// use rune_alloc::{Vec, BTreeMap, IteratorExt}; + /// + /// let mut map: BTreeMap = (0..8).map(|x| (x, x)).try_collect()?; + /// let evens: BTreeMap<_, _> = map.extract_if(|k, _v| k % 2 == 0).try_collect()?; + /// let odds = map; + /// assert_eq!(evens.keys().copied().try_collect::>()?, [0, 2, 4, 6]); + /// assert_eq!(odds.keys().copied().try_collect::>()?, [1, 3, 5, 7]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn extract_if(&mut self, pred: F) -> ExtractIf<'_, K, V, F, A> + where + F: FnMut(&K, &mut V) -> bool, + { + let (inner, alloc) = self.extract_if_inner(); + ExtractIf { pred, inner, alloc } + } + + pub(super) fn extract_if_inner(&mut self) -> (ExtractIfInner<'_, K, V>, &A) { + if let Some(root) = self.root.as_mut() { + let (root, dormant_root) = DormantMutRef::new(root); + let front = root.borrow_mut().first_leaf_edge(); + ( + ExtractIfInner { + length: &mut self.length, + dormant_root: Some(dormant_root), + cur_leaf_edge: Some(front), + }, + &self.alloc, + ) + } else { + ( + ExtractIfInner { + length: &mut self.length, + dormant_root: None, + cur_leaf_edge: None, + }, + &self.alloc, + ) + } + } + + /// Creates a consuming iterator visiting all the keys, in sorted order. The + /// map cannot be used after calling this. The iterator element type is `K`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{BTreeMap, Vec, IteratorExt}; + /// + /// let mut a = BTreeMap::new(); + /// a.try_insert(2, "b")?; + /// a.try_insert(1, "a")?; + /// + /// let keys: Vec = a.into_keys().try_collect()?; + /// assert_eq!(keys, [1, 2]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn into_keys(self) -> IntoKeys { + IntoKeys { + inner: self.into_iter(), + } + } + + /// Creates a consuming iterator visiting all the values, in order by key. + /// The map cannot be used after calling this. The iterator element type is + /// `V`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{BTreeMap, Vec, IteratorExt}; + /// + /// let mut a = BTreeMap::new(); + /// a.try_insert(1, "hello"); + /// a.try_insert(2, "goodbye"); + /// + /// let values: Vec<&str> = a.into_values().try_collect()?; + /// assert_eq!(values, ["hello", "goodbye"]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn into_values(self) -> IntoValues { + IntoValues { + inner: self.into_iter(), + } + } +} + +impl<'a, K, V, A: Allocator> IntoIterator for &'a BTreeMap { + type Item = (&'a K, &'a V); + type IntoIter = Iter<'a, K, V>; + + fn into_iter(self) -> Iter<'a, K, V> { + self.iter() + } +} + +impl<'a, K: 'a, V: 'a> Iterator for Iter<'a, K, V> { + type Item = (&'a K, &'a V); + + fn next(&mut self) -> Option<(&'a K, &'a V)> { + if self.length == 0 { + None + } else { + self.length -= 1; + Some(unsafe { self.range.next_unchecked() }) + } + } + + fn size_hint(&self) -> (usize, Option) { + (self.length, Some(self.length)) + } + + fn last(mut self) -> Option<(&'a K, &'a V)> { + self.next_back() + } + + fn min(mut self) -> Option<(&'a K, &'a V)> + where + (&'a K, &'a V): Ord, + { + self.next() + } + + fn max(mut self) -> Option<(&'a K, &'a V)> + where + (&'a K, &'a V): Ord, + { + self.next_back() + } +} + +impl FusedIterator for Iter<'_, K, V> {} + +impl<'a, K: 'a, V: 'a> DoubleEndedIterator for Iter<'a, K, V> { + fn next_back(&mut self) -> Option<(&'a K, &'a V)> { + if self.length == 0 { + None + } else { + self.length -= 1; + Some(unsafe { self.range.next_back_unchecked() }) + } + } +} + +impl ExactSizeIterator for Iter<'_, K, V> { + fn len(&self) -> usize { + self.length + } +} + +impl Clone for Iter<'_, K, V> { + fn clone(&self) -> Self { + Iter { + range: self.range.clone(), + length: self.length, + } + } +} + +impl<'a, K, V, A: Allocator> IntoIterator for &'a mut BTreeMap { + type Item = (&'a K, &'a mut V); + type IntoIter = IterMut<'a, K, V>; + + fn into_iter(self) -> IterMut<'a, K, V> { + self.iter_mut() + } +} + +impl<'a, K, V> Iterator for IterMut<'a, K, V> { + type Item = (&'a K, &'a mut V); + + fn next(&mut self) -> Option<(&'a K, &'a mut V)> { + if self.length == 0 { + None + } else { + self.length -= 1; + Some(unsafe { self.range.next_unchecked() }) + } + } + + fn size_hint(&self) -> (usize, Option) { + (self.length, Some(self.length)) + } + + fn last(mut self) -> Option<(&'a K, &'a mut V)> { + self.next_back() + } + + fn min(mut self) -> Option<(&'a K, &'a mut V)> + where + (&'a K, &'a mut V): Ord, + { + self.next() + } + + fn max(mut self) -> Option<(&'a K, &'a mut V)> + where + (&'a K, &'a mut V): Ord, + { + self.next_back() + } +} + +impl<'a, K, V> DoubleEndedIterator for IterMut<'a, K, V> { + fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> { + if self.length == 0 { + None + } else { + self.length -= 1; + Some(unsafe { self.range.next_back_unchecked() }) + } + } +} + +impl ExactSizeIterator for IterMut<'_, K, V> { + fn len(&self) -> usize { + self.length + } +} + +impl FusedIterator for IterMut<'_, K, V> {} + +impl<'a, K, V> IterMut<'a, K, V> { + /// Returns an iterator of references over the remaining items. + #[inline] + pub(super) fn iter(&self) -> Iter<'_, K, V> { + Iter { + range: self.range.reborrow(), + length: self.length, + } + } +} + +impl IntoIterator for BTreeMap { + type Item = (K, V); + type IntoIter = IntoIter; + + fn into_iter(self) -> IntoIter { + let mut me = ManuallyDrop::new(self); + if let Some(root) = me.root.take() { + let full_range = root.into_dying().full_range(); + + IntoIter { + range: full_range, + length: me.length, + alloc: unsafe { ManuallyDrop::take(&mut me.alloc) }, + } + } else { + IntoIter { + range: LazyLeafRange::none(), + length: 0, + alloc: unsafe { ManuallyDrop::take(&mut me.alloc) }, + } + } + } +} + +impl Drop for IntoIter { + fn drop(&mut self) { + struct DropGuard<'a, K, V, A: Allocator>(&'a mut IntoIter); + + impl<'a, K, V, A: Allocator> Drop for DropGuard<'a, K, V, A> { + fn drop(&mut self) { + // Continue the same loop we perform below. This only runs when unwinding, so we + // don't have to care about panics this time (they'll abort). + while let Some(kv) = self.0.dying_next() { + // SAFETY: we consume the dying handle immediately. + unsafe { kv.drop_key_val() }; + } + } + } + + while let Some(kv) = self.dying_next() { + let guard = DropGuard(self); + // SAFETY: we don't touch the tree before consuming the dying handle. + unsafe { kv.drop_key_val() }; + mem::forget(guard); + } + } +} + +impl IntoIter { + /// Core of a `next` method returning a dying KV handle, + /// invalidated by further calls to this function and some others. + fn dying_next( + &mut self, + ) -> Option, marker::KV>> { + if self.length == 0 { + self.range.deallocating_end(&self.alloc); + None + } else { + self.length -= 1; + Some(unsafe { self.range.deallocating_next_unchecked(&self.alloc) }) + } + } + + /// Core of a `next_back` method returning a dying KV handle, + /// invalidated by further calls to this function and some others. + fn dying_next_back( + &mut self, + ) -> Option, marker::KV>> { + if self.length == 0 { + self.range.deallocating_end(&self.alloc); + None + } else { + self.length -= 1; + Some(unsafe { self.range.deallocating_next_back_unchecked(&self.alloc) }) + } + } +} + +impl Iterator for IntoIter { + type Item = (K, V); + + fn next(&mut self) -> Option<(K, V)> { + // SAFETY: we consume the dying handle immediately. + self.dying_next().map(unsafe { |kv| kv.into_key_val() }) + } + + fn size_hint(&self) -> (usize, Option) { + (self.length, Some(self.length)) + } +} + +impl DoubleEndedIterator for IntoIter { + fn next_back(&mut self) -> Option<(K, V)> { + // SAFETY: we consume the dying handle immediately. + self.dying_next_back() + .map(unsafe { |kv| kv.into_key_val() }) + } +} + +impl ExactSizeIterator for IntoIter { + fn len(&self) -> usize { + self.length + } +} + +impl FusedIterator for IntoIter {} + +impl<'a, K, V> Iterator for Keys<'a, K, V> { + type Item = &'a K; + + fn next(&mut self) -> Option<&'a K> { + self.inner.next().map(|(k, _)| k) + } + + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } + + fn last(mut self) -> Option<&'a K> { + self.next_back() + } + + fn min(mut self) -> Option<&'a K> + where + &'a K: Ord, + { + self.next() + } + + fn max(mut self) -> Option<&'a K> + where + &'a K: Ord, + { + self.next_back() + } +} + +impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V> { + fn next_back(&mut self) -> Option<&'a K> { + self.inner.next_back().map(|(k, _)| k) + } +} + +impl ExactSizeIterator for Keys<'_, K, V> { + fn len(&self) -> usize { + self.inner.len() + } +} + +impl FusedIterator for Keys<'_, K, V> {} + +impl Clone for Keys<'_, K, V> { + fn clone(&self) -> Self { + Keys { + inner: self.inner.clone(), + } + } +} + +impl Default for Keys<'_, K, V> { + /// Creates an empty `btree_map::Keys`. + /// + /// ``` + /// use rune_alloc::btree_map; + /// + /// let iter: btree_map::Keys<'_, u8, u8> = Default::default(); + /// assert_eq!(iter.len(), 0); + /// ``` + fn default() -> Self { + Keys { + inner: Default::default(), + } + } +} + +impl<'a, K, V> Iterator for Values<'a, K, V> { + type Item = &'a V; + + fn next(&mut self) -> Option<&'a V> { + self.inner.next().map(|(_, v)| v) + } + + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } + + fn last(mut self) -> Option<&'a V> { + self.next_back() + } +} + +impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> { + fn next_back(&mut self) -> Option<&'a V> { + self.inner.next_back().map(|(_, v)| v) + } +} + +impl ExactSizeIterator for Values<'_, K, V> { + fn len(&self) -> usize { + self.inner.len() + } +} + +impl FusedIterator for Values<'_, K, V> {} + +impl Clone for Values<'_, K, V> { + fn clone(&self) -> Self { + Values { + inner: self.inner.clone(), + } + } +} + +impl Default for Values<'_, K, V> { + /// Creates an empty `btree_map::Values`. + /// + /// ``` + /// use rune_alloc::btree_map; + /// + /// let iter: btree_map::Values<'_, u8, u8> = Default::default(); + /// assert_eq!(iter.len(), 0); + /// ``` + fn default() -> Self { + Values { + inner: Default::default(), + } + } +} + +/// An iterator produced by calling `extract_if` on BTreeMap. +#[must_use = "iterators are lazy and do nothing unless consumed"] +pub struct ExtractIf<'a, K, V, F, A: Allocator = Global> +where + F: 'a + FnMut(&K, &mut V) -> bool, +{ + pred: F, + inner: ExtractIfInner<'a, K, V>, + /// The BTreeMap will outlive this IntoIter so we don't care about drop order for `alloc`. + alloc: &'a A, +} + +/// Most of the implementation of ExtractIf are generic over the type +/// of the predicate, thus also serving for BTreeSet::ExtractIf. +pub(super) struct ExtractIfInner<'a, K, V> { + /// Reference to the length field in the borrowed map, updated live. + length: &'a mut usize, + /// Buried reference to the root field in the borrowed map. + /// Wrapped in `Option` to allow drop handler to `take` it. + dormant_root: Option>>, + /// Contains a leaf edge preceding the next element to be returned, or the last leaf edge. + /// Empty if the map has no root, if iteration went beyond the last leaf edge, + /// or if a panic occurred in the predicate. + cur_leaf_edge: Option, K, V, marker::Leaf>, marker::Edge>>, +} + +impl fmt::Debug for ExtractIf<'_, K, V, F> +where + K: fmt::Debug, + V: fmt::Debug, + F: FnMut(&K, &mut V) -> bool, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("ExtractIf") + .field(&self.inner.peek()) + .finish() + } +} + +impl Iterator for ExtractIf<'_, K, V, F, A> +where + F: FnMut(&K, &mut V) -> bool, +{ + type Item = (K, V); + + fn next(&mut self) -> Option<(K, V)> { + self.inner.next(&mut self.pred, self.alloc) + } + + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } +} + +impl<'a, K, V> ExtractIfInner<'a, K, V> { + /// Allow Debug implementations to predict the next element. + pub(super) fn peek(&self) -> Option<(&K, &V)> { + let edge = self.cur_leaf_edge.as_ref()?; + edge.reborrow().next_kv().ok().map(Handle::into_kv) + } + + /// Implementation of a typical `ExtractIf::next` method, given the predicate. + pub(super) fn next(&mut self, pred: &mut F, alloc: &A) -> Option<(K, V)> + where + F: FnMut(&K, &mut V) -> bool, + { + while let Ok(mut kv) = self.cur_leaf_edge.take()?.next_kv() { + let (k, v) = kv.kv_mut(); + if pred(k, v) { + *self.length -= 1; + let (kv, pos) = kv.remove_kv_tracking( + || { + // SAFETY: we will touch the root in a way that will not + // invalidate the position returned. + let root = unsafe { self.dormant_root.take().unwrap().awaken() }; + root.pop_internal_level(alloc); + self.dormant_root = Some(DormantMutRef::new(root).1); + }, + alloc, + ); + self.cur_leaf_edge = Some(pos); + return Some(kv); + } + self.cur_leaf_edge = Some(kv.next_leaf_edge()); + } + None + } + + /// Implementation of a typical `ExtractIf::size_hint` method. + pub(super) fn size_hint(&self) -> (usize, Option) { + // In most of the btree iterators, `self.length` is the number of elements + // yet to be visited. Here, it includes elements that were visited and that + // the predicate decided not to drain. Making this upper bound more tight + // during iteration would require an extra field. + (0, Some(*self.length)) + } +} + +impl FusedIterator for ExtractIf<'_, K, V, F> where F: FnMut(&K, &mut V) -> bool {} + +impl<'a, K, V> Iterator for Range<'a, K, V> { + type Item = (&'a K, &'a V); + + fn next(&mut self) -> Option<(&'a K, &'a V)> { + self.inner.next_checked() + } + + fn last(mut self) -> Option<(&'a K, &'a V)> { + self.next_back() + } + + fn min(mut self) -> Option<(&'a K, &'a V)> + where + (&'a K, &'a V): Ord, + { + self.next() + } + + fn max(mut self) -> Option<(&'a K, &'a V)> + where + (&'a K, &'a V): Ord, + { + self.next_back() + } +} + +impl Default for Range<'_, K, V> { + /// Creates an empty [`Range`]. + /// + /// ``` + /// use rune_alloc::btree_map; + /// + /// let iter: btree_map::Range<'_, u8, u8> = Default::default(); + /// assert_eq!(iter.count(), 0); + /// ``` + fn default() -> Self { + Range { + inner: Default::default(), + } + } +} + +impl<'a, K, V> Iterator for ValuesMut<'a, K, V> { + type Item = &'a mut V; + + fn next(&mut self) -> Option<&'a mut V> { + self.inner.next().map(|(_, v)| v) + } + + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } + + fn last(mut self) -> Option<&'a mut V> { + self.next_back() + } +} + +impl<'a, K, V> DoubleEndedIterator for ValuesMut<'a, K, V> { + fn next_back(&mut self) -> Option<&'a mut V> { + self.inner.next_back().map(|(_, v)| v) + } +} + +impl ExactSizeIterator for ValuesMut<'_, K, V> { + fn len(&self) -> usize { + self.inner.len() + } +} + +impl FusedIterator for ValuesMut<'_, K, V> {} + +impl Iterator for IntoKeys { + type Item = K; + + fn next(&mut self) -> Option { + self.inner.next().map(|(k, _)| k) + } + + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } + + fn last(mut self) -> Option { + self.next_back() + } + + fn min(mut self) -> Option + where + K: Ord, + { + self.next() + } + + fn max(mut self) -> Option + where + K: Ord, + { + self.next_back() + } +} + +impl DoubleEndedIterator for IntoKeys { + fn next_back(&mut self) -> Option { + self.inner.next_back().map(|(k, _)| k) + } +} + +impl ExactSizeIterator for IntoKeys { + fn len(&self) -> usize { + self.inner.len() + } +} + +impl FusedIterator for IntoKeys {} + +impl Default for IntoKeys +where + A: Allocator + Default + Clone, +{ + /// Creates an empty `btree_map::IntoKeys`. + /// + /// ``` + /// use rune_alloc::btree_map; + /// + /// let iter: btree_map::IntoKeys = Default::default(); + /// assert_eq!(iter.len(), 0); + /// ``` + fn default() -> Self { + IntoKeys { + inner: Default::default(), + } + } +} + +impl Iterator for IntoValues { + type Item = V; + + fn next(&mut self) -> Option { + self.inner.next().map(|(_, v)| v) + } + + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } + + fn last(mut self) -> Option { + self.next_back() + } +} + +impl DoubleEndedIterator for IntoValues { + fn next_back(&mut self) -> Option { + self.inner.next_back().map(|(_, v)| v) + } +} + +impl ExactSizeIterator for IntoValues { + fn len(&self) -> usize { + self.inner.len() + } +} + +impl FusedIterator for IntoValues {} + +impl Default for IntoValues +where + A: Allocator + Default + Clone, +{ + /// Creates an empty `btree_map::IntoValues`. + /// + /// ``` + /// use rune_alloc::btree_map; + /// + /// let iter: btree_map::IntoValues = Default::default(); + /// assert_eq!(iter.len(), 0); + /// ``` + fn default() -> Self { + IntoValues { + inner: Default::default(), + } + } +} + +impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> { + fn next_back(&mut self) -> Option<(&'a K, &'a V)> { + self.inner.next_back_checked() + } +} + +impl FusedIterator for Range<'_, K, V> {} + +impl Clone for Range<'_, K, V> { + fn clone(&self) -> Self { + Range { + inner: self.inner.clone(), + } + } +} + +impl<'a, K, V> Iterator for RangeMut<'a, K, V> { + type Item = (&'a K, &'a mut V); + + fn next(&mut self) -> Option<(&'a K, &'a mut V)> { + self.inner.next_checked() + } + + fn last(mut self) -> Option<(&'a K, &'a mut V)> { + self.next_back() + } + + fn min(mut self) -> Option<(&'a K, &'a mut V)> + where + (&'a K, &'a mut V): Ord, + { + self.next() + } + + fn max(mut self) -> Option<(&'a K, &'a mut V)> + where + (&'a K, &'a mut V): Ord, + { + self.next_back() + } +} + +impl<'a, K, V> DoubleEndedIterator for RangeMut<'a, K, V> { + fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> { + self.inner.next_back_checked() + } +} + +impl FusedIterator for RangeMut<'_, K, V> {} + +impl TryExtend<(K, V)> for BTreeMap { + #[inline] + fn try_extend>(&mut self, iter: T) -> Result<(), Error> { + for (k, v) in iter { + self.try_insert(k, v)?; + } + + Ok(()) + } +} + +#[cfg(test)] +impl Extend<(K, V)> for BTreeMap { + #[inline] + fn extend>(&mut self, iter: T) { + self.try_extend(iter).abort(); + } +} + +impl<'a, K: Ord + Copy, V: Copy, A: Allocator + Clone> TryExtend<(&'a K, &'a V)> + for BTreeMap +{ + fn try_extend>(&mut self, iter: I) -> Result<(), Error> { + self.try_extend(iter.into_iter().map(|(&key, &value)| (key, value))) + } +} + +#[cfg(test)] +impl<'a, K: Ord + Copy, V: Copy, A: Allocator + Clone> Extend<(&'a K, &'a V)> + for BTreeMap +{ + fn extend>(&mut self, iter: I) { + self.try_extend(iter).abort(); + } +} + +impl Hash for BTreeMap { + fn hash(&self, state: &mut H) { + state.write_usize(self.len()); + for elt in self { + elt.hash(state); + } + } +} + +impl Default for BTreeMap { + /// Creates an empty `BTreeMap`. + fn default() -> BTreeMap { + BTreeMap::new() + } +} + +impl PartialEq for BTreeMap { + fn eq(&self, other: &BTreeMap) -> bool { + self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a == b) + } +} + +impl Eq for BTreeMap {} + +impl PartialOrd for BTreeMap { + #[inline] + fn partial_cmp(&self, other: &BTreeMap) -> Option { + self.iter().partial_cmp(other.iter()) + } +} + +impl Ord for BTreeMap { + #[inline] + fn cmp(&self, other: &BTreeMap) -> Ordering { + self.iter().cmp(other.iter()) + } +} + +impl Debug for BTreeMap { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_map().entries(self.iter()).finish() + } +} + +impl Index<&Q> for BTreeMap +where + K: Borrow + Ord, + Q: Ord, +{ + type Output = V; + + /// Returns a reference to the value corresponding to the supplied key. + /// + /// # Panics + /// + /// Panics if the key is not present in the `BTreeMap`. + #[inline] + fn index(&self, key: &Q) -> &V { + self.get(key).expect("no entry found for key") + } +} + +impl BTreeMap { + /// Gets an iterator over the entries of the map, sorted by key. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map = BTreeMap::new(); + /// map.try_insert(3, "c")?; + /// map.try_insert(2, "b")?; + /// map.try_insert(1, "a")?; + /// + /// for (key, value) in map.iter() { + /// println!("{key}: {value}"); + /// } + /// + /// let (first_key, first_value) = map.iter().next().unwrap(); + /// assert_eq!((*first_key, *first_value), (1, "a")); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn iter(&self) -> Iter<'_, K, V> { + if let Some(root) = &self.root { + let full_range = root.reborrow().full_range(); + + Iter { + range: full_range, + length: self.length, + } + } else { + Iter { + range: LazyLeafRange::none(), + length: 0, + } + } + } + + /// Perform a raw iteration over the btree. + /// + /// # Safety + /// + /// Caller must ensure that the returned iterator doesn't outlive `self`. + pub unsafe fn iter_raw(&self) -> IterRaw { + if let Some(root) = &self.root { + let full_range = root.raw().full_range(); + + IterRaw { + range: full_range, + length: self.length, + } + } else { + IterRaw { + range: LazyLeafRange::none(), + length: 0, + } + } + } + + /// Gets a mutable iterator over the entries of the map, sorted by key. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map = BTreeMap::try_from([ + /// ("a", 1), + /// ("b", 2), + /// ("c", 3), + /// ])?; + /// + /// // add 10 to the value if the key isn't "a" + /// for (key, value) in map.iter_mut() { + /// if key != &"a" { + /// *value += 10; + /// } + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn iter_mut(&mut self) -> IterMut<'_, K, V> { + if let Some(root) = &mut self.root { + let full_range = root.borrow_valmut().full_range(); + + IterMut { + range: full_range, + length: self.length, + _marker: PhantomData, + } + } else { + IterMut { + range: LazyLeafRange::none(), + length: 0, + _marker: PhantomData, + } + } + } + + /// Gets an iterator over the keys of the map, in sorted order. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut a = BTreeMap::new(); + /// a.try_insert(2, "b")?; + /// a.try_insert(1, "a")?; + /// + /// let keys: Vec<_> = a.keys().cloned().collect(); + /// assert_eq!(keys, [1, 2]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn keys(&self) -> Keys<'_, K, V> { + Keys { inner: self.iter() } + } + + /// Gets an iterator over the values of the map, in order by key. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::{BTreeMap, Vec, IteratorExt}; + /// + /// let mut a = BTreeMap::new(); + /// a.try_insert(1, "hello")?; + /// a.try_insert(2, "goodbye")?; + /// + /// let values: Vec<&str> = a.values().copied().try_collect()?; + /// assert_eq!(values, ["hello", "goodbye"]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn values(&self) -> Values<'_, K, V> { + Values { inner: self.iter() } + } + + /// Gets a mutable iterator over the values of the map, in order by key. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::{BTreeMap, Vec, String, TryClone}; + /// + /// let mut a = BTreeMap::new(); + /// a.try_insert(1, String::try_from("hello")?)?; + /// a.try_insert(2, String::try_from("goodbye")?)?; + /// + /// for value in a.values_mut() { + /// value.try_push_str("!")?; + /// } + /// + /// let mut values = Vec::new(); + /// + /// for value in a.values() { + /// values.try_push(value.try_clone()?)?; + /// } + /// + /// assert_eq!(values, [String::try_from("hello!")?, + /// String::try_from("goodbye!")?]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn values_mut(&mut self) -> ValuesMut<'_, K, V> { + ValuesMut { + inner: self.iter_mut(), + } + } + + /// Returns the number of elements in the map. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut a = BTreeMap::new(); + /// assert_eq!(a.len(), 0); + /// a.try_insert(1, "a")?; + /// assert_eq!(a.len(), 1); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[must_use] + pub const fn len(&self) -> usize { + self.length + } + + /// Returns `true` if the map contains no elements. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut a = BTreeMap::new(); + /// assert!(a.is_empty()); + /// a.try_insert(1, "a")?; + /// assert!(!a.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[must_use] + pub const fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Returns a [`Cursor`] pointing at the first element that is above the + /// given bound. + /// + /// If no such element exists then a cursor pointing at the "ghost" + /// non-element is returned. + /// + /// Passing [`Bound::Unbounded`] will return a cursor pointing at the first + /// element of the map. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// use std::ops::Bound; + /// + /// let mut a = BTreeMap::new(); + /// a.try_insert(1, "a")?; + /// a.try_insert(2, "b")?; + /// a.try_insert(3, "c")?; + /// a.try_insert(4, "c")?; + /// let cursor = a.lower_bound(Bound::Excluded(&2)); + /// assert_eq!(cursor.key(), Some(&3)); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn lower_bound(&self, bound: Bound<&Q>) -> Cursor<'_, K, V> + where + K: Borrow + Ord, + Q: Ord, + { + into_ok(self.lower_bound_with(&mut (), bound, infallible_cmp)) + } + + pub(crate) fn lower_bound_with( + &self, + cx: &mut C, + bound: Bound<&Q>, + cmp: CmpFn, + ) -> Result, E> + where + K: Borrow, + { + let Some(root_node) = self.root.as_ref().map(NodeRef::reborrow) else { + return Ok(Cursor { + current: None, + root: None, + }); + }; + + let edge = root_node.lower_bound(cx, SearchBound::from_range(bound), cmp)?; + + Ok(Cursor { + current: edge.next_kv().ok(), + root: self.root.as_ref(), + }) + } + + /// Returns a [`CursorMut`] pointing at the first element that is above the + /// given bound. + /// + /// If no such element exists then a cursor pointing at the "ghost" + /// non-element is returned. + /// + /// Passing [`Bound::Unbounded`] will return a cursor pointing at the first + /// element of the map. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// use std::ops::Bound; + /// + /// let mut a = BTreeMap::new(); + /// a.try_insert(1, "a")?; + /// a.try_insert(2, "b")?; + /// a.try_insert(3, "c")?; + /// a.try_insert(4, "c")?; + /// let cursor = a.lower_bound_mut(Bound::Excluded(&2)); + /// assert_eq!(cursor.key(), Some(&3)); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn lower_bound_mut(&mut self, bound: Bound<&Q>) -> CursorMut<'_, K, V, A> + where + K: Borrow + Ord, + Q: Ord, + { + into_ok(self.lower_bound_mut_with(&mut (), bound, infallible_cmp)) + } + + pub(crate) fn lower_bound_mut_with( + &mut self, + cx: &mut C, + bound: Bound<&Q>, + cmp: CmpFn, + ) -> Result, E> + where + K: Borrow, + { + let (root, dormant_root) = DormantMutRef::new(&mut self.root); + + let Some(root_node) = root.as_mut().map(NodeRef::borrow_mut) else { + return Ok(CursorMut { + current: None, + root: dormant_root, + length: &mut self.length, + alloc: &mut *self.alloc, + }); + }; + + let edge = root_node.lower_bound(cx, SearchBound::from_range(bound), cmp)?; + + Ok(CursorMut { + current: edge.next_kv().ok(), + root: dormant_root, + length: &mut self.length, + alloc: &mut *self.alloc, + }) + } + + /// Returns a [`Cursor`] pointing at the last element that is below the + /// given bound. + /// + /// If no such element exists then a cursor pointing at the "ghost" + /// non-element is returned. + /// + /// Passing [`Bound::Unbounded`] will return a cursor pointing at the last + /// element of the map. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// use std::ops::Bound; + /// + /// let mut a = BTreeMap::new(); + /// a.try_insert(1, "a")?; + /// a.try_insert(2, "b")?; + /// a.try_insert(3, "c")?; + /// a.try_insert(4, "c")?; + /// let cursor = a.upper_bound(Bound::Excluded(&3)); + /// assert_eq!(cursor.key(), Some(&2)); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn upper_bound(&self, bound: Bound<&Q>) -> Cursor<'_, K, V> + where + K: Borrow + Ord, + Q: Ord, + { + into_ok(self.upper_bound_with(&mut (), bound, infallible_cmp)) + } + + pub(crate) fn upper_bound_with( + &self, + cx: &mut C, + bound: Bound<&Q>, + cmp: CmpFn, + ) -> Result, E> + where + K: Borrow, + { + let Some(root_node) = self.root.as_ref().map(NodeRef::reborrow) else { + return Ok(Cursor { + current: None, + root: None, + }); + }; + + let edge = root_node.upper_bound(cx, SearchBound::from_range(bound), cmp)?; + + Ok(Cursor { + current: edge.next_back_kv().ok(), + root: self.root.as_ref(), + }) + } + + /// Returns a [`CursorMut`] pointing at the last element that is below the + /// given bound. + /// + /// If no such element exists then a cursor pointing at the "ghost" + /// non-element is returned. + /// + /// Passing [`Bound::Unbounded`] will return a cursor pointing at the last + /// element of the map. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// use std::ops::Bound; + /// + /// let mut a = BTreeMap::new(); + /// a.try_insert(1, "a")?; + /// a.try_insert(2, "b")?; + /// a.try_insert(3, "c")?; + /// a.try_insert(4, "c")?; + /// let cursor = a.upper_bound_mut(Bound::Excluded(&3)); + /// assert_eq!(cursor.key(), Some(&2)); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn upper_bound_mut(&mut self, bound: Bound<&Q>) -> CursorMut<'_, K, V, A> + where + K: Borrow, + Q: Ord, + { + into_ok(self.upper_bound_mut_with(&mut (), bound, infallible_cmp)) + } + + pub(crate) fn upper_bound_mut_with( + &mut self, + cx: &mut C, + bound: Bound<&Q>, + cmp: CmpFn, + ) -> Result, E> + where + K: Borrow, + { + let (root, dormant_root) = DormantMutRef::new(&mut self.root); + + let Some(root_node) = root.as_mut().map(NodeRef::borrow_mut) else { + return Ok(CursorMut { + current: None, + root: dormant_root, + length: &mut self.length, + alloc: &mut *self.alloc, + }); + }; + + let edge = root_node.upper_bound(cx, SearchBound::from_range(bound), cmp)?; + + Ok(CursorMut { + current: edge.next_back_kv().ok(), + root: dormant_root, + length: &mut self.length, + alloc: &mut *self.alloc, + }) + } +} + +/// A cursor over a `BTreeMap`. +/// +/// A `Cursor` is like an iterator, except that it can freely seek back-and-forth. +/// +/// Cursors always point to an element in the tree, and index in a logically circular way. +/// To accommodate this, there is a "ghost" non-element that yields `None` between the last and +/// first elements of the tree. +/// +/// A `Cursor` is created with the [`BTreeMap::lower_bound`] and [`BTreeMap::upper_bound`] methods. +pub struct Cursor<'a, K: 'a, V: 'a> { + current: Option, K, V, marker::LeafOrInternal>, marker::KV>>, + root: Option<&'a node::Root>, +} + +impl Clone for Cursor<'_, K, V> { + fn clone(&self) -> Self { + let Cursor { current, root } = *self; + Cursor { current, root } + } +} + +impl Debug for Cursor<'_, K, V> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("Cursor").field(&self.key_value()).finish() + } +} + +/// A cursor over a `BTreeMap` with editing operations. +/// +/// A `Cursor` is like an iterator, except that it can freely seek back-and-forth, and can +/// safely mutate the tree during iteration. This is because the lifetime of its yielded +/// references is tied to its own lifetime, instead of just the underlying tree. This means +/// cursors cannot yield multiple elements at once. +/// +/// Cursors always point to an element in the tree, and index in a logically circular way. +/// To accommodate this, there is a "ghost" non-element that yields `None` between the last and +/// first elements of the tree. +/// +/// A `Cursor` is created with the [`BTreeMap::lower_bound_mut`] and [`BTreeMap::upper_bound_mut`] +/// methods. +pub struct CursorMut<'a, K: 'a, V: 'a, A = Global> { + current: Option, K, V, marker::LeafOrInternal>, marker::KV>>, + root: DormantMutRef<'a, Option>>, + length: &'a mut usize, + alloc: &'a mut A, +} + +impl Debug for CursorMut<'_, K, V, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("CursorMut").field(&self.key_value()).finish() + } +} + +impl<'a, K, V> Cursor<'a, K, V> { + /// Moves the cursor to the next element of the `BTreeMap`. + /// + /// If the cursor is pointing to the "ghost" non-element then this will move it to + /// the first element of the `BTreeMap`. If it is pointing to the last + /// element of the `BTreeMap` then this will move it to the "ghost" non-element. + pub(crate) fn move_next(&mut self) { + match self.current.take() { + None => { + self.current = self.root.and_then(|root| { + root.reborrow() + .first_leaf_edge() + .forget_node_type() + .right_kv() + .ok() + }); + } + Some(current) => { + self.current = current.next_leaf_edge().next_kv().ok(); + } + } + } + + /// Moves the cursor to the previous element of the `BTreeMap`. + /// + /// If the cursor is pointing to the "ghost" non-element then this will move it to + /// the last element of the `BTreeMap`. If it is pointing to the first + /// element of the `BTreeMap` then this will move it to the "ghost" non-element. + pub(crate) fn move_prev(&mut self) { + match self.current.take() { + None => { + self.current = self.root.and_then(|root| { + root.reborrow() + .last_leaf_edge() + .forget_node_type() + .left_kv() + .ok() + }); + } + Some(current) => { + self.current = current.next_back_leaf_edge().next_back_kv().ok(); + } + } + } + + /// Returns a reference to the key of the element that the cursor is + /// currently pointing to. + /// + /// This returns `None` if the cursor is currently pointing to the "ghost" + /// non-element. + pub fn key(&self) -> Option<&'a K> { + self.current.as_ref().map(|current| current.into_kv().0) + } + + /// Returns a reference to the value of the element that the cursor is + /// currently pointing to. + /// + /// This returns `None` if the cursor is currently pointing to the "ghost" + /// non-element. + pub fn value(&self) -> Option<&'a V> { + self.current.as_ref().map(|current| current.into_kv().1) + } + + /// Returns a reference to the key and value of the element that the cursor + /// is currently pointing to. + /// + /// This returns `None` if the cursor is currently pointing to the "ghost" + /// non-element. + pub fn key_value(&self) -> Option<(&'a K, &'a V)> { + self.current.as_ref().map(|current| current.into_kv()) + } + + /// Returns a reference to the next element. + /// + /// If the cursor is pointing to the "ghost" non-element then this returns + /// the first element of the `BTreeMap`. If it is pointing to the last + /// element of the `BTreeMap` then this returns `None`. + pub(crate) fn peek_next(&self) -> Option<(&'a K, &'a V)> { + let mut next = self.clone(); + next.move_next(); + next.current.as_ref().map(|current| current.into_kv()) + } + + /// Returns a reference to the previous element. + /// + /// If the cursor is pointing to the "ghost" non-element then this returns + /// the last element of the `BTreeMap`. If it is pointing to the first + /// element of the `BTreeMap` then this returns `None`. + pub(crate) fn peek_prev(&self) -> Option<(&'a K, &'a V)> { + let mut prev = self.clone(); + prev.move_prev(); + prev.current.as_ref().map(|current| current.into_kv()) + } +} + +impl<'a, K, V, A> CursorMut<'a, K, V, A> { + /// Moves the cursor to the next element of the `BTreeMap`. + /// + /// If the cursor is pointing to the "ghost" non-element then this will move it to + /// the first element of the `BTreeMap`. If it is pointing to the last + /// element of the `BTreeMap` then this will move it to the "ghost" non-element. + pub(crate) fn move_next(&mut self) { + match self.current.take() { + None => { + // SAFETY: The previous borrow of root has ended. + self.current = unsafe { self.root.reborrow() }.as_mut().and_then(|root| { + root.borrow_mut() + .first_leaf_edge() + .forget_node_type() + .right_kv() + .ok() + }); + } + Some(current) => { + self.current = current.next_leaf_edge().next_kv().ok(); + } + } + } + + /// Moves the cursor to the previous element of the `BTreeMap`. + /// + /// If the cursor is pointing to the "ghost" non-element then this will move it to + /// the last element of the `BTreeMap`. If it is pointing to the first + /// element of the `BTreeMap` then this will move it to the "ghost" non-element. + pub(crate) fn move_prev(&mut self) { + match self.current.take() { + None => { + // SAFETY: The previous borrow of root has ended. + self.current = unsafe { self.root.reborrow() }.as_mut().and_then(|root| { + root.borrow_mut() + .last_leaf_edge() + .forget_node_type() + .left_kv() + .ok() + }); + } + Some(current) => { + self.current = current.next_back_leaf_edge().next_back_kv().ok(); + } + } + } + + /// Returns a reference to the key of the element that the cursor is + /// currently pointing to. + /// + /// This returns `None` if the cursor is currently pointing to the "ghost" + /// non-element. + pub fn key(&self) -> Option<&K> { + self.current + .as_ref() + .map(|current| current.reborrow().into_kv().0) + } + + /// Returns a reference to the value of the element that the cursor is + /// currently pointing to. + /// + /// This returns `None` if the cursor is currently pointing to the "ghost" + /// non-element. + pub fn value(&self) -> Option<&V> { + self.current + .as_ref() + .map(|current| current.reborrow().into_kv().1) + } + + /// Returns a reference to the key and value of the element that the cursor + /// is currently pointing to. + /// + /// This returns `None` if the cursor is currently pointing to the "ghost" + /// non-element. + pub fn key_value(&self) -> Option<(&K, &V)> { + self.current + .as_ref() + .map(|current| current.reborrow().into_kv()) + } + + /// Returns a mutable reference to the value of the element that the cursor + /// is currently pointing to. + /// + /// This returns `None` if the cursor is currently pointing to the "ghost" + /// non-element. + pub fn value_mut(&mut self) -> Option<&mut V> { + self.current.as_mut().map(|current| current.kv_mut().1) + } + + /// Returns a reference to the key and mutable reference to the value of the + /// element that the cursor is currently pointing to. + /// + /// This returns `None` if the cursor is currently pointing to the "ghost" + /// non-element. + pub fn key_value_mut(&mut self) -> Option<(&K, &mut V)> { + self.current.as_mut().map(|current| { + let (k, v) = current.kv_mut(); + (&*k, v) + }) + } + + /// Returns a mutable reference to the key of the element that the cursor is + /// currently pointing to. + /// + /// This returns `None` if the cursor is currently pointing to the + /// "ghost" non-element. + /// + /// # Safety + /// + /// This can be used to modify the key, but you must ensure that the + /// `BTreeMap` invariants are maintained. Specifically: + /// + /// * The key must remain unique within the tree. + /// * The key must remain in sorted order with regards to other elements in + /// the tree. + pub(crate) unsafe fn key_mut_unchecked(&mut self) -> Option<&mut K> { + self.current.as_mut().map(|current| current.kv_mut().0) + } + + /// Returns a reference to the key and value of the next element. + /// + /// If the cursor is pointing to the "ghost" non-element then this returns + /// the first element of the `BTreeMap`. If it is pointing to the last + /// element of the `BTreeMap` then this returns `None`. + pub(crate) fn peek_next(&mut self) -> Option<(&K, &mut V)> { + let (k, v) = match self.current { + None => { + // SAFETY: The previous borrow of root has ended. + unsafe { self.root.reborrow() } + .as_mut()? + .borrow_mut() + .first_leaf_edge() + .next_kv() + .ok()? + .into_kv_valmut() + } + // SAFETY: We're not using this to mutate the tree. + Some(ref mut current) => unsafe { current.reborrow_mut() } + .next_leaf_edge() + .next_kv() + .ok()? + .into_kv_valmut(), + }; + Some((k, v)) + } + + /// Returns a reference to the key and value of the previous element. + /// + /// If the cursor is pointing to the "ghost" non-element then this returns + /// the last element of the `BTreeMap`. If it is pointing to the first + /// element of the `BTreeMap` then this returns `None`. + pub(crate) fn peek_prev(&mut self) -> Option<(&K, &mut V)> { + let (k, v) = match self.current.as_mut() { + None => { + // SAFETY: The previous borrow of root has ended. + unsafe { self.root.reborrow() } + .as_mut()? + .borrow_mut() + .last_leaf_edge() + .next_back_kv() + .ok()? + .into_kv_valmut() + } + Some(current) => { + // SAFETY: We're not using this to mutate the tree. + unsafe { current.reborrow_mut() } + .next_back_leaf_edge() + .next_back_kv() + .ok()? + .into_kv_valmut() + } + }; + Some((k, v)) + } + + /// Returns a read-only cursor pointing to the current element. + /// + /// The lifetime of the returned `Cursor` is bound to that of the + /// `CursorMut`, which means it cannot outlive the `CursorMut` and that the + /// `CursorMut` is frozen for the lifetime of the `Cursor`. + pub(crate) fn as_cursor(&self) -> Cursor<'_, K, V> { + Cursor { + // SAFETY: The tree is immutable while the cursor exists. + root: unsafe { self.root.reborrow_shared().as_ref() }, + current: self.current.as_ref().map(|current| current.reborrow()), + } + } +} + +// Now the tree editing operations +impl<'a, K: Ord, V, A: Allocator> CursorMut<'a, K, V, A> { + /// Inserts a new element into the `BTreeMap` after the current one. + /// + /// If the cursor is pointing at the "ghost" non-element then the new element is + /// inserted at the front of the `BTreeMap`. + /// + /// # Safety + /// + /// You must ensure that the `BTreeMap` invariants are maintained. + /// Specifically: + /// + /// * The key of the newly inserted element must be unique in the tree. + /// * All keys in the tree must remain in sorted order. + pub(crate) unsafe fn try_insert_after_unchecked( + &mut self, + key: K, + value: V, + ) -> Result<(), AllocError> { + let edge = match self.current.take() { + None => { + // SAFETY: We have no other reference to the tree. + match unsafe { self.root.reborrow() } { + root @ None => { + // Tree is empty, allocate a new root. + let mut node = NodeRef::new_leaf(self.alloc)?; + node.borrow_mut().push(key, value); + *root = Some(node.forget_type()); + *self.length += 1; + return Ok(()); + } + Some(root) => root.borrow_mut().first_leaf_edge(), + } + } + Some(current) => current.next_leaf_edge(), + }; + + let handle = edge.insert_recursing(key, value, self.alloc, |ins| { + drop(ins.left); + // SAFETY: The handle to the newly inserted value is always on a + // leaf node, so adding a new root node doesn't invalidate it. + let root = unsafe { self.root.reborrow().as_mut().unwrap() }; + root.push_internal_level(self.alloc)? + .push(ins.kv.0, ins.kv.1, ins.right); + Ok(()) + })?; + self.current = handle.left_edge().next_back_kv().ok(); + *self.length += 1; + Ok(()) + } + + /// Inserts a new element into the `BTreeMap` before the current one. + /// + /// If the cursor is pointing at the "ghost" non-element then the new element is + /// inserted at the end of the `BTreeMap`. + /// + /// # Safety + /// + /// You must ensure that the `BTreeMap` invariants are maintained. + /// Specifically: + /// + /// * The key of the newly inserted element must be unique in the tree. + /// * All keys in the tree must remain in sorted order. + pub(crate) unsafe fn try_insert_before_unchecked( + &mut self, + key: K, + value: V, + ) -> Result<(), AllocError> { + let edge = match self.current.take() { + None => { + // SAFETY: We have no other reference to the tree. + match unsafe { self.root.reborrow() } { + root @ None => { + // Tree is empty, allocate a new root. + let mut node = NodeRef::new_leaf(self.alloc)?; + node.borrow_mut().push(key, value); + *root = Some(node.forget_type()); + *self.length += 1; + return Ok(()); + } + Some(root) => root.borrow_mut().last_leaf_edge(), + } + } + Some(current) => current.next_back_leaf_edge(), + }; + + let handle = edge.insert_recursing(key, value, self.alloc, |ins| { + drop(ins.left); + // SAFETY: The handle to the newly inserted value is always on a + // leaf node, so adding a new root node doesn't invalidate it. + let root = unsafe { self.root.reborrow().as_mut().unwrap() }; + root.push_internal_level(self.alloc)? + .push(ins.kv.0, ins.kv.1, ins.right); + Ok(()) + })?; + self.current = handle.right_edge().next_kv().ok(); + *self.length += 1; + Ok(()) + } + + /// Inserts a new element into the `BTreeMap` after the current one. + /// + /// If the cursor is pointing at the "ghost" non-element then the new element is + /// inserted at the front of the `BTreeMap`. + /// + /// # Panics + /// + /// This function panics if: + /// - the given key compares less than or equal to the current element (if + /// any). + /// - the given key compares greater than or equal to the next element (if + /// any). + pub(crate) fn try_insert_after(&mut self, key: K, value: V) -> Result<(), AllocError> { + if let Some(current) = self.key() { + if &key <= current { + panic!("key must be ordered above the current element"); + } + } + if let Some((next, _)) = self.peek_next() { + if &key >= next { + panic!("key must be ordered below the next element"); + } + } + unsafe { + self.try_insert_after_unchecked(key, value)?; + } + Ok(()) + } + + #[cfg(test)] + pub(crate) fn insert_after(&mut self, key: K, value: V) { + self.try_insert_after(key, value).abort() + } + + /// Inserts a new element into the `BTreeMap` before the current one. + /// + /// If the cursor is pointing at the "ghost" non-element then the new element is + /// inserted at the end of the `BTreeMap`. + /// + /// # Panics + /// + /// This function panics if: + /// - the given key compares greater than or equal to the current element + /// (if any). + /// - the given key compares less than or equal to the previous element (if + /// any). + pub(crate) fn try_insert_before(&mut self, key: K, value: V) -> Result<(), AllocError> { + if let Some(current) = self.key() { + if &key >= current { + panic!("key must be ordered below the current element"); + } + } + if let Some((prev, _)) = self.peek_prev() { + if &key <= prev { + panic!("key must be ordered above the previous element"); + } + } + unsafe { + self.try_insert_before_unchecked(key, value)?; + } + Ok(()) + } + + #[cfg(test)] + pub(crate) fn insert_before(&mut self, key: K, value: V) { + self.try_insert_before(key, value).abort() + } + + /// Removes the current element from the `BTreeMap`. + /// + /// The element that was removed is returned, and the cursor is + /// moved to point to the next element in the `BTreeMap`. + /// + /// If the cursor is currently pointing to the "ghost" non-element then no element + /// is removed and `None` is returned. The cursor is not moved in this case. + pub(crate) fn remove_current(&mut self) -> Option<(K, V)> { + let current = self.current.take()?; + let mut emptied_internal_root = false; + let (kv, pos) = current.remove_kv_tracking(|| emptied_internal_root = true, self.alloc); + self.current = pos.next_kv().ok(); + *self.length -= 1; + if emptied_internal_root { + // SAFETY: This is safe since current does not point within the now + // empty root node. + let root = unsafe { self.root.reborrow().as_mut().unwrap() }; + root.pop_internal_level(self.alloc); + } + Some(kv) + } + + /// Removes the current element from the `BTreeMap`. + /// + /// The element that was removed is returned, and the cursor is + /// moved to point to the previous element in the `BTreeMap`. + /// + /// If the cursor is currently pointing to the "ghost" non-element then no element + /// is removed and `None` is returned. The cursor is not moved in this case. + pub(crate) fn remove_current_and_move_back(&mut self) -> Option<(K, V)> { + let current = self.current.take()?; + let mut emptied_internal_root = false; + let (kv, pos) = current.remove_kv_tracking(|| emptied_internal_root = true, self.alloc); + self.current = pos.next_back_kv().ok(); + *self.length -= 1; + + if emptied_internal_root { + // SAFETY: This is safe since current does not point within the now + // empty root node. + let root = unsafe { self.root.reborrow().as_mut().unwrap() }; + root.pop_internal_level(self.alloc); + } + + Some(kv) + } +} + +impl TryFromIteratorIn<(K, V), A> for BTreeMap +where + K: Ord, +{ + #[inline] + fn try_from_iter_in(iter: I, alloc: A) -> Result + where + I: IntoIterator, + { + let mut this = BTreeMap::new_in(alloc); + + for (key, value) in iter { + this.try_insert(key, value)?; + } + + Ok(this) + } +} + +#[cfg(test)] +impl FromIterator<(K, V)> for BTreeMap +where + K: Ord, +{ + fn from_iter(iter: I) -> Self + where + I: IntoIterator, + { + Self::try_from_iter_in(iter, Global).abort() + } +} + +impl TryFrom<[(K, V); N]> for BTreeMap +where + K: Ord, +{ + type Error = Error; + + #[inline] + fn try_from(values: [(K, V); N]) -> Result { + let mut this = BTreeMap::new(); + + for (key, value) in values { + this.try_insert(key, value)?; + } + + Ok(this) + } +} + +#[cfg(test)] +mod tests; diff --git a/crates/rune-alloc/src/alloc/btree/map/entry.rs b/crates/rune-alloc/src/alloc/btree/map/entry.rs new file mode 100644 index 000000000..3d64b69af --- /dev/null +++ b/crates/rune-alloc/src/alloc/btree/map/entry.rs @@ -0,0 +1,548 @@ +use core::fmt::{self, Debug}; +use core::marker::PhantomData; +use core::mem; + +use crate::alloc::{Allocator, Global}; + +use super::super::borrow::DormantMutRef; +use super::super::node::{marker, Handle, NodeRef}; +use super::BTreeMap; + +#[cfg(test)] +use crate::alloc::testing::*; +use crate::alloc::AllocError; + +use Entry::*; + +/// A view into a single entry in a map, which may either be vacant or occupied. +/// +/// This `enum` is constructed from the [`entry`] method on [`BTreeMap`]. +/// +/// [`entry`]: BTreeMap::entry +pub enum Entry<'a, K: 'a, V: 'a, A: Allocator = Global> { + /// A vacant entry. + Vacant(VacantEntry<'a, K, V, A>), + + /// An occupied entry. + Occupied(OccupiedEntry<'a, K, V, A>), +} + +impl Debug for Entry<'_, K, V, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(), + Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(), + } + } +} + +/// A view into a vacant entry in a `BTreeMap`. +/// It is part of the [`Entry`] enum. +pub struct VacantEntry<'a, K, V, A: Allocator = Global> { + pub(super) key: K, + /// `None` for a (empty) map without root + pub(super) handle: Option, K, V, marker::Leaf>, marker::Edge>>, + pub(super) dormant_map: DormantMutRef<'a, BTreeMap>, + + /// The BTreeMap will outlive this IntoIter so we don't care about drop order for `alloc`. + pub(super) alloc: &'a A, + + // Be invariant in `K` and `V` + pub(super) _marker: PhantomData<&'a mut (K, V)>, +} + +impl Debug for VacantEntry<'_, K, V, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("VacantEntry").field(self.key()).finish() + } +} + +/// A view into an occupied entry in a `BTreeMap`. +/// It is part of the [`Entry`] enum. +pub struct OccupiedEntry<'a, K, V, A: Allocator = Global> { + pub(super) handle: Handle, K, V, marker::LeafOrInternal>, marker::KV>, + pub(super) dormant_map: DormantMutRef<'a, BTreeMap>, + + /// The BTreeMap will outlive this IntoIter so we don't care about drop order for `alloc`. + pub(super) alloc: &'a A, + + // Be invariant in `K` and `V` + pub(super) _marker: PhantomData<&'a mut (K, V)>, +} + +impl Debug for OccupiedEntry<'_, K, V, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("OccupiedEntry") + .field("key", self.key()) + .field("value", self.get()) + .finish() + } +} + +/// The error returned by [`try_insert`](BTreeMap::try_insert) when the key already exists. +/// +/// Contains the occupied entry, and the value that was not inserted. +pub struct OccupiedError<'a, K: 'a, V: 'a, A: Allocator = Global> { + /// The entry in the map that was already occupied. + pub entry: OccupiedEntry<'a, K, V, A>, + /// The value which was not inserted, because the entry was already occupied. + pub value: V, +} + +impl Debug for OccupiedError<'_, K, V, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("OccupiedError") + .field("key", self.entry.key()) + .field("old_value", self.entry.get()) + .field("new_value", &self.value) + .finish() + } +} + +impl<'a, K: Debug + Ord, V: Debug, A: Allocator> fmt::Display for OccupiedError<'a, K, V, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "failed to insert {:?}, key {:?} already exists with value {:?}", + self.value, + self.entry.key(), + self.entry.get(), + ) + } +} + +impl<'a, K: Ord, V, A: Allocator> Entry<'a, K, V, A> { + /// Ensures a value is in the entry by inserting the default if empty, and + /// returns a mutable reference to the value in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// map.entry("poneyland").or_try_insert(12)?; + /// + /// assert_eq!(map["poneyland"], 12); + /// # Ok::<_, rune_alloc::AllocError>(()) + /// ``` + pub fn or_try_insert(self, default: V) -> Result<&'a mut V, AllocError> { + match self { + Occupied(entry) => Ok(entry.into_mut()), + Vacant(entry) => entry.try_insert(default), + } + } + + /// Ensures a value is in the entry by inserting the result of the default + /// function if empty, and returns a mutable reference to the value in the + /// entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map: BTreeMap<&str, String> = BTreeMap::new(); + /// let s = "hoho".to_string(); + /// + /// map.entry("poneyland").or_try_insert_with(|| s)?; + /// + /// assert_eq!(map["poneyland"], "hoho".to_string()); + /// # Ok::<_, rune_alloc::AllocError>(()) + /// ``` + pub fn or_try_insert_with V>(self, default: F) -> Result<&'a mut V, AllocError> { + match self { + Occupied(entry) => Ok(entry.into_mut()), + Vacant(entry) => entry.try_insert(default()), + } + } + + /// Ensures a value is in the entry by inserting, if empty, the result of + /// the default function. This method allows for generating key-derived + /// values for insertion by providing the default function a reference to + /// the key that was moved during the `.entry(key)` method call. + /// + /// The reference to the moved key is provided so that cloning or copying + /// the key is unnecessary, unlike with `.or_insert_with(|| ... )`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// + /// map.entry("poneyland").or_try_insert_with_key(|key| key.chars().count())?; + /// + /// assert_eq!(map["poneyland"], 9); + /// # Ok::<_, rune_alloc::AllocError>(()) + /// ``` + #[inline] + pub fn or_try_insert_with_key V>( + self, + default: F, + ) -> Result<&'a mut V, AllocError> { + match self { + Occupied(entry) => Ok(entry.into_mut()), + Vacant(entry) => { + let value = default(entry.key()); + entry.try_insert(value) + } + } + } + + /// Returns a reference to this entry's key. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// assert_eq!(map.entry("poneyland").key(), &"poneyland"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn key(&self) -> &K { + match *self { + Occupied(ref entry) => entry.key(), + Vacant(ref entry) => entry.key(), + } + } + + /// Provides in-place mutable access to an occupied entry before any + /// potential inserts into the map. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// + /// map.entry("poneyland") + /// .and_modify(|e| { *e += 1 }) + /// .or_try_insert(42)?; + /// assert_eq!(map["poneyland"], 42); + /// + /// map.entry("poneyland") + /// .and_modify(|e| { *e += 1 }) + /// .or_try_insert(42)?; + /// assert_eq!(map["poneyland"], 43); + /// # Ok::<(), rune_alloc::AllocError>(()) + /// ``` + pub fn and_modify(self, f: F) -> Self + where + F: FnOnce(&mut V), + { + match self { + Occupied(mut entry) => { + f(entry.get_mut()); + Occupied(entry) + } + Vacant(entry) => Vacant(entry), + } + } +} + +impl<'a, K: Ord, V: Default, A: Allocator> Entry<'a, K, V, A> { + /// Ensures a value is in the entry by inserting the default value if empty, + /// and returns a mutable reference to the value in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map: BTreeMap<&str, Option> = BTreeMap::new(); + /// map.entry("poneyland").or_try_default()?; + /// + /// assert_eq!(map["poneyland"], None); + /// # Ok::<(), rune_alloc::AllocError>(()) + /// ``` + pub fn or_try_default(self) -> Result<&'a mut V, AllocError> { + match self { + Occupied(entry) => Ok(entry.into_mut()), + Vacant(entry) => entry.try_insert(Default::default()), + } + } +} + +impl<'a, K, V, A: Allocator> VacantEntry<'a, K, V, A> { + /// Gets a reference to the key that would be used when inserting a value + /// through the VacantEntry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// assert_eq!(map.entry("poneyland").key(), &"poneyland"); + /// ``` + pub fn key(&self) -> &K { + &self.key + } + + /// Take ownership of the key. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// use rune_alloc::btree_map::Entry; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// + /// if let Entry::Vacant(v) = map.entry("poneyland") { + /// v.into_key(); + /// } + /// ``` + pub fn into_key(self) -> K { + self.key + } + + /// Sets the value of the entry with the `VacantEntry`'s key, + /// and returns a mutable reference to it. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// use rune_alloc::btree_map::Entry; + /// + /// let mut map: BTreeMap<&str, u32> = BTreeMap::new(); + /// + /// if let Entry::Vacant(o) = map.entry("poneyland") { + /// o.try_insert(37)?; + /// } + /// + /// assert_eq!(map["poneyland"], 37); + /// # Ok::<_, rune_alloc::AllocError>(()) + /// ``` + pub fn try_insert(mut self, value: V) -> Result<&'a mut V, AllocError> { + let out_ptr = match self.handle { + None => { + // SAFETY: There is no tree yet so no reference to it exists. + let map = unsafe { self.dormant_map.awaken() }; + let mut root = NodeRef::new_leaf(self.alloc)?; + let val_ptr = root.borrow_mut().push(self.key, value) as *mut V; + map.root = Some(root.forget_type()); + map.length = 1; + val_ptr + } + Some(handle) => { + let new_handle = handle.insert_recursing(self.key, value, self.alloc, |ins| { + drop(ins.left); + // SAFETY: Pushing a new root node doesn't invalidate + // handles to existing nodes. + let map = unsafe { self.dormant_map.reborrow() }; + let root = map.root.as_mut().unwrap(); // same as ins.left + root.push_internal_level(self.alloc)? + .push(ins.kv.0, ins.kv.1, ins.right); + Ok(()) + })?; + + // Get the pointer to the value + let val_ptr = new_handle.into_val_mut(); + + // SAFETY: We have consumed self.handle. + let map = unsafe { self.dormant_map.awaken() }; + map.length += 1; + val_ptr + } + }; + + // Now that we have finished growing the tree using borrowed references, + // dereference the pointer to a part of it, that we picked up along the way. + Ok(unsafe { &mut *out_ptr }) + } + + #[cfg(test)] + pub(crate) fn insert(self, value: V) -> &'a mut V { + self.try_insert(value).abort() + } +} + +impl<'a, K, V, A: Allocator> OccupiedEntry<'a, K, V, A> { + /// Gets a reference to the key in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// map.entry("poneyland").or_try_insert(12)?; + /// assert_eq!(map.entry("poneyland").key(), &"poneyland"); + /// # Ok::<_, rune_alloc::AllocError>(()) + /// ``` + #[must_use] + pub fn key(&self) -> &K { + self.handle.reborrow().into_kv().0 + } + + /// Take ownership of the key and value from the map. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// use rune_alloc::btree_map::Entry; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// map.entry("poneyland").or_try_insert(12)?; + /// + /// if let Entry::Occupied(o) = map.entry("poneyland") { + /// // We delete the entry from the map. + /// assert_eq!(o.remove_entry(), ("poneyland", 12)); + /// } + /// + /// // If now try to get the value, it will panic: + /// // println!("{}", map["poneyland"]); + /// # Ok::<_, rune_alloc::AllocError>(()) + /// ``` + pub fn remove_entry(self) -> (K, V) { + self.remove_kv() + } + + /// Gets a reference to the value in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// use rune_alloc::btree_map::Entry; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// map.entry("poneyland").or_try_insert(12)?; + /// + /// if let Entry::Occupied(o) = map.entry("poneyland") { + /// assert_eq!(o.get(), &12); + /// } + /// # Ok::<_, rune_alloc::AllocError>(()) + /// ``` + #[must_use] + pub fn get(&self) -> &V { + self.handle.reborrow().into_kv().1 + } + + /// Gets a mutable reference to the value in the entry. + /// + /// If you need a reference to the `OccupiedEntry` that may outlive the + /// destruction of the `Entry` value, see [`into_mut`]. + /// + /// [`into_mut`]: OccupiedEntry::into_mut + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// use rune_alloc::btree_map::Entry; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// map.entry("poneyland").or_try_insert(12)?; + /// + /// assert_eq!(map["poneyland"], 12); + /// if let Entry::Occupied(mut o) = map.entry("poneyland") { + /// *o.get_mut() += 10; + /// assert_eq!(*o.get(), 22); + /// + /// // We can use the same Entry multiple times. + /// *o.get_mut() += 2; + /// } + /// assert_eq!(map["poneyland"], 24); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn get_mut(&mut self) -> &mut V { + self.handle.kv_mut().1 + } + + /// Converts the entry into a mutable reference to its value. + /// + /// If you need multiple references to the `OccupiedEntry`, see [`get_mut`]. + /// + /// [`get_mut`]: OccupiedEntry::get_mut + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// use rune_alloc::btree_map::Entry; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// map.entry("poneyland").or_try_insert(12)?; + /// + /// assert_eq!(map["poneyland"], 12); + /// if let Entry::Occupied(o) = map.entry("poneyland") { + /// *o.into_mut() += 10; + /// } + /// assert_eq!(map["poneyland"], 22); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[must_use = "`self` will be dropped if the result is not used"] + pub fn into_mut(self) -> &'a mut V { + self.handle.into_val_mut() + } + + /// Sets the value of the entry with the `OccupiedEntry`'s key, + /// and returns the entry's old value. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// use rune_alloc::btree_map::Entry; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// map.entry("poneyland").or_try_insert(12)?; + /// + /// if let Entry::Occupied(mut o) = map.entry("poneyland") { + /// assert_eq!(o.insert(15), 12); + /// } + /// + /// assert_eq!(map["poneyland"], 15); + /// # Ok::<_, rune_alloc::AllocError>(()) + /// ``` + pub fn insert(&mut self, value: V) -> V { + mem::replace(self.get_mut(), value) + } + + /// Takes the value of the entry out of the map, and returns it. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeMap; + /// use rune_alloc::btree_map::Entry; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// map.entry("poneyland").or_try_insert(12)?; + /// + /// if let Entry::Occupied(o) = map.entry("poneyland") { + /// assert_eq!(o.remove(), 12); + /// } + /// + /// // If we try to get "poneyland"'s value, it'll panic: + /// // println!("{}", map["poneyland"]); + /// # Ok::<_, rune_alloc::AllocError>(()) + /// ``` + pub fn remove(self) -> V { + self.remove_kv().1 + } + + // Body of `remove_entry`, probably separate because the name reflects the returned pair. + pub(super) fn remove_kv(self) -> (K, V) { + let mut emptied_internal_root = false; + let (old_kv, _) = self + .handle + .remove_kv_tracking(|| emptied_internal_root = true, self.alloc); + // SAFETY: we consumed the intermediate root borrow, `self.handle`. + let map = unsafe { self.dormant_map.awaken() }; + map.length -= 1; + if emptied_internal_root { + let root = map.root.as_mut().unwrap(); + root.pop_internal_level(self.alloc); + } + old_kv + } +} diff --git a/crates/rune-alloc/src/alloc/btree/map/tests.rs b/crates/rune-alloc/src/alloc/btree/map/tests.rs new file mode 100644 index 000000000..2d60dd737 --- /dev/null +++ b/crates/rune-alloc/src/alloc/btree/map/tests.rs @@ -0,0 +1,2645 @@ +#![allow(clippy::ifs_same_cond)] +#![allow(clippy::redundant_closure)] +#![allow(clippy::useless_vec)] + +use core::fmt::Debug; +use core::sync::atomic::{AtomicUsize, Ordering::SeqCst}; + +use rust_alloc::boxed::Box; +use rust_alloc::rc::Rc; + +use ::rust_std::cmp::Ordering; +use ::rust_std::iter; +use ::rust_std::mem; +use ::rust_std::ops::Bound::{self, Excluded, Included, Unbounded}; +use ::rust_std::ops::RangeBounds; +use ::rust_std::panic::{catch_unwind, AssertUnwindSafe}; + +use rust_alloc::vec; + +use crate::alloc::TryClone; +use crate::string::{String, TryToString}; +use crate::testing::crash_test::{CrashTestDummy, Panic}; +use crate::testing::ord_chaos::{Cyclic3, Governed, Governor}; +use crate::testing::rng::DeterministicRng; +use crate::vec::Vec; + +use super::Entry::{Occupied, Vacant}; +use super::*; + +macro_rules! assert_matches { + ($expr:expr, $pat:pat) => { + assert!(matches!($expr, $pat)); + }; +} + +// Minimum number of elements to insert, to guarantee a tree with 2 levels, +// i.e., a tree who's root is an internal node at height 1, with edges to leaf nodes. +// It's not the minimum size: removing an element from such a tree does not always reduce height. +const MIN_INSERTS_HEIGHT_1: usize = node::CAPACITY + 1; + +// Minimum number of elements to insert in ascending order, to guarantee a tree with 3 levels, +// i.e., a tree who's root is an internal node at height 2, with edges to more internal nodes. +// It's not the minimum size: removing an element from such a tree does not always reduce height. +const MIN_INSERTS_HEIGHT_2: usize = 89; + +// Gathers all references from a mutable iterator and makes sure Miri notices if +// using them is dangerous. +fn test_all_refs<'a, T: 'a>(dummy: &mut T, iter: impl Iterator) { + // Gather all those references. + let mut refs: Vec<&mut T> = iter.collect(); + // Use them all. Twice, to be sure we got all interleavings. + for r in refs.iter_mut() { + mem::swap(dummy, r); + } + for r in refs { + mem::swap(dummy, r); + } +} + +impl BTreeMap { + // Panics if the map (or the code navigating it) is corrupted. + fn check_invariants(&self) { + if let Some(root) = &self.root { + let root_node = root.reborrow(); + + // Check the back pointers top-down, before we attempt to rely on + // more serious navigation code. + assert!(root_node.ascend().is_err()); + root_node.assert_back_pointers(); + + // Check consistency of `length` with what navigation code encounters. + assert_eq!(self.length, root_node.calc_length()); + + // Lastly, check the invariant causing the least harm. + root_node.assert_min_len(if root_node.height() > 0 { 1 } else { 0 }); + } else { + assert_eq!(self.length, 0); + } + + // Check that `assert_strictly_ascending` will encounter all keys. + assert_eq!(self.length, self.keys().count()); + } + + // Panics if the map is corrupted or if the keys are not in strictly + // ascending order, in the current opinion of the `Ord` implementation. + // If the `Ord` implementation violates transitivity, this method does not + // guarantee that all keys are unique, just that adjacent keys are unique. + fn check(&self) + where + K: Debug + Ord, + { + self.check_invariants(); + self.assert_strictly_ascending(); + } + + // Returns the height of the root, if any. + fn height(&self) -> Option { + self.root.as_ref().map(node::Root::height) + } + + fn dump_keys(&self) -> rust_alloc::string::String + where + K: Debug, + { + if let Some(root) = self.root.as_ref() { + root.reborrow().dump_keys() + } else { + rust_alloc::string::String::from("not yet allocated") + } + } + + // Panics if the keys are not in strictly ascending order. + fn assert_strictly_ascending(&self) + where + K: Debug + Ord, + { + let mut keys = self.keys(); + if let Some(mut previous) = keys.next() { + for next in keys { + assert!(previous < next, "{:?} >= {:?}", previous, next); + previous = next; + } + } + } + + // Transform the tree to minimize wasted space, obtaining fewer nodes that + // are mostly filled up to their capacity. The same compact tree could have + // been obtained by inserting keys in a shrewd order. + fn compact(&mut self) + where + K: Ord, + { + let iter = mem::take(self).into_iter(); + if iter.len() != 0 { + self.root.insert(Root::new(&*self.alloc).abort()).bulk_push( + iter, + &mut self.length, + &*self.alloc, + ); + } + } +} + +impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { + fn assert_min_len(self, min_len: usize) { + assert!( + self.len() >= min_len, + "node len {} < {}", + self.len(), + min_len + ); + if let node::ForceResult::Internal(node) = self.force() { + for idx in 0..=node.len() { + let edge = unsafe { Handle::new_edge(node, idx) }; + edge.descend().assert_min_len(MIN_LEN); + } + } + } +} + +// Tests our value of MIN_INSERTS_HEIGHT_2. Failure may mean you just need to +// adapt that value to match a change in node::CAPACITY or the choices made +// during insertion, otherwise other test cases may fail or be less useful. +#[test] +fn test_levels() { + let mut map = BTreeMap::new(); + map.check(); + assert_eq!(map.height(), None); + assert_eq!(map.len(), 0); + + map.insert(0, ()); + while map.height() == Some(0) { + let last_key = *map.last_key_value().unwrap().0; + map.insert(last_key + 1, ()); + } + map.check(); + // Structure: + // - 1 element in internal root node with 2 children + // - 6 elements in left leaf child + // - 5 elements in right leaf child + assert_eq!(map.height(), Some(1)); + assert_eq!(map.len(), MIN_INSERTS_HEIGHT_1, "{}", map.dump_keys()); + + while map.height() == Some(1) { + let last_key = *map.last_key_value().unwrap().0; + map.insert(last_key + 1, ()); + } + map.check(); + // Structure: + // - 1 element in internal root node with 2 children + // - 6 elements in left internal child with 7 grandchildren + // - 42 elements in left child's 7 grandchildren with 6 elements each + // - 5 elements in right internal child with 6 grandchildren + // - 30 elements in right child's 5 first grandchildren with 6 elements each + // - 5 elements in right child's last grandchild + assert_eq!(map.height(), Some(2)); + assert_eq!(map.len(), MIN_INSERTS_HEIGHT_2, "{}", map.dump_keys()); +} + +// Ensures the testing infrastructure usually notices order violations. +#[test] +#[should_panic] +fn test_check_ord_chaos() { + let gov = Governor::new(); + let map = BTreeMap::from([(Governed(1, &gov), ()), (Governed(2, &gov), ())]); + gov.flip(); + map.check(); +} + +// Ensures the testing infrastructure doesn't always mind order violations. +#[test] +fn test_check_invariants_ord_chaos() { + let gov = Governor::new(); + let map = BTreeMap::from([(Governed(1, &gov), ()), (Governed(2, &gov), ())]); + gov.flip(); + map.check_invariants(); +} + +#[test] +fn test_basic_large() { + let mut map = BTreeMap::new(); + // Miri is too slow + let size = if cfg!(miri) { + MIN_INSERTS_HEIGHT_2 + } else { + 10000 + }; + let size = size + (size % 2); // round up to even number + assert_eq!(map.len(), 0); + + for i in 0..size { + assert_eq!(map.insert(i, 10 * i), None); + assert_eq!(map.len(), i + 1); + } + + assert_eq!(map.first_key_value(), Some((&0, &0))); + assert_eq!( + map.last_key_value(), + Some((&(size - 1), &(10 * (size - 1)))) + ); + assert_eq!(map.first_entry().unwrap().key(), &0); + assert_eq!(map.last_entry().unwrap().key(), &(size - 1)); + + for i in 0..size { + assert_eq!(map.get(&i).unwrap(), &(i * 10)); + } + + for i in size..size * 2 { + assert_eq!(map.get(&i), None); + } + + for i in 0..size { + assert_eq!(map.insert(i, 100 * i), Some(10 * i)); + assert_eq!(map.len(), size); + } + + for i in 0..size { + assert_eq!(map.get(&i).unwrap(), &(i * 100)); + } + + for i in 0..size / 2 { + assert_eq!(map.remove(&(i * 2)), Some(i * 200)); + assert_eq!(map.len(), size - i - 1); + } + + for i in 0..size / 2 { + assert_eq!(map.get(&(2 * i)), None); + assert_eq!(map.get(&(2 * i + 1)).unwrap(), &(i * 200 + 100)); + } + + for i in 0..size / 2 { + assert_eq!(map.remove(&(2 * i)), None); + assert_eq!(map.remove(&(2 * i + 1)), Some(i * 200 + 100)); + assert_eq!(map.len(), size / 2 - i - 1); + } + map.check(); +} + +#[test] +fn test_basic_small() { + let mut map = BTreeMap::new(); + // Empty, root is absent (None): + assert_eq!(map.remove(&1), None); + assert_eq!(map.len(), 0); + assert_eq!(map.get(&1), None); + assert_eq!(map.get_mut(&1), None); + assert_eq!(map.first_key_value(), None); + assert_eq!(map.last_key_value(), None); + assert_eq!(map.keys().count(), 0); + assert_eq!(map.values().count(), 0); + assert_eq!(map.range(..).next(), None); + assert_eq!(map.range(..1).next(), None); + assert_eq!(map.range(1..).next(), None); + assert_eq!(map.range(1..=1).next(), None); + assert_eq!(map.range(1..2).next(), None); + assert_eq!(map.height(), None); + assert_eq!(map.insert(1, 1), None); + assert_eq!(map.height(), Some(0)); + map.check(); + + // 1 key-value pair: + assert_eq!(map.len(), 1); + assert_eq!(map.get(&1), Some(&1)); + assert_eq!(map.get_mut(&1), Some(&mut 1)); + assert_eq!(map.first_key_value(), Some((&1, &1))); + assert_eq!(map.last_key_value(), Some((&1, &1))); + assert_eq!(map.keys().collect::>(), vec![&1]); + assert_eq!(map.values().collect::>(), vec![&1]); + assert_eq!(map.insert(1, 2), Some(1)); + assert_eq!(map.len(), 1); + assert_eq!(map.get(&1), Some(&2)); + assert_eq!(map.get_mut(&1), Some(&mut 2)); + assert_eq!(map.first_key_value(), Some((&1, &2))); + assert_eq!(map.last_key_value(), Some((&1, &2))); + assert_eq!(map.keys().collect::>(), vec![&1]); + assert_eq!(map.values().collect::>(), vec![&2]); + assert_eq!(map.insert(2, 4), None); + assert_eq!(map.height(), Some(0)); + map.check(); + + // 2 key-value pairs: + assert_eq!(map.len(), 2); + assert_eq!(map.get(&2), Some(&4)); + assert_eq!(map.get_mut(&2), Some(&mut 4)); + assert_eq!(map.first_key_value(), Some((&1, &2))); + assert_eq!(map.last_key_value(), Some((&2, &4))); + assert_eq!(map.keys().collect::>(), vec![&1, &2]); + assert_eq!(map.values().collect::>(), vec![&2, &4]); + assert_eq!(map.remove(&1), Some(2)); + assert_eq!(map.height(), Some(0)); + map.check(); + + // 1 key-value pair: + assert_eq!(map.len(), 1); + assert_eq!(map.get(&1), None); + assert_eq!(map.get_mut(&1), None); + assert_eq!(map.get(&2), Some(&4)); + assert_eq!(map.get_mut(&2), Some(&mut 4)); + assert_eq!(map.first_key_value(), Some((&2, &4))); + assert_eq!(map.last_key_value(), Some((&2, &4))); + assert_eq!(map.keys().collect::>(), vec![&2]); + assert_eq!(map.values().collect::>(), vec![&4]); + assert_eq!(map.remove(&2), Some(4)); + assert_eq!(map.height(), Some(0)); + map.check(); + + // Empty but root is owned (Some(...)): + assert_eq!(map.len(), 0); + assert_eq!(map.get(&1), None); + assert_eq!(map.get_mut(&1), None); + assert_eq!(map.first_key_value(), None); + assert_eq!(map.last_key_value(), None); + assert_eq!(map.keys().count(), 0); + assert_eq!(map.values().count(), 0); + assert_eq!(map.range(..).next(), None); + assert_eq!(map.range(..1).next(), None); + assert_eq!(map.range(1..).next(), None); + assert_eq!(map.range(1..=1).next(), None); + assert_eq!(map.range(1..2).next(), None); + assert_eq!(map.remove(&1), None); + assert_eq!(map.height(), Some(0)); + map.check(); +} + +#[test] +fn test_iter() { + // Miri is too slow + let size = if cfg!(miri) { 200 } else { 10000 }; + let mut map = BTreeMap::from_iter((0..size).map(|i| (i, i))); + + fn test(size: usize, mut iter: T) + where + T: Iterator, + { + for i in 0..size { + assert_eq!(iter.size_hint(), (size - i, Some(size - i))); + assert_eq!(iter.next().unwrap(), (i, i)); + } + assert_eq!(iter.size_hint(), (0, Some(0))); + assert_eq!(iter.next(), None); + } + test(size, map.iter().map(|(&k, &v)| (k, v))); + test(size, map.iter_mut().map(|(&k, &mut v)| (k, v))); + test(size, map.into_iter()); +} + +#[test] +fn test_iter_rev() { + // Miri is too slow + let size = if cfg!(miri) { 200 } else { 10000 }; + let mut map = BTreeMap::from_iter((0..size).map(|i| (i, i))); + + fn test(size: usize, mut iter: T) + where + T: Iterator, + { + for i in 0..size { + assert_eq!(iter.size_hint(), (size - i, Some(size - i))); + assert_eq!(iter.next().unwrap(), (size - i - 1, size - i - 1)); + } + assert_eq!(iter.size_hint(), (0, Some(0))); + assert_eq!(iter.next(), None); + } + test(size, map.iter().rev().map(|(&k, &v)| (k, v))); + test(size, map.iter_mut().rev().map(|(&k, &mut v)| (k, v))); + test(size, map.into_iter().rev()); +} + +// Specifically tests iter_mut's ability to mutate the value of pairs in-line. +fn do_test_iter_mut_mutation(size: usize) +where + T: Copy + Debug + Ord + TryFrom, + >::Error: Debug, +{ + let zero = T::try_from(0).unwrap(); + let mut map = BTreeMap::from_iter((0..size).map(|i| (T::try_from(i).unwrap(), zero))); + + // Forward and backward iteration sees enough pairs (also tested elsewhere) + assert_eq!(map.iter_mut().count(), size); + assert_eq!(map.iter_mut().rev().count(), size); + + // Iterate forwards, trying to mutate to unique values + for (i, (k, v)) in map.iter_mut().enumerate() { + assert_eq!(*k, T::try_from(i).unwrap()); + assert_eq!(*v, zero); + *v = T::try_from(i + 1).unwrap(); + } + + // Iterate backwards, checking that mutations succeeded and trying to mutate again + for (i, (k, v)) in map.iter_mut().rev().enumerate() { + assert_eq!(*k, T::try_from(size - i - 1).unwrap()); + assert_eq!(*v, T::try_from(size - i).unwrap()); + *v = T::try_from(2 * size - i).unwrap(); + } + + // Check that backward mutations succeeded + for (i, (k, v)) in map.iter_mut().enumerate() { + assert_eq!(*k, T::try_from(i).unwrap()); + assert_eq!(*v, T::try_from(size + i + 1).unwrap()); + } + map.check(); +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)] +#[repr(align(32))] +struct Align32(usize); + +impl TryFrom for Align32 { + type Error = (); + + fn try_from(s: usize) -> Result { + Ok(Align32(s)) + } +} + +#[test] +fn test_iter_mut_mutation() { + // Check many alignments and trees with roots at various heights. + do_test_iter_mut_mutation::(0); + do_test_iter_mut_mutation::(1); + do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_1); + do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_2); + do_test_iter_mut_mutation::(1); + do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_1); + do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_2); + do_test_iter_mut_mutation::(1); + do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_1); + do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_2); + do_test_iter_mut_mutation::(1); + do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_1); + do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_2); + do_test_iter_mut_mutation::(1); + do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_1); + do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_2); + do_test_iter_mut_mutation::(1); + do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_1); + do_test_iter_mut_mutation::(MIN_INSERTS_HEIGHT_2); +} + +#[test] +fn test_values_mut() { + let mut a = BTreeMap::from_iter((0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i))); + test_all_refs(&mut 13, a.values_mut()); + a.check(); +} + +#[test] +fn test_values_mut_mutation() { + let mut a = BTreeMap::new(); + a.insert(1, String::from("hello")); + a.insert(2, String::from("goodbye")); + + for value in a.values_mut() { + value.push_str("!"); + } + + let values = Vec::from_iter(a.values().cloned()); + assert_eq!(values, [String::from("hello!"), String::from("goodbye!")]); + a.check(); +} + +#[test] +fn test_iter_entering_root_twice() { + let mut map = BTreeMap::from([(0, 0), (1, 1)]); + let mut it = map.iter_mut(); + let front = it.next().unwrap(); + let back = it.next_back().unwrap(); + assert_eq!(front, (&0, &mut 0)); + assert_eq!(back, (&1, &mut 1)); + *front.1 = 24; + *back.1 = 42; + assert_eq!(front, (&0, &mut 24)); + assert_eq!(back, (&1, &mut 42)); + assert_eq!(it.next(), None); + assert_eq!(it.next_back(), None); + map.check(); +} + +#[test] +fn test_iter_descending_to_same_node_twice() { + let mut map = BTreeMap::from_iter((0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i))); + let mut it = map.iter_mut(); + // Descend into first child. + let front = it.next().unwrap(); + // Descend into first child again, after running through second child. + while it.next_back().is_some() {} + // Check immutable access. + assert_eq!(front, (&0, &mut 0)); + // Perform mutable access. + *front.1 = 42; + map.check(); +} + +#[test] +fn test_iter_mixed() { + // Miri is too slow + let size = if cfg!(miri) { 200 } else { 10000 }; + + let mut map = BTreeMap::from_iter((0..size).map(|i| (i, i))); + + fn test(size: usize, mut iter: T) + where + T: Iterator + DoubleEndedIterator, + { + for i in 0..size / 4 { + assert_eq!(iter.size_hint(), (size - i * 2, Some(size - i * 2))); + assert_eq!(iter.next().unwrap(), (i, i)); + assert_eq!(iter.next_back().unwrap(), (size - i - 1, size - i - 1)); + } + for i in size / 4..size * 3 / 4 { + assert_eq!(iter.size_hint(), (size * 3 / 4 - i, Some(size * 3 / 4 - i))); + assert_eq!(iter.next().unwrap(), (i, i)); + } + assert_eq!(iter.size_hint(), (0, Some(0))); + assert_eq!(iter.next(), None); + } + test(size, map.iter().map(|(&k, &v)| (k, v))); + test(size, map.iter_mut().map(|(&k, &mut v)| (k, v))); + test(size, map.into_iter()); +} + +#[test] +fn test_iter_min_max() { + let mut a = BTreeMap::new(); + assert_eq!(a.iter().min(), None); + assert_eq!(a.iter().max(), None); + assert_eq!(a.iter_mut().min(), None); + assert_eq!(a.iter_mut().max(), None); + assert_eq!(a.range(..).min(), None); + assert_eq!(a.range(..).max(), None); + assert_eq!(a.range_mut(..).min(), None); + assert_eq!(a.range_mut(..).max(), None); + assert_eq!(a.keys().min(), None); + assert_eq!(a.keys().max(), None); + assert_eq!(a.values().min(), None); + assert_eq!(a.values().max(), None); + assert_eq!(a.values_mut().min(), None); + assert_eq!(a.values_mut().max(), None); + a.insert(1, 42); + a.insert(2, 24); + assert_eq!(a.iter().min(), Some((&1, &42))); + assert_eq!(a.iter().max(), Some((&2, &24))); + assert_eq!(a.iter_mut().min(), Some((&1, &mut 42))); + assert_eq!(a.iter_mut().max(), Some((&2, &mut 24))); + assert_eq!(a.range(..).min(), Some((&1, &42))); + assert_eq!(a.range(..).max(), Some((&2, &24))); + assert_eq!(a.range_mut(..).min(), Some((&1, &mut 42))); + assert_eq!(a.range_mut(..).max(), Some((&2, &mut 24))); + assert_eq!(a.keys().min(), Some(&1)); + assert_eq!(a.keys().max(), Some(&2)); + assert_eq!(a.values().min(), Some(&24)); + assert_eq!(a.values().max(), Some(&42)); + assert_eq!(a.values_mut().min(), Some(&mut 24)); + assert_eq!(a.values_mut().max(), Some(&mut 42)); + a.check(); +} + +fn range_keys(map: &BTreeMap, range: impl RangeBounds) -> Vec { + Vec::from_iter(map.range(range).map(|(&k, &v)| { + assert_eq!(k, v); + k + })) +} + +#[test] +fn test_range_small() { + let size = 4; + + let all = Vec::from_iter(1..=size); + let (first, last) = (vec![all[0]], vec![all[size as usize - 1]]); + let map = BTreeMap::from_iter(all.iter().copied().map(|i| (i, i))); + + assert_eq!(range_keys(&map, (Excluded(0), Excluded(size + 1))), all); + assert_eq!(range_keys(&map, (Excluded(0), Included(size + 1))), all); + assert_eq!(range_keys(&map, (Excluded(0), Included(size))), all); + assert_eq!(range_keys(&map, (Excluded(0), Unbounded)), all); + assert_eq!(range_keys(&map, (Included(0), Excluded(size + 1))), all); + assert_eq!(range_keys(&map, (Included(0), Included(size + 1))), all); + assert_eq!(range_keys(&map, (Included(0), Included(size))), all); + assert_eq!(range_keys(&map, (Included(0), Unbounded)), all); + assert_eq!(range_keys(&map, (Included(1), Excluded(size + 1))), all); + assert_eq!(range_keys(&map, (Included(1), Included(size + 1))), all); + assert_eq!(range_keys(&map, (Included(1), Included(size))), all); + assert_eq!(range_keys(&map, (Included(1), Unbounded)), all); + assert_eq!(range_keys(&map, (Unbounded, Excluded(size + 1))), all); + assert_eq!(range_keys(&map, (Unbounded, Included(size + 1))), all); + assert_eq!(range_keys(&map, (Unbounded, Included(size))), all); + assert_eq!(range_keys(&map, ..), all); + + assert_eq!(range_keys(&map, (Excluded(0), Excluded(1))), vec![]); + assert_eq!(range_keys(&map, (Excluded(0), Included(0))), vec![]); + assert_eq!(range_keys(&map, (Included(0), Included(0))), vec![]); + assert_eq!(range_keys(&map, (Included(0), Excluded(1))), vec![]); + assert_eq!(range_keys(&map, (Unbounded, Excluded(1))), vec![]); + assert_eq!(range_keys(&map, (Unbounded, Included(0))), vec![]); + assert_eq!(range_keys(&map, (Excluded(0), Excluded(2))), first); + assert_eq!(range_keys(&map, (Excluded(0), Included(1))), first); + assert_eq!(range_keys(&map, (Included(0), Excluded(2))), first); + assert_eq!(range_keys(&map, (Included(0), Included(1))), first); + assert_eq!(range_keys(&map, (Included(1), Excluded(2))), first); + assert_eq!(range_keys(&map, (Included(1), Included(1))), first); + assert_eq!(range_keys(&map, (Unbounded, Excluded(2))), first); + assert_eq!(range_keys(&map, (Unbounded, Included(1))), first); + assert_eq!( + range_keys(&map, (Excluded(size - 1), Excluded(size + 1))), + last + ); + assert_eq!( + range_keys(&map, (Excluded(size - 1), Included(size + 1))), + last + ); + assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size))), last); + assert_eq!(range_keys(&map, (Excluded(size - 1), Unbounded)), last); + assert_eq!(range_keys(&map, (Included(size), Excluded(size + 1))), last); + assert_eq!(range_keys(&map, (Included(size), Included(size + 1))), last); + assert_eq!(range_keys(&map, (Included(size), Included(size))), last); + assert_eq!(range_keys(&map, (Included(size), Unbounded)), last); + assert_eq!( + range_keys(&map, (Excluded(size), Excluded(size + 1))), + vec![] + ); + assert_eq!(range_keys(&map, (Excluded(size), Included(size))), vec![]); + assert_eq!(range_keys(&map, (Excluded(size), Unbounded)), vec![]); + assert_eq!( + range_keys(&map, (Included(size + 1), Excluded(size + 1))), + vec![] + ); + assert_eq!( + range_keys(&map, (Included(size + 1), Included(size + 1))), + vec![] + ); + assert_eq!(range_keys(&map, (Included(size + 1), Unbounded)), vec![]); + + assert_eq!(range_keys(&map, ..3), vec![1, 2]); + assert_eq!(range_keys(&map, 3..), vec![3, 4]); + assert_eq!(range_keys(&map, 2..=3), vec![2, 3]); +} + +#[test] +fn test_range_height_1() { + // Tests tree with a root and 2 leaves. We test around the middle of the + // keys because one of those is the single key in the root node. + let map = BTreeMap::from_iter((0..MIN_INSERTS_HEIGHT_1 as i32).map(|i| (i, i))); + let middle = MIN_INSERTS_HEIGHT_1 as i32 / 2; + for root in middle - 2..=middle + 2 { + assert_eq!( + range_keys(&map, (Excluded(root), Excluded(root + 1))), + vec![] + ); + assert_eq!( + range_keys(&map, (Excluded(root), Included(root + 1))), + vec![root + 1] + ); + assert_eq!( + range_keys(&map, (Included(root), Excluded(root + 1))), + vec![root] + ); + assert_eq!( + range_keys(&map, (Included(root), Included(root + 1))), + vec![root, root + 1] + ); + + assert_eq!( + range_keys(&map, (Excluded(root - 1), Excluded(root))), + vec![] + ); + assert_eq!( + range_keys(&map, (Included(root - 1), Excluded(root))), + vec![root - 1] + ); + assert_eq!( + range_keys(&map, (Excluded(root - 1), Included(root))), + vec![root] + ); + assert_eq!( + range_keys(&map, (Included(root - 1), Included(root))), + vec![root - 1, root] + ); + } +} + +#[test] +fn test_range_large() { + let size = 200; + + let all = Vec::from_iter(1..=size); + let (first, last) = (vec![all[0]], vec![all[size as usize - 1]]); + let map = BTreeMap::from_iter(all.iter().copied().map(|i| (i, i))); + + assert_eq!(range_keys(&map, (Excluded(0), Excluded(size + 1))), all); + assert_eq!(range_keys(&map, (Excluded(0), Included(size + 1))), all); + assert_eq!(range_keys(&map, (Excluded(0), Included(size))), all); + assert_eq!(range_keys(&map, (Excluded(0), Unbounded)), all); + assert_eq!(range_keys(&map, (Included(0), Excluded(size + 1))), all); + assert_eq!(range_keys(&map, (Included(0), Included(size + 1))), all); + assert_eq!(range_keys(&map, (Included(0), Included(size))), all); + assert_eq!(range_keys(&map, (Included(0), Unbounded)), all); + assert_eq!(range_keys(&map, (Included(1), Excluded(size + 1))), all); + assert_eq!(range_keys(&map, (Included(1), Included(size + 1))), all); + assert_eq!(range_keys(&map, (Included(1), Included(size))), all); + assert_eq!(range_keys(&map, (Included(1), Unbounded)), all); + assert_eq!(range_keys(&map, (Unbounded, Excluded(size + 1))), all); + assert_eq!(range_keys(&map, (Unbounded, Included(size + 1))), all); + assert_eq!(range_keys(&map, (Unbounded, Included(size))), all); + assert_eq!(range_keys(&map, ..), all); + + assert_eq!(range_keys(&map, (Excluded(0), Excluded(1))), vec![]); + assert_eq!(range_keys(&map, (Excluded(0), Included(0))), vec![]); + assert_eq!(range_keys(&map, (Included(0), Included(0))), vec![]); + assert_eq!(range_keys(&map, (Included(0), Excluded(1))), vec![]); + assert_eq!(range_keys(&map, (Unbounded, Excluded(1))), vec![]); + assert_eq!(range_keys(&map, (Unbounded, Included(0))), vec![]); + assert_eq!(range_keys(&map, (Excluded(0), Excluded(2))), first); + assert_eq!(range_keys(&map, (Excluded(0), Included(1))), first); + assert_eq!(range_keys(&map, (Included(0), Excluded(2))), first); + assert_eq!(range_keys(&map, (Included(0), Included(1))), first); + assert_eq!(range_keys(&map, (Included(1), Excluded(2))), first); + assert_eq!(range_keys(&map, (Included(1), Included(1))), first); + assert_eq!(range_keys(&map, (Unbounded, Excluded(2))), first); + assert_eq!(range_keys(&map, (Unbounded, Included(1))), first); + assert_eq!( + range_keys(&map, (Excluded(size - 1), Excluded(size + 1))), + last + ); + assert_eq!( + range_keys(&map, (Excluded(size - 1), Included(size + 1))), + last + ); + assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size))), last); + assert_eq!(range_keys(&map, (Excluded(size - 1), Unbounded)), last); + assert_eq!(range_keys(&map, (Included(size), Excluded(size + 1))), last); + assert_eq!(range_keys(&map, (Included(size), Included(size + 1))), last); + assert_eq!(range_keys(&map, (Included(size), Included(size))), last); + assert_eq!(range_keys(&map, (Included(size), Unbounded)), last); + assert_eq!( + range_keys(&map, (Excluded(size), Excluded(size + 1))), + vec![] + ); + assert_eq!(range_keys(&map, (Excluded(size), Included(size))), vec![]); + assert_eq!(range_keys(&map, (Excluded(size), Unbounded)), vec![]); + assert_eq!( + range_keys(&map, (Included(size + 1), Excluded(size + 1))), + vec![] + ); + assert_eq!( + range_keys(&map, (Included(size + 1), Included(size + 1))), + vec![] + ); + assert_eq!(range_keys(&map, (Included(size + 1), Unbounded)), vec![]); + + fn check<'a, L, R>(lhs: L, rhs: R) + where + L: IntoIterator, + R: IntoIterator, + { + assert_eq!(Vec::from_iter(lhs), Vec::from_iter(rhs)); + } + + check(map.range(..=100), map.range(..101)); + check( + map.range(5..=8), + vec![(&5, &5), (&6, &6), (&7, &7), (&8, &8)], + ); + check(map.range(-1..=2), vec![(&1, &1), (&2, &2)]); +} + +#[test] +fn test_range_inclusive_max_value() { + let max = usize::MAX; + let map = BTreeMap::from([(max, 0)]); + assert_eq!(Vec::from_iter(map.range(max..=max)), &[(&max, &0)]); +} + +#[test] +fn test_range_equal_empty_cases() { + let map = BTreeMap::from_iter((0..5).map(|i| (i, i))); + assert_eq!(map.range((Included(2), Excluded(2))).next(), None); + assert_eq!(map.range((Excluded(2), Included(2))).next(), None); +} + +#[test] +#[should_panic] +fn test_range_equal_excluded() { + let map = BTreeMap::from_iter((0..5).map(|i| (i, i))); + let _ = map.range((Excluded(2), Excluded(2))); +} + +#[test] +#[should_panic] +fn test_range_backwards_1() { + let map = BTreeMap::from_iter((0..5).map(|i| (i, i))); + let _ = map.range((Included(3), Included(2))); +} + +#[test] +#[should_panic] +fn test_range_backwards_2() { + let map = BTreeMap::from_iter((0..5).map(|i| (i, i))); + let _ = map.range((Included(3), Excluded(2))); +} + +#[test] +#[should_panic] +fn test_range_backwards_3() { + let map = BTreeMap::from_iter((0..5).map(|i| (i, i))); + let _ = map.range((Excluded(3), Included(2))); +} + +#[test] +#[should_panic] +fn test_range_backwards_4() { + let map = BTreeMap::from_iter((0..5).map(|i| (i, i))); + let _ = map.range((Excluded(3), Excluded(2))); +} + +#[test] +fn test_range_finding_ill_order_in_map() { + let mut map = BTreeMap::new(); + map.insert(Cyclic3::B, ()); + // Lacking static_assert, call `range` conditionally, to emphasise that + // we cause a different panic than `test_range_backwards_1` does. + // A more refined `should_panic` would be welcome. + if Cyclic3::C < Cyclic3::A { + let _ = map.range(Cyclic3::C..=Cyclic3::A); + } +} + +#[test] +fn test_range_finding_ill_order_in_range_ord() { + // Has proper order the first time asked, then flips around. + struct EvilTwin(i32); + + impl PartialOrd for EvilTwin { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } + } + + static COMPARES: AtomicUsize = AtomicUsize::new(0); + impl Ord for EvilTwin { + fn cmp(&self, other: &Self) -> Ordering { + let ord = self.0.cmp(&other.0); + if COMPARES.fetch_add(1, SeqCst) > 0 { + ord.reverse() + } else { + ord + } + } + } + + impl PartialEq for EvilTwin { + fn eq(&self, other: &Self) -> bool { + self.0.eq(&other.0) + } + } + + impl Eq for EvilTwin {} + + #[derive(PartialEq, Eq, PartialOrd, Ord)] + struct CompositeKey(i32, EvilTwin); + + impl Borrow for CompositeKey { + fn borrow(&self) -> &EvilTwin { + &self.1 + } + } + + let map = BTreeMap::from_iter((0..12).map(|i| (CompositeKey(i, EvilTwin(i)), ()))); + let _ = map.range(EvilTwin(5)..=EvilTwin(7)); +} + +#[test] +fn test_range_1000() { + // Miri is too slow + let size = if cfg!(miri) { + MIN_INSERTS_HEIGHT_2 as u32 + } else { + 1000 + }; + let map = BTreeMap::from_iter((0..size).map(|i| (i, i))); + + fn test(map: &BTreeMap, size: u32, min: Bound<&u32>, max: Bound<&u32>) { + let mut kvs = map.range((min, max)).map(|(&k, &v)| (k, v)); + let mut pairs = (0..size).map(|i| (i, i)); + + for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) { + assert_eq!(kv, pair); + } + assert_eq!(kvs.next(), None); + assert_eq!(pairs.next(), None); + } + test(&map, size, Included(&0), Excluded(&size)); + test(&map, size, Unbounded, Excluded(&size)); + test(&map, size, Included(&0), Included(&(size - 1))); + test(&map, size, Unbounded, Included(&(size - 1))); + test(&map, size, Included(&0), Unbounded); + test(&map, size, Unbounded, Unbounded); +} + +#[test] +fn test_range_borrowed_key() { + let mut map = BTreeMap::new(); + map.insert("aardvark".to_string(), 1); + map.insert("baboon".to_string(), 2); + map.insert("coyote".to_string(), 3); + map.insert("dingo".to_string(), 4); + // NOTE: would like to use simply "b".."d" here... + let mut iter = map.range::((Included("b"), Excluded("d"))); + assert_eq!(iter.next(), Some((&"baboon".to_string(), &2))); + assert_eq!(iter.next(), Some((&"coyote".to_string(), &3))); + assert_eq!(iter.next(), None); +} + +#[test] +fn test_range() { + let size = 200; + // Miri is too slow + let step = if cfg!(miri) { 66 } else { 1 }; + let map = BTreeMap::from_iter((0..size).map(|i| (i, i))); + + for i in (0..size).step_by(step) { + for j in (i..size).step_by(step) { + let mut kvs = map + .range((Included(&i), Included(&j))) + .map(|(&k, &v)| (k, v)); + let mut pairs = (i..=j).map(|i| (i, i)); + + for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) { + assert_eq!(kv, pair); + } + assert_eq!(kvs.next(), None); + assert_eq!(pairs.next(), None); + } + } +} + +#[test] +fn test_range_mut() { + let size = 200; + // Miri is too slow + let step = if cfg!(miri) { 66 } else { 1 }; + let mut map = BTreeMap::from_iter((0..size).map(|i| (i, i))); + + for i in (0..size).step_by(step) { + for j in (i..size).step_by(step) { + let mut kvs = map + .range_mut((Included(&i), Included(&j))) + .map(|(&k, &mut v)| (k, v)); + let mut pairs = (i..=j).map(|i| (i, i)); + + for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) { + assert_eq!(kv, pair); + } + assert_eq!(kvs.next(), None); + assert_eq!(pairs.next(), None); + } + } + map.check(); +} + +#[should_panic(expected = "range start is greater than range end in BTree")] +#[test] +fn test_range_panic_1() { + let mut map = BTreeMap::new(); + map.insert(3, "a"); + map.insert(5, "b"); + map.insert(8, "c"); + + let _invalid_range = map.range((Included(&8), Included(&3))); +} + +#[should_panic(expected = "range start and end are equal and excluded in BTree")] +#[test] +fn test_range_panic_2() { + let mut map = BTreeMap::new(); + map.insert(3, "a"); + map.insert(5, "b"); + map.insert(8, "c"); + + let _invalid_range = map.range((Excluded(&5), Excluded(&5))); +} + +#[should_panic(expected = "range start and end are equal and excluded in BTree")] +#[test] +fn test_range_panic_3() { + let mut map: BTreeMap = BTreeMap::new(); + map.insert(3, ()); + map.insert(5, ()); + map.insert(8, ()); + + let _invalid_range = map.range((Excluded(&5), Excluded(&5))); +} + +#[test] +fn test_retain() { + let mut map = BTreeMap::from_iter((0..100).map(|x| (x, x * 10))); + + map.retain(|&k, _| k % 2 == 0); + assert_eq!(map.len(), 50); + assert_eq!(map[&2], 20); + assert_eq!(map[&4], 40); + assert_eq!(map[&6], 60); +} + +mod test_extract_if { + use super::*; + + #[test] + fn empty() { + let mut map: BTreeMap = BTreeMap::new(); + map.extract_if(|_, _| unreachable!("there's nothing to decide on")) + .for_each(drop); + assert_eq!(map.height(), None); + map.check(); + } + + // Explicitly consumes the iterator, where most test cases drop it instantly. + #[test] + fn consumed_keeping_all() { + let pairs = (0..3).map(|i| (i, i)); + let mut map = BTreeMap::from_iter(pairs); + assert!(map.extract_if(|_, _| false).eq(iter::empty())); + map.check(); + } + + // Explicitly consumes the iterator, where most test cases drop it instantly. + #[test] + fn consumed_removing_all() { + let pairs = (0..3).map(|i| (i, i)); + let mut map = BTreeMap::from_iter(pairs.clone()); + assert!(map.extract_if(|_, _| true).eq(pairs)); + assert!(map.is_empty()); + map.check(); + } + + // Explicitly consumes the iterator and modifies values through it. + #[test] + fn mutating_and_keeping() { + let pairs = (0..3).map(|i| (i, i)); + let mut map = BTreeMap::from_iter(pairs); + assert!(map + .extract_if(|_, v| { + *v += 6; + false + }) + .eq(iter::empty())); + assert!(map.keys().copied().eq(0..3)); + assert!(map.values().copied().eq(6..9)); + map.check(); + } + + // Explicitly consumes the iterator and modifies values through it. + #[test] + fn mutating_and_removing() { + let pairs = (0..3).map(|i| (i, i)); + let mut map = BTreeMap::from_iter(pairs); + assert!(map + .extract_if(|_, v| { + *v += 6; + true + }) + .eq((0..3).map(|i| (i, i + 6)))); + assert!(map.is_empty()); + map.check(); + } + + #[test] + fn underfull_keeping_all() { + let pairs = (0..3).map(|i| (i, i)); + let mut map = BTreeMap::from_iter(pairs); + map.extract_if(|_, _| false).for_each(drop); + assert!(map.keys().copied().eq(0..3)); + map.check(); + } + + #[test] + fn underfull_removing_one() { + let pairs = (0..3).map(|i| (i, i)); + for doomed in 0..3 { + let mut map = BTreeMap::from_iter(pairs.clone()); + map.extract_if(|i, _| *i == doomed).for_each(drop); + assert_eq!(map.len(), 2); + map.check(); + } + } + + #[test] + fn underfull_keeping_one() { + let pairs = (0..3).map(|i| (i, i)); + for sacred in 0..3 { + let mut map = BTreeMap::from_iter(pairs.clone()); + map.extract_if(|i, _| *i != sacred).for_each(drop); + assert!(map.keys().copied().eq(sacred..=sacred)); + map.check(); + } + } + + #[test] + fn underfull_removing_all() { + let pairs = (0..3).map(|i| (i, i)); + let mut map = BTreeMap::from_iter(pairs); + map.extract_if(|_, _| true).for_each(drop); + assert!(map.is_empty()); + map.check(); + } + + #[test] + fn height_0_keeping_all() { + let pairs = (0..node::CAPACITY).map(|i| (i, i)); + let mut map = BTreeMap::from_iter(pairs); + map.extract_if(|_, _| false).for_each(drop); + assert!(map.keys().copied().eq(0..node::CAPACITY)); + map.check(); + } + + #[test] + fn height_0_removing_one() { + let pairs = (0..node::CAPACITY).map(|i| (i, i)); + for doomed in 0..node::CAPACITY { + let mut map = BTreeMap::from_iter(pairs.clone()); + map.extract_if(|i, _| *i == doomed).for_each(drop); + assert_eq!(map.len(), node::CAPACITY - 1); + map.check(); + } + } + + #[test] + fn height_0_keeping_one() { + let pairs = (0..node::CAPACITY).map(|i| (i, i)); + for sacred in 0..node::CAPACITY { + let mut map = BTreeMap::from_iter(pairs.clone()); + map.extract_if(|i, _| *i != sacred).for_each(drop); + assert!(map.keys().copied().eq(sacred..=sacred)); + map.check(); + } + } + + #[test] + fn height_0_removing_all() { + let pairs = (0..node::CAPACITY).map(|i| (i, i)); + let mut map = BTreeMap::from_iter(pairs); + map.extract_if(|_, _| true).for_each(drop); + assert!(map.is_empty()); + map.check(); + } + + #[test] + fn height_0_keeping_half() { + let mut map = BTreeMap::from_iter((0..16).map(|i| (i, i))); + assert_eq!(map.extract_if(|i, _| *i % 2 == 0).count(), 8); + assert_eq!(map.len(), 8); + map.check(); + } + + #[test] + fn height_1_removing_all() { + let pairs = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i)); + let mut map = BTreeMap::from_iter(pairs); + map.extract_if(|_, _| true).for_each(drop); + assert!(map.is_empty()); + map.check(); + } + + #[test] + fn height_1_removing_one() { + let pairs = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i)); + for doomed in 0..MIN_INSERTS_HEIGHT_1 { + let mut map = BTreeMap::from_iter(pairs.clone()); + map.extract_if(|i, _| *i == doomed).for_each(drop); + assert_eq!(map.len(), MIN_INSERTS_HEIGHT_1 - 1); + map.check(); + } + } + + #[test] + fn height_1_keeping_one() { + let pairs = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i)); + for sacred in 0..MIN_INSERTS_HEIGHT_1 { + let mut map = BTreeMap::from_iter(pairs.clone()); + map.extract_if(|i, _| *i != sacred).for_each(drop); + assert!(map.keys().copied().eq(sacred..=sacred)); + map.check(); + } + } + + #[test] + fn height_2_removing_one() { + let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i)); + for doomed in (0..MIN_INSERTS_HEIGHT_2).step_by(12) { + let mut map = BTreeMap::from_iter(pairs.clone()); + map.extract_if(|i, _| *i == doomed).for_each(drop); + assert_eq!(map.len(), MIN_INSERTS_HEIGHT_2 - 1); + map.check(); + } + } + + #[test] + fn height_2_keeping_one() { + let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i)); + for sacred in (0..MIN_INSERTS_HEIGHT_2).step_by(12) { + let mut map = BTreeMap::from_iter(pairs.clone()); + map.extract_if(|i, _| *i != sacred).for_each(drop); + assert!(map.keys().copied().eq(sacred..=sacred)); + map.check(); + } + } + + #[test] + fn height_2_removing_all() { + let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i)); + let mut map = BTreeMap::from_iter(pairs); + map.extract_if(|_, _| true).for_each(drop); + assert!(map.is_empty()); + map.check(); + } + + #[test] + #[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] + fn drop_panic_leak() { + let a = CrashTestDummy::new(0); + let b = CrashTestDummy::new(1); + let c = CrashTestDummy::new(2); + let mut map = BTreeMap::new(); + map.insert(a.spawn(Panic::Never), ()); + map.insert(b.spawn(Panic::InDrop), ()); + map.insert(c.spawn(Panic::Never), ()); + + catch_unwind(move || map.extract_if(|dummy, _| dummy.query(true)).for_each(drop)) + .unwrap_err(); + + assert_eq!(a.queried(), 1); + assert_eq!(b.queried(), 1); + assert_eq!(c.queried(), 0); + assert_eq!(a.dropped(), 1); + assert_eq!(b.dropped(), 1); + assert_eq!(c.dropped(), 1); + } + + #[test] + #[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] + fn pred_panic_leak() { + let a = CrashTestDummy::new(0); + let b = CrashTestDummy::new(1); + let c = CrashTestDummy::new(2); + let mut map = BTreeMap::new(); + map.insert(a.spawn(Panic::Never), ()); + map.insert(b.spawn(Panic::InQuery), ()); + map.insert(c.spawn(Panic::InQuery), ()); + + catch_unwind(AssertUnwindSafe(|| { + map.extract_if(|dummy, _| dummy.query(true)).for_each(drop) + })) + .unwrap_err(); + + assert_eq!(a.queried(), 1); + assert_eq!(b.queried(), 1); + assert_eq!(c.queried(), 0); + assert_eq!(a.dropped(), 1); + assert_eq!(b.dropped(), 0); + assert_eq!(c.dropped(), 0); + assert_eq!(map.len(), 2); + assert_eq!(map.first_entry().unwrap().key().id(), 1); + assert_eq!(map.last_entry().unwrap().key().id(), 2); + map.check(); + } + + // Same as above, but attempt to use the iterator again after the panic in the predicate + #[test] + #[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] + fn pred_panic_reuse() { + let a = CrashTestDummy::new(0); + let b = CrashTestDummy::new(1); + let c = CrashTestDummy::new(2); + let mut map = BTreeMap::new(); + map.insert(a.spawn(Panic::Never), ()); + map.insert(b.spawn(Panic::InQuery), ()); + map.insert(c.spawn(Panic::InQuery), ()); + + { + let mut it = map.extract_if(|dummy, _| dummy.query(true)); + catch_unwind(AssertUnwindSafe(|| while it.next().is_some() {})).unwrap_err(); + // Iterator behaviour after a panic is explicitly unspecified, + // so this is just the current implementation: + let result = catch_unwind(AssertUnwindSafe(|| it.next())); + assert!(matches!(result, Ok(None))); + } + + assert_eq!(a.queried(), 1); + assert_eq!(b.queried(), 1); + assert_eq!(c.queried(), 0); + assert_eq!(a.dropped(), 1); + assert_eq!(b.dropped(), 0); + assert_eq!(c.dropped(), 0); + assert_eq!(map.len(), 2); + assert_eq!(map.first_entry().unwrap().key().id(), 1); + assert_eq!(map.last_entry().unwrap().key().id(), 2); + map.check(); + } +} + +#[test] +fn test_borrow() { + // make sure these compile -- using the Borrow trait + { + let mut map = BTreeMap::new(); + map.insert("0".to_string(), 1); + assert_eq!(map["0"], 1); + } + + { + let mut map = BTreeMap::new(); + map.insert(Box::new(0), 1); + assert_eq!(map[&0], 1); + } + + { + let mut map = BTreeMap::new(); + map.insert(Box::new([0, 1]) as Box<[i32]>, 1); + assert_eq!(map[&[0, 1][..]], 1); + } + + { + let mut map = BTreeMap::new(); + map.insert(Rc::new(0), 1); + assert_eq!(map[&0], 1); + } + + #[allow(dead_code)] + fn get(v: &BTreeMap, ()>, t: &T) { + let _ = v.get(t); + } + + #[allow(dead_code)] + fn get_mut(v: &mut BTreeMap, ()>, t: &T) { + let _ = v.get_mut(t); + } + + #[allow(dead_code)] + fn get_key_value(v: &BTreeMap, ()>, t: &T) { + let _ = v.get_key_value(t); + } + + #[allow(dead_code)] + fn contains_key(v: &BTreeMap, ()>, t: &T) { + let _ = v.contains_key(t); + } + + #[allow(dead_code)] + fn range(v: &BTreeMap, ()>, t: T) { + let _ = v.range(t..); + } + + #[allow(dead_code)] + fn range_mut(v: &mut BTreeMap, ()>, t: T) { + let _ = v.range_mut(t..); + } + + #[allow(dead_code)] + fn remove(v: &mut BTreeMap, ()>, t: &T) { + v.remove(t); + } + + #[allow(dead_code)] + fn remove_entry(v: &mut BTreeMap, ()>, t: &T) { + v.remove_entry(t); + } + + #[allow(dead_code)] + fn split_off(v: &mut BTreeMap, ()>, t: &T) { + v.split_off(t); + } +} + +#[test] +fn test_entry() { + let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)]; + + let mut map = BTreeMap::from(xs); + + // Existing key (insert) + match map.entry(1) { + Vacant(_) => unreachable!(), + Occupied(mut view) => { + assert_eq!(view.get(), &10); + assert_eq!(view.insert(100), 10); + } + } + assert_eq!(map.get(&1).unwrap(), &100); + assert_eq!(map.len(), 6); + + // Existing key (update) + match map.entry(2) { + Vacant(_) => unreachable!(), + Occupied(mut view) => { + let v = view.get_mut(); + *v *= 10; + } + } + assert_eq!(map.get(&2).unwrap(), &200); + assert_eq!(map.len(), 6); + map.check(); + + // Existing key (take) + match map.entry(3) { + Vacant(_) => unreachable!(), + Occupied(view) => { + assert_eq!(view.remove(), 30); + } + } + assert_eq!(map.get(&3), None); + assert_eq!(map.len(), 5); + map.check(); + + // Inexistent key (insert) + match map.entry(10) { + Occupied(_) => unreachable!(), + Vacant(view) => { + assert_eq!(*view.insert(1000), 1000); + } + } + assert_eq!(map.get(&10).unwrap(), &1000); + assert_eq!(map.len(), 6); + map.check(); +} + +#[test] +fn test_extend_ref() { + let mut a = BTreeMap::new(); + a.insert(1, "one"); + let mut b = BTreeMap::new(); + b.insert(2, "two"); + b.insert(3, "three"); + + a.extend(&b); + + assert_eq!(a.len(), 3); + assert_eq!(a[&1], "one"); + assert_eq!(a[&2], "two"); + assert_eq!(a[&3], "three"); + a.check(); +} + +#[test] +fn test_zst() { + let mut m = BTreeMap::new(); + assert_eq!(m.len(), 0); + + assert_eq!(m.insert((), ()), None); + assert_eq!(m.len(), 1); + + assert_eq!(m.insert((), ()), Some(())); + assert_eq!(m.len(), 1); + assert_eq!(m.iter().count(), 1); + + m.clear(); + assert_eq!(m.len(), 0); + + for _ in 0..100 { + m.insert((), ()); + } + + assert_eq!(m.len(), 1); + assert_eq!(m.iter().count(), 1); + m.check(); +} + +// This test's only purpose is to ensure that zero-sized keys with nonsensical orderings +// do not cause segfaults when used with zero-sized values. All other map behavior is +// undefined. +#[test] +fn test_bad_zst() { + #[derive(Clone, Copy, Debug)] + struct Bad; + + impl PartialEq for Bad { + fn eq(&self, _: &Self) -> bool { + false + } + } + + impl Eq for Bad {} + + impl PartialOrd for Bad { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } + } + + impl Ord for Bad { + fn cmp(&self, _: &Self) -> Ordering { + Ordering::Less + } + } + + let mut m = BTreeMap::new(); + + for _ in 0..100 { + m.insert(Bad, Bad); + } + m.check(); +} + +#[test] +fn test_clear() { + let mut map = BTreeMap::new(); + for &len in &[ + MIN_INSERTS_HEIGHT_1, + MIN_INSERTS_HEIGHT_2, + 0, + node::CAPACITY, + ] { + for i in 0..len { + map.insert(i, ()); + } + assert_eq!(map.len(), len); + map.clear(); + map.check(); + assert_eq!(map.height(), None); + } +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn test_clear_drop_panic_leak() { + let a = CrashTestDummy::new(0); + let b = CrashTestDummy::new(1); + let c = CrashTestDummy::new(2); + + let mut map = BTreeMap::new(); + map.insert(a.spawn(Panic::Never), ()); + map.insert(b.spawn(Panic::InDrop), ()); + map.insert(c.spawn(Panic::Never), ()); + + catch_unwind(AssertUnwindSafe(|| map.clear())).unwrap_err(); + assert_eq!(a.dropped(), 1); + assert_eq!(b.dropped(), 1); + assert_eq!(c.dropped(), 1); + assert_eq!(map.len(), 0); + + drop(map); + assert_eq!(a.dropped(), 1); + assert_eq!(b.dropped(), 1); + assert_eq!(c.dropped(), 1); +} + +#[test] +fn test_clone() { + let mut map = BTreeMap::new(); + let size = MIN_INSERTS_HEIGHT_1; + assert_eq!(map.len(), 0); + + for i in 0..size { + assert_eq!(map.insert(i, 10 * i), None); + assert_eq!(map.len(), i + 1); + map.check(); + assert_eq!(map, map.clone()); + } + + for i in 0..size { + assert_eq!(map.insert(i, 100 * i), Some(10 * i)); + assert_eq!(map.len(), size); + map.check(); + assert_eq!(map, map.clone()); + } + + for i in 0..size / 2 { + assert_eq!(map.remove(&(i * 2)), Some(i * 200)); + assert_eq!(map.len(), size - i - 1); + map.check(); + assert_eq!(map, map.clone()); + } + + for i in 0..size / 2 { + assert_eq!(map.remove(&(2 * i)), None); + assert_eq!(map.remove(&(2 * i + 1)), Some(i * 200 + 100)); + assert_eq!(map.len(), size / 2 - i - 1); + map.check(); + assert_eq!(map, map.clone()); + } + + // Test a tree with 2 semi-full levels and a tree with 3 levels. + map = BTreeMap::from_iter((1..MIN_INSERTS_HEIGHT_2).map(|i| (i, i))); + assert_eq!(map.len(), MIN_INSERTS_HEIGHT_2 - 1); + assert_eq!(map, map.clone()); + map.insert(0, 0); + assert_eq!(map.len(), MIN_INSERTS_HEIGHT_2); + assert_eq!(map, map.clone()); + map.check(); +} + +fn test_clone_panic_leak(size: usize) { + for i in 0..size { + let dummies = Vec::from_iter((0..size).map(|id| CrashTestDummy::new(id))); + let map = BTreeMap::from_iter(dummies.iter().map(|dummy| { + let panic = if dummy.id == i { + Panic::InClone + } else { + Panic::Never + }; + (dummy.spawn(panic), ()) + })); + + catch_unwind(|| map.clone()).unwrap_err(); + for d in &dummies { + assert_eq!( + d.cloned(), + if d.id <= i { 1 } else { 0 }, + "id={}/{}", + d.id, + i + ); + assert_eq!( + d.dropped(), + if d.id < i { 1 } else { 0 }, + "id={}/{}", + d.id, + i + ); + } + assert_eq!(map.len(), size); + + drop(map); + for d in &dummies { + assert_eq!( + d.cloned(), + if d.id <= i { 1 } else { 0 }, + "id={}/{}", + d.id, + i + ); + assert_eq!( + d.dropped(), + if d.id < i { 2 } else { 1 }, + "id={}/{}", + d.id, + i + ); + } + } +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn test_clone_panic_leak_height_0() { + test_clone_panic_leak(3) +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn test_clone_panic_leak_height_1() { + test_clone_panic_leak(MIN_INSERTS_HEIGHT_1) +} + +#[test] +fn test_clone_from() { + let mut map1 = BTreeMap::new(); + let max_size = MIN_INSERTS_HEIGHT_1; + + // Range to max_size inclusive, because i is the size of map1 being tested. + for i in 0..=max_size { + let mut map2 = BTreeMap::new(); + for j in 0..i { + let mut map1_copy = map2.clone(); + map1_copy.clone_from(&map1); // small cloned from large + assert_eq!(map1_copy, map1); + let mut map2_copy = map1.clone(); + map2_copy.clone_from(&map2); // large cloned from small + assert_eq!(map2_copy, map2); + map2.insert(100 * j + 1, 2 * j + 1); + } + map2.clone_from(&map1); // same length + map2.check(); + assert_eq!(map2, map1); + map1.insert(i, 10 * i); + map1.check(); + } +} + +#[allow(dead_code)] +fn assert_covariance() { + fn map_key<'new>(v: BTreeMap<&'static str, ()>) -> BTreeMap<&'new str, ()> { + v + } + fn map_val<'new>(v: BTreeMap<(), &'static str>) -> BTreeMap<(), &'new str> { + v + } + + fn iter_key<'a, 'new>(v: Iter<'a, &'static str, ()>) -> Iter<'a, &'new str, ()> { + v + } + fn iter_val<'a, 'new>(v: Iter<'a, (), &'static str>) -> Iter<'a, (), &'new str> { + v + } + + fn into_iter_key<'new>(v: IntoIter<&'static str, ()>) -> IntoIter<&'new str, ()> { + v + } + fn into_iter_val<'new>(v: IntoIter<(), &'static str>) -> IntoIter<(), &'new str> { + v + } + + fn into_keys_key<'new>(v: IntoKeys<&'static str, ()>) -> IntoKeys<&'new str, ()> { + v + } + fn into_keys_val<'new>(v: IntoKeys<(), &'static str>) -> IntoKeys<(), &'new str> { + v + } + + fn into_values_key<'new>(v: IntoValues<&'static str, ()>) -> IntoValues<&'new str, ()> { + v + } + fn into_values_val<'new>(v: IntoValues<(), &'static str>) -> IntoValues<(), &'new str> { + v + } + + fn range_key<'a, 'new>(v: Range<'a, &'static str, ()>) -> Range<'a, &'new str, ()> { + v + } + fn range_val<'a, 'new>(v: Range<'a, (), &'static str>) -> Range<'a, (), &'new str> { + v + } + + fn keys_key<'a, 'new>(v: Keys<'a, &'static str, ()>) -> Keys<'a, &'new str, ()> { + v + } + fn keys_val<'a, 'new>(v: Keys<'a, (), &'static str>) -> Keys<'a, (), &'new str> { + v + } + + fn values_key<'a, 'new>(v: Values<'a, &'static str, ()>) -> Values<'a, &'new str, ()> { + v + } + fn values_val<'a, 'new>(v: Values<'a, (), &'static str>) -> Values<'a, (), &'new str> { + v + } +} + +#[allow(dead_code)] +fn assert_sync() { + fn map(v: &BTreeMap) -> impl Sync + '_ { + v + } + + fn into_iter(v: BTreeMap) -> impl Sync { + v.into_iter() + } + + fn into_keys(v: BTreeMap) -> impl Sync { + v.into_keys() + } + + fn into_values(v: BTreeMap) -> impl Sync { + v.into_values() + } + + fn extract_if(v: &mut BTreeMap) -> impl Sync + '_ { + v.extract_if(|_, _| false) + } + + fn iter(v: &BTreeMap) -> impl Sync + '_ { + v.iter() + } + + fn iter_mut(v: &mut BTreeMap) -> impl Sync + '_ { + v.iter_mut() + } + + fn keys(v: &BTreeMap) -> impl Sync + '_ { + v.keys() + } + + fn values(v: &BTreeMap) -> impl Sync + '_ { + v.values() + } + + fn values_mut(v: &mut BTreeMap) -> impl Sync + '_ { + v.values_mut() + } + + fn range(v: &BTreeMap) -> impl Sync + '_ { + v.range(..) + } + + fn range_mut(v: &mut BTreeMap) -> impl Sync + '_ { + v.range_mut(..) + } + + fn entry(v: &mut BTreeMap) -> impl Sync + '_ { + v.entry(Default::default()) + } + + fn occupied_entry(v: &mut BTreeMap) -> impl Sync + '_ { + match v.entry(Default::default()) { + Occupied(entry) => entry, + _ => unreachable!(), + } + } + + fn vacant_entry(v: &mut BTreeMap) -> impl Sync + '_ { + match v.entry(Default::default()) { + Vacant(entry) => entry, + _ => unreachable!(), + } + } +} + +#[allow(dead_code)] +fn assert_send() { + fn map(v: BTreeMap) -> impl Send { + v + } + + fn into_iter(v: BTreeMap) -> impl Send { + v.into_iter() + } + + fn into_keys(v: BTreeMap) -> impl Send { + v.into_keys() + } + + fn into_values(v: BTreeMap) -> impl Send { + v.into_values() + } + + fn extract_if(v: &mut BTreeMap) -> impl Send + '_ { + v.extract_if(|_, _| false) + } + + fn iter(v: &BTreeMap) -> impl Send + '_ { + v.iter() + } + + fn iter_mut(v: &mut BTreeMap) -> impl Send + '_ { + v.iter_mut() + } + + fn keys(v: &BTreeMap) -> impl Send + '_ { + v.keys() + } + + fn values(v: &BTreeMap) -> impl Send + '_ { + v.values() + } + + fn values_mut(v: &mut BTreeMap) -> impl Send + '_ { + v.values_mut() + } + + fn range(v: &BTreeMap) -> impl Send + '_ { + v.range(..) + } + + fn range_mut(v: &mut BTreeMap) -> impl Send + '_ { + v.range_mut(..) + } + + fn entry(v: &mut BTreeMap) -> impl Send + '_ { + v.entry(Default::default()) + } + + fn occupied_entry(v: &mut BTreeMap) -> impl Send + '_ { + match v.entry(Default::default()) { + Occupied(entry) => entry, + _ => unreachable!(), + } + } + + fn vacant_entry(v: &mut BTreeMap) -> impl Send + '_ { + match v.entry(Default::default()) { + Vacant(entry) => entry, + _ => unreachable!(), + } + } +} + +#[test] +fn test_ord_absence() { + fn map(mut map: BTreeMap) { + let _ = map.is_empty(); + let _ = map.len(); + map.clear(); + let _ = map.iter(); + let _ = map.iter_mut(); + let _ = map.keys(); + let _ = map.values(); + let _ = map.values_mut(); + if true { + let _ = map.into_values(); + } else if true { + let _ = map.into_iter(); + } else { + let _ = map.into_keys(); + } + } + + fn map_debug(mut map: BTreeMap) { + rust_alloc::format!("{map:?}"); + rust_alloc::format!("{:?}", map.iter()); + rust_alloc::format!("{:?}", map.iter_mut()); + rust_alloc::format!("{:?}", map.keys()); + rust_alloc::format!("{:?}", map.values()); + rust_alloc::format!("{:?}", map.values_mut()); + if true { + rust_alloc::format!("{:?}", map.into_iter()); + } else if true { + rust_alloc::format!("{:?}", map.into_keys()); + } else { + rust_alloc::format!("{:?}", map.into_values()); + } + } + + fn map_clone(mut map: BTreeMap) { + map.clone_from(&map.clone()); + } + + #[derive(Debug, Clone)] + struct NonOrd; + + impl TryClone for NonOrd { + fn try_clone(&self) -> Result { + Ok(self.clone()) + } + } + + map(BTreeMap::::new()); + map_debug(BTreeMap::::new()); + map_clone(BTreeMap::::default()); +} + +#[test] +fn test_occupied_entry_key() { + let mut a = BTreeMap::new(); + let key = "hello there"; + let value = "value goes here"; + assert_eq!(a.height(), None); + a.insert(key, value); + assert_eq!(a.len(), 1); + assert_eq!(a[key], value); + + match a.entry(key) { + Vacant(_) => panic!(), + Occupied(e) => assert_eq!(key, *e.key()), + } + assert_eq!(a.len(), 1); + assert_eq!(a[key], value); + a.check(); +} + +#[test] +fn test_vacant_entry_key() { + let mut a = BTreeMap::new(); + let key = "hello there"; + let value = "value goes here"; + + assert_eq!(a.height(), None); + match a.entry(key) { + Occupied(_) => unreachable!(), + Vacant(e) => { + assert_eq!(key, *e.key()); + e.insert(value); + } + } + assert_eq!(a.len(), 1); + assert_eq!(a[key], value); + a.check(); +} + +#[test] +fn test_vacant_entry_no_insert() { + let mut a = BTreeMap::<&str, ()>::new(); + let key = "hello there"; + + // Non-allocated + assert_eq!(a.height(), None); + match a.entry(key) { + Occupied(_) => unreachable!(), + Vacant(e) => assert_eq!(key, *e.key()), + } + // Ensures the tree has no root. + assert_eq!(a.height(), None); + a.check(); + + // Allocated but still empty + a.insert(key, ()); + a.remove(&key); + assert_eq!(a.height(), Some(0)); + assert!(a.is_empty()); + match a.entry(key) { + Occupied(_) => unreachable!(), + Vacant(e) => assert_eq!(key, *e.key()), + } + // Ensures the allocated root is not changed. + assert_eq!(a.height(), Some(0)); + assert!(a.is_empty()); + a.check(); +} + +#[test] +fn test_first_last_entry() { + let mut a = BTreeMap::new(); + assert!(a.first_entry().is_none()); + assert!(a.last_entry().is_none()); + a.insert(1, 42); + assert_eq!(a.first_entry().unwrap().key(), &1); + assert_eq!(a.last_entry().unwrap().key(), &1); + a.insert(2, 24); + assert_eq!(a.first_entry().unwrap().key(), &1); + assert_eq!(a.last_entry().unwrap().key(), &2); + a.insert(0, 6); + assert_eq!(a.first_entry().unwrap().key(), &0); + assert_eq!(a.last_entry().unwrap().key(), &2); + let (k1, v1) = a.first_entry().unwrap().remove_entry(); + assert_eq!(k1, 0); + assert_eq!(v1, 6); + let (k2, v2) = a.last_entry().unwrap().remove_entry(); + assert_eq!(k2, 2); + assert_eq!(v2, 24); + assert_eq!(a.first_entry().unwrap().key(), &1); + assert_eq!(a.last_entry().unwrap().key(), &1); + a.check(); +} + +#[test] +fn test_pop_first_last() { + let mut map = BTreeMap::new(); + assert_eq!(map.pop_first(), None); + assert_eq!(map.pop_last(), None); + + map.insert(1, 10); + map.insert(2, 20); + map.insert(3, 30); + map.insert(4, 40); + + assert_eq!(map.len(), 4); + + let (key, val) = map.pop_first().unwrap(); + assert_eq!(key, 1); + assert_eq!(val, 10); + assert_eq!(map.len(), 3); + + let (key, val) = map.pop_first().unwrap(); + assert_eq!(key, 2); + assert_eq!(val, 20); + assert_eq!(map.len(), 2); + let (key, val) = map.pop_last().unwrap(); + assert_eq!(key, 4); + assert_eq!(val, 40); + assert_eq!(map.len(), 1); + + map.insert(5, 50); + map.insert(6, 60); + assert_eq!(map.len(), 3); + + let (key, val) = map.pop_first().unwrap(); + assert_eq!(key, 3); + assert_eq!(val, 30); + assert_eq!(map.len(), 2); + + let (key, val) = map.pop_last().unwrap(); + assert_eq!(key, 6); + assert_eq!(val, 60); + assert_eq!(map.len(), 1); + + let (key, val) = map.pop_last().unwrap(); + assert_eq!(key, 5); + assert_eq!(val, 50); + assert_eq!(map.len(), 0); + + assert_eq!(map.pop_first(), None); + assert_eq!(map.pop_last(), None); + + map.insert(7, 70); + map.insert(8, 80); + + let (key, val) = map.pop_last().unwrap(); + assert_eq!(key, 8); + assert_eq!(val, 80); + assert_eq!(map.len(), 1); + + let (key, val) = map.pop_last().unwrap(); + assert_eq!(key, 7); + assert_eq!(val, 70); + assert_eq!(map.len(), 0); + + assert_eq!(map.pop_first(), None); + assert_eq!(map.pop_last(), None); +} + +#[test] +fn test_get_key_value() { + let mut map = BTreeMap::new(); + + assert!(map.is_empty()); + assert_eq!(map.get_key_value(&1), None); + assert_eq!(map.get_key_value(&2), None); + + map.insert(1, 10); + map.insert(2, 20); + map.insert(3, 30); + + assert_eq!(map.len(), 3); + assert_eq!(map.get_key_value(&1), Some((&1, &10))); + assert_eq!(map.get_key_value(&3), Some((&3, &30))); + assert_eq!(map.get_key_value(&4), None); + + map.remove(&3); + + assert_eq!(map.len(), 2); + assert_eq!(map.get_key_value(&3), None); + assert_eq!(map.get_key_value(&2), Some((&2, &20))); +} + +#[test] +fn test_insert_into_full_height_0() { + let size = node::CAPACITY; + for pos in 0..=size { + let mut map = BTreeMap::from_iter((0..size).map(|i| (i * 2 + 1, ()))); + assert!(map.insert(pos * 2, ()).is_none()); + map.check(); + } +} + +#[test] +fn test_insert_into_full_height_1() { + let size = node::CAPACITY + 1 + node::CAPACITY; + for pos in 0..=size { + let mut map = BTreeMap::from_iter((0..size).map(|i| (i * 2 + 1, ()))); + map.compact(); + let root_node = map.root.as_ref().unwrap().reborrow(); + assert_eq!(root_node.len(), 1); + assert_eq!( + root_node.first_leaf_edge().into_node().len(), + node::CAPACITY + ); + assert_eq!(root_node.last_leaf_edge().into_node().len(), node::CAPACITY); + + assert!(map.insert(pos * 2, ()).is_none()); + map.check(); + } +} + +#[test] +fn test_try_insert() { + let mut map = BTreeMap::new(); + + assert!(map.is_empty()); + + assert_eq!(map.insert_or(1, 10).unwrap(), &10); + assert_eq!(map.insert_or(2, 20).unwrap(), &20); + + if let CustomError::Custom(err) = map.try_insert_or(2, 200).unwrap_err() { + assert_eq!(err.entry.key(), &2); + assert_eq!(err.entry.get(), &20); + assert_eq!(err.value, 200); + } else { + panic!() + } +} + +macro_rules! create_append_test { + ($name:ident, $len:expr) => { + #[test] + fn $name() { + let mut a = BTreeMap::new(); + for i in 0..8 { + a.insert(i, i); + } + + let mut b = BTreeMap::new(); + for i in 5..$len { + b.insert(i, 2 * i); + } + + a.append(&mut b); + + assert_eq!(a.len(), $len); + assert_eq!(b.len(), 0); + + for i in 0..$len { + if i < 5 { + assert_eq!(a[&i], i); + } else { + assert_eq!(a[&i], 2 * i); + } + } + + a.check(); + assert_eq!(a.remove(&($len - 1)), Some(2 * ($len - 1))); + assert_eq!(a.insert($len - 1, 20), None); + a.check(); + } + }; +} + +// These are mostly for testing the algorithm that "fixes" the right edge after insertion. +// Single node. +create_append_test!(test_append_9, 9); +// Two leafs that don't need fixing. +create_append_test!(test_append_17, 17); +// Two leafs where the second one ends up underfull and needs stealing at the end. +create_append_test!(test_append_14, 14); +// Two leafs where the second one ends up empty because the insertion finished at the root. +create_append_test!(test_append_12, 12); +// Three levels; insertion finished at the root. +create_append_test!(test_append_144, 144); +// Three levels; insertion finished at leaf while there is an empty node on the second level. +create_append_test!(test_append_145, 145); +// Tests for several randomly chosen sizes. +create_append_test!(test_append_170, 170); +create_append_test!(test_append_181, 181); +#[cfg(not(miri))] // Miri is too slow +create_append_test!(test_append_239, 239); +#[cfg(not(miri))] // Miri is too slow +create_append_test!(test_append_1700, 1700); + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn test_append_drop_leak() { + let a = CrashTestDummy::new(0); + let b = CrashTestDummy::new(1); + let c = CrashTestDummy::new(2); + let mut left = BTreeMap::new(); + let mut right = BTreeMap::new(); + left.insert(a.spawn(Panic::Never), ()); + left.insert(b.spawn(Panic::InDrop), ()); // first duplicate key, dropped during append + left.insert(c.spawn(Panic::Never), ()); + right.insert(b.spawn(Panic::Never), ()); + right.insert(c.spawn(Panic::Never), ()); + + catch_unwind(move || left.append(&mut right)).unwrap_err(); + assert_eq!(a.dropped(), 1); + assert_eq!(b.dropped(), 1); // should be 2 were it not for Rust issue #47949 + assert_eq!(c.dropped(), 2); +} + +#[test] +fn test_append_ord_chaos() { + let mut map1 = BTreeMap::new(); + map1.insert(Cyclic3::A, ()); + map1.insert(Cyclic3::B, ()); + let mut map2 = BTreeMap::new(); + map2.insert(Cyclic3::A, ()); + map2.insert(Cyclic3::B, ()); + map2.insert(Cyclic3::C, ()); // lands first, before A + map2.insert(Cyclic3::B, ()); // lands first, before C + map1.check(); + map2.check(); // keys are not unique but still strictly ascending + assert_eq!(map1.len(), 2); + assert_eq!(map2.len(), 4); + map1.append(&mut map2); + assert_eq!(map1.len(), 5); + assert_eq!(map2.len(), 0); + map1.check(); + map2.check(); +} + +fn rand_data(len: usize) -> Vec<(u32, u32)> { + let mut rng = DeterministicRng::new(); + Vec::from_iter((0..len).map(|_| (rng.next(), rng.next()))) +} + +#[test] +fn test_split_off_empty_right() { + let mut data = rand_data(173); + + let mut map = BTreeMap::from_iter(data.clone()); + let right = map.split_off(&(data.iter().max().unwrap().0 + 1)); + map.check(); + right.check(); + + data.sort(); + assert!(map.into_iter().eq(data)); + assert!(right.into_iter().eq(None)); +} + +#[test] +fn test_split_off_empty_left() { + let mut data = rand_data(314); + + let mut map = BTreeMap::from_iter(data.clone()); + let right = map.split_off(&data.iter().min().unwrap().0); + map.check(); + right.check(); + + data.sort(); + assert!(map.into_iter().eq(None)); + assert!(right.into_iter().eq(data)); +} + +// In a tree with 3 levels, if all but a part of the first leaf node is split off, +// make sure fix_top eliminates both top levels. +#[test] +fn test_split_off_tiny_left_height_2() { + let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i)); + let mut left = BTreeMap::from_iter(pairs.clone()); + let right = left.split_off(&1); + left.check(); + right.check(); + assert_eq!(left.len(), 1); + assert_eq!(right.len(), MIN_INSERTS_HEIGHT_2 - 1); + assert_eq!(*left.first_key_value().unwrap().0, 0); + assert_eq!(*right.first_key_value().unwrap().0, 1); +} + +// In a tree with 3 levels, if only part of the last leaf node is split off, +// make sure fix_top eliminates both top levels. +#[test] +fn test_split_off_tiny_right_height_2() { + let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i)); + let last = MIN_INSERTS_HEIGHT_2 - 1; + let mut left = BTreeMap::from_iter(pairs.clone()); + assert_eq!(*left.last_key_value().unwrap().0, last); + let right = left.split_off(&last); + left.check(); + right.check(); + assert_eq!(left.len(), MIN_INSERTS_HEIGHT_2 - 1); + assert_eq!(right.len(), 1); + assert_eq!(*left.last_key_value().unwrap().0, last - 1); + assert_eq!(*right.last_key_value().unwrap().0, last); +} + +#[test] +fn test_split_off_halfway() { + let mut rng = DeterministicRng::new(); + for &len in &[node::CAPACITY, 25, 50, 75, 100] { + let mut data = Vec::from_iter((0..len).map(|_| (rng.next(), ()))); + // Insertion in non-ascending order creates some variation in node length. + let mut map = BTreeMap::from_iter(data.iter().copied()); + data.sort(); + let small_keys = data.iter().take(len / 2).map(|kv| kv.0); + let large_keys = data.iter().skip(len / 2).map(|kv| kv.0); + let split_key = large_keys.clone().next().unwrap(); + let right = map.split_off(&split_key); + map.check(); + right.check(); + assert!(map.keys().copied().eq(small_keys)); + assert!(right.keys().copied().eq(large_keys)); + } +} + +#[test] +fn test_split_off_large_random_sorted() { + // Miri is too slow + let mut data = if cfg!(miri) { + rand_data(529) + } else { + rand_data(1529) + }; + // special case with maximum height. + data.sort(); + + let mut map = BTreeMap::from_iter(data.clone()); + let key = data[data.len() / 2].0; + let right = map.split_off(&key); + map.check(); + right.check(); + + assert!(map + .into_iter() + .eq(data.clone().into_iter().filter(|x| x.0 < key))); + assert!(right + .into_iter() + .eq(data.into_iter().filter(|x| x.0 >= key))); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn test_into_iter_drop_leak_height_0() { + let a = CrashTestDummy::new(0); + let b = CrashTestDummy::new(1); + let c = CrashTestDummy::new(2); + let d = CrashTestDummy::new(3); + let e = CrashTestDummy::new(4); + let mut map = BTreeMap::new(); + map.insert("a", a.spawn(Panic::Never)); + map.insert("b", b.spawn(Panic::Never)); + map.insert("c", c.spawn(Panic::Never)); + map.insert("d", d.spawn(Panic::InDrop)); + map.insert("e", e.spawn(Panic::Never)); + + catch_unwind(move || drop(map.into_iter())).unwrap_err(); + + assert_eq!(a.dropped(), 1); + assert_eq!(b.dropped(), 1); + assert_eq!(c.dropped(), 1); + assert_eq!(d.dropped(), 1); + assert_eq!(e.dropped(), 1); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn test_into_iter_drop_leak_height_1() { + let size = MIN_INSERTS_HEIGHT_1; + for panic_point in vec![0, 1, size - 2, size - 1] { + let dummies = Vec::from_iter((0..size).map(|i| CrashTestDummy::new(i))); + let map = BTreeMap::from_iter((0..size).map(|i| { + let panic = if i == panic_point { + Panic::InDrop + } else { + Panic::Never + }; + (dummies[i].spawn(Panic::Never), dummies[i].spawn(panic)) + })); + catch_unwind(move || drop(map.into_iter())).unwrap_err(); + for i in 0..size { + assert_eq!(dummies[i].dropped(), 2); + } + } +} + +#[test] +fn test_into_keys() { + let map = BTreeMap::from([(1, 'a'), (2, 'b'), (3, 'c')]); + let keys = Vec::from_iter(map.into_keys()); + + assert_eq!(keys.len(), 3); + assert!(keys.contains(&1)); + assert!(keys.contains(&2)); + assert!(keys.contains(&3)); +} + +#[test] +fn test_into_values() { + let map = BTreeMap::from([(1, 'a'), (2, 'b'), (3, 'c')]); + let values = Vec::from_iter(map.into_values()); + + assert_eq!(values.len(), 3); + assert!(values.contains(&'a')); + assert!(values.contains(&'b')); + assert!(values.contains(&'c')); +} + +#[test] +fn test_insert_remove_intertwined() { + let loops = if cfg!(miri) { 100 } else { 1_000_000 }; + let mut map = BTreeMap::new(); + let mut i = 1; + let offset = 165; // somewhat arbitrarily chosen to cover some code paths + for _ in 0..loops { + i = (i + offset) & 0xFF; + map.insert(i, i); + map.remove(&(0xFF - i)); + } + map.check(); +} + +#[test] +fn test_insert_remove_intertwined_ord_chaos() { + let loops = if cfg!(miri) { 100 } else { 1_000_000 }; + let gov = Governor::new(); + let mut map = BTreeMap::new(); + let mut i = 1; + let offset = 165; // more arbitrarily copied from above + for _ in 0..loops { + i = (i + offset) & 0xFF; + map.insert(Governed(i, &gov), ()); + map.remove(&Governed(0xFF - i, &gov)); + gov.flip(); + } + map.check_invariants(); +} + +#[test] +fn from_array() { + let map = BTreeMap::from([(1, 2), (3, 4)]); + let unordered_duplicates = BTreeMap::from([(3, 4), (1, 2), (1, 2)]); + assert_eq!(map, unordered_duplicates); +} + +#[test] +fn test_cursor() { + let map = BTreeMap::from([(1, 'a'), (2, 'b'), (3, 'c')]); + + let mut cur = map.lower_bound(Bound::Unbounded); + assert_eq!(cur.key(), Some(&1)); + cur.move_next(); + assert_eq!(cur.key(), Some(&2)); + assert_eq!(cur.peek_next(), Some((&3, &'c'))); + cur.move_prev(); + assert_eq!(cur.key(), Some(&1)); + assert_eq!(cur.peek_prev(), None); + + let mut cur = map.upper_bound(Bound::Excluded(&1)); + assert_eq!(cur.key(), None); + cur.move_next(); + assert_eq!(cur.key(), Some(&1)); + cur.move_prev(); + assert_eq!(cur.key(), None); + assert_eq!(cur.peek_prev(), Some((&3, &'c'))); +} + +#[test] +fn test_cursor_mut() { + let mut map = BTreeMap::from([(1, 'a'), (3, 'c'), (5, 'e')]); + let mut cur = map.lower_bound_mut(Bound::Excluded(&3)); + assert_eq!(cur.key(), Some(&5)); + cur.insert_before(4, 'd'); + assert_eq!(cur.key(), Some(&5)); + assert_eq!(cur.peek_prev(), Some((&4, &mut 'd'))); + cur.move_next(); + assert_eq!(cur.key(), None); + cur.insert_before(6, 'f'); + assert_eq!(cur.key(), None); + assert_eq!(cur.remove_current(), None); + assert_eq!(cur.key(), None); + cur.insert_after(0, '?'); + assert_eq!(cur.key(), None); + assert_eq!( + map, + BTreeMap::from([(0, '?'), (1, 'a'), (3, 'c'), (4, 'd'), (5, 'e'), (6, 'f')]) + ); + + let mut cur = map.upper_bound_mut(Bound::Included(&5)); + assert_eq!(cur.key(), Some(&5)); + assert_eq!(cur.remove_current(), Some((5, 'e'))); + assert_eq!(cur.key(), Some(&6)); + assert_eq!(cur.remove_current_and_move_back(), Some((6, 'f'))); + assert_eq!(cur.key(), Some(&4)); + assert_eq!( + map, + BTreeMap::from([(0, '?'), (1, 'a'), (3, 'c'), (4, 'd')]) + ); +} + +#[should_panic(expected = "key must be ordered above the previous element")] +#[test] +fn test_cursor_mut_insert_before_1() { + let mut map = BTreeMap::from([(1, 'a'), (2, 'b'), (3, 'c')]); + let mut cur = map.upper_bound_mut(Bound::Included(&2)); + cur.insert_before(0, 'd'); +} + +#[should_panic(expected = "key must be ordered above the previous element")] +#[test] +fn test_cursor_mut_insert_before_2() { + let mut map = BTreeMap::from([(1, 'a'), (2, 'b'), (3, 'c')]); + let mut cur = map.upper_bound_mut(Bound::Included(&2)); + cur.insert_before(1, 'd'); +} + +#[should_panic(expected = "key must be ordered below the current element")] +#[test] +fn test_cursor_mut_insert_before_3() { + let mut map = BTreeMap::from([(1, 'a'), (2, 'b'), (3, 'c')]); + let mut cur = map.upper_bound_mut(Bound::Included(&2)); + cur.insert_before(2, 'd'); +} + +#[should_panic(expected = "key must be ordered below the current element")] +#[test] +fn test_cursor_mut_insert_before_4() { + let mut map = BTreeMap::from([(1, 'a'), (2, 'b'), (3, 'c')]); + let mut cur = map.upper_bound_mut(Bound::Included(&2)); + cur.insert_before(3, 'd'); +} + +#[should_panic(expected = "key must be ordered above the current element")] +#[test] +fn test_cursor_mut_insert_after_1() { + let mut map = BTreeMap::from([(1, 'a'), (2, 'b'), (3, 'c')]); + let mut cur = map.upper_bound_mut(Bound::Included(&2)); + cur.insert_after(1, 'd'); +} + +#[should_panic(expected = "key must be ordered above the current element")] +#[test] +fn test_cursor_mut_insert_after_2() { + let mut map = BTreeMap::from([(1, 'a'), (2, 'b'), (3, 'c')]); + let mut cur = map.upper_bound_mut(Bound::Included(&2)); + cur.insert_after(2, 'd'); +} + +#[should_panic(expected = "key must be ordered below the next element")] +#[test] +fn test_cursor_mut_insert_after_3() { + let mut map = BTreeMap::from([(1, 'a'), (2, 'b'), (3, 'c')]); + let mut cur = map.upper_bound_mut(Bound::Included(&2)); + cur.insert_after(3, 'd'); +} + +#[should_panic(expected = "key must be ordered below the next element")] +#[test] +fn test_cursor_mut_insert_after_4() { + let mut map = BTreeMap::from([(1, 'a'), (2, 'b'), (3, 'c')]); + let mut cur = map.upper_bound_mut(Bound::Included(&2)); + cur.insert_after(4, 'd'); +} + +#[test] +fn cursor_peek_prev_agrees_with_cursor_mut() { + let mut map = BTreeMap::from([(1, 1), (2, 2), (3, 3)]); + + let cursor = map.lower_bound(Bound::Excluded(&3)); + assert!(cursor.key().is_none()); + + let prev = cursor.peek_prev(); + assert_matches!(prev, Some((&3, _))); + + // Shadow names so the two parts of this test match. + let mut cursor = map.lower_bound_mut(Bound::Excluded(&3)); + assert!(cursor.key().is_none()); + + let prev = cursor.peek_prev(); + assert_matches!(prev, Some((&3, _))); +} diff --git a/crates/rune-alloc/src/alloc/btree/mem.rs b/crates/rune-alloc/src/alloc/btree/mem.rs new file mode 100644 index 000000000..650c78abc --- /dev/null +++ b/crates/rune-alloc/src/alloc/btree/mem.rs @@ -0,0 +1,40 @@ +use core::mem; + +use crate::ptr; + +/// This replaces the value behind the `v` unique reference by calling the +/// relevant function. +/// +/// If a panic occurs in the `change` closure, the entire process will be aborted. +#[allow(dead_code)] // keep as illustration and for future use +#[inline] +pub(crate) fn take_mut(v: &mut T, change: impl FnOnce(T) -> Result) -> Result<(), E> { + replace(v, |value| Ok((change(value)?, ()))) +} + +/// This replaces the value behind the `v` unique reference by calling the +/// relevant function, and returns a result obtained along the way. +/// +/// If a panic occurs in the `change` closure, the entire process will be aborted. +#[inline] +pub(crate) fn replace( + v: &mut T, + change: impl FnOnce(T) -> Result<(T, R), E>, +) -> Result { + struct PanicGuard; + + impl Drop for PanicGuard { + fn drop(&mut self) { + ::rust_std::process::abort() + } + } + + let guard = PanicGuard; + let value = unsafe { ptr::read(v) }; + let (new_value, ret) = change(value)?; + unsafe { + ptr::write(v, new_value); + } + mem::forget(guard); + Ok(ret) +} diff --git a/crates/rune-alloc/src/alloc/btree/merge_iter.rs b/crates/rune-alloc/src/alloc/btree/merge_iter.rs new file mode 100644 index 000000000..6ba3c1660 --- /dev/null +++ b/crates/rune-alloc/src/alloc/btree/merge_iter.rs @@ -0,0 +1,106 @@ +use core::cmp::Ordering; +use core::fmt::{self, Debug}; +use core::iter::FusedIterator; + +/// Core of an iterator that merges the output of two strictly ascending iterators, +/// for instance a union or a symmetric difference. +pub(crate) struct MergeIterInner { + a: I, + b: I, + peeked: Option>, +} + +/// Benchmarks faster than wrapping both iterators in a Peekable, +/// probably because we can afford to impose a FusedIterator bound. +#[derive(Clone, Debug)] +enum Peeked { + A(I::Item), + B(I::Item), +} + +impl Clone for MergeIterInner +where + I: Clone, + I::Item: Clone, +{ + fn clone(&self) -> Self { + Self { + a: self.a.clone(), + b: self.b.clone(), + peeked: self.peeked.clone(), + } + } +} + +impl Debug for MergeIterInner +where + I: Debug, + I::Item: Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("MergeIterInner") + .field(&self.a) + .field(&self.b) + .field(&self.peeked) + .finish() + } +} + +impl MergeIterInner { + /// Creates a new core for an iterator merging a pair of sources. + pub(crate) fn new(a: I, b: I) -> Self { + MergeIterInner { a, b, peeked: None } + } + + /// Returns the next pair of items stemming from the pair of sources + /// being merged. If both returned options contain a value, that value + /// is equal and occurs in both sources. If one of the returned options + /// contains a value, that value doesn't occur in the other source (or + /// the sources are not strictly ascending). If neither returned option + /// contains a value, iteration has finished and subsequent calls will + /// return the same empty pair. + pub(crate) fn nexts Ordering>( + &mut self, + cmp: Cmp, + ) -> (Option, Option) + where + I: FusedIterator, + { + let mut a_next; + let mut b_next; + match self.peeked.take() { + Some(Peeked::A(next)) => { + a_next = Some(next); + b_next = self.b.next(); + } + Some(Peeked::B(next)) => { + b_next = Some(next); + a_next = self.a.next(); + } + None => { + a_next = self.a.next(); + b_next = self.b.next(); + } + } + if let (Some(ref a1), Some(ref b1)) = (&a_next, &b_next) { + match cmp(a1, b1) { + Ordering::Less => self.peeked = b_next.take().map(Peeked::B), + Ordering::Greater => self.peeked = a_next.take().map(Peeked::A), + Ordering::Equal => (), + } + } + (a_next, b_next) + } + + /// Returns a pair of upper bounds for the `size_hint` of the final iterator. + pub(crate) fn lens(&self) -> (usize, usize) + where + I: ExactSizeIterator, + { + match self.peeked { + Some(Peeked::A(_)) => (1 + self.a.len(), self.b.len()), + Some(Peeked::B(_)) => (self.a.len(), 1 + self.b.len()), + _ => (self.a.len(), self.b.len()), + } + } +} diff --git a/crates/rune-alloc/src/alloc/btree/mod.rs b/crates/rune-alloc/src/alloc/btree/mod.rs new file mode 100644 index 000000000..d1835294e --- /dev/null +++ b/crates/rune-alloc/src/alloc/btree/mod.rs @@ -0,0 +1,42 @@ +mod append; +mod borrow; +mod fix; +pub mod map; +mod mem; +mod merge_iter; +mod navigate; +mod node; +mod remove; +mod search; +pub mod set; +mod set_val; +mod split; + +use core::cmp::Ordering; + +use crate::alloc::AllocError; + +trait Recover { + type Key; + + fn get( + &self, + cx: &mut C, + key: &Q, + cmp: fn(&mut C, &Q, &Q) -> Result, + ) -> Result, E>; + + fn take( + &mut self, + cx: &mut C, + key: &Q, + cmp: fn(&mut C, &Q, &Q) -> Result, + ) -> Result, E>; + + fn try_replace( + &mut self, + cx: &mut C, + key: Self::Key, + cmp: fn(&mut C, &Q, &Q) -> Result, + ) -> Result, AllocError>, E>; +} diff --git a/crates/rune-alloc/src/alloc/btree/navigate.rs b/crates/rune-alloc/src/alloc/btree/navigate.rs new file mode 100644 index 000000000..2e6f2077b --- /dev/null +++ b/crates/rune-alloc/src/alloc/btree/navigate.rs @@ -0,0 +1,904 @@ +use core::borrow::Borrow; +use core::cmp::Ordering; +use core::hint; +use core::ops::RangeBounds; + +use crate::alloc::Allocator; +use crate::ptr; + +use super::node::{marker, ForceResult::*, Handle, NodeRef}; +use super::search::SearchBound; + +enum Never {} + +fn into_ok(result: Result) -> T { + match result { + Ok(value) => value, + Err(never) => match never {}, + } +} + +// `front` and `back` are always both `None` or both `Some`. +pub(crate) struct LeafRange { + front: Option, marker::Edge>>, + back: Option, marker::Edge>>, +} + +impl<'a, K: 'a, V: 'a> Clone for LeafRange, K, V> { + fn clone(&self) -> Self { + LeafRange { + front: self.front, + back: self.back, + } + } +} + +impl Default for LeafRange { + fn default() -> Self { + LeafRange { + front: None, + back: None, + } + } +} + +impl LeafRange { + pub(crate) fn none() -> Self { + LeafRange { + front: None, + back: None, + } + } + + fn is_empty(&self) -> bool { + self.front == self.back + } + + /// Temporarily takes out another, immutable equivalent of the same range. + pub(crate) fn reborrow(&self) -> LeafRange, K, V> { + LeafRange { + front: self.front.as_ref().map(|f| f.reborrow()), + back: self.back.as_ref().map(|b| b.reborrow()), + } + } +} + +impl<'a, K, V> LeafRange, K, V> { + #[inline] + pub(crate) fn next_checked(&mut self) -> Option<(&'a K, &'a V)> { + self.perform_next_checked(|kv| kv.into_kv()) + } + + #[inline] + pub(crate) fn next_back_checked(&mut self) -> Option<(&'a K, &'a V)> { + self.perform_next_back_checked(|kv| kv.into_kv()) + } +} + +impl<'a, K, V> LeafRange, K, V> { + #[inline] + pub(crate) fn next_checked(&mut self) -> Option<(&'a K, &'a mut V)> { + self.perform_next_checked(|kv| unsafe { ptr::read(kv) }.into_kv_valmut()) + } + + #[inline] + pub(crate) fn next_back_checked(&mut self) -> Option<(&'a K, &'a mut V)> { + self.perform_next_back_checked(|kv| unsafe { ptr::read(kv) }.into_kv_valmut()) + } +} + +impl LeafRange { + /// If possible, extract some result from the following KV and move to the edge beyond it. + fn perform_next_checked(&mut self, f: F) -> Option + where + F: Fn(&Handle, marker::KV>) -> R, + { + if self.is_empty() { + None + } else { + into_ok(super::mem::replace(self.front.as_mut().unwrap(), |front| { + let kv = front.next_kv().ok().unwrap(); + let result = f(&kv); + Ok((kv.next_leaf_edge(), Some(result))) + })) + } + } + + /// If possible, extract some result from the preceding KV and move to the edge beyond it. + fn perform_next_back_checked(&mut self, f: F) -> Option + where + F: Fn(&Handle, marker::KV>) -> R, + { + if self.is_empty() { + None + } else { + into_ok(super::mem::replace(self.back.as_mut().unwrap(), |back| { + let kv = back.next_back_kv().ok().unwrap(); + let result = f(&kv); + Ok((kv.next_back_leaf_edge(), Some(result))) + })) + } + } +} + +enum LazyLeafHandle { + Root(NodeRef), // not yet descended + Edge(Handle, marker::Edge>), +} + +impl<'a, K: 'a, V: 'a> Clone for LazyLeafHandle, K, V> { + fn clone(&self) -> Self { + match self { + LazyLeafHandle::Root(root) => LazyLeafHandle::Root(*root), + LazyLeafHandle::Edge(edge) => LazyLeafHandle::Edge(*edge), + } + } +} + +impl Clone for LazyLeafHandle { + fn clone(&self) -> Self { + match self { + LazyLeafHandle::Root(root) => LazyLeafHandle::Root(*root), + LazyLeafHandle::Edge(edge) => LazyLeafHandle::Edge(*edge), + } + } +} + +impl LazyLeafHandle { + fn reborrow(&self) -> LazyLeafHandle, K, V> { + match self { + LazyLeafHandle::Root(root) => LazyLeafHandle::Root(root.reborrow()), + LazyLeafHandle::Edge(edge) => LazyLeafHandle::Edge(edge.reborrow()), + } + } +} + +// `front` and `back` are always both `None` or both `Some`. +pub(crate) struct LazyLeafRange { + front: Option>, + back: Option>, +} + +impl Default for LazyLeafRange { + fn default() -> Self { + LazyLeafRange { + front: None, + back: None, + } + } +} + +impl<'a, K: 'a, V: 'a> Clone for LazyLeafRange, K, V> { + fn clone(&self) -> Self { + LazyLeafRange { + front: self.front.clone(), + back: self.back.clone(), + } + } +} + +impl Clone for LazyLeafRange { + fn clone(&self) -> Self { + LazyLeafRange { + front: self.front.clone(), + back: self.back.clone(), + } + } +} + +impl LazyLeafRange { + pub(crate) fn none() -> Self { + LazyLeafRange { + front: None, + back: None, + } + } + + /// Temporarily takes out another, immutable equivalent of the same range. + pub(crate) fn reborrow(&self) -> LazyLeafRange, K, V> { + LazyLeafRange { + front: self.front.as_ref().map(|f| f.reborrow()), + back: self.back.as_ref().map(|b| b.reborrow()), + } + } +} + +impl<'a, K, V> LazyLeafRange, K, V> { + #[inline] + pub(crate) unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) { + unsafe { self.init_front().unwrap().next_unchecked() } + } + + #[inline] + pub(crate) unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) { + unsafe { self.init_back().unwrap().next_back_unchecked() } + } +} + +impl LazyLeafRange { + #[inline] + pub(crate) unsafe fn next_unchecked(&mut self) -> (*const K, *const V) { + unsafe { self.init_front().unwrap().next_unchecked() } + } + + #[inline] + pub(crate) unsafe fn next_back_unchecked(&mut self) -> (*const K, *const V) { + unsafe { self.init_back().unwrap().next_back_unchecked() } + } +} + +impl<'a, K, V> LazyLeafRange, K, V> { + #[inline] + pub(crate) unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) { + unsafe { self.init_front().unwrap().next_unchecked() } + } + + #[inline] + pub(crate) unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) { + unsafe { self.init_back().unwrap().next_back_unchecked() } + } +} + +impl LazyLeafRange { + fn take_front( + &mut self, + ) -> Option, marker::Edge>> { + match self.front.take()? { + LazyLeafHandle::Root(root) => Some(root.first_leaf_edge()), + LazyLeafHandle::Edge(edge) => Some(edge), + } + } + + #[inline] + pub(crate) unsafe fn deallocating_next_unchecked( + &mut self, + alloc: &A, + ) -> Handle, marker::KV> { + debug_assert!(self.front.is_some()); + let front = self.init_front().unwrap(); + unsafe { front.deallocating_next_unchecked(alloc) } + } + + #[inline] + pub(crate) unsafe fn deallocating_next_back_unchecked( + &mut self, + alloc: &A, + ) -> Handle, marker::KV> { + debug_assert!(self.back.is_some()); + let back = self.init_back().unwrap(); + unsafe { back.deallocating_next_back_unchecked(alloc) } + } + + #[inline] + pub(crate) fn deallocating_end(&mut self, alloc: &A) { + if let Some(front) = self.take_front() { + front.deallocating_end(alloc) + } + } +} + +impl LazyLeafRange { + fn init_front( + &mut self, + ) -> Option<&mut Handle, marker::Edge>> { + if let Some(LazyLeafHandle::Root(root)) = &self.front { + self.front = Some(LazyLeafHandle::Edge( + unsafe { ptr::read(root) }.first_leaf_edge(), + )); + } + match &mut self.front { + None => None, + Some(LazyLeafHandle::Edge(edge)) => Some(edge), + // SAFETY: the code above would have replaced it. + Some(LazyLeafHandle::Root(_)) => unsafe { hint::unreachable_unchecked() }, + } + } + + fn init_back( + &mut self, + ) -> Option<&mut Handle, marker::Edge>> { + if let Some(LazyLeafHandle::Root(root)) = &self.back { + self.back = Some(LazyLeafHandle::Edge( + unsafe { ptr::read(root) }.last_leaf_edge(), + )); + } + match &mut self.back { + None => None, + Some(LazyLeafHandle::Edge(edge)) => Some(edge), + // SAFETY: the code above would have replaced it. + Some(LazyLeafHandle::Root(_)) => unsafe { hint::unreachable_unchecked() }, + } + } +} + +impl NodeRef { + /// Finds the distinct leaf edges delimiting a specified range in a tree. + /// + /// If such distinct edges exist, returns them in ascending order, meaning + /// that a non-zero number of calls to `next_unchecked` on the `front` of + /// the result and/or calls to `next_back_unchecked` on the `back` of the + /// result will eventually reach the same edge. + /// + /// If there are no such edges, i.e., if the tree contains no key within + /// the range, returns an empty `front` and `back`. + /// + /// # Safety + /// Unless `BorrowType` is `Immut`, do not use the handles to visit the same + /// KV twice. + unsafe fn find_leaf_edges_spanning_range( + self, + cx: &mut C, + range: R, + cmp: fn(&mut C, &Q, &Q) -> Result, + ) -> Result, E> + where + K: Borrow, + R: RangeBounds, + { + match self.search_tree_for_bifurcation(cx, &range, cmp)? { + Err(_) => Ok(LeafRange::none()), + Ok(( + node, + lower_edge_idx, + upper_edge_idx, + mut lower_child_bound, + mut upper_child_bound, + )) => { + let mut lower_edge = unsafe { Handle::new_edge(ptr::read(&node), lower_edge_idx) }; + let mut upper_edge = unsafe { Handle::new_edge(node, upper_edge_idx) }; + loop { + match (lower_edge.force(), upper_edge.force()) { + (Leaf(f), Leaf(b)) => { + return Ok(LeafRange { + front: Some(f), + back: Some(b), + }) + } + (Internal(f), Internal(b)) => { + (lower_edge, lower_child_bound) = + f.descend() + .find_lower_bound_edge(cx, lower_child_bound, cmp)?; + (upper_edge, upper_child_bound) = + b.descend() + .find_upper_bound_edge(cx, upper_child_bound, cmp)?; + } + _ => unreachable!("BTreeMap has different depths"), + } + } + } + } + } +} + +fn full_range( + root1: NodeRef, + root2: NodeRef, +) -> LazyLeafRange { + LazyLeafRange { + front: Some(LazyLeafHandle::Root(root1)), + back: Some(LazyLeafHandle::Root(root2)), + } +} + +impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { + /// Finds the pair of leaf edges delimiting a specific range in a tree. + /// + /// The result is meaningful only if the tree is ordered by key, like the tree + /// in a `BTreeMap` is. + pub(crate) fn range_search( + self, + cx: &mut C, + range: R, + cmp: fn(&mut C, &Q, &Q) -> Result, + ) -> Result, K, V>, E> + where + K: Borrow, + R: RangeBounds, + { + // SAFETY: our borrow type is immutable. + unsafe { self.find_leaf_edges_spanning_range(cx, range, cmp) } + } + + /// Finds the pair of leaf edges delimiting an entire tree. + pub(crate) fn full_range(self) -> LazyLeafRange, K, V> { + full_range(self, self) + } +} + +impl NodeRef { + /// Finds the pair of leaf edges delimiting an entire tree. + pub(crate) fn full_range(self) -> LazyLeafRange { + full_range(self, self) + } +} + +impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { + /// Splits a unique reference into a pair of leaf edges delimiting a specified range. + /// The result are non-unique references allowing (some) mutation, which must be used + /// carefully. + /// + /// The result is meaningful only if the tree is ordered by key, like the tree + /// in a `BTreeMap` is. + /// + /// # Safety + /// Do not use the duplicate handles to visit the same KV twice. + pub(crate) fn range_search( + self, + cx: &mut C, + range: R, + cmp: fn(&mut C, &Q, &Q) -> Result, + ) -> Result, K, V>, E> + where + K: Borrow, + R: RangeBounds, + { + unsafe { self.find_leaf_edges_spanning_range(cx, range, cmp) } + } + + /// Splits a unique reference into a pair of leaf edges delimiting the full range of the tree. + /// The results are non-unique references allowing mutation (of values only), so must be used + /// with care. + pub(crate) fn full_range(self) -> LazyLeafRange, K, V> { + // We duplicate the root NodeRef here -- we will never visit the same KV + // twice, and never end up with overlapping value references. + let self2 = unsafe { ptr::read(&self) }; + full_range(self, self2) + } +} + +impl NodeRef { + /// Splits a unique reference into a pair of leaf edges delimiting the full range of the tree. + /// The results are non-unique references allowing massively destructive mutation, so must be + /// used with the utmost care. + pub(crate) fn full_range(self) -> LazyLeafRange { + // We duplicate the root NodeRef here -- we will never access it in a way + // that overlaps references obtained from the root. + let self2 = unsafe { ptr::read(&self) }; + full_range(self, self2) + } +} + +impl + Handle, marker::Edge> +{ + /// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV + /// on the right side, which is either in the same leaf node or in an ancestor node. + /// If the leaf edge is the last one in the tree, returns [`Result::Err`] with the root node. + pub(crate) fn next_kv( + self, + ) -> Result< + Handle, marker::KV>, + NodeRef, + > { + let mut edge = self.forget_node_type(); + loop { + edge = match edge.right_kv() { + Ok(kv) => return Ok(kv), + Err(last_edge) => match last_edge.into_node().ascend() { + Ok(parent_edge) => parent_edge.forget_node_type(), + Err(root) => return Err(root), + }, + } + } + } + + /// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV + /// on the left side, which is either in the same leaf node or in an ancestor node. + /// If the leaf edge is the first one in the tree, returns [`Result::Err`] with the root node. + pub(crate) fn next_back_kv( + self, + ) -> Result< + Handle, marker::KV>, + NodeRef, + > { + let mut edge = self.forget_node_type(); + loop { + edge = match edge.left_kv() { + Ok(kv) => return Ok(kv), + Err(last_edge) => match last_edge.into_node().ascend() { + Ok(parent_edge) => parent_edge.forget_node_type(), + Err(root) => return Err(root), + }, + } + } + } +} + +impl + Handle, marker::Edge> +{ + /// Given an internal edge handle, returns [`Result::Ok`] with a handle to the neighboring KV + /// on the right side, which is either in the same internal node or in an ancestor node. + /// If the internal edge is the last one in the tree, returns [`Result::Err`] with the root node. + fn next_kv( + self, + ) -> Result< + Handle, marker::KV>, + NodeRef, + > { + let mut edge = self; + loop { + edge = match edge.right_kv() { + Ok(internal_kv) => return Ok(internal_kv), + Err(last_edge) => match last_edge.into_node().ascend() { + Ok(parent_edge) => parent_edge, + Err(root) => return Err(root), + }, + } + } + } +} + +impl Handle, marker::Edge> { + /// Given a leaf edge handle into a dying tree, returns the next leaf edge + /// on the right side, and the key-value pair in between, if they exist. + /// + /// If the given edge is the last one in a leaf, this method deallocates + /// the leaf, as well as any ancestor nodes whose last edge was reached. + /// This implies that if no more key-value pair follows, the entire tree + /// will have been deallocated and there is nothing left to return. + /// + /// # Safety + /// - The given edge must not have been previously returned by counterpart + /// `deallocating_next_back`. + /// - The returned KV handle is only valid to access the key and value, + /// and only valid until the next call to a `deallocating_` method. + unsafe fn deallocating_next( + self, + alloc: &A, + ) -> Option<( + Self, + Handle, marker::KV>, + )> { + let mut edge = self.forget_node_type(); + loop { + edge = match edge.right_kv() { + Ok(kv) => return Some((unsafe { ptr::read(&kv) }.next_leaf_edge(), kv)), + Err(last_edge) => { + match unsafe { last_edge.into_node().deallocate_and_ascend(alloc) } { + Some(parent_edge) => parent_edge.forget_node_type(), + None => return None, + } + } + } + } + } + + /// Given a leaf edge handle into a dying tree, returns the next leaf edge + /// on the left side, and the key-value pair in between, if they exist. + /// + /// If the given edge is the first one in a leaf, this method deallocates + /// the leaf, as well as any ancestor nodes whose first edge was reached. + /// This implies that if no more key-value pair follows, the entire tree + /// will have been deallocated and there is nothing left to return. + /// + /// # Safety + /// - The given edge must not have been previously returned by counterpart + /// `deallocating_next`. + /// - The returned KV handle is only valid to access the key and value, + /// and only valid until the next call to a `deallocating_` method. + unsafe fn deallocating_next_back( + self, + alloc: &A, + ) -> Option<( + Self, + Handle, marker::KV>, + )> { + let mut edge = self.forget_node_type(); + loop { + edge = match edge.left_kv() { + Ok(kv) => return Some((unsafe { ptr::read(&kv) }.next_back_leaf_edge(), kv)), + Err(last_edge) => { + match unsafe { last_edge.into_node().deallocate_and_ascend(alloc) } { + Some(parent_edge) => parent_edge.forget_node_type(), + None => return None, + } + } + } + } + } + + /// Deallocates a pile of nodes from the leaf up to the root. + /// This is the only way to deallocate the remainder of a tree after + /// `deallocating_next` and `deallocating_next_back` have been nibbling at + /// both sides of the tree, and have hit the same edge. As it is intended + /// only to be called when all keys and values have been returned, + /// no cleanup is done on any of the keys or values. + fn deallocating_end(self, alloc: &A) { + let mut edge = self.forget_node_type(); + while let Some(parent_edge) = unsafe { edge.into_node().deallocate_and_ascend(alloc) } { + edge = parent_edge.forget_node_type(); + } + } +} + +impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::Edge> { + /// Moves the leaf edge handle to the next leaf edge and returns references to the + /// key and value in between. + /// + /// # Safety + /// There must be another KV in the direction travelled. + unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) { + into_ok(super::mem::replace(self, |leaf_edge| { + let kv = leaf_edge.next_kv().ok().unwrap(); + Ok((kv.next_leaf_edge(), kv.into_kv())) + })) + } + + /// Moves the leaf edge handle to the previous leaf edge and returns references to the + /// key and value in between. + /// + /// # Safety + /// There must be another KV in the direction travelled. + unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) { + into_ok(super::mem::replace(self, |leaf_edge| { + let kv = leaf_edge.next_back_kv().ok().unwrap(); + Ok((kv.next_back_leaf_edge(), kv.into_kv())) + })) + } +} + +impl Handle, marker::Edge> { + /// Moves the leaf edge handle to the next leaf edge and returns references to the + /// key and value in between. + /// + /// # Safety + /// There must be another KV in the direction travelled. + unsafe fn next_unchecked(&mut self) -> (*const K, *const V) { + into_ok(super::mem::replace(self, |leaf_edge| { + let kv = leaf_edge.next_kv().ok().unwrap(); + Ok((kv.next_leaf_edge(), kv.into_kv_raw())) + })) + } + + /// Moves the leaf edge handle to the previous leaf edge and returns references to the + /// key and value in between. + /// + /// # Safety + /// There must be another KV in the direction travelled. + unsafe fn next_back_unchecked(&mut self) -> (*const K, *const V) { + into_ok(super::mem::replace(self, |leaf_edge| { + let kv = leaf_edge.next_back_kv().ok().unwrap(); + Ok((kv.next_back_leaf_edge(), kv.into_kv_raw())) + })) + } +} + +impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::Edge> { + /// Moves the leaf edge handle to the next leaf edge and returns references to the + /// key and value in between. + /// + /// # Safety + /// There must be another KV in the direction travelled. + unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) { + let kv = into_ok(super::mem::replace(self, |leaf_edge| { + let kv = leaf_edge.next_kv().ok().unwrap(); + Ok((unsafe { ptr::read(&kv) }.next_leaf_edge(), kv)) + })); + // Doing this last is faster, according to benchmarks. + kv.into_kv_valmut() + } + + /// Moves the leaf edge handle to the previous leaf and returns references to the + /// key and value in between. + /// + /// # Safety + /// There must be another KV in the direction travelled. + unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) { + let kv = into_ok(super::mem::replace(self, |leaf_edge| { + let kv = leaf_edge.next_back_kv().ok().unwrap(); + Ok((unsafe { ptr::read(&kv) }.next_back_leaf_edge(), kv)) + })); + // Doing this last is faster, according to benchmarks. + kv.into_kv_valmut() + } +} + +impl Handle, marker::Edge> { + /// Moves the leaf edge handle to the next leaf edge and returns the key and value + /// in between, deallocating any node left behind while leaving the corresponding + /// edge in its parent node dangling. + /// + /// # Safety + /// - There must be another KV in the direction travelled. + /// - That KV was not previously returned by counterpart + /// `deallocating_next_back_unchecked` on any copy of the handles + /// being used to traverse the tree. + /// + /// The only safe way to proceed with the updated handle is to compare it, drop it, + /// or call this method or counterpart `deallocating_next_back_unchecked` again. + unsafe fn deallocating_next_unchecked( + &mut self, + alloc: &A, + ) -> Handle, marker::KV> { + into_ok(super::mem::replace(self, |leaf_edge| unsafe { + Ok(leaf_edge.deallocating_next(alloc).unwrap()) + })) + } + + /// Moves the leaf edge handle to the previous leaf edge and returns the key and value + /// in between, deallocating any node left behind while leaving the corresponding + /// edge in its parent node dangling. + /// + /// # Safety + /// - There must be another KV in the direction travelled. + /// - That leaf edge was not previously returned by counterpart + /// `deallocating_next_unchecked` on any copy of the handles + /// being used to traverse the tree. + /// + /// The only safe way to proceed with the updated handle is to compare it, drop it, + /// or call this method or counterpart `deallocating_next_unchecked` again. + unsafe fn deallocating_next_back_unchecked( + &mut self, + alloc: &A, + ) -> Handle, marker::KV> { + into_ok(super::mem::replace(self, |leaf_edge| unsafe { + Ok::<_, Never>(leaf_edge.deallocating_next_back(alloc).unwrap()) + })) + } +} + +impl NodeRef { + /// Returns the leftmost leaf edge in or underneath a node - in other words, the edge + /// you need first when navigating forward (or last when navigating backward). + #[inline] + pub(crate) fn first_leaf_edge( + self, + ) -> Handle, marker::Edge> { + let mut node = self; + loop { + match node.force() { + Leaf(leaf) => return leaf.first_edge(), + Internal(internal) => node = internal.first_edge().descend(), + } + } + } + + /// Returns the rightmost leaf edge in or underneath a node - in other words, the edge + /// you need last when navigating forward (or first when navigating backward). + #[inline] + pub(crate) fn last_leaf_edge( + self, + ) -> Handle, marker::Edge> { + let mut node = self; + loop { + match node.force() { + Leaf(leaf) => return leaf.last_edge(), + Internal(internal) => node = internal.last_edge().descend(), + } + } + } +} + +pub(crate) enum Position { + Leaf(NodeRef), + Internal(NodeRef), + InternalKV(Handle, marker::KV>), +} + +impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { + /// Visits leaf nodes and internal KVs in order of ascending keys, and also + /// visits internal nodes as a whole in a depth first order, meaning that + /// internal nodes precede their individual KVs and their child nodes. + pub(crate) fn visit_nodes_in_order(self, mut visit: F) + where + F: FnMut(Position, K, V>), + { + match self.force() { + Leaf(leaf) => visit(Position::Leaf(leaf)), + Internal(internal) => { + visit(Position::Internal(internal)); + let mut edge = internal.first_edge(); + loop { + edge = match edge.descend().force() { + Leaf(leaf) => { + visit(Position::Leaf(leaf)); + match edge.next_kv() { + Ok(kv) => { + visit(Position::InternalKV(kv)); + kv.right_edge() + } + Err(_) => return, + } + } + Internal(internal) => { + visit(Position::Internal(internal)); + internal.first_edge() + } + } + } + } + } + } + + /// Calculates the number of elements in a (sub)tree. + pub(crate) fn calc_length(self) -> usize { + let mut result = 0; + self.visit_nodes_in_order(|pos| match pos { + Position::Leaf(node) => result += node.len(), + Position::Internal(node) => result += node.len(), + Position::InternalKV(_) => (), + }); + result + } +} + +impl + Handle, marker::KV> +{ + /// Returns the leaf edge closest to a KV for forward navigation. + pub(crate) fn next_leaf_edge( + self, + ) -> Handle, marker::Edge> { + match self.force() { + Leaf(leaf_kv) => leaf_kv.right_edge(), + Internal(internal_kv) => { + let next_internal_edge = internal_kv.right_edge(); + next_internal_edge.descend().first_leaf_edge() + } + } + } + + /// Returns the leaf edge closest to a KV for backward navigation. + pub(crate) fn next_back_leaf_edge( + self, + ) -> Handle, marker::Edge> { + match self.force() { + Leaf(leaf_kv) => leaf_kv.left_edge(), + Internal(internal_kv) => { + let next_internal_edge = internal_kv.left_edge(); + next_internal_edge.descend().last_leaf_edge() + } + } + } +} + +impl NodeRef { + /// Returns the leaf edge corresponding to the first point at which the + /// given bound is true. + pub(crate) fn lower_bound( + self, + cx: &mut C, + mut bound: SearchBound<&Q>, + cmp: fn(&mut C, &Q, &Q) -> Result, + ) -> Result, marker::Edge>, E> + where + K: Borrow, + { + let mut node = self; + loop { + let (edge, new_bound) = node.find_lower_bound_edge(cx, bound, cmp)?; + match edge.force() { + Leaf(edge) => return Ok(edge), + Internal(edge) => { + node = edge.descend(); + bound = new_bound; + } + } + } + } + + /// Returns the leaf edge corresponding to the last point at which the + /// given bound is true. + pub(crate) fn upper_bound( + self, + cx: &mut C, + mut bound: SearchBound<&Q>, + cmp: fn(&mut C, &Q, &Q) -> Result, + ) -> Result, marker::Edge>, E> + where + K: Borrow, + { + let mut node = self; + loop { + let (edge, new_bound) = node.find_upper_bound_edge(cx, bound, cmp)?; + match edge.force() { + Leaf(edge) => return Ok(edge), + Internal(edge) => { + node = edge.descend(); + bound = new_bound; + } + } + } + } +} diff --git a/crates/rune-alloc/src/alloc/btree/node.rs b/crates/rune-alloc/src/alloc/btree/node.rs new file mode 100644 index 000000000..d75542077 --- /dev/null +++ b/crates/rune-alloc/src/alloc/btree/node.rs @@ -0,0 +1,2066 @@ +// This is an attempt at an implementation following the ideal +// +// ``` +// struct BTreeMap { +// height: usize, +// root: Option>> +// } +// +// struct Node { +// keys: [K; 2 * B - 1], +// vals: [V; 2 * B - 1], +// edges: [if height > 0 { Box> } else { () }; 2 * B], +// parent: Option<(NonNull>, u16)>, +// len: u16, +// } +// ``` +// +// Since Rust doesn't actually have dependent types and polymorphic recursion, +// we make do with lots of unsafety. + +// A major goal of this module is to avoid complexity by treating the tree as a generic (if +// weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such, +// this module doesn't care whether the entries are sorted, which nodes can be underfull, or +// even what underfull means. However, we do rely on a few invariants: +// +// - Trees must have uniform depth/height. This means that every path down to a leaf from a +// given node has exactly the same length. +// - A node of length `n` has `n` keys, `n` values, and `n + 1` edges. +// This implies that even an empty node has at least one edge. +// For a leaf node, "having an edge" only means we can identify a position in the node, +// since leaf edges are empty and need no data representation. In an internal node, +// an edge both identifies a position and contains a pointer to a child node. + +use core::alloc::Layout; +use core::marker::PhantomData; +use core::mem::{self, MaybeUninit}; +use core::slice::SliceIndex; + +use crate::alloc::{AllocError, Allocator}; +use crate::ptr::{self, NonNull}; + +const B: usize = 6; +pub(crate) const CAPACITY: usize = 2 * B - 1; +pub(crate) const MIN_LEN_AFTER_SPLIT: usize = B - 1; +const KV_IDX_CENTER: usize = B - 1; +const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1; +const EDGE_IDX_LEFT_OF_CENTER_N1: usize = EDGE_IDX_LEFT_OF_CENTER - 1; +const EDGE_IDX_RIGHT_OF_CENTER: usize = B; + +/// The underlying representation of leaf nodes and part of the representation of internal nodes. +struct LeafNode { + /// We want to be covariant in `K` and `V`. + parent: Option>>, + + /// This node's index into the parent node's `edges` array. + /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`. + /// This is only guaranteed to be initialized when `parent` is non-null. + parent_idx: MaybeUninit, + + /// The number of keys and values this node stores. + len: u16, + + /// The arrays storing the actual data of the node. Only the first `len` elements of each + /// array are initialized and valid. + keys: [MaybeUninit; CAPACITY], + vals: [MaybeUninit; CAPACITY], +} + +impl LeafNode { + /// Initializes a new `LeafNode` in-place. + unsafe fn init(this: *mut Self) { + // As a general policy, we leave fields uninitialized if they can be, as this should + // be both slightly faster and easier to track in Valgrind. + unsafe { + // parent_idx, keys, and vals are all MaybeUninit + ptr::addr_of_mut!((*this).parent).write(None); + ptr::addr_of_mut!((*this).len).write(0); + } + } + + /// Creates a new boxed `LeafNode`. + fn new(alloc: &A) -> Result, AllocError> { + unsafe { + let layout = Layout::new::(); + let ptr = alloc.allocate(layout)?.cast::(); + LeafNode::init(ptr.as_ptr()); + Ok(ptr) + } + } +} + +/// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden +/// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an +/// `InternalNode` can be directly cast to a pointer to the underlying `LeafNode` portion of the +/// node, allowing code to act on leaf and internal nodes generically without having to even check +/// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`. +#[repr(C)] +// gdb_providers.py uses this type name for introspection. +struct InternalNode { + data: LeafNode, + + /// The pointers to the children of this node. `len + 1` of these are considered + /// initialized and valid, except that near the end, while the tree is held + /// through borrow type `Dying`, some of these pointers are dangling. + edges: [MaybeUninit>; 2 * B], +} + +impl InternalNode { + /// Creates a new boxed `InternalNode`. + /// + /// # Safety + /// An invariant of internal nodes is that they have at least one + /// initialized and valid edge. This function does not set up + /// such an edge. + unsafe fn new(alloc: &A) -> Result, AllocError> { + unsafe { + let layout = Layout::new::(); + let ptr = alloc.allocate(layout)?.cast::(); + // We only need to initialize the data; the edges are MaybeUninit. + LeafNode::init(ptr::addr_of_mut!((*ptr.as_ptr()).data)); + Ok(ptr) + } + } +} + +/// A managed, non-null pointer to a node. This is either an owned pointer to +/// `LeafNode` or an owned pointer to `InternalNode`. +/// +/// However, `BoxedNode` contains no information as to which of the two types +/// of nodes it actually contains, and, partially due to this lack of information, +/// is not a separate type and has no destructor. +type BoxedNode = NonNull>; + +// N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType` +// is `Mut`. This is technically wrong, but cannot result in any unsafety due to +// internal use of `NodeRef` because we stay completely generic over `K` and `V`. +// However, whenever a public type wraps `NodeRef`, make sure that it has the +// correct variance. +/// +/// A reference to a node. +/// +/// This type has a number of parameters that controls how it acts: +/// - `BorrowType`: A dummy type that describes the kind of borrow and carries a lifetime. +/// - When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`. +/// - When this is `ValMut<'a>`, the `NodeRef` acts roughly like `&'a Node` +/// with respect to keys and tree structure, but also allows many +/// mutable references to values throughout the tree to coexist. +/// - When this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`, +/// although insert methods allow a mutable pointer to a value to coexist. +/// - When this is `Owned`, the `NodeRef` acts roughly like `Box`, +/// but does not have a destructor, and must be cleaned up manually. +/// - When this is `Dying`, the `NodeRef` still acts roughly like `Box`, +/// but has methods to destroy the tree bit by bit, and ordinary methods, +/// while not marked as unsafe to call, can invoke UB if called incorrectly. +/// Since any `NodeRef` allows navigating through the tree, `BorrowType` +/// effectively applies to the entire tree, not just to the node itself. +/// - `K` and `V`: These are the types of keys and values stored in the nodes. +/// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is +/// `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the +/// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the +/// `NodeRef` could be pointing to either type of node. +/// `Type` is named `NodeType` when used outside `NodeRef`. +/// +/// Both `BorrowType` and `NodeType` restrict what methods we implement, to +/// exploit static type safety. There are limitations in the way we can apply +/// such restrictions: +/// - For each type parameter, we can only define a method either generically +/// or for one particular type. For example, we cannot define a method like +/// `into_kv` generically for all `BorrowType`, or once for all types that +/// carry a lifetime, because we want it to return `&'a` references. +/// Therefore, we define it only for the least powerful type `Immut<'a>`. +/// - We cannot get implicit coercion from say `Mut<'a>` to `Immut<'a>`. +/// Therefore, we have to explicitly call `reborrow` on a more powerful +/// `NodeRef` in order to reach a method like `into_kv`. +/// +/// All methods on `NodeRef` that return some kind of reference, either: +/// - Take `self` by value, and return the lifetime carried by `BorrowType`. +/// Sometimes, to invoke such a method, we need to call `reborrow_mut`. +/// - Take `self` by reference, and (implicitly) return that reference's +/// lifetime, instead of the lifetime carried by `BorrowType`. That way, +/// the borrow checker guarantees that the `NodeRef` remains borrowed as long +/// as the returned reference is used. +/// The methods supporting insert bend this rule by returning a raw pointer, +/// i.e., a reference without any lifetime. +pub(crate) struct NodeRef { + /// The number of levels that the node and the level of leaves are apart, a + /// constant of the node that cannot be entirely described by `Type`, and that + /// the node itself does not store. We only need to store the height of the root + /// node, and derive every other node's height from it. + /// Must be zero if `Type` is `Leaf` and non-zero if `Type` is `Internal`. + height: usize, + /// The pointer to the leaf or internal node. The definition of `InternalNode` + /// ensures that the pointer is valid either way. + node: NonNull>, + _marker: PhantomData<(BorrowType, Type)>, +} + +/// The root node of an owned tree. +/// +/// Note that this does not have a destructor, and must be cleaned up manually. +pub(crate) type Root = NodeRef; + +impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef, K, V, Type> {} +impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef, K, V, Type> { + fn clone(&self) -> Self { + *self + } +} + +impl Copy for NodeRef {} +impl Clone for NodeRef { + fn clone(&self) -> Self { + *self + } +} + +unsafe impl Sync for NodeRef {} + +unsafe impl Send for NodeRef, K, V, Type> {} +unsafe impl Send for NodeRef {} +unsafe impl Send for NodeRef, K, V, Type> {} +unsafe impl Send for NodeRef, K, V, Type> {} +unsafe impl Send for NodeRef {} +unsafe impl Send for NodeRef {} + +impl NodeRef { + pub(crate) fn new_leaf(alloc: &A) -> Result { + Ok(Self::from_new_leaf(LeafNode::new(alloc)?)) + } + + fn from_new_leaf(leaf: NonNull>) -> Self { + NodeRef { + height: 0, + node: leaf, + _marker: PhantomData, + } + } +} + +impl NodeRef { + fn new_internal(child: Root, alloc: &A) -> Result { + let mut new_node = unsafe { InternalNode::new(alloc)? }; + + // SAFETY: new_node has been initialized to the point where we can + // construct a reference to it. + unsafe { + let new_node = new_node.as_mut(); + new_node.edges[0].write(child.node); + } + + Ok(unsafe { NodeRef::from_new_internal(new_node, child.height + 1) }) + } + + /// # Safety + /// `height` must not be zero. + unsafe fn from_new_internal(internal: NonNull>, height: usize) -> Self { + debug_assert!(height > 0); + let node = internal.cast(); + let mut this = NodeRef { + height, + node, + _marker: PhantomData, + }; + this.borrow_mut().correct_all_childrens_parent_links(); + this + } +} + +impl NodeRef { + /// Unpack a node reference that was packed as `NodeRef::parent`. + fn from_internal(node: NonNull>, height: usize) -> Self { + debug_assert!(height > 0); + NodeRef { + height, + node: node.cast(), + _marker: PhantomData, + } + } +} + +impl NodeRef { + /// Exposes the data of an internal node. + /// + /// Returns a raw ptr to avoid invalidating other references to this node. + fn as_internal_ptr(this: &Self) -> *mut InternalNode { + // SAFETY: the static node type is `Internal`. + this.node.as_ptr() as *mut InternalNode + } +} + +impl<'a, K, V> NodeRef, K, V, marker::Internal> { + /// Borrows exclusive access to the data of an internal node. + fn as_internal_mut(&mut self) -> &mut InternalNode { + let ptr = Self::as_internal_ptr(self); + unsafe { &mut *ptr } + } +} + +impl NodeRef { + /// Finds the length of the node. This is the number of keys or values. + /// The number of edges is `len() + 1`. + /// Note that, despite being safe, calling this function can have the side effect + /// of invalidating mutable references that unsafe code has created. + pub(crate) fn len(&self) -> usize { + // Crucially, we only access the `len` field here. If BorrowType is marker::ValMut, + // there might be outstanding mutable references to values that we must not invalidate. + unsafe { usize::from((*Self::as_leaf_ptr(self)).len) } + } + + /// Returns the number of levels that the node and leaves are apart. Zero + /// height means the node is a leaf itself. If you picture trees with the + /// root on top, the number says at which elevation the node appears. + /// If you picture trees with leaves on top, the number says how high + /// the tree extends above the node. + pub(crate) fn height(&self) -> usize { + self.height + } + + /// Temporarily takes out another, immutable reference to the same node. + pub(crate) fn reborrow(&self) -> NodeRef, K, V, Type> { + NodeRef { + height: self.height, + node: self.node, + _marker: PhantomData, + } + } + + /// Temporarily takes out another, raw pointer to the same node. + pub(crate) fn raw(&self) -> NodeRef { + NodeRef { + height: self.height, + node: self.node, + _marker: PhantomData, + } + } + + /// Exposes the leaf portion of any leaf or internal node. + /// + /// Returns a raw ptr to avoid invalidating other references to this node. + fn as_leaf_ptr(this: &Self) -> *mut LeafNode { + // The node must be valid for at least the LeafNode portion. + // This is not a reference in the NodeRef type because we don't know if + // it should be unique or shared. + this.node.as_ptr() + } +} + +impl NodeRef { + /// Finds the parent of the current node. Returns `Ok(handle)` if the current + /// node actually has a parent, where `handle` points to the edge of the parent + /// that points to the current node. Returns `Err(self)` if the current node has + /// no parent, giving back the original `NodeRef`. + /// + /// The method name assumes you picture trees with the root node on top. + /// + /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should + /// both, upon success, do nothing. + pub(crate) fn ascend( + self, + ) -> Result, marker::Edge>, Self> { + assert!(BorrowType::TRAVERSAL_PERMIT); + + // We need to use raw pointers to nodes because, if BorrowType is marker::ValMut, + // there might be outstanding mutable references to values that we must not invalidate. + let leaf_ptr: *const _ = Self::as_leaf_ptr(&self); + unsafe { (*leaf_ptr).parent } + .as_ref() + .map(|parent| Handle { + node: NodeRef::from_internal(*parent, self.height + 1), + idx: unsafe { usize::from((*leaf_ptr).parent_idx.assume_init()) }, + _marker: PhantomData, + }) + .ok_or(self) + } + + pub(crate) fn first_edge(self) -> Handle { + unsafe { Handle::new_edge(self, 0) } + } + + pub(crate) fn last_edge(self) -> Handle { + let len = self.len(); + unsafe { Handle::new_edge(self, len) } + } + + /// Note that `self` must be nonempty. + pub(crate) fn first_kv(self) -> Handle { + let len = self.len(); + assert!(len > 0); + unsafe { Handle::new_kv(self, 0) } + } + + /// Note that `self` must be nonempty. + pub(crate) fn last_kv(self) -> Handle { + let len = self.len(); + assert!(len > 0); + unsafe { Handle::new_kv(self, len - 1) } + } +} + +impl NodeRef { + /// Could be a public implementation of PartialEq, but only used in this module. + fn eq(&self, other: &Self) -> bool { + let Self { + node, + height, + _marker, + } = self; + if node.eq(&other.node) { + debug_assert_eq!(*height, other.height); + true + } else { + false + } + } +} + +impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { + /// Exposes the leaf portion of any leaf or internal node in an immutable tree. + fn into_leaf(self) -> &'a LeafNode { + let ptr = Self::as_leaf_ptr(&self); + // SAFETY: there can be no mutable references into this tree borrowed as `Immut`. + unsafe { &*ptr } + } + + /// Borrows a view into the keys stored in the node. + pub(crate) fn keys(&self) -> &[K] { + const unsafe fn slice_assume_init_ref(slice: &[MaybeUninit]) -> &[T] { + // SAFETY: casting `slice` to a `*const [T]` is safe since the + // caller guarantees that `slice` is initialized, and `MaybeUninit` + // is guaranteed to have the same layout as `T`. The pointer + // obtained is valid since it refers to memory owned by `slice` + // which is a reference and thus guaranteed to be valid for reads. + unsafe { &*(slice as *const [MaybeUninit] as *const [T]) } + } + + let leaf = self.into_leaf(); + + unsafe { slice_assume_init_ref(leaf.keys.get_unchecked(..usize::from(leaf.len))) } + } +} + +impl NodeRef { + /// Exposes the leaf portion of any leaf or internal node in an immutable tree. + fn into_leaf(self) -> *const LeafNode { + let ptr = Self::as_leaf_ptr(&self); + // SAFETY: there can be no mutable references into this tree borrowed as `Raw`. + ptr as *const _ + } +} + +impl NodeRef { + /// Similar to `ascend`, gets a reference to a node's parent node, but also + /// deallocates the current node in the process. This is unsafe because the + /// current node will still be accessible despite being deallocated. + pub(crate) unsafe fn deallocate_and_ascend( + self, + alloc: &A, + ) -> Option, marker::Edge>> { + let height = self.height; + let node = self.node; + let ret = self.ascend().ok(); + unsafe { + alloc.deallocate( + node.cast(), + if height > 0 { + Layout::new::>() + } else { + Layout::new::>() + }, + ); + } + ret + } +} + +impl<'a, K, V, Type> NodeRef, K, V, Type> { + /// Temporarily takes out another mutable reference to the same node. Beware, as + /// this method is very dangerous, doubly so since it might not immediately appear + /// dangerous. + /// + /// Because mutable pointers can roam anywhere around the tree, the returned + /// pointer can easily be used to make the original pointer dangling, out of + /// bounds, or invalid under stacked borrow rules. + // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` + // that restricts the use of navigation methods on reborrowed pointers, + // preventing this unsafety. + unsafe fn reborrow_mut(&mut self) -> NodeRef, K, V, Type> { + NodeRef { + height: self.height, + node: self.node, + _marker: PhantomData, + } + } + + /// Borrows exclusive access to the leaf portion of a leaf or internal node. + fn as_leaf_mut(&mut self) -> &mut LeafNode { + let ptr = Self::as_leaf_ptr(self); + // SAFETY: we have exclusive access to the entire node. + unsafe { &mut *ptr } + } + + /// Offers exclusive access to the leaf portion of a leaf or internal node. + fn into_leaf_mut(self) -> &'a mut LeafNode { + let ptr = Self::as_leaf_ptr(&self); + // SAFETY: we have exclusive access to the entire node. + unsafe { &mut *ptr } + } + + /// Returns a dormant copy of this node with its lifetime erased which can + /// be reawakened later. + pub(crate) fn dormant(&self) -> NodeRef { + NodeRef { + height: self.height, + node: self.node, + _marker: PhantomData, + } + } +} + +impl NodeRef { + /// Revert to the unique borrow initially captured. + /// + /// # Safety + /// + /// The reborrow must have ended, i.e., the reference returned by `new` and + /// all pointers and references derived from it, must not be used anymore. + pub(crate) unsafe fn awaken<'a>(self) -> NodeRef, K, V, Type> { + NodeRef { + height: self.height, + node: self.node, + _marker: PhantomData, + } + } +} + +impl NodeRef { + /// Borrows exclusive access to the leaf portion of a dying leaf or internal node. + fn as_leaf_dying(&mut self) -> &mut LeafNode { + let ptr = Self::as_leaf_ptr(self); + // SAFETY: we have exclusive access to the entire node. + unsafe { &mut *ptr } + } +} + +impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { + /// Borrows exclusive access to an element of the key storage area. + /// + /// # Safety + /// `index` is in bounds of 0..CAPACITY + unsafe fn key_area_mut(&mut self, index: I) -> &mut Output + where + I: SliceIndex<[MaybeUninit], Output = Output>, + { + // SAFETY: the caller will not be able to call further methods on self + // until the key slice reference is dropped, as we have unique access + // for the lifetime of the borrow. + unsafe { + self.as_leaf_mut() + .keys + .as_mut_slice() + .get_unchecked_mut(index) + } + } + + /// Borrows exclusive access to an element or slice of the node's value storage area. + /// + /// # Safety + /// `index` is in bounds of 0..CAPACITY + unsafe fn val_area_mut(&mut self, index: I) -> &mut Output + where + I: SliceIndex<[MaybeUninit], Output = Output>, + { + // SAFETY: the caller will not be able to call further methods on self + // until the value slice reference is dropped, as we have unique access + // for the lifetime of the borrow. + unsafe { + self.as_leaf_mut() + .vals + .as_mut_slice() + .get_unchecked_mut(index) + } + } +} + +impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Internal> { + /// Borrows exclusive access to an element or slice of the node's storage area for edge contents. + /// + /// # Safety + /// `index` is in bounds of 0..CAPACITY + 1 + unsafe fn edge_area_mut(&mut self, index: I) -> &mut Output + where + I: SliceIndex<[MaybeUninit>], Output = Output>, + { + // SAFETY: the caller will not be able to call further methods on self + // until the edge slice reference is dropped, as we have unique access + // for the lifetime of the borrow. + unsafe { + self.as_internal_mut() + .edges + .as_mut_slice() + .get_unchecked_mut(index) + } + } +} + +impl<'a, K, V, Type> NodeRef, K, V, Type> { + /// # Safety + /// - The node has more than `idx` initialized elements. + unsafe fn into_key_val_mut_at(self, idx: usize) -> (&'a K, &'a mut V) { + // We only create a reference to the one element we are interested in, + // to avoid aliasing with outstanding references to other elements, + // in particular, those returned to the caller in earlier iterations. + let leaf = Self::as_leaf_ptr(&self); + let keys = unsafe { ptr::addr_of!((*leaf).keys) }; + let vals = unsafe { ptr::addr_of_mut!((*leaf).vals) }; + // We must coerce to unsized array pointers because of Rust issue #74679. + let keys: *const [_] = keys; + let vals: *mut [_] = vals; + let key = unsafe { (*(keys as *const MaybeUninit).wrapping_add(idx)).assume_init_ref() }; + let val = unsafe { (*(vals as *mut MaybeUninit).wrapping_add(idx)).assume_init_mut() }; + (key, val) + } +} + +impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { + /// Borrows exclusive access to the length of the node. + pub(crate) fn len_mut(&mut self) -> &mut u16 { + &mut self.as_leaf_mut().len + } +} + +impl<'a, K, V> NodeRef, K, V, marker::Internal> { + /// # Safety + /// Every item returned by `range` is a valid edge index for the node. + unsafe fn correct_childrens_parent_links>(&mut self, range: R) { + for i in range { + debug_assert!(i <= self.len()); + unsafe { Handle::new_edge(self.reborrow_mut(), i) }.correct_parent_link(); + } + } + + fn correct_all_childrens_parent_links(&mut self) { + let len = self.len(); + unsafe { self.correct_childrens_parent_links(0..=len) }; + } +} + +impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { + /// Sets the node's link to its parent edge, + /// without invalidating other references to the node. + fn set_parent_link(&mut self, parent: NonNull>, parent_idx: usize) { + let leaf = Self::as_leaf_mut(self); + leaf.parent = Some(parent); + leaf.parent_idx.write(parent_idx as u16); + } +} + +impl NodeRef { + /// Clears the root's link to its parent edge. + fn clear_parent_link(&mut self) { + let mut root_node = self.borrow_mut(); + let leaf = root_node.as_leaf_mut(); + leaf.parent = None; + } +} + +impl NodeRef { + /// Returns a new owned tree, with its own root node that is initially empty. + pub(crate) fn new(alloc: &A) -> Result { + Ok(NodeRef::new_leaf(alloc)?.forget_type()) + } + + /// Adds a new internal node with a single edge pointing to the previous root node, + /// make that new node the root node, and return it. This increases the height by 1 + /// and is the opposite of `pop_internal_level`. + pub(crate) fn push_internal_level( + &mut self, + alloc: &A, + ) -> Result, K, V, marker::Internal>, AllocError> { + super::mem::take_mut(self, |old_root| { + Ok(NodeRef::new_internal(old_root, alloc)?.forget_type()) + })?; + + // `self.borrow_mut()`, except that we just forgot we're internal now: + Ok(NodeRef { + height: self.height, + node: self.node, + _marker: PhantomData, + }) + } + + /// Removes the internal root node, using its first child as the new root node. + /// As it is intended only to be called when the root node has only one child, + /// no cleanup is done on any of the keys, values and other children. + /// This decreases the height by 1 and is the opposite of `push_internal_level`. + /// + /// Requires exclusive access to the `NodeRef` object but not to the root node; + /// it will not invalidate other handles or references to the root node. + /// + /// Panics if there is no internal level, i.e., if the root node is a leaf. + pub(crate) fn pop_internal_level(&mut self, alloc: &A) { + assert!(self.height > 0); + + let top = self.node; + + // SAFETY: we asserted to be internal. + let internal_self = unsafe { self.borrow_mut().cast_to_internal_unchecked() }; + // SAFETY: we borrowed `self` exclusively and its borrow type is exclusive. + let internal_node = unsafe { &mut *NodeRef::as_internal_ptr(&internal_self) }; + // SAFETY: the first edge is always initialized. + self.node = unsafe { internal_node.edges[0].assume_init_read() }; + self.height -= 1; + self.clear_parent_link(); + + unsafe { + alloc.deallocate(top.cast(), Layout::new::>()); + } + } +} + +impl NodeRef { + /// Mutably borrows the owned root node. Unlike `reborrow_mut`, this is safe + /// because the return value cannot be used to destroy the root, and there + /// cannot be other references to the tree. + pub(crate) fn borrow_mut(&mut self) -> NodeRef, K, V, Type> { + NodeRef { + height: self.height, + node: self.node, + _marker: PhantomData, + } + } + + /// Slightly mutably borrows the owned root node. + pub(crate) fn borrow_valmut(&mut self) -> NodeRef, K, V, Type> { + NodeRef { + height: self.height, + node: self.node, + _marker: PhantomData, + } + } + + /// Irreversibly transitions to a reference that permits traversal and offers + /// destructive methods and little else. + pub(crate) fn into_dying(self) -> NodeRef { + NodeRef { + height: self.height, + node: self.node, + _marker: PhantomData, + } + } +} + +impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Leaf> { + /// Adds a key-value pair to the end of the node, and returns + /// the mutable reference of the inserted value. + pub(crate) fn push(&mut self, key: K, val: V) -> &mut V { + let len = self.len_mut(); + let idx = usize::from(*len); + assert!(idx < CAPACITY); + *len += 1; + unsafe { + self.key_area_mut(idx).write(key); + self.val_area_mut(idx).write(val) + } + } +} + +impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Internal> { + /// Adds a key-value pair, and an edge to go to the right of that pair, + /// to the end of the node. + pub(crate) fn push(&mut self, key: K, val: V, edge: Root) { + assert!(edge.height == self.height - 1); + + let len = self.len_mut(); + let idx = usize::from(*len); + assert!(idx < CAPACITY); + *len += 1; + unsafe { + self.key_area_mut(idx).write(key); + self.val_area_mut(idx).write(val); + self.edge_area_mut(idx + 1).write(edge.node); + Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link(); + } + } +} + +impl NodeRef { + /// Removes any static information asserting that this node is a `Leaf` node. + pub(crate) fn forget_type(self) -> NodeRef { + NodeRef { + height: self.height, + node: self.node, + _marker: PhantomData, + } + } +} + +impl NodeRef { + /// Removes any static information asserting that this node is an `Internal` node. + pub(crate) fn forget_type(self) -> NodeRef { + NodeRef { + height: self.height, + node: self.node, + _marker: PhantomData, + } + } +} + +impl NodeRef { + /// Checks whether a node is an `Internal` node or a `Leaf` node. + pub(crate) fn force( + self, + ) -> ForceResult< + NodeRef, + NodeRef, + > { + if self.height == 0 { + ForceResult::Leaf(NodeRef { + height: self.height, + node: self.node, + _marker: PhantomData, + }) + } else { + ForceResult::Internal(NodeRef { + height: self.height, + node: self.node, + _marker: PhantomData, + }) + } + } +} + +impl<'a, K, V> NodeRef, K, V, marker::LeafOrInternal> { + /// Unsafely asserts to the compiler the static information that this node is a `Leaf`. + unsafe fn cast_to_leaf_unchecked(self) -> NodeRef, K, V, marker::Leaf> { + debug_assert!(self.height == 0); + NodeRef { + height: self.height, + node: self.node, + _marker: PhantomData, + } + } + + /// Unsafely asserts to the compiler the static information that this node is an `Internal`. + unsafe fn cast_to_internal_unchecked(self) -> NodeRef, K, V, marker::Internal> { + debug_assert!(self.height > 0); + NodeRef { + height: self.height, + node: self.node, + _marker: PhantomData, + } + } +} + +/// A reference to a specific key-value pair or edge within a node. The `Node` parameter +/// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key-value +/// pair) or `Edge` (signifying a handle on an edge). +/// +/// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to +/// a child node, these represent the spaces where child pointers would go between the key-value +/// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one +/// to the left of the node, one between the two pairs, and one at the right of the node. +pub(crate) struct Handle { + node: Node, + idx: usize, + _marker: PhantomData, +} + +impl Copy for Handle {} +// We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be +// `Clone`able is when it is an immutable reference and therefore `Copy`. +impl Clone for Handle { + fn clone(&self) -> Self { + *self + } +} + +impl Handle { + /// Retrieves the node that contains the edge or key-value pair this handle points to. + pub(crate) fn into_node(self) -> Node { + self.node + } + + /// Returns the position of this handle in the node. + pub(crate) fn idx(&self) -> usize { + self.idx + } +} + +impl Handle, marker::KV> { + /// Creates a new handle to a key-value pair in `node`. + /// Unsafe because the caller must ensure that `idx < node.len()`. + pub(crate) unsafe fn new_kv(node: NodeRef, idx: usize) -> Self { + debug_assert!(idx < node.len()); + + Handle { + node, + idx, + _marker: PhantomData, + } + } + + pub(crate) fn left_edge(self) -> Handle, marker::Edge> { + unsafe { Handle::new_edge(self.node, self.idx) } + } + + pub(crate) fn right_edge(self) -> Handle, marker::Edge> { + unsafe { Handle::new_edge(self.node, self.idx + 1) } + } +} + +impl PartialEq + for Handle, HandleType> +{ + fn eq(&self, other: &Self) -> bool { + let Self { node, idx, _marker } = self; + node.eq(&other.node) && *idx == other.idx + } +} + +impl + Handle, HandleType> +{ + /// Temporarily takes out another immutable handle on the same location. + pub(crate) fn reborrow( + &self, + ) -> Handle, K, V, NodeType>, HandleType> { + // We can't use Handle::new_kv or Handle::new_edge because we don't know our type + Handle { + node: self.node.reborrow(), + idx: self.idx, + _marker: PhantomData, + } + } +} + +impl<'a, K, V, NodeType, HandleType> Handle, K, V, NodeType>, HandleType> { + /// Temporarily takes out another mutable handle on the same location. Beware, as + /// this method is very dangerous, doubly so since it might not immediately appear + /// dangerous. + /// + /// For details, see `NodeRef::reborrow_mut`. + pub(crate) unsafe fn reborrow_mut( + &mut self, + ) -> Handle, K, V, NodeType>, HandleType> { + // We can't use Handle::new_kv or Handle::new_edge because we don't know our type + Handle { + node: unsafe { self.node.reborrow_mut() }, + idx: self.idx, + _marker: PhantomData, + } + } + + /// Returns a dormant copy of this handle which can be reawakened later. + /// + /// See `DormantMutRef` for more details. + pub(crate) fn dormant( + &self, + ) -> Handle, HandleType> { + Handle { + node: self.node.dormant(), + idx: self.idx, + _marker: PhantomData, + } + } +} + +impl Handle, HandleType> { + /// Revert to the unique borrow initially captured. + /// + /// # Safety + /// + /// The reborrow must have ended, i.e., the reference returned by `new` and + /// all pointers and references derived from it, must not be used anymore. + pub(crate) unsafe fn awaken<'a>( + self, + ) -> Handle, K, V, NodeType>, HandleType> { + Handle { + node: unsafe { self.node.awaken() }, + idx: self.idx, + _marker: PhantomData, + } + } +} + +impl Handle, marker::Edge> { + /// Creates a new handle to an edge in `node`. + /// Unsafe because the caller must ensure that `idx <= node.len()`. + pub(crate) unsafe fn new_edge(node: NodeRef, idx: usize) -> Self { + debug_assert!(idx <= node.len()); + + Handle { + node, + idx, + _marker: PhantomData, + } + } + + pub(crate) fn left_kv( + self, + ) -> Result, marker::KV>, Self> { + if self.idx > 0 { + Ok(unsafe { Handle::new_kv(self.node, self.idx - 1) }) + } else { + Err(self) + } + } + + pub(crate) fn right_kv( + self, + ) -> Result, marker::KV>, Self> { + if self.idx < self.node.len() { + Ok(unsafe { Handle::new_kv(self.node, self.idx) }) + } else { + Err(self) + } + } +} + +pub(crate) enum LeftOrRight { + Left(T), + Right(T), +} + +/// Given an edge index where we want to insert into a node filled to capacity, +/// computes a sensible KV index of a split point and where to perform the insertion. +/// The goal of the split point is for its key and value to end up in a parent node; +/// the keys, values and edges to the left of the split point become the left child; +/// the keys, values and edges to the right of the split point become the right child. +fn splitpoint(edge_idx: usize) -> (usize, LeftOrRight) { + debug_assert!(edge_idx <= CAPACITY); + // Rust issue #74834 tries to explain these symmetric rules. + match edge_idx { + 0..=EDGE_IDX_LEFT_OF_CENTER_N1 => (KV_IDX_CENTER - 1, LeftOrRight::Left(edge_idx)), + EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Left(edge_idx)), + EDGE_IDX_RIGHT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Right(0)), + _ => ( + KV_IDX_CENTER + 1, + LeftOrRight::Right(edge_idx - (KV_IDX_CENTER + 1 + 1)), + ), + } +} + +impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::Edge> { + /// Inserts a new key-value pair between the key-value pairs to the right and left of + /// this edge. This method assumes that there is enough space in the node for the new + /// pair to fit. + unsafe fn insert_fit( + mut self, + key: K, + val: V, + ) -> Handle, K, V, marker::Leaf>, marker::KV> { + debug_assert!(self.node.len() < CAPACITY); + let new_len = self.node.len() + 1; + + unsafe { + slice_insert(self.node.key_area_mut(..new_len), self.idx, key); + slice_insert(self.node.val_area_mut(..new_len), self.idx, val); + *self.node.len_mut() = new_len as u16; + + Handle::new_kv(self.node, self.idx) + } + } +} + +impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::Edge> { + /// Inserts a new key-value pair between the key-value pairs to the right and left of + /// this edge. This method splits the node if there isn't enough room. + /// + /// Returns a dormant handle to the inserted node which can be reawakened + /// once splitting is complete. + fn insert( + self, + key: K, + val: V, + alloc: &A, + ) -> Result< + ( + Option>, + Handle, marker::KV>, + ), + AllocError, + > { + if self.node.len() < CAPACITY { + // SAFETY: There is enough space in the node for insertion. + let handle = unsafe { self.insert_fit(key, val) }; + Ok((None, handle.dormant())) + } else { + let (middle_kv_idx, insertion) = splitpoint(self.idx); + let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) }; + let mut result = middle.split(alloc)?; + let insertion_edge = match insertion { + LeftOrRight::Left(insert_idx) => unsafe { + Handle::new_edge(result.left.reborrow_mut(), insert_idx) + }, + LeftOrRight::Right(insert_idx) => unsafe { + Handle::new_edge(result.right.borrow_mut(), insert_idx) + }, + }; + // SAFETY: We just split the node, so there is enough space for + // insertion. + let handle = unsafe { insertion_edge.insert_fit(key, val).dormant() }; + Ok((Some(result), handle)) + } + } +} + +impl<'a, K, V> Handle, K, V, marker::Internal>, marker::Edge> { + /// Fixes the parent pointer and index in the child node that this edge + /// links to. This is useful when the ordering of edges has been changed, + fn correct_parent_link(self) { + // Create backpointer without invalidating other references to the node. + let ptr = unsafe { NonNull::new_unchecked(NodeRef::as_internal_ptr(&self.node)) }; + let idx = self.idx; + let mut child = self.descend(); + child.set_parent_link(ptr, idx); + } +} + +impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, marker::Edge> { + /// Inserts a new key-value pair and an edge that will go to the right of that new pair + /// between this edge and the key-value pair to the right of this edge. This method assumes + /// that there is enough space in the node for the new pair to fit. + fn insert_fit(&mut self, key: K, val: V, edge: Root) { + debug_assert!(self.node.len() < CAPACITY); + debug_assert!(edge.height == self.node.height - 1); + let new_len = self.node.len() + 1; + + unsafe { + slice_insert(self.node.key_area_mut(..new_len), self.idx, key); + slice_insert(self.node.val_area_mut(..new_len), self.idx, val); + slice_insert( + self.node.edge_area_mut(..new_len + 1), + self.idx + 1, + edge.node, + ); + *self.node.len_mut() = new_len as u16; + + self.node + .correct_childrens_parent_links(self.idx + 1..new_len + 1); + } + } + + /// Inserts a new key-value pair and an edge that will go to the right of that new pair + /// between this edge and the key-value pair to the right of this edge. This method splits + /// the node if there isn't enough room. + fn insert( + mut self, + key: K, + val: V, + edge: Root, + alloc: &A, + ) -> Result>, AllocError> { + assert!(edge.height == self.node.height - 1); + + if self.node.len() < CAPACITY { + self.insert_fit(key, val, edge); + Ok(None) + } else { + let (middle_kv_idx, insertion) = splitpoint(self.idx); + let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) }; + let mut result = middle.split(alloc)?; + let mut insertion_edge = match insertion { + LeftOrRight::Left(insert_idx) => unsafe { + Handle::new_edge(result.left.reborrow_mut(), insert_idx) + }, + LeftOrRight::Right(insert_idx) => unsafe { + Handle::new_edge(result.right.borrow_mut(), insert_idx) + }, + }; + insertion_edge.insert_fit(key, val, edge); + Ok(Some(result)) + } + } +} + +impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::Edge> { + /// Inserts a new key-value pair between the key-value pairs to the right and left of + /// this edge. This method splits the node if there isn't enough room, and tries to + /// insert the split off portion into the parent node recursively, until the root is reached. + /// + /// If the returned result is some `SplitResult`, the `left` field will be the root node. + /// The returned pointer points to the inserted value, which in the case of `SplitResult` + /// is in the `left` or `right` tree. + pub(crate) fn insert_recursing( + self, + key: K, + value: V, + alloc: &A, + split_root: impl FnOnce(SplitResult<'a, K, V, marker::LeafOrInternal>) -> Result<(), AllocError>, + ) -> Result, K, V, marker::Leaf>, marker::KV>, AllocError> { + let (mut split, handle) = match self.insert(key, value, alloc)? { + // SAFETY: we have finished splitting and can now re-awaken the + // handle to the inserted element. + (None, handle) => return Ok(unsafe { handle.awaken() }), + (Some(split), handle) => (split.forget_node_type(), handle), + }; + + loop { + split = match split.left.ascend() { + Ok(parent) => { + match parent.insert(split.kv.0, split.kv.1, split.right, alloc)? { + // SAFETY: we have finished splitting and can now re-awaken the + // handle to the inserted element. + None => return Ok(unsafe { handle.awaken() }), + Some(split) => split.forget_node_type(), + } + } + Err(root) => { + split_root(SplitResult { + left: root, + ..split + })?; + // SAFETY: we have finished splitting and can now re-awaken the + // handle to the inserted element. + return Ok(unsafe { handle.awaken() }); + } + }; + } + } +} + +impl + Handle, marker::Edge> +{ + /// Finds the node pointed to by this edge. + /// + /// The method name assumes you picture trees with the root node on top. + /// + /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should + /// both, upon success, do nothing. + pub(crate) fn descend(self) -> NodeRef { + assert!(BorrowType::TRAVERSAL_PERMIT); + + // We need to use raw pointers to nodes because, if BorrowType is + // marker::ValMut, there might be outstanding mutable references to + // values that we must not invalidate. There's no worry accessing the + // height field because that value is copied. Beware that, once the + // node pointer is dereferenced, we access the edges array with a + // reference (Rust issue #73987) and invalidate any other references + // to or inside the array, should any be around. + let parent_ptr = NodeRef::as_internal_ptr(&self.node); + let node = unsafe { + (*parent_ptr) + .edges + .get_unchecked(self.idx) + .assume_init_read() + }; + NodeRef { + node, + height: self.node.height - 1, + _marker: PhantomData, + } + } +} + +impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { + pub(crate) fn into_kv(self) -> (&'a K, &'a V) { + debug_assert!(self.idx < self.node.len()); + let leaf = self.node.into_leaf(); + let k = unsafe { leaf.keys.get_unchecked(self.idx).assume_init_ref() }; + let v = unsafe { leaf.vals.get_unchecked(self.idx).assume_init_ref() }; + (k, v) + } +} + +impl Handle, marker::KV> { + pub(crate) fn into_kv_raw(self) -> (*const K, *const V) { + debug_assert!(self.idx < self.node.len()); + let leaf = self.node.into_leaf(); + let k = unsafe { (*leaf).keys.get_unchecked(self.idx).assume_init_ref() }; + let v = unsafe { (*leaf).vals.get_unchecked(self.idx).assume_init_ref() }; + (k, v) + } +} + +impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { + pub(crate) fn key_mut(&mut self) -> &mut K { + unsafe { self.node.key_area_mut(self.idx).assume_init_mut() } + } + + pub(crate) fn into_val_mut(self) -> &'a mut V { + debug_assert!(self.idx < self.node.len()); + let leaf = self.node.into_leaf_mut(); + unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() } + } + + pub(crate) fn into_kv_valmut(self) -> (&'a K, &'a mut V) { + debug_assert!(self.idx < self.node.len()); + let leaf = self.node.into_leaf_mut(); + let k = unsafe { leaf.keys.get_unchecked(self.idx).assume_init_ref() }; + let v = unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() }; + (k, v) + } +} + +impl<'a, K, V, NodeType> Handle, K, V, NodeType>, marker::KV> { + pub(crate) fn into_kv_valmut(self) -> (&'a K, &'a mut V) { + unsafe { self.node.into_key_val_mut_at(self.idx) } + } +} + +impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { + pub(crate) fn kv_mut(&mut self) -> (&mut K, &mut V) { + debug_assert!(self.idx < self.node.len()); + // We cannot call separate key and value methods, because calling the second one + // invalidates the reference returned by the first. + unsafe { + let leaf = self.node.as_leaf_mut(); + let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_mut(); + let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_mut(); + (key, val) + } + } + + /// Replaces the key and value that the KV handle refers to. + pub(crate) fn replace_kv(&mut self, k: K, v: V) -> (K, V) { + let (key, val) = self.kv_mut(); + (mem::replace(key, k), mem::replace(val, v)) + } +} + +impl Handle, marker::KV> { + /// Extracts the key and value that the KV handle refers to. + /// # Safety + /// The node that the handle refers to must not yet have been deallocated. + pub(crate) unsafe fn into_key_val(mut self) -> (K, V) { + debug_assert!(self.idx < self.node.len()); + let leaf = self.node.as_leaf_dying(); + unsafe { + let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_read(); + let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_read(); + (key, val) + } + } + + /// Drops the key and value that the KV handle refers to. + /// # Safety + /// The node that the handle refers to must not yet have been deallocated. + #[inline] + pub(crate) unsafe fn drop_key_val(mut self) { + debug_assert!(self.idx < self.node.len()); + let leaf = self.node.as_leaf_dying(); + unsafe { + leaf.keys.get_unchecked_mut(self.idx).assume_init_drop(); + leaf.vals.get_unchecked_mut(self.idx).assume_init_drop(); + } + } +} + +impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { + /// Helps implementations of `split` for a particular `NodeType`, + /// by taking care of leaf data. + fn split_leaf_data(&mut self, new_node: &mut LeafNode) -> (K, V) { + debug_assert!(self.idx < self.node.len()); + let old_len = self.node.len(); + let new_len = old_len - self.idx - 1; + new_node.len = new_len as u16; + unsafe { + let k = self.node.key_area_mut(self.idx).assume_init_read(); + let v = self.node.val_area_mut(self.idx).assume_init_read(); + + move_to_slice( + self.node.key_area_mut(self.idx + 1..old_len), + &mut new_node.keys[..new_len], + ); + move_to_slice( + self.node.val_area_mut(self.idx + 1..old_len), + &mut new_node.vals[..new_len], + ); + + *self.node.len_mut() = self.idx as u16; + (k, v) + } + } +} + +impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::KV> { + /// Splits the underlying node into three parts: + /// + /// - The node is truncated to only contain the key-value pairs to the left of + /// this handle. + /// - The key and value pointed to by this handle are extracted. + /// - All the key-value pairs to the right of this handle are put into a newly + /// allocated node. + pub(crate) fn split( + mut self, + alloc: &A, + ) -> Result, AllocError> { + let mut new_node = LeafNode::new(alloc)?; + + let kv = self.split_leaf_data(unsafe { new_node.as_mut() }); + + let right = NodeRef::from_new_leaf(new_node); + + Ok(SplitResult { + left: self.node, + kv, + right, + }) + } + + /// Removes the key-value pair pointed to by this handle and returns it, along with the edge + /// that the key-value pair collapsed into. + pub(crate) fn remove( + mut self, + ) -> ( + (K, V), + Handle, K, V, marker::Leaf>, marker::Edge>, + ) { + let old_len = self.node.len(); + unsafe { + let k = slice_remove(self.node.key_area_mut(..old_len), self.idx); + let v = slice_remove(self.node.val_area_mut(..old_len), self.idx); + *self.node.len_mut() = (old_len - 1) as u16; + ((k, v), self.left_edge()) + } + } +} + +impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, marker::KV> { + /// Splits the underlying node into three parts: + /// + /// - The node is truncated to only contain the edges and key-value pairs to the + /// left of this handle. + /// - The key and value pointed to by this handle are extracted. + /// - All the edges and key-value pairs to the right of this handle are put into + /// a newly allocated node. + pub(crate) fn split( + mut self, + alloc: &A, + ) -> Result, AllocError> { + let old_len = self.node.len(); + unsafe { + let mut new_node = InternalNode::new(alloc)?; + + // SAFETY: new_node has been initialized to the point where we can + // construct a reference to it. + let kv = { + let new_node = new_node.as_mut(); + let kv = self.split_leaf_data(&mut new_node.data); + let new_len = usize::from(new_node.data.len); + move_to_slice( + self.node.edge_area_mut(self.idx + 1..old_len + 1), + &mut new_node.edges[..new_len + 1], + ); + kv + }; + + let height = self.node.height; + let right = NodeRef::from_new_internal(new_node, height); + + Ok(SplitResult { + left: self.node, + kv, + right, + }) + } + } +} + +/// Represents a session for evaluating and performing a balancing operation +/// around an internal key-value pair. +pub(crate) struct BalancingContext<'a, K, V> { + parent: Handle, K, V, marker::Internal>, marker::KV>, + left_child: NodeRef, K, V, marker::LeafOrInternal>, + right_child: NodeRef, K, V, marker::LeafOrInternal>, +} + +impl<'a, K, V> Handle, K, V, marker::Internal>, marker::KV> { + pub(crate) fn consider_for_balancing(self) -> BalancingContext<'a, K, V> { + let self1 = unsafe { ptr::read(&self) }; + let self2 = unsafe { ptr::read(&self) }; + BalancingContext { + parent: self, + left_child: self1.left_edge().descend(), + right_child: self2.right_edge().descend(), + } + } +} + +impl<'a, K, V> NodeRef, K, V, marker::LeafOrInternal> { + /// Chooses a balancing context involving the node as a child, thus between + /// the KV immediately to the left or to the right in the parent node. + /// Returns an `Err` if there is no parent. + /// Panics if the parent is empty. + /// + /// Prefers the left side, to be optimal if the given node is somehow + /// underfull, meaning here only that it has fewer elements than its left + /// sibling and than its right sibling, if they exist. In that case, + /// merging with the left sibling is faster, since we only need to move + /// the node's N elements, instead of shifting them to the right and moving + /// more than N elements in front. Stealing from the left sibling is also + /// typically faster, since we only need to shift the node's N elements to + /// the right, instead of shifting at least N of the sibling's elements to + /// the left. + pub(crate) fn choose_parent_kv(self) -> Result>, Self> { + match unsafe { ptr::read(&self) }.ascend() { + Ok(parent_edge) => match parent_edge.left_kv() { + Ok(left_parent_kv) => Ok(LeftOrRight::Left(BalancingContext { + parent: unsafe { ptr::read(&left_parent_kv) }, + left_child: left_parent_kv.left_edge().descend(), + right_child: self, + })), + Err(parent_edge) => match parent_edge.right_kv() { + Ok(right_parent_kv) => Ok(LeftOrRight::Right(BalancingContext { + parent: unsafe { ptr::read(&right_parent_kv) }, + left_child: self, + right_child: right_parent_kv.right_edge().descend(), + })), + Err(_) => unreachable!("empty internal node"), + }, + }, + Err(root) => Err(root), + } + } +} + +impl<'a, K, V> BalancingContext<'a, K, V> { + pub(crate) fn left_child_len(&self) -> usize { + self.left_child.len() + } + + pub(crate) fn right_child_len(&self) -> usize { + self.right_child.len() + } + + pub(crate) fn into_left_child(self) -> NodeRef, K, V, marker::LeafOrInternal> { + self.left_child + } + + pub(crate) fn into_right_child(self) -> NodeRef, K, V, marker::LeafOrInternal> { + self.right_child + } + + /// Returns whether merging is possible, i.e., whether there is enough room + /// in a node to combine the central KV with both adjacent child nodes. + pub(crate) fn can_merge(&self) -> bool { + self.left_child.len() + 1 + self.right_child.len() <= CAPACITY + } +} + +impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> { + /// Performs a merge and lets a closure decide what to return. + fn do_merge< + F: FnOnce( + NodeRef, K, V, marker::Internal>, + NodeRef, K, V, marker::LeafOrInternal>, + ) -> R, + R, + A: Allocator, + >( + self, + result: F, + alloc: &A, + ) -> R { + let Handle { + node: mut parent_node, + idx: parent_idx, + _marker, + } = self.parent; + let old_parent_len = parent_node.len(); + let mut left_node = self.left_child; + let old_left_len = left_node.len(); + let mut right_node = self.right_child; + let right_len = right_node.len(); + let new_left_len = old_left_len + 1 + right_len; + + assert!(new_left_len <= CAPACITY); + + unsafe { + *left_node.len_mut() = new_left_len as u16; + + let parent_key = slice_remove(parent_node.key_area_mut(..old_parent_len), parent_idx); + left_node.key_area_mut(old_left_len).write(parent_key); + move_to_slice( + right_node.key_area_mut(..right_len), + left_node.key_area_mut(old_left_len + 1..new_left_len), + ); + + let parent_val = slice_remove(parent_node.val_area_mut(..old_parent_len), parent_idx); + left_node.val_area_mut(old_left_len).write(parent_val); + move_to_slice( + right_node.val_area_mut(..right_len), + left_node.val_area_mut(old_left_len + 1..new_left_len), + ); + + slice_remove( + parent_node.edge_area_mut(..old_parent_len + 1), + parent_idx + 1, + ); + parent_node.correct_childrens_parent_links(parent_idx + 1..old_parent_len); + *parent_node.len_mut() -= 1; + + if parent_node.height > 1 { + // SAFETY: the height of the nodes being merged is one below the height + // of the node of this edge, thus above zero, so they are internal. + let mut left_node = left_node.reborrow_mut().cast_to_internal_unchecked(); + let mut right_node = right_node.cast_to_internal_unchecked(); + move_to_slice( + right_node.edge_area_mut(..right_len + 1), + left_node.edge_area_mut(old_left_len + 1..new_left_len + 1), + ); + + left_node.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1); + + alloc.deallocate(right_node.node.cast(), Layout::new::>()); + } else { + alloc.deallocate(right_node.node.cast(), Layout::new::>()); + } + } + + result(parent_node, left_node) + } + + /// Merges the parent's key-value pair and both adjacent child nodes into + /// the left child node and returns the shrunk parent node. + /// + /// Panics unless we `.can_merge()`. + pub(crate) fn merge_tracking_parent( + self, + alloc: &A, + ) -> NodeRef, K, V, marker::Internal> { + self.do_merge(|parent, _child| parent, alloc) + } + + /// Merges the parent's key-value pair and both adjacent child nodes into + /// the left child node and returns that child node. + /// + /// Panics unless we `.can_merge()`. + pub(crate) fn merge_tracking_child( + self, + alloc: &A, + ) -> NodeRef, K, V, marker::LeafOrInternal> { + self.do_merge(|_parent, child| child, alloc) + } + + /// Merges the parent's key-value pair and both adjacent child nodes into + /// the left child node and returns the edge handle in that child node + /// where the tracked child edge ended up, + /// + /// Panics unless we `.can_merge()`. + pub(crate) fn merge_tracking_child_edge( + self, + track_edge_idx: LeftOrRight, + alloc: &A, + ) -> Handle, K, V, marker::LeafOrInternal>, marker::Edge> { + let old_left_len = self.left_child.len(); + let right_len = self.right_child.len(); + assert!(match track_edge_idx { + LeftOrRight::Left(idx) => idx <= old_left_len, + LeftOrRight::Right(idx) => idx <= right_len, + }); + let child = self.merge_tracking_child(alloc); + let new_idx = match track_edge_idx { + LeftOrRight::Left(idx) => idx, + LeftOrRight::Right(idx) => old_left_len + 1 + idx, + }; + unsafe { Handle::new_edge(child, new_idx) } + } + + /// Removes a key-value pair from the left child and places it in the key-value storage + /// of the parent, while pushing the old parent key-value pair into the right child. + /// Returns a handle to the edge in the right child corresponding to where the original + /// edge specified by `track_right_edge_idx` ended up. + pub(crate) fn steal_left( + mut self, + track_right_edge_idx: usize, + ) -> Handle, K, V, marker::LeafOrInternal>, marker::Edge> { + self.bulk_steal_left(1); + unsafe { Handle::new_edge(self.right_child, 1 + track_right_edge_idx) } + } + + /// Removes a key-value pair from the right child and places it in the key-value storage + /// of the parent, while pushing the old parent key-value pair onto the left child. + /// Returns a handle to the edge in the left child specified by `track_left_edge_idx`, + /// which didn't move. + pub(crate) fn steal_right( + mut self, + track_left_edge_idx: usize, + ) -> Handle, K, V, marker::LeafOrInternal>, marker::Edge> { + self.bulk_steal_right(1); + unsafe { Handle::new_edge(self.left_child, track_left_edge_idx) } + } + + /// This does stealing similar to `steal_left` but steals multiple elements at once. + pub(crate) fn bulk_steal_left(&mut self, count: usize) { + assert!(count > 0); + unsafe { + let left_node = &mut self.left_child; + let old_left_len = left_node.len(); + let right_node = &mut self.right_child; + let old_right_len = right_node.len(); + + // Make sure that we may steal safely. + assert!(old_right_len + count <= CAPACITY); + assert!(old_left_len >= count); + + let new_left_len = old_left_len - count; + let new_right_len = old_right_len + count; + *left_node.len_mut() = new_left_len as u16; + *right_node.len_mut() = new_right_len as u16; + + // Move leaf data. + { + // Make room for stolen elements in the right child. + slice_shr(right_node.key_area_mut(..new_right_len), count); + slice_shr(right_node.val_area_mut(..new_right_len), count); + + // Move elements from the left child to the right one. + move_to_slice( + left_node.key_area_mut(new_left_len + 1..old_left_len), + right_node.key_area_mut(..count - 1), + ); + move_to_slice( + left_node.val_area_mut(new_left_len + 1..old_left_len), + right_node.val_area_mut(..count - 1), + ); + + // Move the left-most stolen pair to the parent. + let k = left_node.key_area_mut(new_left_len).assume_init_read(); + let v = left_node.val_area_mut(new_left_len).assume_init_read(); + let (k, v) = self.parent.replace_kv(k, v); + + // Move parent's key-value pair to the right child. + right_node.key_area_mut(count - 1).write(k); + right_node.val_area_mut(count - 1).write(v); + } + + match ( + left_node.reborrow_mut().force(), + right_node.reborrow_mut().force(), + ) { + (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => { + // Make room for stolen edges. + slice_shr(right.edge_area_mut(..new_right_len + 1), count); + + // Steal edges. + move_to_slice( + left.edge_area_mut(new_left_len + 1..old_left_len + 1), + right.edge_area_mut(..count), + ); + + right.correct_childrens_parent_links(0..new_right_len + 1); + } + (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {} + _ => unreachable!(), + } + } + } + + /// The symmetric clone of `bulk_steal_left`. + pub(crate) fn bulk_steal_right(&mut self, count: usize) { + assert!(count > 0); + unsafe { + let left_node = &mut self.left_child; + let old_left_len = left_node.len(); + let right_node = &mut self.right_child; + let old_right_len = right_node.len(); + + // Make sure that we may steal safely. + assert!(old_left_len + count <= CAPACITY); + assert!(old_right_len >= count); + + let new_left_len = old_left_len + count; + let new_right_len = old_right_len - count; + *left_node.len_mut() = new_left_len as u16; + *right_node.len_mut() = new_right_len as u16; + + // Move leaf data. + { + // Move the right-most stolen pair to the parent. + let k = right_node.key_area_mut(count - 1).assume_init_read(); + let v = right_node.val_area_mut(count - 1).assume_init_read(); + let (k, v) = self.parent.replace_kv(k, v); + + // Move parent's key-value pair to the left child. + left_node.key_area_mut(old_left_len).write(k); + left_node.val_area_mut(old_left_len).write(v); + + // Move elements from the right child to the left one. + move_to_slice( + right_node.key_area_mut(..count - 1), + left_node.key_area_mut(old_left_len + 1..new_left_len), + ); + move_to_slice( + right_node.val_area_mut(..count - 1), + left_node.val_area_mut(old_left_len + 1..new_left_len), + ); + + // Fill gap where stolen elements used to be. + slice_shl(right_node.key_area_mut(..old_right_len), count); + slice_shl(right_node.val_area_mut(..old_right_len), count); + } + + match ( + left_node.reborrow_mut().force(), + right_node.reborrow_mut().force(), + ) { + (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => { + // Steal edges. + move_to_slice( + right.edge_area_mut(..count), + left.edge_area_mut(old_left_len + 1..new_left_len + 1), + ); + + // Fill gap where stolen edges used to be. + slice_shl(right.edge_area_mut(..old_right_len + 1), count); + + left.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1); + right.correct_childrens_parent_links(0..new_right_len + 1); + } + (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {} + _ => unreachable!(), + } + } + } +} + +impl Handle, marker::Edge> { + pub(crate) fn forget_node_type( + self, + ) -> Handle, marker::Edge> { + unsafe { Handle::new_edge(self.node.forget_type(), self.idx) } + } +} + +impl Handle, marker::Edge> { + pub(crate) fn forget_node_type( + self, + ) -> Handle, marker::Edge> { + unsafe { Handle::new_edge(self.node.forget_type(), self.idx) } + } +} + +impl Handle, marker::KV> { + pub(crate) fn forget_node_type( + self, + ) -> Handle, marker::KV> { + unsafe { Handle::new_kv(self.node.forget_type(), self.idx) } + } +} + +impl Handle, Type> { + /// Checks whether the underlying node is an `Internal` node or a `Leaf` node. + pub(crate) fn force( + self, + ) -> ForceResult< + Handle, Type>, + Handle, Type>, + > { + match self.node.force() { + ForceResult::Leaf(node) => ForceResult::Leaf(Handle { + node, + idx: self.idx, + _marker: PhantomData, + }), + ForceResult::Internal(node) => ForceResult::Internal(Handle { + node, + idx: self.idx, + _marker: PhantomData, + }), + } + } +} + +impl<'a, K, V, Type> Handle, K, V, marker::LeafOrInternal>, Type> { + /// Unsafely asserts to the compiler the static information that the handle's node is a `Leaf`. + pub(crate) unsafe fn cast_to_leaf_unchecked( + self, + ) -> Handle, K, V, marker::Leaf>, Type> { + let node = unsafe { self.node.cast_to_leaf_unchecked() }; + Handle { + node, + idx: self.idx, + _marker: PhantomData, + } + } +} + +impl<'a, K, V> Handle, K, V, marker::LeafOrInternal>, marker::Edge> { + /// Move the suffix after `self` from one node to another one. `right` must be empty. + /// The first edge of `right` remains unchanged. + pub(crate) fn move_suffix( + &mut self, + right: &mut NodeRef, K, V, marker::LeafOrInternal>, + ) { + unsafe { + let new_left_len = self.idx; + let mut left_node = self.reborrow_mut().into_node(); + let old_left_len = left_node.len(); + + let new_right_len = old_left_len - new_left_len; + let mut right_node = right.reborrow_mut(); + + assert!(right_node.len() == 0); + assert!(left_node.height == right_node.height); + + if new_right_len > 0 { + *left_node.len_mut() = new_left_len as u16; + *right_node.len_mut() = new_right_len as u16; + + move_to_slice( + left_node.key_area_mut(new_left_len..old_left_len), + right_node.key_area_mut(..new_right_len), + ); + move_to_slice( + left_node.val_area_mut(new_left_len..old_left_len), + right_node.val_area_mut(..new_right_len), + ); + match (left_node.force(), right_node.force()) { + (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => { + move_to_slice( + left.edge_area_mut(new_left_len + 1..old_left_len + 1), + right.edge_area_mut(1..new_right_len + 1), + ); + right.correct_childrens_parent_links(1..new_right_len + 1); + } + (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {} + _ => unreachable!(), + } + } + } + } +} + +pub(crate) enum ForceResult { + Leaf(Leaf), + Internal(Internal), +} + +/// Result of insertion, when a node needed to expand beyond its capacity. +pub(crate) struct SplitResult<'a, K, V, NodeType> { + // Altered node in existing tree with elements and edges that belong to the left of `kv`. + pub(crate) left: NodeRef, K, V, NodeType>, + // Some key and value that existed before and were split off, to be inserted elsewhere. + pub(crate) kv: (K, V), + // Owned, unattached, new node with elements and edges that belong to the right of `kv`. + pub(crate) right: NodeRef, +} + +impl<'a, K, V> SplitResult<'a, K, V, marker::Leaf> { + pub(crate) fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> { + SplitResult { + left: self.left.forget_type(), + kv: self.kv, + right: self.right.forget_type(), + } + } +} + +impl<'a, K, V> SplitResult<'a, K, V, marker::Internal> { + pub(crate) fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> { + SplitResult { + left: self.left.forget_type(), + kv: self.kv, + right: self.right.forget_type(), + } + } +} + +pub(crate) mod marker { + use core::marker::PhantomData; + + pub(crate) enum Leaf {} + pub(crate) enum Internal {} + pub(crate) enum LeafOrInternal {} + + pub(crate) enum Owned {} + pub(crate) enum Dying {} + pub(crate) enum DormantMut {} + pub(crate) struct Immut<'a>(PhantomData<&'a ()>); + pub(crate) struct Mut<'a>(PhantomData<&'a mut ()>); + pub(crate) struct ValMut<'a>(PhantomData<&'a mut ()>); + pub(crate) struct Raw; + + pub(crate) trait BorrowType { + /// If node references of this borrow type allow traversing to other + /// nodes in the tree, this constant is set to `true`. It can be used + /// for a compile-time assertion. + const TRAVERSAL_PERMIT: bool = true; + } + impl BorrowType for Owned { + /// Reject traversal, because it isn't needed. Instead traversal + /// happens using the result of `borrow_mut`. + /// By disabling traversal, and only creating new references to roots, + /// we know that every reference of the `Owned` type is to a root node. + const TRAVERSAL_PERMIT: bool = false; + } + impl BorrowType for Dying {} + impl<'a> BorrowType for Immut<'a> {} + impl BorrowType for Raw {} + impl<'a> BorrowType for Mut<'a> {} + impl<'a> BorrowType for ValMut<'a> {} + impl BorrowType for DormantMut {} + + pub(crate) enum KV {} + pub(crate) enum Edge {} +} + +/// Inserts a value into a slice of initialized elements followed by one uninitialized element. +/// +/// # Safety +/// The slice has more than `idx` elements. +unsafe fn slice_insert(slice: &mut [MaybeUninit], idx: usize, val: T) { + unsafe { + let len = slice.len(); + debug_assert!(len > idx); + let slice_ptr = slice.as_mut_ptr(); + if len > idx + 1 { + ptr::copy(slice_ptr.add(idx), slice_ptr.add(idx + 1), len - idx - 1); + } + (*slice_ptr.add(idx)).write(val); + } +} + +/// Removes and returns a value from a slice of all initialized elements, leaving behind one +/// trailing uninitialized element. +/// +/// # Safety +/// The slice has more than `idx` elements. +unsafe fn slice_remove(slice: &mut [MaybeUninit], idx: usize) -> T { + unsafe { + let len = slice.len(); + debug_assert!(idx < len); + let slice_ptr = slice.as_mut_ptr(); + let ret = (*slice_ptr.add(idx)).assume_init_read(); + ptr::copy(slice_ptr.add(idx + 1), slice_ptr.add(idx), len - idx - 1); + ret + } +} + +/// Shifts the elements in a slice `distance` positions to the left. +/// +/// # Safety +/// The slice has at least `distance` elements. +unsafe fn slice_shl(slice: &mut [MaybeUninit], distance: usize) { + unsafe { + let slice_ptr = slice.as_mut_ptr(); + ptr::copy(slice_ptr.add(distance), slice_ptr, slice.len() - distance); + } +} + +/// Shifts the elements in a slice `distance` positions to the right. +/// +/// # Safety +/// The slice has at least `distance` elements. +unsafe fn slice_shr(slice: &mut [MaybeUninit], distance: usize) { + unsafe { + let slice_ptr = slice.as_mut_ptr(); + ptr::copy(slice_ptr, slice_ptr.add(distance), slice.len() - distance); + } +} + +/// Moves all values from a slice of initialized elements to a slice +/// of uninitialized elements, leaving behind `src` as all uninitialized. +/// Works like `dst.copy_from_slice(src)` but does not require `T` to be `Copy`. +fn move_to_slice(src: &[MaybeUninit], dst: &mut [MaybeUninit]) { + assert!(src.len() == dst.len()); + unsafe { + ptr::copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), src.len()); + } +} + +#[cfg(test)] +mod tests; diff --git a/crates/rune-alloc/src/alloc/btree/node/tests.rs b/crates/rune-alloc/src/alloc/btree/node/tests.rs new file mode 100644 index 000000000..4932088c3 --- /dev/null +++ b/crates/rune-alloc/src/alloc/btree/node/tests.rs @@ -0,0 +1,118 @@ +use core::fmt; + +use rust_alloc::string::String; + +use crate::alloc::testing::*; +use crate::alloc::Global; + +use super::super::navigate; +use super::*; + +impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { + // Asserts that the back pointer in each reachable node points to its parent. + pub fn assert_back_pointers(self) { + if let ForceResult::Internal(node) = self.force() { + for idx in 0..=node.len() { + let edge = unsafe { Handle::new_edge(node, idx) }; + let child = edge.descend(); + assert!(child.ascend().ok() == Some(edge)); + child.assert_back_pointers(); + } + } + } + + // Renders a multi-line display of the keys in order and in tree hierarchy, + // picturing the tree growing sideways from its root on the left to its + // leaves on the right. + pub fn dump_keys(self) -> String + where + K: fmt::Debug, + { + let mut result = String::new(); + self.visit_nodes_in_order(|pos| match pos { + navigate::Position::Leaf(leaf) => { + let depth = self.height(); + let indent = " ".repeat(depth); + result += &rust_alloc::format!("\n{}{:?}", indent, leaf.keys()); + } + navigate::Position::Internal(_) => {} + navigate::Position::InternalKV(kv) => { + let depth = self.height() - kv.into_node().height(); + let indent = " ".repeat(depth); + result += &rust_alloc::format!("\n{}{:?}", indent, kv.into_kv().0); + } + }); + result + } +} + +#[test] +fn test_splitpoint() { + for idx in 0..=CAPACITY { + let (middle_kv_idx, insertion) = splitpoint(idx); + + // Simulate performing the split: + let mut left_len = middle_kv_idx; + let mut right_len = CAPACITY - middle_kv_idx - 1; + match insertion { + LeftOrRight::Left(edge_idx) => { + assert!(edge_idx <= left_len); + left_len += 1; + } + LeftOrRight::Right(edge_idx) => { + assert!(edge_idx <= right_len); + right_len += 1; + } + } + assert!(left_len >= MIN_LEN_AFTER_SPLIT); + assert!(right_len >= MIN_LEN_AFTER_SPLIT); + assert!(left_len + right_len == CAPACITY); + } +} + +#[test] +fn test_partial_eq() { + let mut root1 = NodeRef::new_leaf(&Global).abort(); + root1.borrow_mut().push(1, ()); + let mut root1 = NodeRef::new_internal(root1.forget_type(), &Global) + .abort() + .forget_type(); + let root2 = Root::new(&Global).abort(); + root1.reborrow().assert_back_pointers(); + root2.reborrow().assert_back_pointers(); + + let leaf_edge_1a = root1.reborrow().first_leaf_edge().forget_node_type(); + let leaf_edge_1b = root1.reborrow().last_leaf_edge().forget_node_type(); + let top_edge_1 = root1.reborrow().first_edge(); + let top_edge_2 = root2.reborrow().first_edge(); + + assert!(leaf_edge_1a == leaf_edge_1a); + assert!(leaf_edge_1a != leaf_edge_1b); + assert!(leaf_edge_1a != top_edge_1); + assert!(leaf_edge_1a != top_edge_2); + assert!(top_edge_1 == top_edge_1); + assert!(top_edge_1 != top_edge_2); + + root1.pop_internal_level(&Global); + unsafe { root1.into_dying().deallocate_and_ascend(&Global) }; + unsafe { root2.into_dying().deallocate_and_ascend(&Global) }; +} + +#[test] +#[cfg(target_arch = "x86_64")] +#[cfg_attr(miri, ignore)] // We'd like to run Miri with layout randomization +fn test_sizes() { + assert_eq!(core::mem::size_of::>(), 16); + assert_eq!( + core::mem::size_of::>(), + 16 + CAPACITY * 2 * 8 + ); + assert_eq!( + core::mem::size_of::>(), + 16 + (CAPACITY + 1) * 8 + ); + assert_eq!( + core::mem::size_of::>(), + 16 + (CAPACITY * 3 + 1) * 8 + ); +} diff --git a/crates/rune-alloc/src/alloc/btree/remove.rs b/crates/rune-alloc/src/alloc/btree/remove.rs new file mode 100644 index 000000000..3d343d71d --- /dev/null +++ b/crates/rune-alloc/src/alloc/btree/remove.rs @@ -0,0 +1,109 @@ +use super::map::MIN_LEN; +use super::node::{marker, ForceResult::*, Handle, LeftOrRight::*, NodeRef}; + +use crate::alloc::Allocator; + +impl<'a, K: 'a, V: 'a> Handle, K, V, marker::LeafOrInternal>, marker::KV> { + /// Removes a key-value pair from the tree, and returns that pair, as well as + /// the leaf edge corresponding to that former pair. It's possible this empties + /// a root node that is internal, which the caller should pop from the map + /// holding the tree. The caller should also decrement the map's length. + pub(crate) fn remove_kv_tracking( + self, + handle_emptied_internal_root: F, + alloc: &A, + ) -> ( + (K, V), + Handle, K, V, marker::Leaf>, marker::Edge>, + ) { + match self.force() { + Leaf(node) => node.remove_leaf_kv(handle_emptied_internal_root, alloc), + Internal(node) => node.remove_internal_kv(handle_emptied_internal_root, alloc), + } + } +} + +impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::KV> { + fn remove_leaf_kv( + self, + handle_emptied_internal_root: F, + alloc: &A, + ) -> ( + (K, V), + Handle, K, V, marker::Leaf>, marker::Edge>, + ) { + let (old_kv, mut pos) = self.remove(); + let len = pos.reborrow().into_node().len(); + if len < MIN_LEN { + let idx = pos.idx(); + // We have to temporarily forget the child type, because there is no + // distinct node type for the immediate parents of a leaf. + let new_pos = match pos.into_node().forget_type().choose_parent_kv() { + Ok(Left(left_parent_kv)) => { + debug_assert!(left_parent_kv.right_child_len() == MIN_LEN - 1); + if left_parent_kv.can_merge() { + left_parent_kv.merge_tracking_child_edge(Right(idx), alloc) + } else { + debug_assert!(left_parent_kv.left_child_len() > MIN_LEN); + left_parent_kv.steal_left(idx) + } + } + Ok(Right(right_parent_kv)) => { + debug_assert!(right_parent_kv.left_child_len() == MIN_LEN - 1); + if right_parent_kv.can_merge() { + right_parent_kv.merge_tracking_child_edge(Left(idx), alloc) + } else { + debug_assert!(right_parent_kv.right_child_len() > MIN_LEN); + right_parent_kv.steal_right(idx) + } + } + Err(pos) => unsafe { Handle::new_edge(pos, idx) }, + }; + // SAFETY: `new_pos` is the leaf we started from or a sibling. + pos = unsafe { new_pos.cast_to_leaf_unchecked() }; + + // Only if we merged, the parent (if any) has shrunk, but skipping + // the following step otherwise does not pay off in benchmarks. + // + // SAFETY: We won't destroy or rearrange the leaf where `pos` is at + // by handling its parent recursively; at worst we will destroy or + // rearrange the parent through the grandparent, thus change the + // link to the parent inside the leaf. + if let Ok(parent) = unsafe { pos.reborrow_mut() }.into_node().ascend() { + if !parent + .into_node() + .forget_type() + .fix_node_and_affected_ancestors(alloc) + { + handle_emptied_internal_root(); + } + } + } + (old_kv, pos) + } +} + +impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, marker::KV> { + fn remove_internal_kv( + self, + handle_emptied_internal_root: F, + alloc: &A, + ) -> ( + (K, V), + Handle, K, V, marker::Leaf>, marker::Edge>, + ) { + // Remove an adjacent KV from its leaf and then put it back in place of + // the element we were asked to remove. Prefer the left adjacent KV, + // for the reasons listed in `choose_parent_kv`. + let left_leaf_kv = self.left_edge().descend().last_leaf_edge().left_kv(); + let left_leaf_kv = unsafe { left_leaf_kv.ok().unwrap_unchecked() }; + let (left_kv, left_hole) = left_leaf_kv.remove_leaf_kv(handle_emptied_internal_root, alloc); + + // The internal node may have been stolen from or merged. Go back right + // to find where the original KV ended up. + let mut internal = unsafe { left_hole.next_kv().ok().unwrap_unchecked() }; + let old_kv = internal.replace_kv(left_kv.0, left_kv.1); + let pos = internal.next_leaf_edge(); + (old_kv, pos) + } +} diff --git a/crates/rune-alloc/src/alloc/btree/search.rs b/crates/rune-alloc/src/alloc/btree/search.rs new file mode 100644 index 000000000..68e740d2a --- /dev/null +++ b/crates/rune-alloc/src/alloc/btree/search.rs @@ -0,0 +1,298 @@ +use core::borrow::Borrow; +use core::cmp::Ordering; +use core::ops::{Bound, RangeBounds}; + +use super::node::{marker, ForceResult::*, Handle, NodeRef}; + +use SearchBound::*; +use SearchResult::*; + +pub(crate) enum SearchBound { + /// An inclusive bound to look for, just like `Bound::Included(T)`. + Included(T), + /// An exclusive bound to look for, just like `Bound::Excluded(T)`. + Excluded(T), + /// An unconditional inclusive bound, just like `Bound::Unbounded`. + AllIncluded, + /// An unconditional exclusive bound. + AllExcluded, +} + +impl SearchBound { + pub(crate) fn from_range(range_bound: Bound) -> Self { + match range_bound { + Bound::Included(t) => Included(t), + Bound::Excluded(t) => Excluded(t), + Bound::Unbounded => AllIncluded, + } + } +} + +pub(crate) enum SearchResult { + Found(Handle, marker::KV>), + GoDown(Handle, marker::Edge>), +} + +pub(crate) enum IndexResult { + KV(usize), + Edge(usize), +} + +impl NodeRef { + /// Looks up a given key in a (sub)tree headed by the node, recursively. + /// Returns a `Found` with the handle of the matching KV, if any. Otherwise, + /// returns a `GoDown` with the handle of the leaf edge where the key belongs. + /// + /// The result is meaningful only if the tree is ordered by key, like the tree + /// in a `BTreeMap` is. + pub(crate) fn search_tree( + mut self, + cx: &mut C, + key: &Q, + cmp: fn(&mut C, &Q, &Q) -> Result, + ) -> Result, E> + where + K: Borrow, + { + loop { + self = match self.search_node(cx, key, cmp)? { + Found(handle) => return Ok(Found(handle)), + GoDown(handle) => match handle.force() { + Leaf(leaf) => return Ok(GoDown(leaf)), + Internal(internal) => internal.descend(), + }, + } + } + } + + /// Descends to the nearest node where the edge matching the lower bound + /// of the range is different from the edge matching the upper bound, i.e., + /// the nearest node that has at least one key contained in the range. + /// + /// If found, returns an `Ok` with that node, the strictly ascending pair of + /// edge indices in the node delimiting the range, and the corresponding + /// pair of bounds for continuing the search in the child nodes, in case + /// the node is internal. + /// + /// If not found, returns an `Err` with the leaf edge matching the entire + /// range. + /// + /// As a diagnostic service, panics if the range specifies impossible bounds. + /// + /// The result is meaningful only if the tree is ordered by key. + pub(crate) fn search_tree_for_bifurcation<'r, C: ?Sized, Q: ?Sized, R, E>( + mut self, + cx: &mut C, + range: &'r R, + cmp: fn(&mut C, &Q, &Q) -> Result, + ) -> Result< + Result< + ( + NodeRef, + usize, + usize, + SearchBound<&'r Q>, + SearchBound<&'r Q>, + ), + Handle, marker::Edge>, + >, + E, + > + where + K: Borrow, + R: RangeBounds, + { + // Inlining these variables should be avoided. We assume the bounds reported by `range` + // remain the same, but an adversarial implementation could change between calls (#81138). + let (start, end) = (range.start_bound(), range.end_bound()); + match (start, end) { + (Bound::Excluded(s), Bound::Excluded(e)) + if matches!(cmp(cx, s, e)?, Ordering::Equal) => + { + panic!("range start and end are equal and excluded in BTree") + } + (Bound::Included(s) | Bound::Excluded(s), Bound::Included(e) | Bound::Excluded(e)) + if matches!(cmp(cx, s, e)?, Ordering::Greater) => + { + panic!("range start is greater than range end in BTree") + } + _ => {} + } + let mut lower_bound = SearchBound::from_range(start); + let mut upper_bound = SearchBound::from_range(end); + loop { + let (lower_edge_idx, lower_child_bound) = + self.find_lower_bound_index(cx, lower_bound, cmp)?; + let (upper_edge_idx, upper_child_bound) = + unsafe { self.find_upper_bound_index(cx, upper_bound, lower_edge_idx, cmp)? }; + if lower_edge_idx < upper_edge_idx { + return Ok(Ok(( + self, + lower_edge_idx, + upper_edge_idx, + lower_child_bound, + upper_child_bound, + ))); + } + debug_assert_eq!(lower_edge_idx, upper_edge_idx); + let common_edge = unsafe { Handle::new_edge(self, lower_edge_idx) }; + match common_edge.force() { + Leaf(common_edge) => return Ok(Err(common_edge)), + Internal(common_edge) => { + self = common_edge.descend(); + lower_bound = lower_child_bound; + upper_bound = upper_child_bound; + } + } + } + } + + /// Finds an edge in the node delimiting the lower bound of a range. + /// Also returns the lower bound to be used for continuing the search in + /// the matching child node, if `self` is an internal node. + /// + /// The result is meaningful only if the tree is ordered by key. + pub(crate) fn find_lower_bound_edge<'r, C: ?Sized, Q: ?Sized, E>( + self, + cx: &mut C, + bound: SearchBound<&'r Q>, + cmp: fn(&mut C, &Q, &Q) -> Result, + ) -> Result<(Handle, SearchBound<&'r Q>), E> + where + K: Borrow, + { + let (edge_idx, bound) = self.find_lower_bound_index(cx, bound, cmp)?; + let edge = unsafe { Handle::new_edge(self, edge_idx) }; + Ok((edge, bound)) + } + + /// Clone of `find_lower_bound_edge` for the upper bound. + pub(crate) fn find_upper_bound_edge<'r, C: ?Sized, Q: ?Sized, E>( + self, + cx: &mut C, + bound: SearchBound<&'r Q>, + cmp: fn(&mut C, &Q, &Q) -> Result, + ) -> Result<(Handle, SearchBound<&'r Q>), E> + where + K: Borrow, + { + let (edge_idx, bound) = unsafe { self.find_upper_bound_index(cx, bound, 0, cmp)? }; + let edge = unsafe { Handle::new_edge(self, edge_idx) }; + Ok((edge, bound)) + } +} + +impl NodeRef { + /// Looks up a given key in the node, without recursion. + /// Returns a `Found` with the handle of the matching KV, if any. Otherwise, + /// returns a `GoDown` with the handle of the edge where the key might be found + /// (if the node is internal) or where the key can be inserted. + /// + /// The result is meaningful only if the tree is ordered by key, like the tree + /// in a `BTreeMap` is. + pub(crate) fn search_node( + self, + cx: &mut C, + key: &Q, + cmp: fn(&mut C, &Q, &Q) -> Result, + ) -> Result, E> + where + K: Borrow, + { + Ok(match unsafe { self.find_key_index(cx, key, 0, cmp)? } { + IndexResult::KV(idx) => Found(unsafe { Handle::new_kv(self, idx) }), + IndexResult::Edge(idx) => GoDown(unsafe { Handle::new_edge(self, idx) }), + }) + } + + /// Returns either the KV index in the node at which the key (or an equivalent) + /// exists, or the edge index where the key belongs, starting from a particular index. + /// + /// The result is meaningful only if the tree is ordered by key, like the tree + /// in a `BTreeMap` is. + /// + /// # Safety + /// `start_index` must be a valid edge index for the node. + unsafe fn find_key_index( + &self, + cx: &mut C, + key: &Q, + start_index: usize, + cmp: fn(&mut C, &Q, &Q) -> Result, + ) -> Result + where + K: Borrow, + { + let node = self.reborrow(); + let keys = node.keys(); + debug_assert!(start_index <= keys.len()); + for (offset, k) in unsafe { keys.get_unchecked(start_index..) } + .iter() + .enumerate() + { + match cmp(cx, key, k.borrow())? { + Ordering::Greater => {} + Ordering::Equal => return Ok(IndexResult::KV(start_index + offset)), + Ordering::Less => return Ok(IndexResult::Edge(start_index + offset)), + } + } + Ok(IndexResult::Edge(keys.len())) + } + + /// Finds an edge index in the node delimiting the lower bound of a range. + /// Also returns the lower bound to be used for continuing the search in + /// the matching child node, if `self` is an internal node. + /// + /// The result is meaningful only if the tree is ordered by key. + fn find_lower_bound_index<'r, C: ?Sized, Q: ?Sized, E>( + &self, + cx: &mut C, + bound: SearchBound<&'r Q>, + cmp: fn(&mut C, &Q, &Q) -> Result, + ) -> Result<(usize, SearchBound<&'r Q>), E> + where + K: Borrow, + { + Ok(match bound { + Included(key) => match unsafe { self.find_key_index(cx, key, 0, cmp)? } { + IndexResult::KV(idx) => (idx, AllExcluded), + IndexResult::Edge(idx) => (idx, bound), + }, + Excluded(key) => match unsafe { self.find_key_index(cx, key, 0, cmp)? } { + IndexResult::KV(idx) => (idx + 1, AllIncluded), + IndexResult::Edge(idx) => (idx, bound), + }, + AllIncluded => (0, AllIncluded), + AllExcluded => (self.len(), AllExcluded), + }) + } + + /// Mirror image of `find_lower_bound_index` for the upper bound, + /// with an additional parameter to skip part of the key array. + /// + /// # Safety + /// `start_index` must be a valid edge index for the node. + unsafe fn find_upper_bound_index<'r, C: ?Sized, Q: ?Sized, E>( + &self, + cx: &mut C, + bound: SearchBound<&'r Q>, + start_index: usize, + cmp: fn(&mut C, &Q, &Q) -> Result, + ) -> Result<(usize, SearchBound<&'r Q>), E> + where + K: Borrow, + { + Ok(match bound { + Included(key) => match unsafe { self.find_key_index(cx, key, start_index, cmp)? } { + IndexResult::KV(idx) => (idx + 1, AllExcluded), + IndexResult::Edge(idx) => (idx, bound), + }, + Excluded(key) => match unsafe { self.find_key_index(cx, key, start_index, cmp)? } { + IndexResult::KV(idx) => (idx, AllIncluded), + IndexResult::Edge(idx) => (idx, bound), + }, + AllIncluded => (self.len(), AllIncluded), + AllExcluded => (start_index, AllExcluded), + }) + } +} diff --git a/crates/rune-alloc/src/alloc/btree/set.rs b/crates/rune-alloc/src/alloc/btree/set.rs new file mode 100644 index 000000000..7e803b8fa --- /dev/null +++ b/crates/rune-alloc/src/alloc/btree/set.rs @@ -0,0 +1,1866 @@ +//! An ordered set based on a B-Tree. + +use core::borrow::Borrow; +use core::cmp::Ordering::{self, Equal, Greater, Less}; +use core::cmp::{max, min}; +use core::fmt; +use core::hash::{Hash, Hasher}; +use core::iter::{FusedIterator, Peekable}; +use core::ops::RangeBounds; + +use super::map::{infallible_cmp, into_ok, BTreeMap, CmpFn, Keys}; +use super::merge_iter::MergeIterInner; +use super::set_val::SetValZST; +use super::Recover; + +#[cfg(test)] +use crate::alloc::testing::*; +use crate::alloc::{AllocError, Allocator, Error, Global, TryClone, TryExtend, TryFromIteratorIn}; + +/// An ordered set based on a B-Tree. +/// +/// See [`BTreeMap`]'s documentation for a detailed discussion of this collection's performance +/// benefits and drawbacks. +/// +/// It is a logic error for an item to be modified in such a way that the item's ordering relative +/// to any other item, as determined by the [`Ord`] trait, changes while it is in the set. This is +/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code. +/// The behavior resulting from such a logic error is not specified, but will be encapsulated to the +/// `BTreeSet` that observed the logic error and not result in undefined behavior. This could +/// include panics, incorrect results, aborts, memory leaks, and non-termination. +/// +/// Iterators returned by [`BTreeSet::iter`] produce their items in order, and take worst-case +/// logarithmic and amortized constant time per item returned. +/// +/// [`Cell`]: core::cell::Cell +/// [`RefCell`]: core::cell::RefCell +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::BTreeSet; +/// +/// // Type inference lets us omit an explicit type signature (which +/// // would be `BTreeSet<&str>` in this example). +/// let mut books = BTreeSet::new(); +/// +/// // Add some books. +/// books.try_insert("A Dance With Dragons")?; +/// books.try_insert("To Kill a Mockingbird")?; +/// books.try_insert("The Odyssey")?; +/// books.try_insert("The Great Gatsby")?; +/// +/// // Check for a specific one. +/// if !books.contains("The Winds of Winter") { +/// println!("We have {} books, but The Winds of Winter ain't one.", +/// books.len()); +/// } +/// +/// // Remove a book. +/// books.remove("The Odyssey"); +/// +/// // Iterate over everything. +/// for book in &books { +/// println!("{book}"); +/// } +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// A `BTreeSet` with a known list of items can be initialized from an array: +/// +/// ``` +/// use rune_alloc::BTreeSet; +/// +/// let set = BTreeSet::try_from([1, 2, 3])?; +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct BTreeSet { + map: BTreeMap, +} + +impl Hash for BTreeSet { + fn hash(&self, state: &mut H) { + self.map.hash(state) + } +} + +impl PartialEq for BTreeSet { + fn eq(&self, other: &BTreeSet) -> bool { + self.map.eq(&other.map) + } +} + +impl Eq for BTreeSet {} + +impl PartialOrd for BTreeSet { + fn partial_cmp(&self, other: &BTreeSet) -> Option { + self.map.partial_cmp(&other.map) + } +} + +impl Ord for BTreeSet { + fn cmp(&self, other: &BTreeSet) -> Ordering { + self.map.cmp(&other.map) + } +} + +impl TryClone for BTreeSet +where + T: TryClone, +{ + fn try_clone(&self) -> Result { + Ok(BTreeSet { + map: self.map.try_clone()?, + }) + } +} + +#[cfg(test)] +impl Clone for BTreeSet +where + T: TryClone, +{ + fn clone(&self) -> Self { + self.try_clone().abort() + } +} + +/// An iterator over the items of a `BTreeSet`. +/// +/// This `struct` is created by the [`iter`] method on [`BTreeSet`]. See its +/// documentation for more. +/// +/// [`iter`]: BTreeSet::iter +#[must_use = "iterators are lazy and do nothing unless consumed"] +pub struct Iter<'a, T: 'a> { + iter: Keys<'a, T, SetValZST>, +} + +impl fmt::Debug for Iter<'_, T> +where + T: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("Iter").field(&self.iter.clone()).finish() + } +} + +/// An owning iterator over the items of a `BTreeSet`. +/// +/// This `struct` is created by the [`into_iter`] method on [`BTreeSet`] +/// (provided by the [`IntoIterator`] trait). See its documentation for more. +/// +/// [`into_iter`]: BTreeSet#method.into_iter +#[derive(Debug)] +pub struct IntoIter { + iter: super::map::IntoIter, +} + +/// An iterator over a sub-range of items in a `BTreeSet`. +/// +/// This `struct` is created by the [`range`] method on [`BTreeSet`]. +/// See its documentation for more. +/// +/// [`range`]: BTreeSet::range +#[must_use = "iterators are lazy and do nothing unless consumed"] +#[derive(Debug)] +pub struct Range<'a, T: 'a> { + iter: super::map::Range<'a, T, SetValZST>, +} + +/// A lazy iterator producing elements in the difference of `BTreeSet`s. +/// +/// This `struct` is created by the [`difference`] method on [`BTreeSet`]. +/// See its documentation for more. +/// +/// [`difference`]: BTreeSet::difference +#[must_use = "this returns the difference as an iterator, \ + without modifying either input set"] +pub struct Difference<'a, T: 'a, A: Allocator = Global> { + inner: DifferenceInner<'a, T, A>, +} + +enum DifferenceInner<'a, T: 'a, A: Allocator> { + Stitch { + // iterate all of `self` and some of `other`, spotting matches along the way + self_iter: Iter<'a, T>, + other_iter: Peekable>, + }, + Search { + // iterate `self`, look up in `other` + self_iter: Iter<'a, T>, + other_set: &'a BTreeSet, + }, + Iterate(Iter<'a, T>), // simply produce all elements in `self` +} + +// Explicit Debug impl necessary because of issue #26925 +impl fmt::Debug for DifferenceInner<'_, T, A> +where + T: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + DifferenceInner::Stitch { + self_iter, + other_iter, + } => f + .debug_struct("Stitch") + .field("self_iter", self_iter) + .field("other_iter", other_iter) + .finish(), + DifferenceInner::Search { + self_iter, + other_set, + } => f + .debug_struct("Search") + .field("self_iter", self_iter) + .field("other_iter", other_set) + .finish(), + DifferenceInner::Iterate(x) => f.debug_tuple("Iterate").field(x).finish(), + } + } +} + +impl fmt::Debug for Difference<'_, T, A> +where + T: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("Difference").field(&self.inner).finish() + } +} + +/// A lazy iterator producing elements in the symmetric difference of `BTreeSet`s. +/// +/// This `struct` is created by the [`symmetric_difference`] method on +/// [`BTreeSet`]. See its documentation for more. +/// +/// [`symmetric_difference`]: BTreeSet::symmetric_difference +#[must_use = "this returns the difference as an iterator, \ + without modifying either input set"] +pub struct SymmetricDifference<'a, T: 'a>(MergeIterInner>); + +impl fmt::Debug for SymmetricDifference<'_, T> +where + T: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("SymmetricDifference").field(&self.0).finish() + } +} + +/// A lazy iterator producing elements in the intersection of `BTreeSet`s. +/// +/// This `struct` is created by the [`intersection`] method on [`BTreeSet`]. +/// See its documentation for more. +/// +/// [`intersection`]: BTreeSet::intersection +#[must_use = "this returns the intersection as an iterator, \ + without modifying either input set"] +pub struct Intersection<'a, T: 'a, A: Allocator = Global> { + inner: IntersectionInner<'a, T, A>, +} + +enum IntersectionInner<'a, T: 'a, A: Allocator> { + Stitch { + // iterate similarly sized sets jointly, spotting matches along the way + a: Iter<'a, T>, + b: Iter<'a, T>, + }, + Search { + // iterate a small set, look up in the large set + small_iter: Iter<'a, T>, + large_set: &'a BTreeSet, + }, + Answer(Option<&'a T>), // return a specific element or emptiness +} + +// Explicit Debug impl necessary because of issue #26925 +impl fmt::Debug for IntersectionInner<'_, T, A> +where + T: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + IntersectionInner::Stitch { a, b } => f + .debug_struct("Stitch") + .field("a", a) + .field("b", b) + .finish(), + IntersectionInner::Search { + small_iter, + large_set, + } => f + .debug_struct("Search") + .field("small_iter", small_iter) + .field("large_set", large_set) + .finish(), + IntersectionInner::Answer(x) => f.debug_tuple("Answer").field(x).finish(), + } + } +} + +impl fmt::Debug for Intersection<'_, T, A> +where + T: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("Intersection").field(&self.inner).finish() + } +} + +/// A lazy iterator producing elements in the union of `BTreeSet`s. +/// +/// This `struct` is created by the [`union`] method on [`BTreeSet`]. +/// See its documentation for more. +/// +/// [`union`]: BTreeSet::union +#[must_use = "this returns the union as an iterator, \ + without modifying either input set"] +pub struct Union<'a, T: 'a>(MergeIterInner>); + +impl fmt::Debug for Union<'_, T> +where + T: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("Union").field(&self.0).finish() + } +} + +// This constant is used by functions that compare two sets. +// It estimates the relative size at which searching performs better +// than iterating, based on the benchmarks in +// https://github.com/ssomers/rust_bench_btreeset_intersection. +// It's used to divide rather than multiply sizes, to rule out overflow, +// and it's a power of two to make that division cheap. +const ITER_PERFORMANCE_TIPPING_SIZE_DIFF: usize = 16; + +impl BTreeSet { + /// Makes a new, empty `BTreeSet`. + /// + /// Does not allocate anything on its own. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// + /// let mut set: BTreeSet = BTreeSet::new(); + /// ``` + #[must_use] + pub const fn new() -> BTreeSet { + BTreeSet { + map: BTreeMap::new(), + } + } + + #[cfg(test)] + pub(crate) fn from(values: [T; N]) -> Self + where + T: Ord, + { + Self::try_from(values).abort() + } +} + +impl BTreeSet { + /// Makes a new `BTreeSet` with a reasonable choice of B. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{BTreeSet, Global}; + /// + /// let mut set: BTreeSet = BTreeSet::new_in(Global); + /// ``` + pub fn new_in(alloc: A) -> BTreeSet { + BTreeSet { + map: BTreeMap::new_in(alloc), + } + } + + /// Constructs a double-ended iterator over a sub-range of elements in the set. + /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will + /// yield elements from min (inclusive) to max (exclusive). + /// The range may also be entered as `(Bound, Bound)`, so for example + /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive + /// range from 4 to 10. + /// + /// # Panics + /// + /// Panics if range `start > end`. + /// Panics if range `start == end` and both bounds are `Excluded`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// use std::ops::Bound::Included; + /// + /// let mut set = BTreeSet::new(); + /// set.try_insert(3)?; + /// set.try_insert(5)?; + /// set.try_insert(8)?; + /// for &elem in set.range((Included(&4), Included(&8))) { + /// println!("{elem}"); + /// } + /// assert_eq!(Some(&5), set.range(4..).next()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn range(&self, range: R) -> Range<'_, T> + where + K: Ord, + T: Borrow + Ord, + R: RangeBounds, + { + Range { + iter: self.map.range(range), + } + } + + /// Visits the elements representing the difference, + /// i.e., the elements that are in `self` but not in `other`, + /// in ascending order. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{BTreeSet, Vec, IteratorExt}; + /// + /// let mut a = BTreeSet::new(); + /// a.try_insert(1)?; + /// a.try_insert(2)?; + /// + /// let mut b = BTreeSet::new(); + /// b.try_insert(2)?; + /// b.try_insert(3)?; + /// + /// let diff: Vec<_> = a.difference(&b).cloned().try_collect()?; + /// assert_eq!(diff, [1]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn difference<'a>(&'a self, other: &'a BTreeSet) -> Difference<'a, T, A> + where + T: Ord, + { + let (self_min, self_max) = + if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) { + (self_min, self_max) + } else { + return Difference { + inner: DifferenceInner::Iterate(self.iter()), + }; + }; + let (other_min, other_max) = + if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) { + (other_min, other_max) + } else { + return Difference { + inner: DifferenceInner::Iterate(self.iter()), + }; + }; + Difference { + inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) { + (Greater, _) | (_, Less) => DifferenceInner::Iterate(self.iter()), + (Equal, _) => { + let mut self_iter = self.iter(); + self_iter.next(); + DifferenceInner::Iterate(self_iter) + } + (_, Equal) => { + let mut self_iter = self.iter(); + self_iter.next_back(); + DifferenceInner::Iterate(self_iter) + } + _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => { + DifferenceInner::Search { + self_iter: self.iter(), + other_set: other, + } + } + _ => DifferenceInner::Stitch { + self_iter: self.iter(), + other_iter: other.iter().peekable(), + }, + }, + } + } + + /// Visits the elements representing the symmetric difference, + /// i.e., the elements that are in `self` or in `other` but not in both, + /// in ascending order. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{BTreeSet, Vec, IteratorExt}; + /// + /// let mut a = BTreeSet::new(); + /// a.try_insert(1)?; + /// a.try_insert(2)?; + /// + /// let mut b = BTreeSet::new(); + /// b.try_insert(2)?; + /// b.try_insert(3)?; + /// + /// let sym_diff: Vec<_> = a.symmetric_difference(&b).cloned().try_collect()?; + /// assert_eq!(sym_diff, [1, 3]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn symmetric_difference<'a>( + &'a self, + other: &'a BTreeSet, + ) -> SymmetricDifference<'a, T> + where + T: Ord, + { + SymmetricDifference(MergeIterInner::new(self.iter(), other.iter())) + } + + /// Visits the elements representing the intersection, + /// i.e., the elements that are both in `self` and `other`, + /// in ascending order. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{BTreeSet, Vec, IteratorExt}; + /// + /// let mut a = BTreeSet::new(); + /// a.try_insert(1)?; + /// a.try_insert(2)?; + /// + /// let mut b = BTreeSet::new(); + /// b.try_insert(2)?; + /// b.try_insert(3)?; + /// + /// let intersection: Vec<_> = a.intersection(&b).cloned().try_collect()?; + /// assert_eq!(intersection, [2]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn intersection<'a>(&'a self, other: &'a BTreeSet) -> Intersection<'a, T, A> + where + T: Ord, + { + let (self_min, self_max) = + if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) { + (self_min, self_max) + } else { + return Intersection { + inner: IntersectionInner::Answer(None), + }; + }; + let (other_min, other_max) = + if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) { + (other_min, other_max) + } else { + return Intersection { + inner: IntersectionInner::Answer(None), + }; + }; + Intersection { + inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) { + (Greater, _) | (_, Less) => IntersectionInner::Answer(None), + (Equal, _) => IntersectionInner::Answer(Some(self_min)), + (_, Equal) => IntersectionInner::Answer(Some(self_max)), + _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => { + IntersectionInner::Search { + small_iter: self.iter(), + large_set: other, + } + } + _ if other.len() <= self.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => { + IntersectionInner::Search { + small_iter: other.iter(), + large_set: self, + } + } + _ => IntersectionInner::Stitch { + a: self.iter(), + b: other.iter(), + }, + }, + } + } + + /// Visits the elements representing the union, + /// i.e., all the elements in `self` or `other`, without duplicates, + /// in ascending order. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{BTreeSet, Vec, IteratorExt}; + /// + /// let mut a = BTreeSet::new(); + /// a.try_insert(1)?; + /// + /// let mut b = BTreeSet::new(); + /// b.try_insert(2)?; + /// + /// let union: Vec<_> = a.union(&b).cloned().try_collect()?; + /// assert_eq!(union, [1, 2]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn union<'a>(&'a self, other: &'a BTreeSet) -> Union<'a, T> + where + T: Ord, + { + Union(MergeIterInner::new(self.iter(), other.iter())) + } + + /// Clears the set, removing all elements. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{BTreeSet, Vec, IteratorExt}; + /// + /// let mut v = BTreeSet::new(); + /// v.try_insert(1)?; + /// v.clear(); + /// assert!(v.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn clear(&mut self) { + self.map.clear() + } + + /// Returns `true` if the set contains an element equal to the value. + /// + /// The value may be any borrowed form of the set's element type, + /// but the ordering on the borrowed form *must* match the + /// ordering on the element type. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// + /// let set = BTreeSet::try_from([1, 2, 3])?; + /// assert_eq!(set.contains(&1), true); + /// assert_eq!(set.contains(&4), false); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn contains(&self, value: &Q) -> bool + where + T: Borrow + Ord, + Q: Ord, + { + self.map.contains_key(value) + } + + /// Returns a reference to the element in the set, if any, that is equal to + /// the value. + /// + /// The value may be any borrowed form of the set's element type, + /// but the ordering on the borrowed form *must* match the + /// ordering on the element type. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// + /// let set = BTreeSet::try_from([1, 2, 3])?; + /// assert_eq!(set.get(&2), Some(&2)); + /// assert_eq!(set.get(&4), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn get(&self, value: &Q) -> Option<&T> + where + T: Borrow + Ord, + Q: Ord, + { + into_ok(self.get_with(&mut (), value, infallible_cmp)) + } + + pub(crate) fn get_with( + &self, + cx: &mut C, + value: &Q, + cmp: CmpFn, + ) -> Result, E> + where + T: Borrow, + { + Recover::get(&self.map, cx, value, cmp) + } + + /// Returns `true` if `self` has no elements in common with `other`. This is + /// equivalent to checking for an empty intersection. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// + /// let a = BTreeSet::try_from([1, 2, 3])?; + /// let mut b = BTreeSet::new(); + /// + /// assert_eq!(a.is_disjoint(&b), true); + /// b.try_insert(4)?; + /// assert_eq!(a.is_disjoint(&b), true); + /// b.try_insert(1)?; + /// assert_eq!(a.is_disjoint(&b), false); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[must_use] + pub fn is_disjoint(&self, other: &BTreeSet) -> bool + where + T: Ord, + { + self.intersection(other).next().is_none() + } + + /// Returns `true` if the set is a subset of another, + /// i.e., `other` contains at least all the elements in `self`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// + /// let sup = BTreeSet::try_from([1, 2, 3])?; + /// let mut set = BTreeSet::new(); + /// + /// assert_eq!(set.is_subset(&sup), true); + /// set.try_insert(2)?; + /// assert_eq!(set.is_subset(&sup), true); + /// set.try_insert(4)?; + /// assert_eq!(set.is_subset(&sup), false); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[must_use] + pub fn is_subset(&self, other: &BTreeSet) -> bool + where + T: Ord, + { + // Same result as self.difference(other).next().is_none() + // but the code below is faster (hugely in some cases). + if self.len() > other.len() { + return false; + } + let (self_min, self_max) = + if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) { + (self_min, self_max) + } else { + return true; // self is empty + }; + let (other_min, other_max) = + if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) { + (other_min, other_max) + } else { + return false; // other is empty + }; + let mut self_iter = self.iter(); + match self_min.cmp(other_min) { + Less => return false, + Equal => { + self_iter.next(); + } + Greater => (), + } + match self_max.cmp(other_max) { + Greater => return false, + Equal => { + self_iter.next_back(); + } + Less => (), + } + if self_iter.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF { + for next in self_iter { + if !other.contains(next) { + return false; + } + } + } else { + let mut other_iter = other.iter(); + other_iter.next(); + other_iter.next_back(); + let mut self_next = self_iter.next(); + while let Some(self1) = self_next { + match other_iter.next().map_or(Less, |other1| self1.cmp(other1)) { + Less => return false, + Equal => self_next = self_iter.next(), + Greater => (), + } + } + } + true + } + + /// Returns `true` if the set is a superset of another, + /// i.e., `self` contains at least all the elements in `other`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// + /// let sub = BTreeSet::try_from([1, 2])?; + /// let mut set = BTreeSet::new(); + /// + /// assert_eq!(set.is_superset(&sub), false); + /// + /// set.try_insert(0)?; + /// set.try_insert(1)?; + /// assert_eq!(set.is_superset(&sub), false); + /// + /// set.try_insert(2)?; + /// assert_eq!(set.is_superset(&sub), true); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[must_use] + pub fn is_superset(&self, other: &BTreeSet) -> bool + where + T: Ord, + { + other.is_subset(self) + } + + /// Returns a reference to the first element in the set, if any. + /// This element is always the minimum of all elements in the set. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// + /// let mut set = BTreeSet::new(); + /// assert_eq!(set.first(), None); + /// set.try_insert(1)?; + /// assert_eq!(set.first(), Some(&1)); + /// set.try_insert(2)?; + /// assert_eq!(set.first(), Some(&1)); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[must_use] + pub fn first(&self) -> Option<&T> + where + T: Ord, + { + self.map.first_key_value().map(|(k, _)| k) + } + + /// Returns a reference to the last element in the set, if any. + /// This element is always the maximum of all elements in the set. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// + /// let mut set = BTreeSet::new(); + /// assert_eq!(set.last(), None); + /// set.try_insert(1)?; + /// assert_eq!(set.last(), Some(&1)); + /// set.try_insert(2)?; + /// assert_eq!(set.last(), Some(&2)); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[must_use] + pub fn last(&self) -> Option<&T> + where + T: Ord, + { + self.map.last_key_value().map(|(k, _)| k) + } + + /// Removes the first element from the set and returns it, if any. + /// The first element is always the minimum element in the set. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// + /// let mut set = BTreeSet::new(); + /// + /// set.try_insert(1)?; + /// + /// while let Some(n) = set.pop_first() { + /// assert_eq!(n, 1); + /// } + /// + /// assert!(set.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn pop_first(&mut self) -> Option + where + T: Ord, + { + self.map.pop_first().map(|kv| kv.0) + } + + /// Removes the last element from the set and returns it, if any. The last + /// element is always the maximum element in the set. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// + /// let mut set = BTreeSet::new(); + /// + /// set.try_insert(1)?; + /// + /// while let Some(n) = set.pop_last() { + /// assert_eq!(n, 1); + /// } + /// + /// assert!(set.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn pop_last(&mut self) -> Option + where + T: Ord, + { + self.map.pop_last().map(|kv| kv.0) + } + + /// Adds a value to the set. + /// + /// Returns whether the value was newly inserted. That is: + /// + /// - If the set did not previously contain an equal value, `true` is + /// returned. + /// - If the set already contained an equal value, `false` is returned, and + /// the entry is not updated. + /// + /// See the [module-level documentation] for more. + /// + /// [module-level documentation]: index.html#insert-and-complex-keys + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// + /// let mut set = BTreeSet::new(); + /// + /// assert_eq!(set.try_insert(2)?, true); + /// assert_eq!(set.try_insert(2)?, false); + /// assert_eq!(set.len(), 1); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_insert(&mut self, value: T) -> Result + where + T: Ord, + { + Ok(self.map.try_insert(value, SetValZST)?.is_none()) + } + + #[cfg(test)] + pub(crate) fn insert(&mut self, value: T) -> bool + where + T: Ord, + { + self.try_insert(value).abort() + } + + /// Adds a value to the set, replacing the existing element, if any, that is + /// equal to the value. Returns the replaced element. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{Vec, BTreeSet}; + /// + /// let mut set = BTreeSet::new(); + /// set.try_insert(Vec::::new())?; + /// + /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 0); + /// set.try_replace(Vec::try_with_capacity(10)?)?; + /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 10); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_replace(&mut self, value: T) -> Result, AllocError> + where + T: Ord, + { + into_ok(self.try_replace_with(&mut (), value, infallible_cmp)) + } + + #[cfg(test)] + pub(crate) fn replace(&mut self, value: T) -> Option + where + T: Ord, + { + self.try_replace(value).abort() + } + + pub(crate) fn try_replace_with( + &mut self, + cx: &mut C, + value: T, + cmp: CmpFn, + ) -> Result, AllocError>, E> { + Recover::try_replace(&mut self.map, cx, value, cmp) + } + + /// If the set contains an element equal to the value, removes it from the + /// set and drops it. Returns whether such an element was present. + /// + /// The value may be any borrowed form of the set's element type, + /// but the ordering on the borrowed form *must* match the + /// ordering on the element type. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// + /// let mut set = BTreeSet::new(); + /// + /// set.try_insert(2)?; + /// assert_eq!(set.remove(&2), true); + /// assert_eq!(set.remove(&2), false); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn remove(&mut self, value: &Q) -> bool + where + T: Borrow + Ord, + Q: Ord, + { + self.map.remove(value).is_some() + } + + /// Removes and returns the element in the set, if any, that is equal to + /// the value. + /// + /// The value may be any borrowed form of the set's element type, + /// but the ordering on the borrowed form *must* match the + /// ordering on the element type. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// + /// let mut set = BTreeSet::try_from([1, 2, 3])?; + /// assert_eq!(set.take(&2), Some(2)); + /// assert_eq!(set.take(&2), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn take(&mut self, value: &Q) -> Option + where + T: Borrow + Ord, + Q: Ord, + { + into_ok(self.take_with(&mut (), value, infallible_cmp)) + } + + pub(crate) fn take_with( + &mut self, + cx: &mut C, + value: &Q, + cmp: CmpFn, + ) -> Result, E> + where + T: Borrow, + { + Recover::take(&mut self.map, cx, value, cmp) + } + + /// Retains only the elements specified by the predicate. + /// + /// In other words, remove all elements `e` for which `f(&e)` returns `false`. + /// The elements are visited in ascending order. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// + /// let mut set = BTreeSet::try_from([1, 2, 3, 4, 5, 6])?; + /// // Keep only the even numbers. + /// set.retain(|&k| k % 2 == 0); + /// assert!(set.iter().eq([2, 4, 6].iter())); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn retain(&mut self, mut f: F) + where + T: Ord, + F: FnMut(&T) -> bool, + { + self.extract_if(|v| !f(v)).for_each(drop); + } + + /// Moves all elements from `other` into `self`, leaving `other` empty. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// + /// let mut a = BTreeSet::new(); + /// a.try_insert(1)?; + /// a.try_insert(2)?; + /// a.try_insert(3)?; + /// + /// let mut b = BTreeSet::new(); + /// b.try_insert(3)?; + /// b.try_insert(4)?; + /// b.try_insert(5)?; + /// + /// a.try_append(&mut b)?; + /// + /// assert_eq!(a.len(), 5); + /// assert_eq!(b.len(), 0); + /// + /// assert!(a.contains(&1)); + /// assert!(a.contains(&2)); + /// assert!(a.contains(&3)); + /// assert!(a.contains(&4)); + /// assert!(a.contains(&5)); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_append(&mut self, other: &mut Self) -> Result<(), AllocError> + where + T: Ord, + { + self.map.try_append(&mut other.map) + } + + #[cfg(test)] + pub(crate) fn append(&mut self, other: &mut Self) + where + T: Ord, + { + self.try_append(other).abort() + } + + /// Splits the collection into two at the value. Returns a new collection + /// with all elements greater than or equal to the value. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// + /// let mut a = BTreeSet::new(); + /// a.try_insert(1)?; + /// a.try_insert(2)?; + /// a.try_insert(3)?; + /// a.try_insert(17)?; + /// a.try_insert(41)?; + /// + /// let b = a.try_split_off(&3)?; + /// + /// assert_eq!(a.len(), 2); + /// assert_eq!(b.len(), 3); + /// + /// assert!(a.contains(&1)); + /// assert!(a.contains(&2)); + /// + /// assert!(b.contains(&3)); + /// assert!(b.contains(&17)); + /// assert!(b.contains(&41)); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_split_off(&mut self, value: &Q) -> Result + where + T: Borrow + Ord, + A: Clone, + { + Ok(BTreeSet { + map: self.map.try_split_off(value)?, + }) + } + + #[cfg(test)] + pub(crate) fn split_off(&mut self, value: &Q) -> Self + where + T: Borrow + Ord, + A: Clone, + { + self.try_split_off(value).abort() + } + + /// Creates an iterator that visits all elements in ascending order and + /// uses a closure to determine if an element should be removed. + /// + /// If the closure returns `true`, the element is removed from the set and + /// yielded. If the closure returns `false`, or panics, the element remains + /// in the set and will not be yielded. + /// + /// If the returned `ExtractIf` is not exhausted, e.g. because it is dropped without iterating + /// or the iteration short-circuits, then the remaining elements will be retained. + /// Use [`retain`] with a negated predicate if you do not need the returned iterator. + /// + /// [`retain`]: BTreeSet::retain + /// # Examples + /// + /// Splitting a set into even and odd values, reusing the original set: + /// + /// ``` + /// use rune_alloc::{BTreeSet, Vec, IteratorExt}; + /// + /// let mut set: BTreeSet = (0..8).try_collect()?; + /// let evens: BTreeSet<_> = set.extract_if(|v| v % 2 == 0).try_collect()?; + /// let odds = set; + /// assert_eq!(evens.into_iter().try_collect::>()?, [0, 2, 4, 6]); + /// assert_eq!(odds.into_iter().try_collect::>()?, [1, 3, 5, 7]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn extract_if<'a, F>(&'a mut self, pred: F) -> ExtractIf<'a, T, F, A> + where + T: Ord, + F: 'a + FnMut(&T) -> bool, + { + let (inner, alloc) = self.map.extract_if_inner(); + ExtractIf { pred, inner, alloc } + } + + /// Gets an iterator that visits the elements in the `BTreeSet` in ascending + /// order. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// + /// let set = BTreeSet::try_from([1, 2, 3])?; + /// let mut set_iter = set.iter(); + /// assert_eq!(set_iter.next(), Some(&1)); + /// assert_eq!(set_iter.next(), Some(&2)); + /// assert_eq!(set_iter.next(), Some(&3)); + /// assert_eq!(set_iter.next(), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// Values returned by the iterator are returned in ascending order: + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// + /// let set = BTreeSet::try_from([3, 1, 2])?; + /// let mut set_iter = set.iter(); + /// assert_eq!(set_iter.next(), Some(&1)); + /// assert_eq!(set_iter.next(), Some(&2)); + /// assert_eq!(set_iter.next(), Some(&3)); + /// assert_eq!(set_iter.next(), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn iter(&self) -> Iter<'_, T> { + Iter { + iter: self.map.keys(), + } + } + + /// Returns the number of elements in the set. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// + /// let mut v = BTreeSet::new(); + /// assert_eq!(v.len(), 0); + /// v.try_insert(1)?; + /// assert_eq!(v.len(), 1); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[must_use] + pub const fn len(&self) -> usize { + self.map.len() + } + + /// Returns `true` if the set contains no elements. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::BTreeSet; + /// + /// let mut v = BTreeSet::new(); + /// assert!(v.is_empty()); + /// v.try_insert(1)?; + /// assert!(!v.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[must_use] + pub const fn is_empty(&self) -> bool { + self.len() == 0 + } +} + +impl IntoIterator for BTreeSet { + type Item = T; + type IntoIter = IntoIter; + + /// Gets an iterator for moving out the `BTreeSet`'s contents. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{BTreeSet, Vec, IteratorExt}; + /// + /// let set = BTreeSet::try_from([1, 2, 3, 4])?; + /// + /// let v: Vec<_> = set.into_iter().try_collect()?; + /// assert_eq!(v, [1, 2, 3, 4]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + fn into_iter(self) -> IntoIter { + IntoIter { + iter: self.map.into_iter(), + } + } +} + +impl<'a, T, A: Allocator> IntoIterator for &'a BTreeSet { + type Item = &'a T; + type IntoIter = Iter<'a, T>; + + fn into_iter(self) -> Iter<'a, T> { + self.iter() + } +} + +/// An iterator produced by calling `extract_if` on BTreeSet. +#[must_use = "iterators are lazy and do nothing unless consumed"] +pub struct ExtractIf<'a, T, F, A: Allocator = Global> +where + T: 'a, + F: 'a + FnMut(&T) -> bool, +{ + pred: F, + inner: super::map::ExtractIfInner<'a, T, SetValZST>, + /// The BTreeMap will outlive this IntoIter so we don't care about drop order for `alloc`. + alloc: &'a A, +} + +impl fmt::Debug for ExtractIf<'_, T, F, A> +where + T: fmt::Debug, + F: FnMut(&T) -> bool, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("ExtractIf") + .field(&self.inner.peek().map(|(k, _)| k)) + .finish() + } +} + +impl<'a, T, F, A: Allocator> Iterator for ExtractIf<'_, T, F, A> +where + F: 'a + FnMut(&T) -> bool, +{ + type Item = T; + + fn next(&mut self) -> Option { + let pred = &mut self.pred; + let mut mapped_pred = |k: &T, _v: &mut SetValZST| pred(k); + self.inner + .next(&mut mapped_pred, self.alloc) + .map(|(k, _)| k) + } + + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } +} + +impl FusedIterator for ExtractIf<'_, T, F, A> where F: FnMut(&T) -> bool {} + +impl TryExtend for BTreeSet +where + T: Ord, +{ + #[inline] + fn try_extend>(&mut self, iter: Iter) -> Result<(), Error> { + for elem in iter { + self.try_insert(elem)?; + } + + Ok(()) + } +} + +#[cfg(test)] +impl Extend for BTreeSet +where + T: Ord, +{ + #[inline] + fn extend>(&mut self, iter: Iter) { + self.try_extend(iter).abort() + } +} + +impl<'a, T, A: Allocator> TryExtend<&'a T> for BTreeSet +where + T: 'a + Ord + Copy, +{ + fn try_extend>(&mut self, iter: I) -> Result<(), Error> { + self.try_extend(iter.into_iter().copied()) + } +} + +#[cfg(test)] +impl<'a, T, A: Allocator> Extend<&'a T> for BTreeSet +where + T: 'a + Ord + Copy, +{ + fn extend>(&mut self, iter: I) { + self.try_extend(iter).abort() + } +} + +impl Default for BTreeSet { + /// Creates an empty `BTreeSet`. + fn default() -> BTreeSet { + BTreeSet::new() + } +} + +impl fmt::Debug for BTreeSet +where + T: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_set().entries(self.iter()).finish() + } +} + +impl Clone for Iter<'_, T> { + fn clone(&self) -> Self { + Iter { + iter: self.iter.clone(), + } + } +} +impl<'a, T> Iterator for Iter<'a, T> { + type Item = &'a T; + + fn next(&mut self) -> Option<&'a T> { + self.iter.next() + } + + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } + + fn last(mut self) -> Option<&'a T> { + self.next_back() + } + + fn min(mut self) -> Option<&'a T> + where + &'a T: Ord, + { + self.next() + } + + fn max(mut self) -> Option<&'a T> + where + &'a T: Ord, + { + self.next_back() + } +} + +impl<'a, T> DoubleEndedIterator for Iter<'a, T> { + fn next_back(&mut self) -> Option<&'a T> { + self.iter.next_back() + } +} +impl ExactSizeIterator for Iter<'_, T> { + fn len(&self) -> usize { + self.iter.len() + } +} + +impl FusedIterator for Iter<'_, T> {} + +impl Iterator for IntoIter { + type Item = T; + + fn next(&mut self) -> Option { + self.iter.next().map(|(k, _)| k) + } + + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} + +impl Default for Iter<'_, T> { + /// Creates an empty `btree_set::Iter`. + /// + /// ``` + /// use rune_alloc::btree_set; + /// + /// let iter: btree_set::Iter<'_, u8> = Default::default(); + /// assert_eq!(iter.len(), 0); + /// ``` + fn default() -> Self { + Iter { + iter: Default::default(), + } + } +} + +impl DoubleEndedIterator for IntoIter { + fn next_back(&mut self) -> Option { + self.iter.next_back().map(|(k, _)| k) + } +} +impl ExactSizeIterator for IntoIter { + fn len(&self) -> usize { + self.iter.len() + } +} + +impl FusedIterator for IntoIter {} + +impl Default for IntoIter +where + A: Allocator + Default + Clone, +{ + /// Creates an empty `btree_set::IntoIter`. + /// + /// ``` + /// use rune_alloc::btree_set; + /// + /// let iter: btree_set::IntoIter = Default::default(); + /// assert_eq!(iter.len(), 0); + /// ``` + fn default() -> Self { + IntoIter { + iter: Default::default(), + } + } +} + +impl Clone for Range<'_, T> { + fn clone(&self) -> Self { + Range { + iter: self.iter.clone(), + } + } +} + +impl<'a, T> Iterator for Range<'a, T> { + type Item = &'a T; + + fn next(&mut self) -> Option<&'a T> { + self.iter.next().map(|(k, _)| k) + } + + fn last(mut self) -> Option<&'a T> { + self.next_back() + } + + fn min(mut self) -> Option<&'a T> + where + &'a T: Ord, + { + self.next() + } + + fn max(mut self) -> Option<&'a T> + where + &'a T: Ord, + { + self.next_back() + } +} + +impl<'a, T> DoubleEndedIterator for Range<'a, T> { + fn next_back(&mut self) -> Option<&'a T> { + self.iter.next_back().map(|(k, _)| k) + } +} + +impl FusedIterator for Range<'_, T> {} + +impl Default for Range<'_, T> { + /// Creates an empty `btree_set::Range`. + /// + /// ``` + /// use rune_alloc::btree_set; + /// + /// let iter: btree_set::Range<'_, u8> = Default::default(); + /// assert_eq!(iter.count(), 0); + /// ``` + fn default() -> Self { + Range { + iter: Default::default(), + } + } +} + +impl Clone for Difference<'_, T, A> { + fn clone(&self) -> Self { + Difference { + inner: match &self.inner { + DifferenceInner::Stitch { + self_iter, + other_iter, + } => DifferenceInner::Stitch { + self_iter: self_iter.clone(), + other_iter: other_iter.clone(), + }, + DifferenceInner::Search { + self_iter, + other_set, + } => DifferenceInner::Search { + self_iter: self_iter.clone(), + other_set, + }, + DifferenceInner::Iterate(iter) => DifferenceInner::Iterate(iter.clone()), + }, + } + } +} + +impl<'a, T: Ord, A: Allocator> Iterator for Difference<'a, T, A> { + type Item = &'a T; + + fn next(&mut self) -> Option<&'a T> { + match &mut self.inner { + DifferenceInner::Stitch { + self_iter, + other_iter, + } => { + let mut self_next = self_iter.next()?; + + loop { + match other_iter + .peek() + .map_or(Less, |other_next| self_next.cmp(other_next)) + { + Less => return Some(self_next), + Equal => { + self_next = self_iter.next()?; + other_iter.next(); + } + Greater => { + other_iter.next(); + } + } + } + } + DifferenceInner::Search { + self_iter, + other_set, + } => loop { + let self_next = self_iter.next()?; + + if !other_set.contains(self_next) { + return Some(self_next); + } + }, + DifferenceInner::Iterate(iter) => iter.next(), + } + } + + fn size_hint(&self) -> (usize, Option) { + let (self_len, other_len) = match &self.inner { + DifferenceInner::Stitch { + self_iter, + other_iter, + } => (self_iter.len(), other_iter.len()), + DifferenceInner::Search { + self_iter, + other_set, + } => (self_iter.len(), other_set.len()), + DifferenceInner::Iterate(iter) => (iter.len(), 0), + }; + (self_len.saturating_sub(other_len), Some(self_len)) + } + + fn min(mut self) -> Option<&'a T> { + self.next() + } +} + +impl FusedIterator for Difference<'_, T, A> {} + +impl Clone for SymmetricDifference<'_, T> { + fn clone(&self) -> Self { + SymmetricDifference(self.0.clone()) + } +} + +impl<'a, T: Ord> Iterator for SymmetricDifference<'a, T> { + type Item = &'a T; + + fn next(&mut self) -> Option<&'a T> { + loop { + let (a_next, b_next) = self.0.nexts(Self::Item::cmp); + if a_next.and(b_next).is_none() { + return a_next.or(b_next); + } + } + } + + fn size_hint(&self) -> (usize, Option) { + let (a_len, b_len) = self.0.lens(); + // No checked_add, because even if a and b refer to the same set, + // and T is a zero-sized type, the storage overhead of sets limits + // the number of elements to less than half the range of usize. + (0, Some(a_len + b_len)) + } + + fn min(mut self) -> Option<&'a T> { + self.next() + } +} + +impl FusedIterator for SymmetricDifference<'_, T> {} + +impl Clone for Intersection<'_, T, A> { + fn clone(&self) -> Self { + Intersection { + inner: match &self.inner { + IntersectionInner::Stitch { a, b } => IntersectionInner::Stitch { + a: a.clone(), + b: b.clone(), + }, + IntersectionInner::Search { + small_iter, + large_set, + } => IntersectionInner::Search { + small_iter: small_iter.clone(), + large_set, + }, + IntersectionInner::Answer(answer) => IntersectionInner::Answer(*answer), + }, + } + } +} +impl<'a, T: Ord, A: Allocator> Iterator for Intersection<'a, T, A> { + type Item = &'a T; + + fn next(&mut self) -> Option<&'a T> { + match &mut self.inner { + IntersectionInner::Stitch { a, b } => { + let mut a_next = a.next()?; + let mut b_next = b.next()?; + loop { + match a_next.cmp(b_next) { + Less => a_next = a.next()?, + Greater => b_next = b.next()?, + Equal => return Some(a_next), + } + } + } + IntersectionInner::Search { + small_iter, + large_set, + } => loop { + let small_next = small_iter.next()?; + if large_set.contains(small_next) { + return Some(small_next); + } + }, + IntersectionInner::Answer(answer) => answer.take(), + } + } + + fn size_hint(&self) -> (usize, Option) { + match &self.inner { + IntersectionInner::Stitch { a, b } => (0, Some(min(a.len(), b.len()))), + IntersectionInner::Search { small_iter, .. } => (0, Some(small_iter.len())), + IntersectionInner::Answer(None) => (0, Some(0)), + IntersectionInner::Answer(Some(_)) => (1, Some(1)), + } + } + + fn min(mut self) -> Option<&'a T> { + self.next() + } +} + +impl FusedIterator for Intersection<'_, T, A> {} + +impl Clone for Union<'_, T> { + fn clone(&self) -> Self { + Union(self.0.clone()) + } +} +impl<'a, T: Ord> Iterator for Union<'a, T> { + type Item = &'a T; + + fn next(&mut self) -> Option<&'a T> { + let (a_next, b_next) = self.0.nexts(Self::Item::cmp); + a_next.or(b_next) + } + + fn size_hint(&self) -> (usize, Option) { + let (a_len, b_len) = self.0.lens(); + // No checked_add - see SymmetricDifference::size_hint. + (max(a_len, b_len), Some(a_len + b_len)) + } + + fn min(mut self) -> Option<&'a T> { + self.next() + } +} + +impl FusedIterator for Union<'_, T> {} + +impl TryFromIteratorIn for BTreeSet +where + T: Ord, +{ + #[inline] + fn try_from_iter_in(iter: I, alloc: A) -> Result + where + I: IntoIterator, + { + let mut this = BTreeSet::new_in(alloc); + + for value in iter { + this.try_insert(value)?; + } + + Ok(this) + } +} + +#[cfg(test)] +impl FromIterator for BTreeSet +where + T: Ord, +{ + fn from_iter(iter: I) -> Self + where + I: IntoIterator, + { + Self::try_from_iter_in(iter, Global).abort() + } +} + +impl TryFrom<[T; N]> for BTreeSet +where + T: Ord, +{ + type Error = Error; + + #[inline] + fn try_from(values: [T; N]) -> Result { + let mut this = BTreeSet::new(); + + for value in values { + this.try_insert(value)?; + } + + Ok(this) + } +} + +#[cfg(test)] +mod tests; diff --git a/crates/rune-alloc/src/alloc/btree/set/tests.rs b/crates/rune-alloc/src/alloc/btree/set/tests.rs new file mode 100644 index 000000000..d5e7c8eaf --- /dev/null +++ b/crates/rune-alloc/src/alloc/btree/set/tests.rs @@ -0,0 +1,898 @@ +#![allow(clippy::bool_assert_comparison)] +#![allow(clippy::needless_borrow)] +#![allow(clippy::redundant_closure)] +#![allow(clippy::useless_vec)] + +use core::cmp::Ordering; +use core::fmt; +use core::hash::{Hash, Hasher}; +use core::ops::Bound::{Excluded, Included}; + +use rust_alloc::format; +use rust_std::panic::{catch_unwind, AssertUnwindSafe}; + +use super::*; + +use crate::testing::crash_test::{CrashTestDummy, Panic}; +use crate::testing::rng::DeterministicRng; +use crate::vec::Vec; + +#[test] +fn test_clone_eq() { + let mut m = BTreeSet::new(); + + m.insert(1); + m.insert(2); + + assert_eq!(m.clone(), m); +} + +#[test] +fn test_iter_min_max() { + let mut a = BTreeSet::new(); + assert_eq!(a.iter().min(), None); + assert_eq!(a.iter().max(), None); + assert_eq!(a.range(..).min(), None); + assert_eq!(a.range(..).max(), None); + assert_eq!(a.difference(&BTreeSet::new()).min(), None); + assert_eq!(a.difference(&BTreeSet::new()).max(), None); + assert_eq!(a.intersection(&a).min(), None); + assert_eq!(a.intersection(&a).max(), None); + assert_eq!(a.symmetric_difference(&BTreeSet::new()).min(), None); + assert_eq!(a.symmetric_difference(&BTreeSet::new()).max(), None); + assert_eq!(a.union(&a).min(), None); + assert_eq!(a.union(&a).max(), None); + a.insert(1); + a.insert(2); + assert_eq!(a.iter().min(), Some(&1)); + assert_eq!(a.iter().max(), Some(&2)); + assert_eq!(a.range(..).min(), Some(&1)); + assert_eq!(a.range(..).max(), Some(&2)); + assert_eq!(a.difference(&BTreeSet::new()).min(), Some(&1)); + assert_eq!(a.difference(&BTreeSet::new()).max(), Some(&2)); + assert_eq!(a.intersection(&a).min(), Some(&1)); + assert_eq!(a.intersection(&a).max(), Some(&2)); + assert_eq!(a.symmetric_difference(&BTreeSet::new()).min(), Some(&1)); + assert_eq!(a.symmetric_difference(&BTreeSet::new()).max(), Some(&2)); + assert_eq!(a.union(&a).min(), Some(&1)); + assert_eq!(a.union(&a).max(), Some(&2)); +} + +fn check(a: &[i32], b: &[i32], expected: &[i32], f: F) +where + F: FnOnce(&BTreeSet, &BTreeSet, &mut dyn FnMut(&i32) -> bool) -> bool, +{ + let mut set_a = BTreeSet::new(); + let mut set_b = BTreeSet::new(); + + for x in a { + assert!(set_a.insert(*x)) + } + for y in b { + assert!(set_b.insert(*y)) + } + + let mut i = 0; + f(&set_a, &set_b, &mut |&x| { + if i < expected.len() { + assert_eq!(x, expected[i]); + } + i += 1; + true + }); + assert_eq!(i, expected.len()); +} + +#[test] +fn test_intersection() { + fn check_intersection(a: &[i32], b: &[i32], expected: &[i32]) { + check(a, b, expected, |x, y, f| x.intersection(y).all(f)) + } + + check_intersection(&[], &[], &[]); + check_intersection(&[1, 2, 3], &[], &[]); + check_intersection(&[], &[1, 2, 3], &[]); + check_intersection(&[2], &[1, 2, 3], &[2]); + check_intersection(&[1, 2, 3], &[2], &[2]); + check_intersection( + &[11, 1, 3, 77, 103, 5, -5], + &[2, 11, 77, -9, -42, 5, 3], + &[3, 5, 11, 77], + ); + + if cfg!(miri) { + // Miri is too slow + return; + } + + let large = Vec::from_iter(0..100); + check_intersection(&[], &large, &[]); + check_intersection(&large, &[], &[]); + check_intersection(&[-1], &large, &[]); + check_intersection(&large, &[-1], &[]); + check_intersection(&[0], &large, &[0]); + check_intersection(&large, &[0], &[0]); + check_intersection(&[99], &large, &[99]); + check_intersection(&large, &[99], &[99]); + check_intersection(&[100], &large, &[]); + check_intersection(&large, &[100], &[]); + check_intersection(&[11, 5000, 1, 3, 77, 8924], &large, &[1, 3, 11, 77]); +} + +#[test] +fn test_intersection_size_hint() { + let x = BTreeSet::from([3, 4]); + let y = BTreeSet::from([1, 2, 3]); + let mut iter = x.intersection(&y); + assert_eq!(iter.size_hint(), (1, Some(1))); + assert_eq!(iter.next(), Some(&3)); + assert_eq!(iter.size_hint(), (0, Some(0))); + assert_eq!(iter.next(), None); + + iter = y.intersection(&y); + assert_eq!(iter.size_hint(), (0, Some(3))); + assert_eq!(iter.next(), Some(&1)); + assert_eq!(iter.size_hint(), (0, Some(2))); +} + +#[test] +fn test_difference() { + fn check_difference(a: &[i32], b: &[i32], expected: &[i32]) { + check(a, b, expected, |x, y, f| x.difference(y).all(f)) + } + + check_difference(&[], &[], &[]); + check_difference(&[1, 12], &[], &[1, 12]); + check_difference(&[], &[1, 2, 3, 9], &[]); + check_difference(&[1, 3, 5, 9, 11], &[3, 9], &[1, 5, 11]); + check_difference(&[1, 3, 5, 9, 11], &[3, 6, 9], &[1, 5, 11]); + check_difference(&[1, 3, 5, 9, 11], &[0, 1], &[3, 5, 9, 11]); + check_difference(&[1, 3, 5, 9, 11], &[11, 12], &[1, 3, 5, 9]); + check_difference( + &[-5, 11, 22, 33, 40, 42], + &[-12, -5, 14, 23, 34, 38, 39, 50], + &[11, 22, 33, 40, 42], + ); + + if cfg!(miri) { + // Miri is too slow + return; + } + + let large = Vec::from_iter(0..100); + check_difference(&[], &large, &[]); + check_difference(&[-1], &large, &[-1]); + check_difference(&[0], &large, &[]); + check_difference(&[99], &large, &[]); + check_difference(&[100], &large, &[100]); + check_difference(&[11, 5000, 1, 3, 77, 8924], &large, &[5000, 8924]); + check_difference(&large, &[], &large); + check_difference(&large, &[-1], &large); + check_difference(&large, &[100], &large); +} + +#[test] +fn test_difference_size_hint() { + let s246 = BTreeSet::from([2, 4, 6]); + let s23456 = BTreeSet::from_iter(2..=6); + let mut iter = s246.difference(&s23456); + assert_eq!(iter.size_hint(), (0, Some(3))); + assert_eq!(iter.next(), None); + + let s12345 = BTreeSet::from_iter(1..=5); + iter = s246.difference(&s12345); + assert_eq!(iter.size_hint(), (0, Some(3))); + assert_eq!(iter.next(), Some(&6)); + assert_eq!(iter.size_hint(), (0, Some(0))); + assert_eq!(iter.next(), None); + + let s34567 = BTreeSet::from_iter(3..=7); + iter = s246.difference(&s34567); + assert_eq!(iter.size_hint(), (0, Some(3))); + assert_eq!(iter.next(), Some(&2)); + assert_eq!(iter.size_hint(), (0, Some(2))); + assert_eq!(iter.next(), None); + + let s1 = BTreeSet::from_iter(-9..=1); + iter = s246.difference(&s1); + assert_eq!(iter.size_hint(), (3, Some(3))); + + let s2 = BTreeSet::from_iter(-9..=2); + iter = s246.difference(&s2); + assert_eq!(iter.size_hint(), (2, Some(2))); + assert_eq!(iter.next(), Some(&4)); + assert_eq!(iter.size_hint(), (1, Some(1))); + + let s23 = BTreeSet::from([2, 3]); + iter = s246.difference(&s23); + assert_eq!(iter.size_hint(), (1, Some(3))); + assert_eq!(iter.next(), Some(&4)); + assert_eq!(iter.size_hint(), (1, Some(1))); + + let s4 = BTreeSet::from([4]); + iter = s246.difference(&s4); + assert_eq!(iter.size_hint(), (2, Some(3))); + assert_eq!(iter.next(), Some(&2)); + assert_eq!(iter.size_hint(), (1, Some(2))); + assert_eq!(iter.next(), Some(&6)); + assert_eq!(iter.size_hint(), (0, Some(0))); + assert_eq!(iter.next(), None); + + let s56 = BTreeSet::from([5, 6]); + iter = s246.difference(&s56); + assert_eq!(iter.size_hint(), (1, Some(3))); + assert_eq!(iter.next(), Some(&2)); + assert_eq!(iter.size_hint(), (0, Some(2))); + + let s6 = BTreeSet::from_iter(6..=19); + iter = s246.difference(&s6); + assert_eq!(iter.size_hint(), (2, Some(2))); + assert_eq!(iter.next(), Some(&2)); + assert_eq!(iter.size_hint(), (1, Some(1))); + + let s7 = BTreeSet::from_iter(7..=19); + iter = s246.difference(&s7); + assert_eq!(iter.size_hint(), (3, Some(3))); +} + +#[test] +fn test_symmetric_difference() { + fn check_symmetric_difference(a: &[i32], b: &[i32], expected: &[i32]) { + check(a, b, expected, |x, y, f| x.symmetric_difference(y).all(f)) + } + + check_symmetric_difference(&[], &[], &[]); + check_symmetric_difference(&[1, 2, 3], &[2], &[1, 3]); + check_symmetric_difference(&[2], &[1, 2, 3], &[1, 3]); + check_symmetric_difference( + &[1, 3, 5, 9, 11], + &[-2, 3, 9, 14, 22], + &[-2, 1, 5, 11, 14, 22], + ); +} + +#[test] +fn test_symmetric_difference_size_hint() { + let x = BTreeSet::from([2, 4]); + let y = BTreeSet::from([1, 2, 3]); + let mut iter = x.symmetric_difference(&y); + assert_eq!(iter.size_hint(), (0, Some(5))); + assert_eq!(iter.next(), Some(&1)); + assert_eq!(iter.size_hint(), (0, Some(4))); + assert_eq!(iter.next(), Some(&3)); + assert_eq!(iter.size_hint(), (0, Some(1))); +} + +#[test] +fn test_union() { + fn check_union(a: &[i32], b: &[i32], expected: &[i32]) { + check(a, b, expected, |x, y, f| x.union(y).all(f)) + } + + check_union(&[], &[], &[]); + check_union(&[1, 2, 3], &[2], &[1, 2, 3]); + check_union(&[2], &[1, 2, 3], &[1, 2, 3]); + check_union( + &[1, 3, 5, 9, 11, 16, 19, 24], + &[-2, 1, 5, 9, 13, 19], + &[-2, 1, 3, 5, 9, 11, 13, 16, 19, 24], + ); +} + +#[test] +fn test_union_size_hint() { + let x = BTreeSet::from([2, 4]); + let y = BTreeSet::from([1, 2, 3]); + let mut iter = x.union(&y); + assert_eq!(iter.size_hint(), (3, Some(5))); + assert_eq!(iter.next(), Some(&1)); + assert_eq!(iter.size_hint(), (2, Some(4))); + assert_eq!(iter.next(), Some(&2)); + assert_eq!(iter.size_hint(), (1, Some(2))); +} + +#[test] +// Only tests the simple function definition with respect to intersection +fn test_is_disjoint() { + let one = BTreeSet::from([1]); + let two = BTreeSet::from([2]); + assert!(one.is_disjoint(&two)); +} + +#[test] +// Also implicitly tests the trivial function definition of is_superset +fn test_is_subset() { + fn is_subset(a: &[i32], b: &[i32]) -> bool { + let set_a = BTreeSet::from_iter(a.iter()); + let set_b = BTreeSet::from_iter(b.iter()); + set_a.is_subset(&set_b) + } + + assert_eq!(is_subset(&[], &[]), true); + assert_eq!(is_subset(&[], &[1, 2]), true); + assert_eq!(is_subset(&[0], &[1, 2]), false); + assert_eq!(is_subset(&[1], &[1, 2]), true); + assert_eq!(is_subset(&[2], &[1, 2]), true); + assert_eq!(is_subset(&[3], &[1, 2]), false); + assert_eq!(is_subset(&[1, 2], &[1]), false); + assert_eq!(is_subset(&[1, 2], &[1, 2]), true); + assert_eq!(is_subset(&[1, 2], &[2, 3]), false); + assert_eq!( + is_subset( + &[-5, 11, 22, 33, 40, 42], + &[-12, -5, 11, 14, 22, 23, 33, 34, 38, 39, 40, 42] + ), + true + ); + assert_eq!( + is_subset( + &[-5, 11, 22, 33, 40, 42], + &[-12, -5, 11, 14, 22, 23, 34, 38] + ), + false + ); + + if cfg!(miri) { + // Miri is too slow + return; + } + + let large = Vec::from_iter(0..100); + assert_eq!(is_subset(&[], &large), true); + assert_eq!(is_subset(&large, &[]), false); + assert_eq!(is_subset(&[-1], &large), false); + assert_eq!(is_subset(&[0], &large), true); + assert_eq!(is_subset(&[1, 2], &large), true); + assert_eq!(is_subset(&[99, 100], &large), false); +} + +#[test] +fn test_is_superset() { + fn is_superset(a: &[i32], b: &[i32]) -> bool { + let set_a = BTreeSet::from_iter(a.iter()); + let set_b = BTreeSet::from_iter(b.iter()); + set_a.is_superset(&set_b) + } + + assert_eq!(is_superset(&[], &[]), true); + assert_eq!(is_superset(&[], &[1, 2]), false); + assert_eq!(is_superset(&[0], &[1, 2]), false); + assert_eq!(is_superset(&[1], &[1, 2]), false); + assert_eq!(is_superset(&[4], &[1, 2]), false); + assert_eq!(is_superset(&[1, 4], &[1, 2]), false); + assert_eq!(is_superset(&[1, 2], &[1, 2]), true); + assert_eq!(is_superset(&[1, 2, 3], &[1, 3]), true); + assert_eq!(is_superset(&[1, 2, 3], &[]), true); + assert_eq!(is_superset(&[-1, 1, 2, 3], &[-1, 3]), true); + + if cfg!(miri) { + // Miri is too slow + return; + } + + let large = Vec::from_iter(0..100); + assert_eq!(is_superset(&[], &large), false); + assert_eq!(is_superset(&large, &[]), true); + assert_eq!(is_superset(&large, &[1]), true); + assert_eq!(is_superset(&large, &[50, 99]), true); + assert_eq!(is_superset(&large, &[100]), false); + assert_eq!(is_superset(&large, &[0, 99]), true); + assert_eq!(is_superset(&[-1], &large), false); + assert_eq!(is_superset(&[0], &large), false); + assert_eq!(is_superset(&[99, 100], &large), false); +} + +#[test] +fn test_retain() { + let mut set = BTreeSet::from([1, 2, 3, 4, 5, 6]); + set.retain(|&k| k % 2 == 0); + assert_eq!(set.len(), 3); + assert!(set.contains(&2)); + assert!(set.contains(&4)); + assert!(set.contains(&6)); +} + +#[test] +fn test_extract_if() { + let mut x = BTreeSet::from([1]); + let mut y = BTreeSet::from([1]); + + x.extract_if(|_| true).for_each(drop); + y.extract_if(|_| false).for_each(drop); + assert_eq!(x.len(), 0); + assert_eq!(y.len(), 1); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn test_extract_if_drop_panic_leak() { + let a = CrashTestDummy::new(0); + let b = CrashTestDummy::new(1); + let c = CrashTestDummy::new(2); + let mut set = BTreeSet::new(); + set.insert(a.spawn(Panic::Never)); + set.insert(b.spawn(Panic::InDrop)); + set.insert(c.spawn(Panic::Never)); + + catch_unwind(move || set.extract_if(|dummy| dummy.query(true)).for_each(drop)).ok(); + + assert_eq!(a.queried(), 1); + assert_eq!(b.queried(), 1); + assert_eq!(c.queried(), 0); + assert_eq!(a.dropped(), 1); + assert_eq!(b.dropped(), 1); + assert_eq!(c.dropped(), 1); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn test_extract_if_pred_panic_leak() { + let a = CrashTestDummy::new(0); + let b = CrashTestDummy::new(1); + let c = CrashTestDummy::new(2); + let mut set = BTreeSet::new(); + set.insert(a.spawn(Panic::Never)); + set.insert(b.spawn(Panic::InQuery)); + set.insert(c.spawn(Panic::InQuery)); + + catch_unwind(AssertUnwindSafe(|| { + set.extract_if(|dummy| dummy.query(true)).for_each(drop) + })) + .ok(); + + assert_eq!(a.queried(), 1); + assert_eq!(b.queried(), 1); + assert_eq!(c.queried(), 0); + assert_eq!(a.dropped(), 1); + assert_eq!(b.dropped(), 0); + assert_eq!(c.dropped(), 0); + assert_eq!(set.len(), 2); + assert_eq!(set.first().unwrap().id(), 1); + assert_eq!(set.last().unwrap().id(), 2); +} + +#[test] +fn test_clear() { + let mut x = BTreeSet::new(); + x.insert(1); + + x.clear(); + assert!(x.is_empty()); +} +#[test] +fn test_remove() { + let mut x = BTreeSet::new(); + assert!(x.is_empty()); + + x.insert(1); + x.insert(2); + x.insert(3); + x.insert(4); + + assert_eq!(x.remove(&2), true); + assert_eq!(x.remove(&0), false); + assert_eq!(x.remove(&5), false); + assert_eq!(x.remove(&1), true); + assert_eq!(x.remove(&2), false); + assert_eq!(x.remove(&3), true); + assert_eq!(x.remove(&4), true); + assert_eq!(x.remove(&4), false); + assert!(x.is_empty()); +} + +#[test] +fn test_zip() { + let mut x = BTreeSet::new(); + x.insert(5); + x.insert(12); + x.insert(11); + + let mut y = BTreeSet::new(); + y.insert("foo"); + y.insert("bar"); + + let x = x; + let y = y; + let mut z = x.iter().zip(&y); + + assert_eq!(z.next().unwrap(), (&5, &("bar"))); + assert_eq!(z.next().unwrap(), (&11, &("foo"))); + assert!(z.next().is_none()); +} + +#[test] +fn test_from_iter() { + let xs = [1, 2, 3, 4, 5, 6, 7, 8, 9]; + + let set = BTreeSet::from_iter(xs.iter()); + + for x in &xs { + assert!(set.contains(x)); + } +} + +#[test] +fn test_show() { + let mut set = BTreeSet::new(); + let empty = BTreeSet::::new(); + + set.insert(1); + set.insert(2); + + let set_str = format!("{set:?}"); + + assert_eq!(set_str, "{1, 2}"); + assert_eq!(format!("{empty:?}"), "{}"); +} + +#[test] +fn test_extend_ref() { + let mut a = BTreeSet::new(); + a.insert(1); + + a.extend(&[2, 3, 4]); + + assert_eq!(a.len(), 4); + assert!(a.contains(&1)); + assert!(a.contains(&2)); + assert!(a.contains(&3)); + assert!(a.contains(&4)); + + let mut b = BTreeSet::new(); + b.insert(5); + b.insert(6); + + a.extend(&b); + + assert_eq!(a.len(), 6); + assert!(a.contains(&1)); + assert!(a.contains(&2)); + assert!(a.contains(&3)); + assert!(a.contains(&4)); + assert!(a.contains(&5)); + assert!(a.contains(&6)); +} + +#[test] +fn test_recovery() { + #[derive(Debug)] + struct Foo(&'static str, i32); + + impl PartialEq for Foo { + fn eq(&self, other: &Self) -> bool { + self.0 == other.0 + } + } + + impl Eq for Foo {} + + impl PartialOrd for Foo { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } + } + + impl Ord for Foo { + fn cmp(&self, other: &Self) -> Ordering { + self.0.cmp(&other.0) + } + } + + let mut s = BTreeSet::new(); + assert_eq!(s.replace(Foo("a", 1)), None); + assert_eq!(s.len(), 1); + assert_eq!(s.replace(Foo("a", 2)), Some(Foo("a", 1))); + assert_eq!(s.len(), 1); + + { + let mut it = s.iter(); + assert_eq!(it.next(), Some(&Foo("a", 2))); + assert_eq!(it.next(), None); + } + + assert_eq!(s.get(&Foo("a", 1)), Some(&Foo("a", 2))); + assert_eq!(s.take(&Foo("a", 1)), Some(Foo("a", 2))); + assert_eq!(s.len(), 0); + + assert_eq!(s.get(&Foo("a", 1)), None); + assert_eq!(s.take(&Foo("a", 1)), None); + + assert_eq!(s.iter().next(), None); +} + +#[allow(dead_code)] +fn assert_covariance() { + fn set<'new>(v: BTreeSet<&'static str>) -> BTreeSet<&'new str> { + v + } + fn iter<'a, 'new>(v: Iter<'a, &'static str>) -> Iter<'a, &'new str> { + v + } + fn into_iter<'new>(v: IntoIter<&'static str>) -> IntoIter<&'new str> { + v + } + fn range<'a, 'new>(v: Range<'a, &'static str>) -> Range<'a, &'new str> { + v + } + // not applied to Difference, Intersection, SymmetricDifference, Union +} + +#[allow(dead_code)] +fn assert_sync() { + fn set(v: &BTreeSet) -> impl Sync + '_ { + v + } + + fn iter(v: &BTreeSet) -> impl Sync + '_ { + v.iter() + } + + fn into_iter(v: BTreeSet) -> impl Sync { + v.into_iter() + } + + fn range(v: &BTreeSet) -> impl Sync + '_ { + v.range(..) + } + + fn extract_if(v: &mut BTreeSet) -> impl Sync + '_ { + v.extract_if(|_| false) + } + + fn difference(v: &BTreeSet) -> impl Sync + '_ { + v.difference(&v) + } + + fn intersection(v: &BTreeSet) -> impl Sync + '_ { + v.intersection(&v) + } + + fn symmetric_difference(v: &BTreeSet) -> impl Sync + '_ { + v.symmetric_difference(&v) + } + + fn union(v: &BTreeSet) -> impl Sync + '_ { + v.union(&v) + } +} + +#[allow(dead_code)] +fn assert_send() { + fn set(v: BTreeSet) -> impl Send { + v + } + + fn iter(v: &BTreeSet) -> impl Send + '_ { + v.iter() + } + + fn into_iter(v: BTreeSet) -> impl Send { + v.into_iter() + } + + fn range(v: &BTreeSet) -> impl Send + '_ { + v.range(..) + } + + fn extract_if(v: &mut BTreeSet) -> impl Send + '_ { + v.extract_if(|_| false) + } + + fn difference(v: &BTreeSet) -> impl Send + '_ { + v.difference(&v) + } + + fn intersection(v: &BTreeSet) -> impl Send + '_ { + v.intersection(&v) + } + + fn symmetric_difference(v: &BTreeSet) -> impl Send + '_ { + v.symmetric_difference(&v) + } + + fn union(v: &BTreeSet) -> impl Send + '_ { + v.union(&v) + } +} + +#[allow(dead_code)] +// Check that the member-like functions conditionally provided by #[derive()] +// are not overridden by genuine member functions with a different signature. +fn assert_derives() { + fn hash(v: BTreeSet, state: &mut H) { + v.hash(state); + // Tested much more thoroughly outside the crate in btree_set_hash.rs + } + fn eq(v: BTreeSet) { + let _ = v.eq(&v); + } + fn ne(v: BTreeSet) { + let _ = v.ne(&v); + } + fn cmp(v: BTreeSet) { + let _ = v.cmp(&v); + } + fn min(v: BTreeSet, w: BTreeSet) { + let _ = v.min(w); + } + fn max(v: BTreeSet, w: BTreeSet) { + let _ = v.max(w); + } + fn clamp(v: BTreeSet, w: BTreeSet, x: BTreeSet) { + let _ = v.clamp(w, x); + } + fn partial_cmp(v: &BTreeSet) { + let _ = v.partial_cmp(&v); + } +} + +#[test] +fn test_ord_absence() { + fn set(mut set: BTreeSet) { + let _ = set.is_empty(); + let _ = set.len(); + set.clear(); + let _ = set.iter(); + let _ = set.into_iter(); + } + + fn set_debug(set: BTreeSet) { + format!("{set:?}"); + format!("{:?}", set.iter()); + format!("{:?}", set.into_iter()); + } + + fn set_clone(mut set: BTreeSet) { + set.clone_from(&set.clone()); + } + + #[derive(Debug, Clone)] + struct NonOrd; + impl TryClone for NonOrd { + fn try_clone(&self) -> Result { + Ok(self.clone()) + } + } + set(BTreeSet::::new()); + set_debug(BTreeSet::::new()); + set_clone(BTreeSet::::default()); +} + +#[test] +fn test_append() { + let mut a = BTreeSet::new(); + a.insert(1); + a.insert(2); + a.insert(3); + + let mut b = BTreeSet::new(); + b.insert(3); + b.insert(4); + b.insert(5); + + a.append(&mut b); + + assert_eq!(a.len(), 5); + assert_eq!(b.len(), 0); + + assert_eq!(a.contains(&1), true); + assert_eq!(a.contains(&2), true); + assert_eq!(a.contains(&3), true); + assert_eq!(a.contains(&4), true); + assert_eq!(a.contains(&5), true); +} + +#[test] +fn test_first_last() { + let mut a = BTreeSet::new(); + assert_eq!(a.first(), None); + assert_eq!(a.last(), None); + a.insert(1); + assert_eq!(a.first(), Some(&1)); + assert_eq!(a.last(), Some(&1)); + a.insert(2); + assert_eq!(a.first(), Some(&1)); + assert_eq!(a.last(), Some(&2)); + for i in 3..=12 { + a.insert(i); + } + assert_eq!(a.first(), Some(&1)); + assert_eq!(a.last(), Some(&12)); + assert_eq!(a.pop_first(), Some(1)); + assert_eq!(a.pop_last(), Some(12)); + assert_eq!(a.pop_first(), Some(2)); + assert_eq!(a.pop_last(), Some(11)); + assert_eq!(a.pop_first(), Some(3)); + assert_eq!(a.pop_last(), Some(10)); + assert_eq!(a.pop_first(), Some(4)); + assert_eq!(a.pop_first(), Some(5)); + assert_eq!(a.pop_first(), Some(6)); + assert_eq!(a.pop_first(), Some(7)); + assert_eq!(a.pop_first(), Some(8)); + assert_eq!(a.clone().pop_last(), Some(9)); + assert_eq!(a.pop_first(), Some(9)); + assert_eq!(a.pop_first(), None); + assert_eq!(a.pop_last(), None); +} + +// Unlike the function with the same name in map/tests, returns no values. +// Which also means it returns different predetermined pseudo-random keys, +// and the test cases using this function explore slightly different trees. +fn rand_data(len: usize) -> Vec { + let mut rng = DeterministicRng::new(); + Vec::from_iter((0..len).map(|_| rng.next())) +} + +#[test] +fn test_split_off_empty_right() { + let mut data = rand_data(173); + + let mut set = BTreeSet::from_iter(data.clone()); + let right = set.split_off(&(data.iter().max().unwrap() + 1)); + + data.sort(); + assert!(set.into_iter().eq(data)); + assert!(right.into_iter().eq(None)); +} + +#[test] +fn test_split_off_empty_left() { + let mut data = rand_data(314); + + let mut set = BTreeSet::from_iter(data.clone()); + let right = set.split_off(data.iter().min().unwrap()); + + data.sort(); + assert!(set.into_iter().eq(None)); + assert!(right.into_iter().eq(data)); +} + +#[test] +fn test_split_off_large_random_sorted() { + // Miri is too slow + let mut data = if cfg!(miri) { + rand_data(529) + } else { + rand_data(1529) + }; + // special case with maximum height. + data.sort(); + + let mut set = BTreeSet::from_iter(data.clone()); + let key = data[data.len() / 2]; + let right = set.split_off(&key); + + assert!(set + .into_iter() + .eq(data.clone().into_iter().filter(|x| *x < key))); + assert!(right.into_iter().eq(data.into_iter().filter(|x| *x >= key))); +} + +#[test] +fn from_array() { + let set = BTreeSet::from([1, 2, 3, 4]); + let unordered_duplicates = BTreeSet::from([4, 1, 4, 3, 2]); + assert_eq!(set, unordered_duplicates); +} + +#[should_panic(expected = "range start is greater than range end in BTree")] +#[test] +fn test_range_panic_1() { + let mut set = BTreeSet::new(); + set.insert(3); + set.insert(5); + set.insert(8); + + let _invalid_range = set.range((Included(&8), Included(&3))); +} + +#[should_panic(expected = "range start and end are equal and excluded in BTree")] +#[test] +fn test_range_panic_2() { + let mut set = BTreeSet::new(); + set.insert(3); + set.insert(5); + set.insert(8); + + let _invalid_range = set.range((Excluded(&5), Excluded(&5))); +} diff --git a/crates/rune-alloc/src/alloc/btree/set_val.rs b/crates/rune-alloc/src/alloc/btree/set_val.rs new file mode 100644 index 000000000..4970eadcf --- /dev/null +++ b/crates/rune-alloc/src/alloc/btree/set_val.rs @@ -0,0 +1,14 @@ +use crate::alloc::{Error, TryClone}; + +/// Zero-Sized Type (ZST) for internal `BTreeSet` values. +/// Used instead of `()` to differentiate between: +/// * `BTreeMap` (possible user-defined map) +/// * `BTreeMap` (internal set representation) +#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone, Default)] +pub(crate) struct SetValZST; + +impl TryClone for SetValZST { + fn try_clone(&self) -> Result { + Ok(Self) + } +} diff --git a/crates/rune-alloc/src/alloc/btree/split.rs b/crates/rune-alloc/src/alloc/btree/split.rs new file mode 100644 index 000000000..a52a0a024 --- /dev/null +++ b/crates/rune-alloc/src/alloc/btree/split.rs @@ -0,0 +1,89 @@ +use core::borrow::Borrow; +use core::cmp::Ordering; + +use super::node::{ForceResult::*, Root}; +use super::search::SearchResult::*; + +use crate::alloc::{AllocError, Allocator}; + +impl Root { + /// Calculates the length of both trees that result from splitting up + /// a given number of distinct key-value pairs. + pub(crate) fn calc_split_length( + total_num: usize, + root_a: &Root, + root_b: &Root, + ) -> (usize, usize) { + let (length_a, length_b); + if root_a.height() < root_b.height() { + length_a = root_a.reborrow().calc_length(); + length_b = total_num - length_a; + debug_assert_eq!(length_b, root_b.reborrow().calc_length()); + } else { + length_b = root_b.reborrow().calc_length(); + length_a = total_num - length_b; + debug_assert_eq!(length_a, root_a.reborrow().calc_length()); + } + (length_a, length_b) + } + + /// Split off a tree with key-value pairs at and after the given key. + /// The result is meaningful only if the tree is ordered by key, + /// and if the ordering of `Q` corresponds to that of `K`. + /// If `self` respects all `BTreeMap` tree invariants, then both + /// `self` and the returned tree will respect those invariants. + pub(crate) fn split_off( + &mut self, + cx: &mut C, + key: &Q, + alloc: &A, + cmp: fn(&mut C, &Q, &Q) -> Result, + ) -> Result, E> + where + K: Borrow, + { + let left_root = self; + + let mut right_root = match Root::new_pillar(left_root.height(), alloc) { + Ok(root) => root, + Err(e) => return Ok(Err(e)), + }; + + let mut left_node = left_root.borrow_mut(); + let mut right_node = right_root.borrow_mut(); + + loop { + let mut split_edge = match left_node.search_node(cx, key, cmp)? { + // key is going to the right tree + Found(kv) => kv.left_edge(), + GoDown(edge) => edge, + }; + + split_edge.move_suffix(&mut right_node); + + match (split_edge.force(), right_node.force()) { + (Internal(edge), Internal(node)) => { + left_node = edge.descend(); + right_node = node.first_edge().descend(); + } + (Leaf(_), Leaf(_)) => break, + _ => unreachable!(), + } + } + + left_root.fix_right_border(alloc); + right_root.fix_left_border(alloc); + Ok(Ok(right_root)) + } + + /// Creates a tree consisting of empty nodes. + fn new_pillar(height: usize, alloc: &A) -> Result { + let mut root = Root::new(alloc)?; + + for _ in 0..height { + root.push_internal_level(alloc)?; + } + + Ok(root) + } +} diff --git a/crates/rune-alloc/src/alloc/hashbrown/map.rs b/crates/rune-alloc/src/alloc/hashbrown/map.rs new file mode 100644 index 000000000..57ae648e6 --- /dev/null +++ b/crates/rune-alloc/src/alloc/hashbrown/map.rs @@ -0,0 +1,9304 @@ +use core::borrow::Borrow; +use core::convert::Infallible; +use core::fmt::{self, Debug}; +use core::hash::{BuildHasher, Hash}; +use core::iter::FusedIterator; +use core::marker::PhantomData; +use core::mem; +use core::ops::Index; + +#[cfg(test)] +use crate::alloc::testing::*; +use crate::alloc::{into_ok, into_ok_try}; +use crate::alloc::{Allocator, CustomError, Error, Global, TryClone, TryExtend, TryFromIteratorIn}; + +use super::raw::{Bucket, RawDrain, RawIntoIter, RawIter, RawTable}; +use super::{Equivalent, ErrorOrInsertSlot, HasherFn}; + +/// Default hasher for `HashMap`. +pub type DefaultHashBuilder = core::hash::BuildHasherDefault; + +/// Default source of random state. +pub type RandomState = ahash::RandomState; + +/// Default hasher. +pub type Hasher = ahash::AHasher; + +/// A hash map implemented with quadratic probing and SIMD lookup. +/// +/// The default hashing algorithm is currently [`AHash`], though this is +/// subject to change at any point in the future. This hash function is very +/// fast for all types of keys, but this algorithm will typically *not* protect +/// against attacks such as HashDoS. +/// +/// The hashing algorithm can be replaced on a per-`HashMap` basis using the +/// [`default`], [`with_hasher`], and [`with_capacity_and_hasher`] methods. Many +/// alternative algorithms are available on crates.io, such as the [`fnv`] crate. +/// +/// It is required that the keys implement the [`Eq`] and [`Hash`] traits, although +/// this can frequently be achieved by using `#[derive(PartialEq, Eq, Hash)]`. +/// If you implement these yourself, it is important that the following +/// property holds: +/// +/// ```text +/// k1 == k2 -> hash(k1) == hash(k2) +/// ``` +/// +/// In other words, if two keys are equal, their hashes must be equal. +/// +/// It is a logic error for a key to be modified in such a way that the key's +/// hash, as determined by the [`Hash`] trait, or its equality, as determined by +/// the [`Eq`] trait, changes while it is in the map. This is normally only +/// possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code. +/// +/// It is also a logic error for the [`Hash`] implementation of a key to panic. +/// This is generally only possible if the trait is implemented manually. If a +/// panic does occur then the contents of the `HashMap` may become corrupted and +/// some items may be dropped from the table. +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::HashMap; +/// +/// // Type inference lets us omit an explicit type signature (which +/// // would be `HashMap` in this example). +/// let mut book_reviews = HashMap::new(); +/// +/// // Review some books. +/// book_reviews.try_insert( +/// "Adventures of Huckleberry Finn".to_string(), +/// "My favorite book.".to_string(), +/// )?; +/// book_reviews.try_insert( +/// "Grimms' Fairy Tales".to_string(), +/// "Masterpiece.".to_string(), +/// )?; +/// book_reviews.try_insert( +/// "Pride and Prejudice".to_string(), +/// "Very enjoyable.".to_string(), +/// )?; +/// book_reviews.try_insert( +/// "The Adventures of Sherlock Holmes".to_string(), +/// "Eye lyked it alot.".to_string(), +/// )?; +/// +/// // Check for a specific one. +/// // When collections store owned values (String), they can still be +/// // queried using references (&str). +/// if !book_reviews.contains_key("Les Misérables") { +/// println!("We've got {} reviews, but Les Misérables ain't one.", +/// book_reviews.len()); +/// } +/// +/// // oops, this review has a lot of spelling mistakes, let's delete it. +/// book_reviews.remove("The Adventures of Sherlock Holmes"); +/// +/// // Look up the values associated with some keys. +/// let to_find = ["Pride and Prejudice", "Alice's Adventure in Wonderland"]; +/// for &book in &to_find { +/// match book_reviews.get(book) { +/// Some(review) => println!("{}: {}", book, review), +/// None => println!("{} is unreviewed.", book) +/// } +/// } +/// +/// // Look up the value for a key (will panic if the key is not found). +/// println!("Review for Jane: {}", book_reviews["Pride and Prejudice"]); +/// +/// // Iterate over everything. +/// for (book, review) in &book_reviews { +/// println!("{}: \"{}\"", book, review); +/// } +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// `HashMap` also implements an [`Entry API`](#method.entry), which allows +/// for more complex methods of getting, setting, updating and removing keys and +/// their values: +/// +/// ``` +/// use rune_alloc::HashMap; +/// +/// // type inference lets us omit an explicit type signature (which +/// // would be `HashMap<&str, u8>` in this example). +/// let mut player_stats = HashMap::new(); +/// +/// fn random_stat_buff() -> u8 { +/// // could actually return some random value here - let's just return +/// // some fixed value for now +/// 42 +/// } +/// +/// // insert a key only if it doesn't already exist +/// player_stats.entry("health").or_try_insert(100)?; +/// +/// // insert a key using a function that provides a new value only if it +/// // doesn't already exist +/// player_stats.entry("defence").or_try_insert_with(random_stat_buff)?; +/// +/// // update a key, guarding against the key possibly not being set +/// let stat = player_stats.entry("attack").or_try_insert(100)?; +/// *stat += random_stat_buff(); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// The easiest way to use `HashMap` with a custom key type is to derive [`Eq`] and [`Hash`]. +/// We must also derive [`PartialEq`]. +/// +/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html +/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html +/// [`PartialEq`]: https://doc.rust-lang.org/std/cmp/trait.PartialEq.html +/// [`RefCell`]: https://doc.rust-lang.org/std/cell/struct.RefCell.html +/// [`Cell`]: https://doc.rust-lang.org/std/cell/struct.Cell.html +/// [`default`]: #method.default +/// [`with_hasher`]: #method.with_hasher +/// [`with_capacity_and_hasher`]: #method.with_capacity_and_hasher +/// [`fnv`]: https://crates.io/crates/fnv +/// [`AHash`]: https://crates.io/crates/ahash +/// +/// ``` +/// use rune_alloc::HashMap; +/// +/// #[derive(Hash, Eq, PartialEq, Debug)] +/// struct Viking { +/// name: String, +/// country: String, +/// } +/// +/// impl Viking { +/// /// Creates a new Viking. +/// fn new(name: &str, country: &str) -> Viking { +/// Viking { name: name.to_string(), country: country.to_string() } +/// } +/// } +/// +/// // Use a HashMap to store the vikings' health points. +/// let mut vikings = HashMap::new(); +/// +/// vikings.try_insert(Viking::new("Einar", "Norway"), 25)?; +/// vikings.try_insert(Viking::new("Olaf", "Denmark"), 24)?; +/// vikings.try_insert(Viking::new("Harald", "Iceland"), 12)?; +/// +/// // Use derived implementation to print the status of the vikings. +/// for (viking, health) in &vikings { +/// println!("{:?} has {} hp", viking, health); +/// } +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// A `HashMap` with fixed list of elements can be initialized from an array: +/// +/// ``` +/// use rune_alloc::{HashMap, IteratorExt}; +/// +/// let timber_resources: HashMap<&str, i32> = [("Norway", 100), ("Denmark", 50), ("Iceland", 10)] +/// .iter().cloned().try_collect()?; +/// // use the values stored in map +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct HashMap { + pub(crate) hash_builder: S, + pub(crate) table: RawTable<(K, V), A>, +} + +impl TryClone for HashMap +where + K: TryClone, + V: TryClone, +{ + fn try_clone(&self) -> Result { + Ok(HashMap { + hash_builder: self.hash_builder.clone(), + table: self.table.try_clone()?, + }) + } + + fn try_clone_from(&mut self, source: &Self) -> Result<(), Error> { + self.table.try_clone_from(&source.table)?; + + // Update hash_builder only if we successfully cloned all elements. + self.hash_builder.clone_from(&source.hash_builder); + Ok(()) + } +} + +#[cfg(test)] +impl Clone for HashMap +where + K: TryClone, + V: TryClone, +{ + fn clone(&self) -> Self { + self.try_clone().abort() + } + + fn clone_from(&mut self, source: &Self) { + self.try_clone_from(source).abort() + } +} + +/// Ensures that a single closure type across uses of this which, in turn prevents multiple +/// instances of any functions like RawTable::reserve from being generated +#[cfg_attr(feature = "inline-more", inline)] +pub(crate) fn make_hasher(hash_builder: &S) -> impl HasherFn<(), T, Infallible> + '_ +where + T: Hash, + S: BuildHasher, +{ + move |_: &mut (), value: &T| Ok(make_hash::(hash_builder, value)) +} + +/// Ensures that a single closure type across uses of this which, in turn prevents multiple +/// instances of any functions like RawTable::reserve from being generated +#[cfg_attr(feature = "inline-more", inline)] +fn equivalent_key(k: &Q) -> impl Fn(&mut C, &(K, V)) -> Result + '_ +where + Q: ?Sized + Equivalent, +{ + move |_, x| Ok(k.equivalent(&x.0)) +} + +/// Ensures that a single closure type across uses of this which, in turn prevents multiple +/// instances of any functions like RawTable::reserve from being generated +#[cfg_attr(feature = "inline-more", inline)] +fn equivalent(k: &Q) -> impl Fn(&K) -> bool + '_ +where + Q: ?Sized + Equivalent, +{ + move |x| k.equivalent(x) +} + +#[cfg(not(rune_nightly))] +#[cfg_attr(feature = "inline-more", inline)] +pub(crate) fn make_hash(hash_builder: &S, val: &Q) -> u64 +where + Q: Hash, + S: BuildHasher, +{ + use core::hash::Hasher; + let mut state = hash_builder.build_hasher(); + val.hash(&mut state); + state.finish() +} + +#[cfg(rune_nightly)] +#[cfg_attr(feature = "inline-more", inline)] +pub(crate) fn make_hash(hash_builder: &S, val: &Q) -> u64 +where + Q: Hash + ?Sized, + S: BuildHasher, +{ + hash_builder.hash_one(val) +} + +impl HashMap { + /// Creates an empty `HashMap`. + /// + /// The hash map is initially created with a capacity of 0, so it will not allocate until it + /// is first inserted into. + /// + /// # HashDoS resistance + /// + /// The `hash_builder` normally use a fixed key by default and that does not + /// allow the `HashMap` to be protected against attacks such as [`HashDoS`]. + /// Users who require HashDoS resistance should explicitly use + /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`] as + /// the hasher when creating a [`HashMap`], for example with + /// [`with_hasher`](HashMap::with_hasher) method. + /// + /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack + /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// let mut map: HashMap<&str, i32> = HashMap::new(); + /// assert_eq!(map.len(), 0); + /// assert_eq!(map.capacity(), 0); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn new() -> Self { + Self::default() + } + + /// Creates an empty `HashMap` with the specified capacity. + /// + /// The hash map will be able to hold at least `capacity` elements without + /// reallocating. If `capacity` is 0, the hash map will not allocate. + /// + /// # HashDoS resistance + /// + /// The `hash_builder` normally use a fixed key by default and that does not + /// allow the `HashMap` to be protected against attacks such as [`HashDoS`]. + /// Users who require HashDoS resistance should explicitly use + /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`] as + /// the hasher when creating a [`HashMap`], for example with + /// [`try_with_capacity_and_hasher`] method. + /// + /// [`try_with_capacity_and_hasher`]: HashMap::try_with_capacity_and_hasher + /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack + /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// let mut map: HashMap<&str, i32> = HashMap::try_with_capacity(10)?; + /// assert_eq!(map.len(), 0); + /// assert!(map.capacity() >= 10); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_with_capacity(capacity: usize) -> Result { + Self::try_with_capacity_and_hasher(capacity, DefaultHashBuilder::default()) + } + + #[cfg(test)] + pub(crate) fn with_capacity(capacity: usize) -> Self { + Self::try_with_capacity(capacity).abort() + } +} + +impl HashMap { + /// Creates an empty `HashMap` using the given allocator. + /// + /// The hash map is initially created with a capacity of 0, so it will not allocate until it + /// is first inserted into. + /// + /// # HashDoS resistance + /// + /// The `hash_builder` normally use a fixed key by default and that does + /// not allow the `HashMap` to be protected against attacks such as [`HashDoS`]. + /// Users who require HashDoS resistance should explicitly use + /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`] + /// as the hasher when creating a [`HashMap`], for example with + /// [`with_hasher_in`](HashMap::with_hasher_in) method. + /// + /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack + /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{HashMap, Global}; + /// + /// let mut map = HashMap::new_in(Global); + /// + /// // The created HashMap holds none elements + /// assert_eq!(map.len(), 0); + /// + /// // The created HashMap also doesn't allocate memory + /// assert_eq!(map.capacity(), 0); + /// + /// // Now we insert element inside created HashMap + /// map.try_insert("One", 1)?; + /// // We can see that the HashMap holds 1 element + /// assert_eq!(map.len(), 1); + /// // And it also allocates some capacity + /// assert!(map.capacity() > 1); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn new_in(alloc: A) -> Self { + Self::with_hasher_in(DefaultHashBuilder::default(), alloc) + } + + /// Creates an empty `HashMap` with the specified capacity using the given allocator. + /// + /// The hash map will be able to hold at least `capacity` elements without + /// reallocating. If `capacity` is 0, the hash map will not allocate. + /// + /// # HashDoS resistance + /// + /// The `hash_builder` normally use a fixed key by default and that does not + /// allow the `HashMap` to be protected against attacks such as [`HashDoS`]. + /// Users who require HashDoS resistance should explicitly use + /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`] as + /// the hasher when creating a [`HashMap`], for example with + /// [`try_with_capacity_and_hasher_in`] method. + /// + /// [`try_with_capacity_and_hasher_in`]: + /// HashMap::try_with_capacity_and_hasher_in + /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack + /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{HashMap, Global}; + /// + /// let mut map = HashMap::try_with_capacity_in(5, Global)?; + /// + /// // The created HashMap holds none elements + /// assert_eq!(map.len(), 0); + /// // But it can hold at least 5 elements without reallocating + /// let empty_map_capacity = map.capacity(); + /// assert!(empty_map_capacity >= 5); + /// + /// // Now we insert some 5 elements inside created HashMap + /// map.try_insert("One", 1)?; + /// map.try_insert("Two", 2)?; + /// map.try_insert("Three", 3)?; + /// map.try_insert("Four", 4)?; + /// map.try_insert("Five", 5)?; + /// + /// // We can see that the HashMap holds 5 elements + /// assert_eq!(map.len(), 5); + /// // But its capacity isn't changed + /// assert_eq!(map.capacity(), empty_map_capacity); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_with_capacity_in(capacity: usize, alloc: A) -> Result { + Self::try_with_capacity_and_hasher_in(capacity, DefaultHashBuilder::default(), alloc) + } +} + +impl HashMap { + /// Creates an empty `HashMap` which will use the given hash builder to hash + /// keys. + /// + /// The hash map is initially created with a capacity of 0, so it will not + /// allocate until it is first inserted into. + /// + /// # HashDoS resistance + /// + /// The `hash_builder` normally use a fixed key by default and that does + /// not allow the `HashMap` to be protected against attacks such as [`HashDoS`]. + /// Users who require HashDoS resistance should explicitly use + /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`] + /// as the hasher when creating a [`HashMap`]. + /// + /// The `hash_builder` passed should implement the [`BuildHasher`] trait for + /// the HashMap to be useful, see its documentation for details. + /// + /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack + /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html + /// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::DefaultHashBuilder; + /// + /// let s = DefaultHashBuilder::default(); + /// let mut map = HashMap::with_hasher(s); + /// assert_eq!(map.len(), 0); + /// assert_eq!(map.capacity(), 0); + /// + /// map.try_insert(1, 2)?; + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub const fn with_hasher(hash_builder: S) -> Self { + Self { + hash_builder, + table: RawTable::new_in(Global), + } + } + + /// Creates an empty `HashMap` with the specified capacity, using `hash_builder` + /// to hash the keys. + /// + /// The hash map will be able to hold at least `capacity` elements without + /// reallocating. If `capacity` is 0, the hash map will not allocate. + /// + /// # HashDoS resistance + /// + /// The `hash_builder` normally use a fixed key by default and that does + /// not allow the `HashMap` to be protected against attacks such as [`HashDoS`]. + /// Users who require HashDoS resistance should explicitly use + /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`] + /// as the hasher when creating a [`HashMap`]. + /// + /// The `hash_builder` passed should implement the [`BuildHasher`] trait for + /// the HashMap to be useful, see its documentation for details. + /// + /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack + /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html + /// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::DefaultHashBuilder; + /// + /// let s = DefaultHashBuilder::default(); + /// let mut map = HashMap::try_with_capacity_and_hasher(10, s)?; + /// assert_eq!(map.len(), 0); + /// assert!(map.capacity() >= 10); + /// + /// map.try_insert(1, 2)?; + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Result { + Ok(Self { + hash_builder, + table: RawTable::try_with_capacity_in(capacity, Global)?, + }) + } + + #[cfg(test)] + pub(crate) fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Self { + Self::try_with_capacity_and_hasher(capacity, hash_builder).abort() + } +} + +impl HashMap { + /// Returns a reference to the underlying allocator. + #[inline] + pub fn allocator(&self) -> &A { + self.table.allocator() + } + + /// Creates an empty `HashMap` which will use the given hash builder to hash + /// keys. It will be allocated with the given allocator. + /// + /// The hash map is initially created with a capacity of 0, so it will not allocate until it + /// is first inserted into. + /// + /// # HashDoS resistance + /// + /// The `hash_builder` normally use a fixed key by default and that does + /// not allow the `HashMap` to be protected against attacks such as [`HashDoS`]. + /// Users who require HashDoS resistance should explicitly use + /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`] + /// as the hasher when creating a [`HashMap`]. + /// + /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack + /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::DefaultHashBuilder; + /// + /// let s = DefaultHashBuilder::default(); + /// let mut map = HashMap::with_hasher(s); + /// map.try_insert(1, 2)?; + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub const fn with_hasher_in(hash_builder: S, alloc: A) -> Self { + Self { + hash_builder, + table: RawTable::new_in(alloc), + } + } + + /// Creates an empty `HashMap` with the specified capacity, using `hash_builder` + /// to hash the keys. It will be allocated with the given allocator. + /// + /// The hash map will be able to hold at least `capacity` elements without + /// reallocating. If `capacity` is 0, the hash map will not allocate. + /// + /// # HashDoS resistance + /// + /// The `hash_builder` normally use a fixed key by default and that does + /// not allow the `HashMap` to be protected against attacks such as [`HashDoS`]. + /// Users who require HashDoS resistance should explicitly use + /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`] + /// as the hasher when creating a [`HashMap`]. + /// + /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack + /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{HashMap, Global}; + /// use rune_alloc::hash_map::DefaultHashBuilder; + /// + /// let s = DefaultHashBuilder::default(); + /// let mut map = HashMap::try_with_capacity_and_hasher_in(10, s, Global)?; + /// map.try_insert(1, 2)?; + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_with_capacity_and_hasher_in( + capacity: usize, + hash_builder: S, + alloc: A, + ) -> Result { + Ok(Self { + hash_builder, + table: RawTable::try_with_capacity_in(capacity, alloc)?, + }) + } + + /// Returns a reference to the map's [`BuildHasher`]. + /// + /// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::DefaultHashBuilder; + /// + /// let hasher = DefaultHashBuilder::default(); + /// let map: HashMap = HashMap::with_hasher(hasher); + /// let hasher: &DefaultHashBuilder = map.hasher(); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn hasher(&self) -> &S { + &self.hash_builder + } + + /// Returns the number of elements the map can hold without reallocating. + /// + /// This number is a lower bound; the `HashMap` might be able to hold + /// more, but is guaranteed to be able to hold at least this many. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// let map: HashMap = HashMap::try_with_capacity(100)?; + /// assert_eq!(map.len(), 0); + /// assert!(map.capacity() >= 100); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn capacity(&self) -> usize { + self.table.capacity() + } + + /// An iterator visiting all keys in arbitrary order. + /// The iterator element type is `&'a K`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map = HashMap::new(); + /// map.try_insert("a", 1)?; + /// map.try_insert("b", 2)?; + /// map.try_insert("c", 3)?; + /// assert_eq!(map.len(), 3); + /// let mut vec: Vec<&str> = Vec::new(); + /// + /// for key in map.keys() { + /// println!("{}", key); + /// vec.push(*key); + /// } + /// + /// // The `Keys` iterator produces keys in arbitrary order, so the + /// // keys must be sorted to test them against a sorted array. + /// vec.sort_unstable(); + /// assert_eq!(vec, ["a", "b", "c"]); + /// + /// assert_eq!(map.len(), 3); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn keys(&self) -> Keys<'_, K, V> { + Keys { inner: self.iter() } + } + + /// An iterator visiting all values in arbitrary order. + /// The iterator element type is `&'a V`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map = HashMap::new(); + /// map.try_insert("a", 1)?; + /// map.try_insert("b", 2)?; + /// map.try_insert("c", 3)?; + /// assert_eq!(map.len(), 3); + /// let mut vec: Vec = Vec::new(); + /// + /// for val in map.values() { + /// println!("{}", val); + /// vec.push(*val); + /// } + /// + /// // The `Values` iterator produces values in arbitrary order, so the + /// // values must be sorted to test them against a sorted array. + /// vec.sort_unstable(); + /// assert_eq!(vec, [1, 2, 3]); + /// + /// assert_eq!(map.len(), 3); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn values(&self) -> Values<'_, K, V> { + Values { inner: self.iter() } + } + + /// An iterator visiting all values mutably in arbitrary order. + /// The iterator element type is `&'a mut V`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map = HashMap::new(); + /// + /// map.try_insert("a", 1)?; + /// map.try_insert("b", 2)?; + /// map.try_insert("c", 3)?; + /// + /// for val in map.values_mut() { + /// *val = *val + 10; + /// } + /// + /// assert_eq!(map.len(), 3); + /// let mut vec: Vec = Vec::new(); + /// + /// for val in map.values() { + /// println!("{}", val); + /// vec.push(*val); + /// } + /// + /// // The `Values` iterator produces values in arbitrary order, so the + /// // values must be sorted to test them against a sorted array. + /// vec.sort_unstable(); + /// assert_eq!(vec, [11, 12, 13]); + /// + /// assert_eq!(map.len(), 3); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn values_mut(&mut self) -> ValuesMut<'_, K, V> { + ValuesMut { + inner: self.iter_mut(), + } + } + + /// An iterator visiting all key-value pairs in arbitrary order. + /// The iterator element type is `(&'a K, &'a V)`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map = HashMap::new(); + /// map.try_insert("a", 1)?; + /// map.try_insert("b", 2)?; + /// map.try_insert("c", 3)?; + /// assert_eq!(map.len(), 3); + /// let mut vec: Vec<(&str, i32)> = Vec::new(); + /// + /// for (key, val) in map.iter() { + /// println!("key: {} val: {}", key, val); + /// vec.push((*key, *val)); + /// } + /// + /// // The `Iter` iterator produces items in arbitrary order, so the + /// // items must be sorted to test them against a sorted array. + /// vec.sort_unstable(); + /// assert_eq!(vec, [("a", 1), ("b", 2), ("c", 3)]); + /// + /// assert_eq!(map.len(), 3); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn iter(&self) -> Iter<'_, K, V> { + // Here we tie the lifetime of self to the iter. + unsafe { + Iter { + inner: self.table.iter(), + marker: PhantomData, + } + } + } + + /// An iterator visiting all key-value pairs in arbitrary order, + /// with mutable references to the values. + /// The iterator element type is `(&'a K, &'a mut V)`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map = HashMap::new(); + /// map.try_insert("a", 1)?; + /// map.try_insert("b", 2)?; + /// map.try_insert("c", 3)?; + /// + /// // Update all values + /// for (_, val) in map.iter_mut() { + /// *val *= 2; + /// } + /// + /// assert_eq!(map.len(), 3); + /// let mut vec: Vec<(&str, i32)> = Vec::new(); + /// + /// for (key, val) in &map { + /// println!("key: {} val: {}", key, val); + /// vec.push((*key, *val)); + /// } + /// + /// // The `Iter` iterator produces items in arbitrary order, so the + /// // items must be sorted to test them against a sorted array. + /// vec.sort_unstable(); + /// assert_eq!(vec, [("a", 2), ("b", 4), ("c", 6)]); + /// + /// assert_eq!(map.len(), 3); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn iter_mut(&mut self) -> IterMut<'_, K, V> { + // Here we tie the lifetime of self to the iter. + unsafe { + IterMut { + inner: self.table.iter(), + marker: PhantomData, + } + } + } + + #[cfg(test)] + #[cfg_attr(feature = "inline-more", inline)] + fn raw_capacity(&self) -> usize { + self.table.buckets() + } + + /// Returns the number of elements in the map. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut a = HashMap::new(); + /// assert_eq!(a.len(), 0); + /// a.try_insert(1, "a")?; + /// assert_eq!(a.len(), 1); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn len(&self) -> usize { + self.table.len() + } + + /// Returns `true` if the map contains no elements. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut a = HashMap::new(); + /// assert!(a.is_empty()); + /// a.try_insert(1, "a")?; + /// assert!(!a.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Clears the map, returning all key-value pairs as an iterator. Keeps the + /// allocated memory for reuse. + /// + /// If the returned iterator is dropped before being fully consumed, it + /// drops the remaining key-value pairs. The returned iterator keeps a + /// mutable borrow on the vector to optimize its implementation. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut a = HashMap::new(); + /// a.try_insert(1, "a")?; + /// a.try_insert(2, "b")?; + /// let capacity_before_drain = a.capacity(); + /// + /// for (k, v) in a.drain().take(1) { + /// assert!(k == 1 || k == 2); + /// assert!(v == "a" || v == "b"); + /// } + /// + /// // As we can see, the map is empty and contains no element. + /// assert!(a.is_empty() && a.len() == 0); + /// // But map capacity is equal to old one. + /// assert_eq!(a.capacity(), capacity_before_drain); + /// + /// let mut a = HashMap::new(); + /// a.try_insert(1, "a")?; + /// a.try_insert(2, "b")?; + /// + /// { // Iterator is dropped without being consumed. + /// let d = a.drain(); + /// } + /// + /// // But the map is empty even if we do not use Drain iterator. + /// assert!(a.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn drain(&mut self) -> Drain<'_, K, V, A> { + Drain { + inner: self.table.drain(), + } + } + + /// Retains only the elements specified by the predicate. Keeps the + /// allocated memory for reuse. + /// + /// In other words, remove all pairs `(k, v)` such that `f(&k, &mut v)` returns `false`. + /// The elements are visited in unsorted (and unspecified) order. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{HashMap, Vec, IteratorExt}; + /// + /// let mut map: HashMap = (0..8).map(|x|(x, x*10)).try_collect()?; + /// assert_eq!(map.len(), 8); + /// + /// map.retain(|&k, _| k % 2 == 0); + /// + /// // We can see, that the number of elements inside map is changed. + /// assert_eq!(map.len(), 4); + /// + /// let mut vec: Vec<(i32, i32)> = map.iter().map(|(&k, &v)| (k, v)).try_collect()?; + /// vec.sort_unstable(); + /// assert_eq!(vec, [(0, 0), (2, 20), (4, 40), (6, 60)]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn retain(&mut self, mut f: F) + where + F: FnMut(&K, &mut V) -> bool, + { + // Here we only use `iter` as a temporary, preventing use-after-free + unsafe { + for item in self.table.iter() { + let &mut (ref key, ref mut value) = item.as_mut(); + if !f(key, value) { + self.table.erase(item); + } + } + } + } + + /// Drains elements which are true under the given predicate, + /// and returns an iterator over the removed items. + /// + /// In other words, move all pairs `(k, v)` such that `f(&k, &mut v)` returns `true` out + /// into another iterator. + /// + /// Note that `extract_if` lets you mutate every value in the filter closure, regardless of + /// whether you choose to keep or remove it. + /// + /// If the returned `ExtractIf` is not exhausted, e.g. because it is dropped without iterating + /// or the iteration short-circuits, then the remaining elements will be retained. + /// Use [`retain`] with a negated predicate if you do not need the returned iterator. + /// + /// Keeps the allocated memory for reuse. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{HashMap, Vec, IteratorExt}; + /// use rune_alloc::prelude::*; + /// + /// let mut map: HashMap = (0..8).map(|x| (x, x)).try_collect()?; + /// + /// let drained: HashMap = map.extract_if(|k, _v| k % 2 == 0).try_collect()?; + /// + /// let mut evens = drained.keys().cloned().try_collect::>()?; + /// let mut odds = map.keys().cloned().try_collect::>()?; + /// evens.sort(); + /// odds.sort(); + /// + /// assert_eq!(evens, rune_alloc::try_vec![0, 2, 4, 6]); + /// assert_eq!(odds, rune_alloc::try_vec![1, 3, 5, 7]); + /// + /// let mut map: HashMap = (0..8).map(|x| (x, x)).try_collect()?; + /// + /// { // Iterator is dropped without being consumed. + /// let d = map.extract_if(|k, _v| k % 2 != 0); + /// } + /// + /// // ExtractIf was not exhausted, therefore no elements were drained. + /// assert_eq!(map.len(), 8); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// [`retain`]: HashMap::retain + #[cfg_attr(feature = "inline-more", inline)] + pub fn extract_if(&mut self, f: F) -> ExtractIf<'_, K, V, F, A> + where + F: FnMut(&K, &mut V) -> bool, + { + ExtractIf { + f, + inner: ExtractIfInner { + iter: unsafe { self.table.iter() }, + table: &mut self.table, + }, + } + } + + /// Clears the map, removing all key-value pairs. Keeps the allocated memory + /// for reuse. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut a = HashMap::new(); + /// a.try_insert(1, "a")?; + /// let capacity_before_clear = a.capacity(); + /// + /// a.clear(); + /// + /// // Map is empty. + /// assert!(a.is_empty()); + /// // But map capacity is equal to old one. + /// assert_eq!(a.capacity(), capacity_before_clear); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn clear(&mut self) { + self.table.clear(); + } + + /// Creates a consuming iterator visiting all the keys in arbitrary order. + /// The map cannot be used after calling this. + /// The iterator element type is `K`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{HashMap, Vec, IteratorExt}; + /// + /// let mut map = HashMap::new(); + /// map.try_insert("a", 1)?; + /// map.try_insert("b", 2)?; + /// map.try_insert("c", 3)?; + /// + /// let mut vec: Vec<&str> = map.into_keys().try_collect()?; + /// + /// // The `IntoKeys` iterator produces keys in arbitrary order, so the + /// // keys must be sorted to test them against a sorted array. + /// vec.sort_unstable(); + /// assert_eq!(vec, ["a", "b", "c"]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn into_keys(self) -> IntoKeys { + IntoKeys { + inner: self.into_iter(), + } + } + + /// Creates a consuming iterator visiting all the values in arbitrary order. + /// The map cannot be used after calling this. + /// The iterator element type is `V`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{HashMap, Vec, IteratorExt}; + /// + /// let mut map = HashMap::new(); + /// map.try_insert("a", 1)?; + /// map.try_insert("b", 2)?; + /// map.try_insert("c", 3)?; + /// + /// let mut vec: Vec = map.into_values().try_collect()?; + /// + /// // The `IntoValues` iterator produces values in arbitrary order, so + /// // the values must be sorted to test them against a sorted array. + /// vec.sort_unstable(); + /// assert_eq!(vec, [1, 2, 3]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn into_values(self) -> IntoValues { + IntoValues { + inner: self.into_iter(), + } + } +} + +impl HashMap +where + K: Eq + Hash, + S: BuildHasher, + A: Allocator, +{ + /// Tries to reserve capacity for at least `additional` more elements to be inserted + /// in the given `HashMap`. The collection may reserve more space to avoid + /// frequent reallocations. + /// + /// # Errors + /// + /// If the capacity overflows, or the allocator reports a failure, then an error + /// is returned. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap<&str, isize> = HashMap::new(); + /// // Map is empty and doesn't allocate memory + /// assert_eq!(map.capacity(), 0); + /// + /// map.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?"); + /// + /// // And now map can hold at least 10 elements + /// assert!(map.capacity() >= 10); + /// ``` + /// If the capacity overflows, or the allocator reports a failure, then an error + /// is returned: + /// ``` + /// # fn test() { + /// use rune_alloc::{HashMap, Error}; + /// let mut map: HashMap = HashMap::new(); + /// + /// match map.try_reserve(usize::MAX) { + /// Err(error) => match error { + /// Error::CapacityOverflow => {} + /// _ => panic!("Error::AllocError ?"), + /// }, + /// _ => panic!(), + /// } + /// # } + /// # fn main() { + /// # #[cfg(not(miri))] + /// # test() + /// # } + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_reserve(&mut self, additional: usize) -> Result<(), Error> { + let hasher = make_hasher::(&self.hash_builder); + into_ok_try( + self.table + .try_reserve(&mut (), additional, hasher.into_tuple()), + ) + } + + #[cfg(test)] + pub fn reserve(&mut self, additional: usize) { + self.try_reserve(additional).abort() + } + + /// Shrinks the capacity of the map as much as possible. It will drop + /// down as much as possible while maintaining the internal rules + /// and possibly leaving some space in accordance with the resize policy. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap = HashMap::try_with_capacity(100)?; + /// map.try_insert(1, 2)?; + /// map.try_insert(3, 4)?; + /// assert!(map.capacity() >= 100); + /// map.try_shrink_to_fit()?; + /// assert!(map.capacity() >= 2); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_shrink_to_fit(&mut self) -> Result<(), Error> { + into_ok_try(self.table.shrink_to( + &mut (), + 0, + make_hasher::(&self.hash_builder).into_tuple(), + )) + } + + #[cfg(test)] + pub(crate) fn shrink_to_fit(&mut self) { + self.try_shrink_to_fit().abort() + } + + /// Shrinks the capacity of the map with a lower limit. It will drop + /// down no lower than the supplied limit while maintaining the internal rules + /// and possibly leaving some space in accordance with the resize policy. + /// + /// This function does nothing if the current capacity is smaller than the + /// supplied minimum capacity. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap = HashMap::try_with_capacity(100)?; + /// map.try_insert(1, 2)?; + /// map.try_insert(3, 4)?; + /// assert!(map.capacity() >= 100); + /// map.try_shrink_to(10)?; + /// assert!(map.capacity() >= 10); + /// map.try_shrink_to(0)?; + /// assert!(map.capacity() >= 2); + /// map.try_shrink_to(10)?; + /// assert!(map.capacity() >= 2); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_shrink_to(&mut self, min_capacity: usize) -> Result<(), Error> { + into_ok_try(self.table.shrink_to( + &mut (), + min_capacity, + make_hasher::(&self.hash_builder).into_tuple(), + )) + } + + /// Gets the given key's corresponding entry in the map for in-place manipulation. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut letters = HashMap::new(); + /// + /// for ch in "a short treatise on fungi".chars() { + /// let counter = letters.entry(ch).or_try_insert(0)?; + /// *counter += 1; + /// } + /// + /// assert_eq!(letters[&'s'], 2); + /// assert_eq!(letters[&'t'], 3); + /// assert_eq!(letters[&'u'], 1); + /// assert_eq!(letters.get(&'y'), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S, A> { + let hash = make_hash::(&self.hash_builder, &key); + if let Some(elem) = into_ok(self.table.find(&mut (), hash, equivalent_key(&key))) { + Entry::Occupied(OccupiedEntry { + hash, + key: Some(key), + elem, + table: self, + }) + } else { + Entry::Vacant(VacantEntry { + hash, + key, + table: self, + }) + } + } + + /// Gets the given key's corresponding entry by reference in the map for in-place manipulation. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut words: HashMap = HashMap::new(); + /// let source = ["poneyland", "horseyland", "poneyland", "poneyland"]; + /// for (i, &s) in source.iter().enumerate() { + /// let counter = words.entry_ref(s).or_try_insert(0)?; + /// *counter += 1; + /// } + /// + /// assert_eq!(words["poneyland"], 3); + /// assert_eq!(words["horseyland"], 1); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn entry_ref<'a, 'b, Q: ?Sized>(&'a mut self, key: &'b Q) -> EntryRef<'a, 'b, K, Q, V, S, A> + where + Q: Hash + Equivalent, + { + let hash = make_hash::(&self.hash_builder, key); + + if let Some(elem) = into_ok(self.table.find(&mut (), hash, equivalent_key(key))) { + EntryRef::Occupied(OccupiedEntryRef { + hash, + key: Some(KeyOrRef::Borrowed(key)), + elem, + table: self, + }) + } else { + EntryRef::Vacant(VacantEntryRef { + hash, + key: KeyOrRef::Borrowed(key), + table: self, + }) + } + } + + /// Returns a reference to the value corresponding to the key. + /// + /// The key may be any borrowed form of the map's key type, but + /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for + /// the key type. + /// + /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html + /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map = HashMap::new(); + /// map.try_insert(1, "a")?; + /// assert_eq!(map.get(&1), Some(&"a")); + /// assert_eq!(map.get(&2), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn get(&self, k: &Q) -> Option<&V> + where + Q: Hash + Equivalent, + { + // Avoid `Option::map` because it bloats LLVM IR. + match self.get_inner(k) { + Some((_, v)) => Some(v), + None => None, + } + } + + /// Returns the key-value pair corresponding to the supplied key. + /// + /// The supplied key may be any borrowed form of the map's key type, but + /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for + /// the key type. + /// + /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html + /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map = HashMap::new(); + /// map.try_insert(1, "a")?; + /// assert_eq!(map.get_key_value(&1), Some((&1, &"a"))); + /// assert_eq!(map.get_key_value(&2), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn get_key_value(&self, k: &Q) -> Option<(&K, &V)> + where + Q: Hash + Equivalent, + { + // Avoid `Option::map` because it bloats LLVM IR. + match self.get_inner(k) { + Some((key, value)) => Some((key, value)), + None => None, + } + } + + #[inline] + fn get_inner(&self, k: &Q) -> Option<&(K, V)> + where + Q: Hash + Equivalent, + { + if self.table.is_empty() { + None + } else { + let hash = make_hash::(&self.hash_builder, k); + into_ok(self.table.get(&mut (), hash, equivalent_key(k))) + } + } + + /// Returns the key-value pair corresponding to the supplied key, with a mutable reference to value. + /// + /// The supplied key may be any borrowed form of the map's key type, but + /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for + /// the key type. + /// + /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html + /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map = HashMap::new(); + /// map.try_insert(1, "a")?; + /// let (k, v) = map.get_key_value_mut(&1).unwrap(); + /// assert_eq!(k, &1); + /// assert_eq!(v, &mut "a"); + /// *v = "b"; + /// assert_eq!(map.get_key_value_mut(&1), Some((&1, &mut "b"))); + /// assert_eq!(map.get_key_value_mut(&2), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn get_key_value_mut(&mut self, k: &Q) -> Option<(&K, &mut V)> + where + Q: Hash + Equivalent, + { + // Avoid `Option::map` because it bloats LLVM IR. + match self.get_inner_mut(k) { + Some(&mut (ref key, ref mut value)) => Some((key, value)), + None => None, + } + } + + /// Returns `true` if the map contains a value for the specified key. + /// + /// The key may be any borrowed form of the map's key type, but + /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for + /// the key type. + /// + /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html + /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map = HashMap::new(); + /// map.try_insert(1, "a")?; + /// assert_eq!(map.contains_key(&1), true); + /// assert_eq!(map.contains_key(&2), false); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn contains_key(&self, k: &Q) -> bool + where + Q: Hash + Equivalent, + { + self.get_inner(k).is_some() + } + + /// Returns a mutable reference to the value corresponding to the key. + /// + /// The key may be any borrowed form of the map's key type, but + /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for + /// the key type. + /// + /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html + /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map = HashMap::new(); + /// map.try_insert(1, "a")?; + /// if let Some(x) = map.get_mut(&1) { + /// *x = "b"; + /// } + /// assert_eq!(map[&1], "b"); + /// + /// assert_eq!(map.get_mut(&2), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn get_mut(&mut self, k: &Q) -> Option<&mut V> + where + Q: Hash + Equivalent, + { + // Avoid `Option::map` because it bloats LLVM IR. + match self.get_inner_mut(k) { + Some(&mut (_, ref mut v)) => Some(v), + None => None, + } + } + + #[inline] + fn get_inner_mut(&mut self, k: &Q) -> Option<&mut (K, V)> + where + Q: Hash + Equivalent, + { + if self.table.is_empty() { + None + } else { + let hash = make_hash::(&self.hash_builder, k); + into_ok(self.table.get_mut(&mut (), hash, equivalent_key(k))) + } + } + + /// Attempts to get mutable references to `N` values in the map at once. + /// + /// Returns an array of length `N` with the results of each query. For soundness, at most one + /// mutable reference will be returned to any value. `None` will be returned if any of the + /// keys are duplicates or missing. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut libraries = HashMap::new(); + /// libraries.try_insert("Bodleian Library".to_string(), 1602)?; + /// libraries.try_insert("Athenæum".to_string(), 1807)?; + /// libraries.try_insert("Herzogin-Anna-Amalia-Bibliothek".to_string(), 1691)?; + /// libraries.try_insert("Library of Congress".to_string(), 1800)?; + /// + /// let got = libraries.get_many_mut([ + /// "Athenæum", + /// "Library of Congress", + /// ]); + /// assert_eq!( + /// got, + /// Some([ + /// &mut 1807, + /// &mut 1800, + /// ]), + /// ); + /// + /// // Missing keys result in None + /// let got = libraries.get_many_mut([ + /// "Athenæum", + /// "New York Public Library", + /// ]); + /// assert_eq!(got, None); + /// + /// // Duplicate keys result in None + /// let got = libraries.get_many_mut([ + /// "Athenæum", + /// "Athenæum", + /// ]); + /// assert_eq!(got, None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn get_many_mut(&mut self, ks: [&Q; N]) -> Option<[&'_ mut V; N]> + where + Q: Hash + Equivalent, + { + self.get_many_mut_inner(ks).map(|res| res.map(|(_, v)| v)) + } + + /// Attempts to get mutable references to `N` values in the map at once, without validating that + /// the values are unique. + /// + /// Returns an array of length `N` with the results of each query. `None` will be returned if + /// any of the keys are missing. + /// + /// For a safe alternative see [`get_many_mut`](`HashMap::get_many_mut`). + /// + /// # Safety + /// + /// Calling this method with overlapping keys is *[undefined behavior]* even if the resulting + /// references are not used. + /// + /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut libraries = HashMap::new(); + /// libraries.try_insert("Bodleian Library".to_string(), 1602)?; + /// libraries.try_insert("Athenæum".to_string(), 1807)?; + /// libraries.try_insert("Herzogin-Anna-Amalia-Bibliothek".to_string(), 1691)?; + /// libraries.try_insert("Library of Congress".to_string(), 1800)?; + /// + /// let got = libraries.get_many_mut([ + /// "Athenæum", + /// "Library of Congress", + /// ]); + /// assert_eq!( + /// got, + /// Some([ + /// &mut 1807, + /// &mut 1800, + /// ]), + /// ); + /// + /// // Missing keys result in None + /// let got = libraries.get_many_mut([ + /// "Athenæum", + /// "New York Public Library", + /// ]); + /// assert_eq!(got, None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub unsafe fn get_many_unchecked_mut( + &mut self, + ks: [&Q; N], + ) -> Option<[&'_ mut V; N]> + where + Q: Hash + Equivalent, + { + self.get_many_unchecked_mut_inner(ks) + .map(|res| res.map(|(_, v)| v)) + } + + /// Attempts to get mutable references to `N` values in the map at once, with immutable + /// references to the corresponding keys. + /// + /// Returns an array of length `N` with the results of each query. For soundness, at most one + /// mutable reference will be returned to any value. `None` will be returned if any of the keys + /// are duplicates or missing. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut libraries = HashMap::new(); + /// libraries.try_insert("Bodleian Library".to_string(), 1602)?; + /// libraries.try_insert("Athenæum".to_string(), 1807)?; + /// libraries.try_insert("Herzogin-Anna-Amalia-Bibliothek".to_string(), 1691)?; + /// libraries.try_insert("Library of Congress".to_string(), 1800)?; + /// + /// let got = libraries.get_many_key_value_mut([ + /// "Bodleian Library", + /// "Herzogin-Anna-Amalia-Bibliothek", + /// ]); + /// assert_eq!( + /// got, + /// Some([ + /// (&"Bodleian Library".to_string(), &mut 1602), + /// (&"Herzogin-Anna-Amalia-Bibliothek".to_string(), &mut 1691), + /// ]), + /// ); + /// // Missing keys result in None + /// let got = libraries.get_many_key_value_mut([ + /// "Bodleian Library", + /// "Gewandhaus", + /// ]); + /// assert_eq!(got, None); + /// + /// // Duplicate keys result in None + /// let got = libraries.get_many_key_value_mut([ + /// "Bodleian Library", + /// "Herzogin-Anna-Amalia-Bibliothek", + /// "Herzogin-Anna-Amalia-Bibliothek", + /// ]); + /// assert_eq!(got, None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn get_many_key_value_mut( + &mut self, + ks: [&Q; N], + ) -> Option<[(&'_ K, &'_ mut V); N]> + where + Q: Hash + Equivalent, + { + self.get_many_mut_inner(ks) + .map(|res| res.map(|(k, v)| (&*k, v))) + } + + /// Attempts to get mutable references to `N` values in the map at once, with immutable + /// references to the corresponding keys, without validating that the values are unique. + /// + /// Returns an array of length `N` with the results of each query. `None` will be returned if + /// any of the keys are missing. + /// + /// For a safe alternative see [`get_many_key_value_mut`](`HashMap::get_many_key_value_mut`). + /// + /// # Safety + /// + /// Calling this method with overlapping keys is *[undefined behavior]* even if the resulting + /// references are not used. + /// + /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut libraries = HashMap::new(); + /// libraries.try_insert("Bodleian Library".to_string(), 1602)?; + /// libraries.try_insert("Athenæum".to_string(), 1807)?; + /// libraries.try_insert("Herzogin-Anna-Amalia-Bibliothek".to_string(), 1691)?; + /// libraries.try_insert("Library of Congress".to_string(), 1800)?; + /// + /// let got = libraries.get_many_key_value_mut([ + /// "Bodleian Library", + /// "Herzogin-Anna-Amalia-Bibliothek", + /// ]); + /// assert_eq!( + /// got, + /// Some([ + /// (&"Bodleian Library".to_string(), &mut 1602), + /// (&"Herzogin-Anna-Amalia-Bibliothek".to_string(), &mut 1691), + /// ]), + /// ); + /// // Missing keys result in None + /// let got = libraries.get_many_key_value_mut([ + /// "Bodleian Library", + /// "Gewandhaus", + /// ]); + /// assert_eq!(got, None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub unsafe fn get_many_key_value_unchecked_mut( + &mut self, + ks: [&Q; N], + ) -> Option<[(&'_ K, &'_ mut V); N]> + where + Q: Hash + Equivalent, + { + self.get_many_unchecked_mut_inner(ks) + .map(|res| res.map(|(k, v)| (&*k, v))) + } + + fn get_many_mut_inner( + &mut self, + ks: [&Q; N], + ) -> Option<[&'_ mut (K, V); N]> + where + Q: Hash + Equivalent, + { + let hashes = self.build_hashes_inner(ks); + into_ok( + self.table + .get_many_mut(&mut (), hashes, |_, i, (k, _)| Ok(ks[i].equivalent(k))), + ) + } + + unsafe fn get_many_unchecked_mut_inner( + &mut self, + ks: [&Q; N], + ) -> Option<[&'_ mut (K, V); N]> + where + Q: Hash + Equivalent, + { + let hashes = self.build_hashes_inner(ks); + into_ok( + self.table + .get_many_unchecked_mut(&mut (), hashes, |_, i, (k, _)| Ok(ks[i].equivalent(k))), + ) + } + + fn build_hashes_inner(&self, ks: [&Q; N]) -> [u64; N] + where + Q: Hash + Equivalent, + { + let mut hashes = [0_u64; N]; + for i in 0..N { + hashes[i] = make_hash::(&self.hash_builder, ks[i]); + } + hashes + } + + /// Inserts a key-value pair into the map. + /// + /// If the map did not have this key present, [`None`] is returned. + /// + /// If the map did have this key present, the value is updated, and the old + /// value is returned. The key is not updated, though; this matters for + /// types that can be `==` without being identical. See the [`std::collections`] + /// [module-level documentation] for more. + /// + /// [`None`]: https://doc.rust-lang.org/std/option/enum.Option.html#variant.None + /// [`std::collections`]: https://doc.rust-lang.org/std/collections/index.html + /// [module-level documentation]: https://doc.rust-lang.org/std/collections/index.html#insert-and-complex-keys + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map = HashMap::new(); + /// assert_eq!(map.try_insert(37, "a")?, None); + /// assert_eq!(map.is_empty(), false); + /// + /// map.try_insert(37, "b")?; + /// assert_eq!(map.try_insert(37, "c")?, Some("b")); + /// assert_eq!(map[&37], "c"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_insert(&mut self, k: K, v: V) -> Result, Error> { + let hasher = make_hasher::(&self.hash_builder); + let hash = into_ok(hasher.hash(&mut (), &k)); + + let result = self.table.find_or_find_insert_slot( + &mut (), + hash, + equivalent_key(&k), + hasher.into_tuple(), + ); + + Ok(match result { + Ok(bucket) => Some(mem::replace(unsafe { &mut bucket.as_mut().1 }, v)), + Err(ErrorOrInsertSlot::InsertSlot(slot)) => { + unsafe { + self.table.insert_in_slot(hash, slot, (k, v)); + } + None + } + Err(ErrorOrInsertSlot::Error(error)) => match error { + CustomError::Custom(error) => match error {}, + CustomError::Error(error) => return Err(error), + }, + }) + } + + #[cfg(test)] + pub(crate) fn insert(&mut self, k: K, v: V) -> Option { + self.try_insert(k, v).abort() + } + + /// Insert a key-value pair into the map without checking + /// if the key already exists in the map. + /// + /// Returns a reference to the key and value just inserted. + /// + /// This operation is safe if a key does not exist in the map. + /// + /// However, if a key exists in the map already, the behavior is unspecified: + /// this operation may panic, loop forever, or any following operation with the map + /// may panic, loop forever or return arbitrary result. + /// + /// That said, this operation (and following operations) are guaranteed to + /// not violate memory safety. + /// + /// This operation is faster than regular insert, because it does not perform + /// lookup before insertion. + /// + /// This operation is useful during initial population of the map. + /// For example, when constructing a map from another map, we know + /// that keys are unique. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map1 = HashMap::new(); + /// assert_eq!(map1.try_insert(1, "a")?, None); + /// assert_eq!(map1.try_insert(2, "b")?, None); + /// assert_eq!(map1.try_insert(3, "c")?, None); + /// assert_eq!(map1.len(), 3); + /// + /// let mut map2 = HashMap::new(); + /// + /// for (key, value) in map1.into_iter() { + /// map2.try_insert_unique_unchecked(key, value)?; + /// } + /// + /// let (key, value) = map2.try_insert_unique_unchecked(4, "d")?; + /// assert_eq!(key, &4); + /// assert_eq!(value, &mut "d"); + /// *value = "e"; + /// + /// assert_eq!(map2[&1], "a"); + /// assert_eq!(map2[&2], "b"); + /// assert_eq!(map2[&3], "c"); + /// assert_eq!(map2[&4], "e"); + /// assert_eq!(map2.len(), 4); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_insert_unique_unchecked(&mut self, k: K, v: V) -> Result<(&K, &mut V), Error> { + let hasher = make_hasher::(&self.hash_builder); + let hash = into_ok(hasher.hash(&mut (), &k)); + let bucket = into_ok_try( + self.table + .insert(&mut (), hash, (k, v), hasher.into_tuple()), + )?; + let (k_ref, v_ref) = unsafe { bucket.as_mut() }; + Ok((k_ref, v_ref)) + } + + #[cfg(test)] + pub(crate) fn insert_unique_unchecked(&mut self, k: K, v: V) -> (&K, &mut V) { + self.try_insert_unique_unchecked(k, v).abort() + } + + /// Tries to insert a key-value pair into the map, and returns + /// a mutable reference to the value in the entry. + /// + /// # Errors + /// + /// If the map already had this key present, nothing is updated, and + /// an error containing the occupied entry and the value is returned. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::{HashMap, CustomError}; + /// use rune_alloc::hash_map::OccupiedError; + /// + /// let mut map = HashMap::new(); + /// assert_eq!(map.try_insert_or(37, "a").unwrap(), &"a"); + /// + /// match map.try_insert_or(37, "b") { + /// Err(CustomError::Custom(OccupiedError { entry, value })) => { + /// assert_eq!(entry.key(), &37); + /// assert_eq!(entry.get(), &"a"); + /// assert_eq!(value, "b"); + /// } + /// _ => panic!() + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_insert_or( + &mut self, + key: K, + value: V, + ) -> Result<&mut V, CustomError>> { + match self.entry(key) { + Entry::Occupied(entry) => Err(CustomError::Custom(OccupiedError { entry, value })), + Entry::Vacant(entry) => Ok(entry.try_insert(value)?), + } + } + + /// Removes a key from the map, returning the value at the key if the key + /// was previously in the map. Keeps the allocated memory for reuse. + /// + /// The key may be any borrowed form of the map's key type, but + /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for + /// the key type. + /// + /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html + /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map = HashMap::new(); + /// // The map is empty + /// assert!(map.is_empty() && map.capacity() == 0); + /// + /// map.try_insert(1, "a")?; + /// + /// assert_eq!(map.remove(&1), Some("a")); + /// assert_eq!(map.remove(&1), None); + /// + /// // Now map holds none elements + /// assert!(map.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn remove(&mut self, k: &Q) -> Option + where + Q: Hash + Equivalent, + { + // Avoid `Option::map` because it bloats LLVM IR. + match self.remove_entry(k) { + Some((_, v)) => Some(v), + None => None, + } + } + + /// Removes a key from the map, returning the stored key and value if the + /// key was previously in the map. Keeps the allocated memory for reuse. + /// + /// The key may be any borrowed form of the map's key type, but + /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for + /// the key type. + /// + /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html + /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map = HashMap::new(); + /// // The map is empty + /// assert!(map.is_empty() && map.capacity() == 0); + /// + /// map.try_insert(1, "a")?; + /// + /// assert_eq!(map.remove_entry(&1), Some((1, "a"))); + /// assert_eq!(map.remove(&1), None); + /// + /// // Now map hold none elements + /// assert!(map.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn remove_entry(&mut self, k: &Q) -> Option<(K, V)> + where + Q: Hash + Equivalent, + { + let hash = make_hash::(&self.hash_builder, k); + into_ok(self.table.remove_entry(&mut (), hash, equivalent_key(k))) + } +} + +impl HashMap { + /// Creates a raw entry builder for the HashMap. + /// + /// Raw entries provide the lowest level of control for searching and + /// manipulating a map. They must be manually initialized with a hash and + /// then manually searched. After this, insertions into a vacant entry + /// still require an owned key to be provided. + /// + /// Raw entries are useful for such exotic situations as: + /// + /// * Hash memoization + /// * Deferring the creation of an owned key until it is known to be required + /// * Using a search key that doesn't work with the Borrow trait + /// * Using custom comparison logic without newtype wrappers + /// + /// Because raw entries provide much more low-level control, it's much easier + /// to put the HashMap into an inconsistent state which, while memory-safe, + /// will cause the map to produce seemingly random results. Higher-level and + /// more foolproof APIs like `entry` should be preferred when possible. + /// + /// In particular, the hash used to initialized the raw entry must still be + /// consistent with the hash of the key that is ultimately stored in the entry. + /// This is because implementations of HashMap may need to recompute hashes + /// when resizing, at which point only the keys are available. + /// + /// Raw entries give mutable access to the keys. This must not be used + /// to modify how the key would compare or hash, as the map will not re-evaluate + /// where the key should go, meaning the keys may become "lost" if their + /// location does not reflect their state. For instance, if you change a key + /// so that the map now contains keys which compare equal, search may start + /// acting erratically, with two keys randomly masking each other. Implementations + /// are free to assume this doesn't happen (within the limits of memory-safety). + /// + /// # Examples + /// + /// ``` + /// use core::hash::{BuildHasher, Hash}; + /// use rune_alloc::hash_map::{HashMap, RawEntryMut}; + /// use rune_alloc::prelude::*; + /// + /// let mut map = HashMap::new(); + /// map.try_extend([("a", 100), ("b", 200), ("c", 300)])?; + /// + /// fn compute_hash(hash_builder: &S, key: &K) -> u64 { + /// use core::hash::Hasher; + /// let mut state = hash_builder.build_hasher(); + /// key.hash(&mut state); + /// state.finish() + /// } + /// + /// // Existing key (insert and update) + /// match map.raw_entry_mut().from_key(&"a") { + /// RawEntryMut::Vacant(_) => unreachable!(), + /// RawEntryMut::Occupied(mut view) => { + /// assert_eq!(view.get(), &100); + /// let v = view.get_mut(); + /// let new_v = (*v) * 10; + /// *v = new_v; + /// assert_eq!(view.insert(1111), 1000); + /// } + /// } + /// + /// assert_eq!(map[&"a"], 1111); + /// assert_eq!(map.len(), 3); + /// + /// // Existing key (take) + /// let hash = compute_hash(map.hasher(), &"c"); + /// match map.raw_entry_mut().from_key_hashed_nocheck(hash, &"c") { + /// RawEntryMut::Vacant(_) => unreachable!(), + /// RawEntryMut::Occupied(view) => { + /// assert_eq!(view.remove_entry(), ("c", 300)); + /// } + /// } + /// assert_eq!(map.raw_entry().from_key(&"c"), None); + /// assert_eq!(map.len(), 2); + /// + /// // Nonexistent key (insert and update) + /// let key = "d"; + /// let hash = compute_hash(map.hasher(), &key); + /// match map.raw_entry_mut().from_hash(hash, |q| *q == key) { + /// RawEntryMut::Occupied(_) => unreachable!(), + /// RawEntryMut::Vacant(view) => { + /// let (k, value) = view.try_insert("d", 4000)?; + /// assert_eq!((*k, *value), ("d", 4000)); + /// *value = 40000; + /// } + /// } + /// assert_eq!(map[&"d"], 40000); + /// assert_eq!(map.len(), 3); + /// + /// match map.raw_entry_mut().from_hash(hash, |q| *q == key) { + /// RawEntryMut::Vacant(_) => unreachable!(), + /// RawEntryMut::Occupied(view) => { + /// assert_eq!(view.remove_entry(), ("d", 40000)); + /// } + /// } + /// assert_eq!(map.get(&"d"), None); + /// assert_eq!(map.len(), 2); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<'_, K, V, S, A> { + RawEntryBuilderMut { map: self } + } + + /// Creates a raw immutable entry builder for the HashMap. + /// + /// Raw entries provide the lowest level of control for searching and + /// manipulating a map. They must be manually initialized with a hash and + /// then manually searched. + /// + /// This is useful for + /// * Hash memoization + /// * Using a search key that doesn't work with the Borrow trait + /// * Using custom comparison logic without newtype wrappers + /// + /// Unless you are in such a situation, higher-level and more foolproof APIs like + /// `get` should be preferred. + /// + /// Immutable raw entries have very limited use; you might instead want `raw_entry_mut`. + /// + /// # Examples + /// + /// ``` + /// use core::hash::{BuildHasher, Hash}; + /// use rune_alloc::HashMap; + /// use rune_alloc::prelude::*; + /// + /// let mut map = HashMap::new(); + /// map.try_extend([("a", 100), ("b", 200), ("c", 300)])?; + /// + /// fn compute_hash(hash_builder: &S, key: &K) -> u64 { + /// use core::hash::Hasher; + /// let mut state = hash_builder.build_hasher(); + /// key.hash(&mut state); + /// state.finish() + /// } + /// + /// for k in ["a", "b", "c", "d", "e", "f"] { + /// let hash = compute_hash(map.hasher(), k); + /// let v = map.get(&k).cloned(); + /// let kv = v.as_ref().map(|v| (&k, v)); + /// + /// println!("Key: {} and value: {:?}", k, v); + /// + /// assert_eq!(map.raw_entry().from_key(&k), kv); + /// assert_eq!(map.raw_entry().from_hash(hash, |q| *q == k), kv); + /// assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &k), kv); + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn raw_entry(&self) -> RawEntryBuilder<'_, K, V, S, A> { + RawEntryBuilder { map: self } + } + + /// Returns a reference to the [`RawTable`] used underneath [`HashMap`]. + /// This function is only available if the `raw` feature of the crate is enabled. + /// + /// See [`raw_table_mut`] for more. + /// + /// [`raw_table_mut`]: Self::raw_table_mut + #[cfg_attr(feature = "inline-more", inline)] + pub fn raw_table(&self) -> &RawTable<(K, V), A> { + &self.table + } + + /// Returns a mutable reference to the [`RawTable`] used underneath [`HashMap`]. + /// This function is only available if the `raw` feature of the crate is enabled. + /// + /// # Note + /// + /// Calling this function is safe, but using the raw hash table API may require + /// unsafe functions or blocks. + /// + /// `RawTable` API gives the lowest level of control under the map that can be useful + /// for extending the HashMap's API, but may lead to *[undefined behavior]*. + /// + /// [`RawTable`]: crate::hashbrown::raw::RawTable + /// [undefined behavior]: + /// https://doc.rust-lang.org/reference/behavior-considered-undefined.html + /// + /// # Examples + /// + /// ``` + /// use core::hash::{BuildHasher, Hash}; + /// use core::convert::Infallible; + /// use rune_alloc::HashMap; + /// use rune_alloc::prelude::*; + /// + /// let mut map = HashMap::new(); + /// map.try_extend([("a", 10), ("b", 20), ("c", 30)])?; + /// assert_eq!(map.len(), 3); + /// + /// // Let's imagine that we have a value and a hash of the key, but not the key itself. + /// // However, if you want to remove the value from the map by hash and value, and you + /// // know exactly that the value is unique, then you can create a function like this: + /// fn remove_by_hash( + /// map: &mut HashMap, + /// hash: u64, + /// is_match: F, + /// ) -> Option<(K, V)> + /// where + /// F: Fn(&(K, V)) -> bool, + /// { + /// let raw_table = map.raw_table_mut(); + /// match raw_table.find(&mut (), hash, |_: &mut (), k: &(K, V)| Ok::<_, Infallible>(is_match(k))).unwrap() { + /// Some(bucket) => Some(unsafe { raw_table.remove(bucket).0 }), + /// None => None, + /// } + /// } + /// + /// fn compute_hash(hash_builder: &S, key: &K) -> u64 { + /// use core::hash::Hasher; + /// let mut state = hash_builder.build_hasher(); + /// key.hash(&mut state); + /// state.finish() + /// } + /// + /// let hash = compute_hash(map.hasher(), "a"); + /// assert_eq!(remove_by_hash(&mut map, hash, |(_, v)| *v == 10), Some(("a", 10))); + /// assert_eq!(map.get(&"a"), None); + /// assert_eq!(map.len(), 2); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn raw_table_mut(&mut self) -> &mut RawTable<(K, V), A> { + &mut self.table + } +} + +impl PartialEq for HashMap +where + K: Eq + Hash, + V: PartialEq, + S: BuildHasher, + A: Allocator, +{ + fn eq(&self, other: &Self) -> bool { + if self.len() != other.len() { + return false; + } + + self.iter() + .all(|(key, value)| other.get(key).map_or(false, |v| *value == *v)) + } +} + +impl Eq for HashMap +where + K: Eq + Hash, + V: Eq, + S: BuildHasher, + A: Allocator, +{ +} + +impl Debug for HashMap +where + K: Debug, + V: Debug, + A: Allocator, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_map().entries(self.iter()).finish() + } +} + +impl Default for HashMap +where + S: Default, + A: Default + Allocator, +{ + /// Creates an empty `HashMap`, with the `Default` value for the hasher and allocator. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use std::collections::hash_map::RandomState; + /// + /// // You can specify all types of HashMap, including hasher and allocator. + /// // Created map is empty and don't allocate memory + /// let map: HashMap = Default::default(); + /// assert_eq!(map.capacity(), 0); + /// let map: HashMap = HashMap::default(); + /// assert_eq!(map.capacity(), 0); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + fn default() -> Self { + Self::with_hasher_in(Default::default(), Default::default()) + } +} + +impl Index<&Q> for HashMap +where + K: Eq + Hash, + Q: Hash + Equivalent, + S: BuildHasher, + A: Allocator, +{ + type Output = V; + + /// Returns a reference to the value corresponding to the supplied key. + /// + /// # Panics + /// + /// Panics if the key is not present in the `HashMap`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let map: HashMap<_, _> = [("a", "One"), ("b", "Two")].try_into()?; + /// + /// assert_eq!(map[&"a"], "One"); + /// assert_eq!(map[&"b"], "Two"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + fn index(&self, key: &Q) -> &V { + self.get(key).expect("no entry found for key") + } +} + +// The default hasher is used to match the std implementation signature +impl TryFrom<[(K, V); N]> for HashMap +where + K: Eq + Hash, + A: Default + Allocator, +{ + type Error = Error; + + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let map1 = HashMap::try_from([(1, 2), (3, 4)])?; + /// let map2: HashMap<_, _> = [(1, 2), (3, 4)].try_into()?; + /// assert_eq!(map1, map2); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + fn try_from(arr: [(K, V); N]) -> Result { + HashMap::try_from_iter_in(arr, A::default()) + } +} + +/// An iterator over the entries of a `HashMap` in arbitrary order. +/// The iterator element type is `(&'a K, &'a V)`. +/// +/// This `struct` is created by the [`iter`] method on [`HashMap`]. See its +/// documentation for more. +/// +/// [`iter`]: struct.HashMap.html#method.iter +/// [`HashMap`]: struct.HashMap.html +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::HashMap; +/// +/// let map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].try_into()?; +/// +/// let mut iter = map.iter(); +/// let mut vec = vec![iter.next(), iter.next(), iter.next()]; +/// +/// // The `Iter` iterator produces items in arbitrary order, so the +/// // items must be sorted to test them against a sorted array. +/// vec.sort_unstable(); +/// assert_eq!(vec, [Some((&1, &"a")), Some((&2, &"b")), Some((&3, &"c"))]); +/// +/// // It is fused iterator +/// assert_eq!(iter.next(), None); +/// assert_eq!(iter.next(), None); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct Iter<'a, K, V> { + inner: RawIter<(K, V)>, + marker: PhantomData<(&'a K, &'a V)>, +} + +// FIXME(#26925) Remove in favor of `#[derive(Clone)]` +impl Clone for Iter<'_, K, V> { + #[cfg_attr(feature = "inline-more", inline)] + fn clone(&self) -> Self { + Iter { + inner: self.inner.clone(), + marker: PhantomData, + } + } +} + +impl fmt::Debug for Iter<'_, K, V> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +/// A mutable iterator over the entries of a `HashMap` in arbitrary order. +/// The iterator element type is `(&'a K, &'a mut V)`. +/// +/// This `struct` is created by the [`iter_mut`] method on [`HashMap`]. See its +/// documentation for more. +/// +/// [`iter_mut`]: struct.HashMap.html#method.iter_mut +/// [`HashMap`]: struct.HashMap.html +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::HashMap; +/// +/// let mut map: HashMap<_, _> = [(1, "One".to_owned()), (2, "Two".into())].try_into()?; +/// +/// let mut iter = map.iter_mut(); +/// iter.next().map(|(_, v)| v.push_str(" Mississippi")); +/// iter.next().map(|(_, v)| v.push_str(" Mississippi")); +/// +/// // It is fused iterator +/// assert_eq!(iter.next(), None); +/// assert_eq!(iter.next(), None); +/// +/// assert_eq!(map.get(&1).unwrap(), &"One Mississippi".to_owned()); +/// assert_eq!(map.get(&2).unwrap(), &"Two Mississippi".to_owned()); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct IterMut<'a, K, V> { + inner: RawIter<(K, V)>, + // To ensure invariance with respect to V + marker: PhantomData<(&'a K, &'a mut V)>, +} + +// We override the default Send impl which has K: Sync instead of K: Send. Both +// are correct, but this one is more general since it allows keys which +// implement Send but not Sync. +unsafe impl Send for IterMut<'_, K, V> {} + +impl IterMut<'_, K, V> { + /// Returns a iterator of references over the remaining items. + #[cfg_attr(feature = "inline-more", inline)] + pub(super) fn iter(&self) -> Iter<'_, K, V> { + Iter { + inner: self.inner.clone(), + marker: PhantomData, + } + } +} + +/// An owning iterator over the entries of a `HashMap` in arbitrary order. +/// The iterator element type is `(K, V)`. +/// +/// This `struct` is created by the [`into_iter`] method on [`HashMap`] +/// (provided by the [`IntoIterator`] trait). See its documentation for more. +/// The map cannot be used after calling that method. +/// +/// [`into_iter`]: struct.HashMap.html#method.into_iter +/// [`HashMap`]: struct.HashMap.html +/// [`IntoIterator`]: https://doc.rust-lang.org/core/iter/trait.IntoIterator.html +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::HashMap; +/// +/// let map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].try_into()?; +/// +/// let mut iter = map.into_iter(); +/// let mut vec = vec![iter.next(), iter.next(), iter.next()]; +/// +/// // The `IntoIter` iterator produces items in arbitrary order, so the +/// // items must be sorted to test them against a sorted array. +/// vec.sort_unstable(); +/// assert_eq!(vec, [Some((1, "a")), Some((2, "b")), Some((3, "c"))]); +/// +/// // It is fused iterator +/// assert_eq!(iter.next(), None); +/// assert_eq!(iter.next(), None); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct IntoIter { + inner: RawIntoIter<(K, V), A>, +} + +impl IntoIter { + /// Returns a iterator of references over the remaining items. + #[cfg_attr(feature = "inline-more", inline)] + pub(super) fn iter(&self) -> Iter<'_, K, V> { + Iter { + inner: self.inner.iter(), + marker: PhantomData, + } + } +} + +/// An owning iterator over the keys of a `HashMap` in arbitrary order. +/// The iterator element type is `K`. +/// +/// This `struct` is created by the [`into_keys`] method on [`HashMap`]. +/// See its documentation for more. +/// The map cannot be used after calling that method. +/// +/// [`into_keys`]: struct.HashMap.html#method.into_keys +/// [`HashMap`]: struct.HashMap.html +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::HashMap; +/// +/// let map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].try_into()?; +/// +/// let mut keys = map.into_keys(); +/// let mut vec = vec![keys.next(), keys.next(), keys.next()]; +/// +/// // The `IntoKeys` iterator produces keys in arbitrary order, so the +/// // keys must be sorted to test them against a sorted array. +/// vec.sort_unstable(); +/// assert_eq!(vec, [Some(1), Some(2), Some(3)]); +/// +/// // It is fused iterator +/// assert_eq!(keys.next(), None); +/// assert_eq!(keys.next(), None); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct IntoKeys { + inner: IntoIter, +} + +impl Iterator for IntoKeys { + type Item = K; + + #[inline] + fn next(&mut self) -> Option { + self.inner.next().map(|(k, _)| k) + } + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } +} + +impl ExactSizeIterator for IntoKeys { + #[inline] + fn len(&self) -> usize { + self.inner.len() + } +} + +impl FusedIterator for IntoKeys {} + +impl fmt::Debug for IntoKeys { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list() + .entries(self.inner.iter().map(|(k, _)| k)) + .finish() + } +} + +/// An owning iterator over the values of a `HashMap` in arbitrary order. +/// The iterator element type is `V`. +/// +/// This `struct` is created by the [`into_values`] method on [`HashMap`]. +/// See its documentation for more. The map cannot be used after calling that method. +/// +/// [`into_values`]: struct.HashMap.html#method.into_values +/// [`HashMap`]: struct.HashMap.html +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::HashMap; +/// +/// let map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].try_into()?; +/// +/// let mut values = map.into_values(); +/// let mut vec = vec![values.next(), values.next(), values.next()]; +/// +/// // The `IntoValues` iterator produces values in arbitrary order, so +/// // the values must be sorted to test them against a sorted array. +/// vec.sort_unstable(); +/// assert_eq!(vec, [Some("a"), Some("b"), Some("c")]); +/// +/// // It is fused iterator +/// assert_eq!(values.next(), None); +/// assert_eq!(values.next(), None); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct IntoValues { + inner: IntoIter, +} + +impl Iterator for IntoValues { + type Item = V; + + #[inline] + fn next(&mut self) -> Option { + self.inner.next().map(|(_, v)| v) + } + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } +} + +impl ExactSizeIterator for IntoValues { + #[inline] + fn len(&self) -> usize { + self.inner.len() + } +} + +impl FusedIterator for IntoValues {} + +impl fmt::Debug for IntoValues { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list() + .entries(self.inner.iter().map(|(_, v)| v)) + .finish() + } +} + +/// An iterator over the keys of a `HashMap` in arbitrary order. +/// The iterator element type is `&'a K`. +/// +/// This `struct` is created by the [`keys`] method on [`HashMap`]. See its +/// documentation for more. +/// +/// [`keys`]: struct.HashMap.html#method.keys +/// [`HashMap`]: struct.HashMap.html +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::HashMap; +/// +/// let map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].try_into()?; +/// +/// let mut keys = map.keys(); +/// let mut vec = vec![keys.next(), keys.next(), keys.next()]; +/// +/// // The `Keys` iterator produces keys in arbitrary order, so the +/// // keys must be sorted to test them against a sorted array. +/// vec.sort_unstable(); +/// assert_eq!(vec, [Some(&1), Some(&2), Some(&3)]); +/// +/// // It is fused iterator +/// assert_eq!(keys.next(), None); +/// assert_eq!(keys.next(), None); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct Keys<'a, K, V> { + inner: Iter<'a, K, V>, +} + +// FIXME(#26925) Remove in favor of `#[derive(Clone)]` +impl Clone for Keys<'_, K, V> { + #[cfg_attr(feature = "inline-more", inline)] + fn clone(&self) -> Self { + Keys { + inner: self.inner.clone(), + } + } +} + +impl fmt::Debug for Keys<'_, K, V> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +/// An iterator over the values of a `HashMap` in arbitrary order. +/// The iterator element type is `&'a V`. +/// +/// This `struct` is created by the [`values`] method on [`HashMap`]. See its +/// documentation for more. +/// +/// [`values`]: struct.HashMap.html#method.values +/// [`HashMap`]: struct.HashMap.html +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::HashMap; +/// +/// let map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].try_into()?; +/// +/// let mut values = map.values(); +/// let mut vec = vec![values.next(), values.next(), values.next()]; +/// +/// // The `Values` iterator produces values in arbitrary order, so the +/// // values must be sorted to test them against a sorted array. +/// vec.sort_unstable(); +/// assert_eq!(vec, [Some(&"a"), Some(&"b"), Some(&"c")]); +/// +/// // It is fused iterator +/// assert_eq!(values.next(), None); +/// assert_eq!(values.next(), None); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct Values<'a, K, V> { + inner: Iter<'a, K, V>, +} + +// FIXME(#26925) Remove in favor of `#[derive(Clone)]` +impl Clone for Values<'_, K, V> { + #[cfg_attr(feature = "inline-more", inline)] + fn clone(&self) -> Self { + Values { + inner: self.inner.clone(), + } + } +} + +impl fmt::Debug for Values<'_, K, V> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +/// A draining iterator over the entries of a `HashMap` in arbitrary +/// order. The iterator element type is `(K, V)`. +/// +/// This `struct` is created by the [`drain`] method on [`HashMap`]. See its +/// documentation for more. +/// +/// [`drain`]: struct.HashMap.html#method.drain +/// [`HashMap`]: struct.HashMap.html +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::HashMap; +/// +/// let mut map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].try_into()?; +/// +/// let mut drain_iter = map.drain(); +/// let mut vec = vec![drain_iter.next(), drain_iter.next(), drain_iter.next()]; +/// +/// // The `Drain` iterator produces items in arbitrary order, so the +/// // items must be sorted to test them against a sorted array. +/// vec.sort_unstable(); +/// assert_eq!(vec, [Some((1, "a")), Some((2, "b")), Some((3, "c"))]); +/// +/// // It is fused iterator +/// assert_eq!(drain_iter.next(), None); +/// assert_eq!(drain_iter.next(), None); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct Drain<'a, K, V, A: Allocator = Global> { + inner: RawDrain<'a, (K, V), A>, +} + +impl Drain<'_, K, V, A> { + /// Returns a iterator of references over the remaining items. + #[cfg_attr(feature = "inline-more", inline)] + pub(super) fn iter(&self) -> Iter<'_, K, V> { + Iter { + inner: self.inner.iter(), + marker: PhantomData, + } + } +} + +/// A draining iterator over entries of a `HashMap` which don't satisfy the predicate +/// `f(&k, &mut v)` in arbitrary order. The iterator element type is `(K, V)`. +/// +/// This `struct` is created by the [`extract_if`] method on [`HashMap`]. See its +/// documentation for more. +/// +/// [`extract_if`]: struct.HashMap.html#method.extract_if +/// [`HashMap`]: struct.HashMap.html +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::HashMap; +/// +/// let mut map: HashMap = [(1, "a"), (2, "b"), (3, "c")].try_into()?; +/// +/// let mut extract_if = map.extract_if(|k, _v| k % 2 != 0); +/// let mut vec = vec![extract_if.next(), extract_if.next()]; +/// +/// // The `ExtractIf` iterator produces items in arbitrary order, so the +/// // items must be sorted to test them against a sorted array. +/// vec.sort_unstable(); +/// assert_eq!(vec, [Some((1, "a")),Some((3, "c"))]); +/// +/// // It is fused iterator +/// assert_eq!(extract_if.next(), None); +/// assert_eq!(extract_if.next(), None); +/// drop(extract_if); +/// +/// assert_eq!(map.len(), 1); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +#[must_use = "Iterators are lazy unless consumed"] +pub struct ExtractIf<'a, K, V, F, A: Allocator = Global> +where + F: FnMut(&K, &mut V) -> bool, +{ + f: F, + inner: ExtractIfInner<'a, K, V, A>, +} + +impl Iterator for ExtractIf<'_, K, V, F, A> +where + F: FnMut(&K, &mut V) -> bool, + A: Allocator, +{ + type Item = (K, V); + + #[cfg_attr(feature = "inline-more", inline)] + fn next(&mut self) -> Option { + self.inner.next(&mut self.f) + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + (0, self.inner.iter.size_hint().1) + } +} + +impl FusedIterator for ExtractIf<'_, K, V, F> where F: FnMut(&K, &mut V) -> bool {} + +/// Portions of `ExtractIf` shared with `set::ExtractIf` +pub(super) struct ExtractIfInner<'a, K, V, A: Allocator> { + pub iter: RawIter<(K, V)>, + pub table: &'a mut RawTable<(K, V), A>, +} + +impl ExtractIfInner<'_, K, V, A> { + #[cfg_attr(feature = "inline-more", inline)] + pub(super) fn next(&mut self, f: &mut F) -> Option<(K, V)> + where + F: FnMut(&K, &mut V) -> bool, + { + unsafe { + for item in &mut self.iter { + let &mut (ref key, ref mut value) = item.as_mut(); + if f(key, value) { + return Some(self.table.remove(item).0); + } + } + } + None + } +} + +/// A mutable iterator over the values of a `HashMap` in arbitrary order. +/// The iterator element type is `&'a mut V`. +/// +/// This `struct` is created by the [`values_mut`] method on [`HashMap`]. See its +/// documentation for more. +/// +/// [`values_mut`]: struct.HashMap.html#method.values_mut +/// [`HashMap`]: struct.HashMap.html +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::HashMap; +/// +/// let mut map: HashMap<_, _> = [(1, "One".to_owned()), (2, "Two".into())].try_into()?; +/// +/// let mut values = map.values_mut(); +/// values.next().map(|v| v.push_str(" Mississippi")); +/// values.next().map(|v| v.push_str(" Mississippi")); +/// +/// // It is fused iterator +/// assert_eq!(values.next(), None); +/// assert_eq!(values.next(), None); +/// +/// assert_eq!(map.get(&1).unwrap(), &"One Mississippi".to_owned()); +/// assert_eq!(map.get(&2).unwrap(), &"Two Mississippi".to_owned()); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct ValuesMut<'a, K, V> { + inner: IterMut<'a, K, V>, +} + +/// A builder for computing where in a [`HashMap`] a key-value pair would be stored. +/// +/// See the [`HashMap::raw_entry_mut`] docs for usage examples. +/// +/// [`HashMap::raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut +/// +/// # Examples +/// +/// ``` +/// use core::hash::{BuildHasher, Hash}; +/// use rune_alloc::hash_map::{RawEntryBuilderMut, RawEntryMut::Vacant, RawEntryMut::Occupied}; +/// use rune_alloc::HashMap; +/// use rune_alloc::prelude::*; +/// +/// let mut map = HashMap::new(); +/// map.try_extend([(1, 11), (2, 12), (3, 13), (4, 14), (5, 15), (6, 16)])?; +/// assert_eq!(map.len(), 6); +/// +/// fn compute_hash(hash_builder: &S, key: &K) -> u64 { +/// use core::hash::Hasher; +/// let mut state = hash_builder.build_hasher(); +/// key.hash(&mut state); +/// state.finish() +/// } +/// +/// let builder: RawEntryBuilderMut<_, _, _> = map.raw_entry_mut(); +/// +/// // Existing key +/// match builder.from_key(&6) { +/// Vacant(_) => unreachable!(), +/// Occupied(view) => assert_eq!(view.get(), &16), +/// } +/// +/// for key in 0..12 { +/// let hash = compute_hash(map.hasher(), &key); +/// let value = map.get(&key).cloned(); +/// let key_value = value.as_ref().map(|v| (&key, v)); +/// +/// println!("Key: {} and value: {:?}", key, value); +/// +/// match map.raw_entry_mut().from_key(&key) { +/// Occupied(mut o) => assert_eq!(Some(o.get_key_value()), key_value), +/// Vacant(_) => assert_eq!(value, None), +/// } +/// match map.raw_entry_mut().from_key_hashed_nocheck(hash, &key) { +/// Occupied(mut o) => assert_eq!(Some(o.get_key_value()), key_value), +/// Vacant(_) => assert_eq!(value, None), +/// } +/// match map.raw_entry_mut().from_hash(hash, |q| *q == key) { +/// Occupied(mut o) => assert_eq!(Some(o.get_key_value()), key_value), +/// Vacant(_) => assert_eq!(value, None), +/// } +/// } +/// +/// assert_eq!(map.len(), 6); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct RawEntryBuilderMut<'a, K, V, S, A: Allocator = Global> { + map: &'a mut HashMap, +} + +/// A view into a single entry in a map, which may either be vacant or occupied. +/// +/// This is a lower-level version of [`Entry`]. +/// +/// This `enum` is constructed through the [`raw_entry_mut`] method on [`HashMap`], +/// then calling one of the methods of that [`RawEntryBuilderMut`]. +/// +/// [`HashMap`]: struct.HashMap.html +/// [`Entry`]: enum.Entry.html +/// [`raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut +/// [`RawEntryBuilderMut`]: struct.RawEntryBuilderMut.html +/// +/// # Examples +/// +/// ``` +/// use core::hash::{BuildHasher, Hash}; +/// use rune_alloc::hash_map::{HashMap, RawEntryMut, RawOccupiedEntryMut}; +/// use rune_alloc::prelude::*; +/// +/// let mut map = HashMap::new(); +/// map.try_extend([('a', 1), ('b', 2), ('c', 3)])?; +/// assert_eq!(map.len(), 3); +/// +/// fn compute_hash(hash_builder: &S, key: &K) -> u64 { +/// use core::hash::Hasher; +/// let mut state = hash_builder.build_hasher(); +/// key.hash(&mut state); +/// state.finish() +/// } +/// +/// // Existing key (try_insert) +/// let raw: RawEntryMut<_, _, _> = map.raw_entry_mut().from_key(&'a'); +/// let _raw_o: RawOccupiedEntryMut<_, _, _> = raw.try_insert('a', 10)?; +/// assert_eq!(map.len(), 3); +/// +/// // Nonexistent key (try_insert) +/// map.raw_entry_mut().from_key(&'d').try_insert('d', 40)?; +/// assert_eq!(map.len(), 4); +/// +/// // Existing key (or_try_insert) +/// let hash = compute_hash(map.hasher(), &'b'); +/// let kv = map +/// .raw_entry_mut() +/// .from_key_hashed_nocheck(hash, &'b') +/// .or_try_insert('b', 20)?; +/// assert_eq!(kv, (&mut 'b', &mut 2)); +/// *kv.1 = 20; +/// assert_eq!(map.len(), 4); +/// +/// // Nonexistent key (or_try_insert) +/// let hash = compute_hash(map.hasher(), &'e'); +/// let kv = map +/// .raw_entry_mut() +/// .from_key_hashed_nocheck(hash, &'e') +/// .or_try_insert('e', 50)?; +/// assert_eq!(kv, (&mut 'e', &mut 50)); +/// assert_eq!(map.len(), 5); +/// +/// // Existing key (or_try_insert_with) +/// let hash = compute_hash(map.hasher(), &'c'); +/// let kv = map +/// .raw_entry_mut() +/// .from_hash(hash, |q| q == &'c') +/// .or_try_insert_with(|| ('c', 30))?; +/// assert_eq!(kv, (&mut 'c', &mut 3)); +/// *kv.1 = 30; +/// assert_eq!(map.len(), 5); +/// +/// // Nonexistent key (or_try_insert_with) +/// let hash = compute_hash(map.hasher(), &'f'); +/// let kv = map +/// .raw_entry_mut() +/// .from_hash(hash, |q| q == &'f') +/// .or_try_insert_with(|| ('f', 60))?; +/// assert_eq!(kv, (&mut 'f', &mut 60)); +/// assert_eq!(map.len(), 6); +/// +/// println!("Our HashMap: {:?}", map); +/// +/// let mut vec: Vec<_> = map.iter().map(|(&k, &v)| (k, v)).collect(); +/// // The `Iter` iterator produces items in arbitrary order, so the +/// // items must be sorted to test them against a sorted array. +/// vec.sort_unstable(); +/// assert_eq!(vec, [('a', 10), ('b', 20), ('c', 30), ('d', 40), ('e', 50), ('f', 60)]); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub enum RawEntryMut<'a, K, V, S, A: Allocator = Global> { + /// An occupied entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::RawEntryMut; + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap<_, _> = [("a", 100), ("b", 200)].try_into()?; + /// + /// match map.raw_entry_mut().from_key(&"a") { + /// RawEntryMut::Vacant(_) => unreachable!(), + /// RawEntryMut::Occupied(_) => { } + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + Occupied(RawOccupiedEntryMut<'a, K, V, S, A>), + /// A vacant entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{hash_map::RawEntryMut, HashMap}; + /// let mut map: HashMap<&str, i32> = HashMap::new(); + /// + /// match map.raw_entry_mut().from_key("a") { + /// RawEntryMut::Occupied(_) => unreachable!(), + /// RawEntryMut::Vacant(_) => { } + /// } + /// ``` + Vacant(RawVacantEntryMut<'a, K, V, S, A>), +} + +/// A view into an occupied entry in a `HashMap`. +/// It is part of the [`RawEntryMut`] enum. +/// +/// [`RawEntryMut`]: enum.RawEntryMut.html +/// +/// # Examples +/// +/// ``` +/// use core::hash::{BuildHasher, Hash}; +/// use rune_alloc::hash_map::{RawEntryMut, RawOccupiedEntryMut}; +/// use rune_alloc::HashMap; +/// use rune_alloc::prelude::*; +/// +/// let mut map = HashMap::new(); +/// map.try_extend([("a", 10), ("b", 20), ("c", 30)])?; +/// +/// fn compute_hash(hash_builder: &S, key: &K) -> u64 { +/// use core::hash::Hasher; +/// let mut state = hash_builder.build_hasher(); +/// key.hash(&mut state); +/// state.finish() +/// } +/// +/// let _raw_o: RawOccupiedEntryMut<_, _, _> = map.raw_entry_mut().from_key(&"a").try_insert("a", 100)?; +/// assert_eq!(map.len(), 3); +/// +/// // Existing key (insert and update) +/// match map.raw_entry_mut().from_key(&"a") { +/// RawEntryMut::Vacant(_) => unreachable!(), +/// RawEntryMut::Occupied(mut view) => { +/// assert_eq!(view.get(), &100); +/// let v = view.get_mut(); +/// let new_v = (*v) * 10; +/// *v = new_v; +/// assert_eq!(view.insert(1111), 1000); +/// } +/// } +/// +/// assert_eq!(map[&"a"], 1111); +/// assert_eq!(map.len(), 3); +/// +/// // Existing key (take) +/// let hash = compute_hash(map.hasher(), &"c"); +/// match map.raw_entry_mut().from_key_hashed_nocheck(hash, &"c") { +/// RawEntryMut::Vacant(_) => unreachable!(), +/// RawEntryMut::Occupied(view) => { +/// assert_eq!(view.remove_entry(), ("c", 30)); +/// } +/// } +/// assert_eq!(map.raw_entry().from_key(&"c"), None); +/// assert_eq!(map.len(), 2); +/// +/// let hash = compute_hash(map.hasher(), &"b"); +/// match map.raw_entry_mut().from_hash(hash, |q| *q == "b") { +/// RawEntryMut::Vacant(_) => unreachable!(), +/// RawEntryMut::Occupied(view) => { +/// assert_eq!(view.remove_entry(), ("b", 20)); +/// } +/// } +/// assert_eq!(map.get(&"b"), None); +/// assert_eq!(map.len(), 1); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct RawOccupiedEntryMut<'a, K, V, S, A: Allocator = Global> { + elem: Bucket<(K, V)>, + table: &'a mut RawTable<(K, V), A>, + hash_builder: &'a S, +} + +unsafe impl Send for RawOccupiedEntryMut<'_, K, V, S, A> +where + K: Send, + V: Send, + S: Send, + A: Send + Allocator, +{ +} +unsafe impl Sync for RawOccupiedEntryMut<'_, K, V, S, A> +where + K: Sync, + V: Sync, + S: Sync, + A: Sync + Allocator, +{ +} + +/// A view into a vacant entry in a `HashMap`. +/// It is part of the [`RawEntryMut`] enum. +/// +/// [`RawEntryMut`]: enum.RawEntryMut.html +/// +/// # Examples +/// +/// ``` +/// use core::hash::{BuildHasher, Hash}; +/// use rune_alloc::hash_map::{RawEntryMut, RawVacantEntryMut}; +/// use rune_alloc::HashMap; +/// +/// let mut map = HashMap::<&str, i32>::new(); +/// +/// fn compute_hash(hash_builder: &S, key: &K) -> u64 { +/// use core::hash::Hasher; +/// let mut state = hash_builder.build_hasher(); +/// key.hash(&mut state); +/// state.finish() +/// } +/// +/// let raw_v: RawVacantEntryMut<_, _, _> = match map.raw_entry_mut().from_key(&"a") { +/// RawEntryMut::Vacant(view) => view, +/// RawEntryMut::Occupied(_) => unreachable!(), +/// }; +/// raw_v.try_insert("a", 10)?; +/// assert!(map[&"a"] == 10 && map.len() == 1); +/// +/// // Nonexistent key (insert and update) +/// let hash = compute_hash(map.hasher(), &"b"); +/// match map.raw_entry_mut().from_key_hashed_nocheck(hash, &"b") { +/// RawEntryMut::Occupied(_) => unreachable!(), +/// RawEntryMut::Vacant(view) => { +/// let (k, value) = view.try_insert("b", 2)?; +/// assert_eq!((*k, *value), ("b", 2)); +/// *value = 20; +/// } +/// } +/// assert!(map[&"b"] == 20 && map.len() == 2); +/// +/// let hash = compute_hash(map.hasher(), &"c"); +/// match map.raw_entry_mut().from_hash(hash, |q| *q == "c") { +/// RawEntryMut::Occupied(_) => unreachable!(), +/// RawEntryMut::Vacant(view) => { +/// assert_eq!(view.try_insert("c", 30)?, (&mut "c", &mut 30)); +/// } +/// } +/// assert!(map[&"c"] == 30 && map.len() == 3); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct RawVacantEntryMut<'a, K, V, S, A: Allocator = Global> { + table: &'a mut RawTable<(K, V), A>, + hash_builder: &'a S, +} + +/// A builder for computing where in a [`HashMap`] a key-value pair would be stored. +/// +/// See the [`HashMap::raw_entry`] docs for usage examples. +/// +/// [`HashMap::raw_entry`]: struct.HashMap.html#method.raw_entry +/// +/// # Examples +/// +/// ``` +/// use core::hash::{BuildHasher, Hash}; +/// use rune_alloc::hash_map::RawEntryBuilder; +/// use rune_alloc::HashMap; +/// use rune_alloc::prelude::*; +/// +/// let mut map = HashMap::new(); +/// map.try_extend([(1, 10), (2, 20), (3, 30)])?; +/// +/// fn compute_hash(hash_builder: &S, key: &K) -> u64 { +/// use core::hash::Hasher; +/// let mut state = hash_builder.build_hasher(); +/// key.hash(&mut state); +/// state.finish() +/// } +/// +/// for k in 0..6 { +/// let hash = compute_hash(map.hasher(), &k); +/// let v = map.get(&k).cloned(); +/// let kv = v.as_ref().map(|v| (&k, v)); +/// +/// println!("Key: {} and value: {:?}", k, v); +/// let builder: RawEntryBuilder<_, _, _> = map.raw_entry(); +/// assert_eq!(builder.from_key(&k), kv); +/// assert_eq!(map.raw_entry().from_hash(hash, |q| *q == k), kv); +/// assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &k), kv); +/// } +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct RawEntryBuilder<'a, K, V, S, A: Allocator = Global> { + map: &'a HashMap, +} + +impl<'a, K, V, S, A: Allocator> RawEntryBuilderMut<'a, K, V, S, A> { + /// Creates a `RawEntryMut` from the given key. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::RawEntryMut; + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// let key = "a"; + /// let entry: RawEntryMut<&str, u32, _> = map.raw_entry_mut().from_key(&key); + /// entry.try_insert(key, 100)?; + /// assert_eq!(map[&"a"], 100); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + #[allow(clippy::wrong_self_convention)] + pub fn from_key(self, k: &Q) -> RawEntryMut<'a, K, V, S, A> + where + S: BuildHasher, + Q: Hash + Equivalent, + { + let hash = make_hash::(&self.map.hash_builder, k); + self.from_key_hashed_nocheck(hash, k) + } + + /// Creates a `RawEntryMut` from the given key and its hash. + /// + /// # Examples + /// + /// ``` + /// use core::hash::{BuildHasher, Hash}; + /// use rune_alloc::hash_map::{HashMap, RawEntryMut}; + /// + /// fn compute_hash(hash_builder: &S, key: &K) -> u64 { + /// use core::hash::Hasher; + /// let mut state = hash_builder.build_hasher(); + /// key.hash(&mut state); + /// state.finish() + /// } + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// let key = "a"; + /// let hash = compute_hash(map.hasher(), &key); + /// let entry: RawEntryMut<&str, u32, _> = map.raw_entry_mut().from_key_hashed_nocheck(hash, &key); + /// entry.try_insert(key, 100)?; + /// assert_eq!(map[&"a"], 100); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + #[allow(clippy::wrong_self_convention)] + pub fn from_key_hashed_nocheck(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S, A> + where + Q: Equivalent, + { + self.from_hash(hash, equivalent(k)) + } +} + +impl<'a, K, V, S, A: Allocator> RawEntryBuilderMut<'a, K, V, S, A> { + /// Creates a `RawEntryMut` from the given hash and matching function. + /// + /// # Examples + /// + /// ``` + /// use core::hash::{BuildHasher, Hash}; + /// use rune_alloc::hash_map::{HashMap, RawEntryMut}; + /// + /// fn compute_hash(hash_builder: &S, key: &K) -> u64 { + /// use core::hash::Hasher; + /// let mut state = hash_builder.build_hasher(); + /// key.hash(&mut state); + /// state.finish() + /// } + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// let key = "a"; + /// let hash = compute_hash(map.hasher(), &key); + /// let entry: RawEntryMut<&str, u32, _> = map.raw_entry_mut().from_hash(hash, |k| k == &key); + /// entry.try_insert(key, 100)?; + /// assert_eq!(map[&"a"], 100); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + #[allow(clippy::wrong_self_convention)] + pub fn from_hash(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S, A> + where + F: FnMut(&K) -> bool, + { + self.search(hash, is_match) + } + + #[cfg_attr(feature = "inline-more", inline)] + fn search(self, hash: u64, mut is_match: F) -> RawEntryMut<'a, K, V, S, A> + where + F: FnMut(&K) -> bool, + { + match into_ok(self.map.table.find( + &mut is_match, + hash, + move |is_match: &mut F, (k, _): &(K, _)| Ok(is_match(k)), + )) { + Some(elem) => RawEntryMut::Occupied(RawOccupiedEntryMut { + elem, + table: &mut self.map.table, + hash_builder: &self.map.hash_builder, + }), + None => RawEntryMut::Vacant(RawVacantEntryMut { + table: &mut self.map.table, + hash_builder: &self.map.hash_builder, + }), + } + } +} + +impl<'a, K, V, S, A: Allocator> RawEntryBuilder<'a, K, V, S, A> { + /// Access an immutable entry by key. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let map: HashMap<&str, u32> = [("a", 100), ("b", 200)].try_into()?; + /// let key = "a"; + /// assert_eq!(map.raw_entry().from_key(&key), Some((&"a", &100))); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + #[allow(clippy::wrong_self_convention)] + pub fn from_key(self, k: &Q) -> Option<(&'a K, &'a V)> + where + S: BuildHasher, + Q: Hash + Equivalent, + { + let hash = make_hash::(&self.map.hash_builder, k); + self.from_key_hashed_nocheck(hash, k) + } + + /// Access an immutable entry by a key and its hash. + /// + /// # Examples + /// + /// ``` + /// use core::hash::{BuildHasher, Hash}; + /// use rune_alloc::HashMap; + /// + /// fn compute_hash(hash_builder: &S, key: &K) -> u64 { + /// use core::hash::Hasher; + /// let mut state = hash_builder.build_hasher(); + /// key.hash(&mut state); + /// state.finish() + /// } + /// + /// let map: HashMap<&str, u32> = [("a", 100), ("b", 200)].try_into()?; + /// let key = "a"; + /// let hash = compute_hash(map.hasher(), &key); + /// assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &key), Some((&"a", &100))); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + #[allow(clippy::wrong_self_convention)] + pub fn from_key_hashed_nocheck(self, hash: u64, k: &Q) -> Option<(&'a K, &'a V)> + where + Q: Equivalent, + { + self.from_hash(hash, equivalent(k)) + } + + #[cfg_attr(feature = "inline-more", inline)] + fn search(self, hash: u64, mut is_match: F) -> Option<(&'a K, &'a V)> + where + F: FnMut(&K) -> bool, + { + match into_ok(self.map.table.get( + &mut is_match, + hash, + |is_match: &mut F, (k, _): &(K, _)| Ok(is_match(k)), + )) { + Some((key, value)) => Some((key, value)), + None => None, + } + } + + /// Access an immutable entry by hash and matching function. + /// + /// # Examples + /// + /// ``` + /// use core::hash::{BuildHasher, Hash}; + /// use rune_alloc::HashMap; + /// + /// fn compute_hash(hash_builder: &S, key: &K) -> u64 { + /// use core::hash::Hasher; + /// let mut state = hash_builder.build_hasher(); + /// key.hash(&mut state); + /// state.finish() + /// } + /// + /// let map: HashMap<&str, u32> = [("a", 100), ("b", 200)].try_into()?; + /// let key = "a"; + /// let hash = compute_hash(map.hasher(), &key); + /// assert_eq!(map.raw_entry().from_hash(hash, |k| k == &key), Some((&"a", &100))); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + #[allow(clippy::wrong_self_convention)] + pub fn from_hash(self, hash: u64, is_match: F) -> Option<(&'a K, &'a V)> + where + F: FnMut(&K) -> bool, + { + self.search(hash, is_match) + } +} + +impl<'a, K, V, S, A: Allocator> RawEntryMut<'a, K, V, S, A> { + /// Sets the value of the entry, and returns a RawOccupiedEntryMut. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// let entry = map.raw_entry_mut().from_key("horseyland").try_insert("horseyland", 37)?; + /// + /// assert_eq!(entry.remove_entry(), ("horseyland", 37)); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_insert(self, key: K, value: V) -> Result, Error> + where + K: Hash, + S: BuildHasher, + { + match self { + RawEntryMut::Occupied(mut entry) => { + entry.insert(value); + Ok(entry) + } + RawEntryMut::Vacant(entry) => { + let hasher = make_hasher::(entry.hash_builder); + into_ok_try(entry.insert_entry(&mut (), hasher, key, value)) + } + } + } + + #[cfg(test)] + pub(crate) fn insert(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V, S, A> + where + K: Hash, + S: BuildHasher, + { + self.try_insert(key, value).abort() + } + + /// Ensures a value is in the entry by inserting the default if empty, and returns + /// mutable references to the key and value in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// + /// map.raw_entry_mut().from_key("poneyland").or_try_insert("poneyland", 3)?; + /// assert_eq!(map["poneyland"], 3); + /// + /// *map.raw_entry_mut().from_key("poneyland").or_try_insert("poneyland", 10)?.1 *= 2; + /// assert_eq!(map["poneyland"], 6); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn or_try_insert( + self, + default_key: K, + default_val: V, + ) -> Result<(&'a mut K, &'a mut V), Error> + where + K: Hash, + S: BuildHasher, + { + match self { + RawEntryMut::Occupied(entry) => Ok(entry.into_key_value()), + RawEntryMut::Vacant(entry) => entry.try_insert(default_key, default_val), + } + } + + /// Ensures a value is in the entry by inserting the result of the default function if empty, + /// and returns mutable references to the key and value in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap<&str, String> = HashMap::new(); + /// + /// map.raw_entry_mut().from_key("poneyland").or_try_insert_with(|| { + /// ("poneyland", "hoho".to_string()) + /// })?; + /// + /// assert_eq!(map["poneyland"], "hoho".to_string()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn or_try_insert_with(self, default: F) -> Result<(&'a mut K, &'a mut V), Error> + where + F: FnOnce() -> (K, V), + K: Hash, + S: BuildHasher, + { + match self { + RawEntryMut::Occupied(entry) => Ok(entry.into_key_value()), + RawEntryMut::Vacant(entry) => { + let (k, v) = default(); + entry.try_insert(k, v) + } + } + } + + /// Provides in-place mutable access to an occupied entry before any + /// potential inserts into the map. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// + /// map.raw_entry_mut() + /// .from_key("poneyland") + /// .and_modify(|_k, v| { *v += 1 }) + /// .or_try_insert("poneyland", 42)?; + /// assert_eq!(map["poneyland"], 42); + /// + /// map.raw_entry_mut() + /// .from_key("poneyland") + /// .and_modify(|_k, v| { *v += 1 }) + /// .or_try_insert("poneyland", 0)?; + /// assert_eq!(map["poneyland"], 43); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn and_modify(self, f: F) -> Self + where + F: FnOnce(&mut K, &mut V), + { + match self { + RawEntryMut::Occupied(mut entry) => { + { + let (k, v) = entry.get_key_value_mut(); + f(k, v); + } + RawEntryMut::Occupied(entry) + } + RawEntryMut::Vacant(entry) => RawEntryMut::Vacant(entry), + } + } + + /// Provides shared access to the key and owned access to the value of + /// an occupied entry and allows to replace or remove it based on the + /// value of the returned option. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::RawEntryMut; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// + /// let entry = map + /// .raw_entry_mut() + /// .from_key("poneyland") + /// .and_replace_entry_with(|_k, _v| panic!()); + /// + /// match entry { + /// RawEntryMut::Vacant(_) => {}, + /// RawEntryMut::Occupied(_) => panic!(), + /// } + /// + /// map.try_insert("poneyland", 42)?; + /// + /// let entry = map + /// .raw_entry_mut() + /// .from_key("poneyland") + /// .and_replace_entry_with(|k, v| { + /// assert_eq!(k, &"poneyland"); + /// assert_eq!(v, 42); + /// Some(v + 1) + /// }); + /// + /// match entry { + /// RawEntryMut::Occupied(e) => { + /// assert_eq!(e.key(), &"poneyland"); + /// assert_eq!(e.get(), &43); + /// }, + /// RawEntryMut::Vacant(_) => panic!(), + /// } + /// + /// assert_eq!(map["poneyland"], 43); + /// + /// let entry = map + /// .raw_entry_mut() + /// .from_key("poneyland") + /// .and_replace_entry_with(|_k, _v| None); + /// + /// match entry { + /// RawEntryMut::Vacant(_) => {}, + /// RawEntryMut::Occupied(_) => panic!(), + /// } + /// + /// assert!(!map.contains_key("poneyland")); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn and_replace_entry_with(self, f: F) -> Self + where + F: FnOnce(&K, V) -> Option, + { + match self { + RawEntryMut::Occupied(entry) => entry.replace_entry_with(f), + RawEntryMut::Vacant(_) => self, + } + } +} + +impl<'a, K, V, S, A: Allocator> RawOccupiedEntryMut<'a, K, V, S, A> { + /// Gets a reference to the key in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{HashMap, RawEntryMut}; + /// + /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].try_into()?; + /// + /// match map.raw_entry_mut().from_key(&"a") { + /// RawEntryMut::Vacant(_) => panic!(), + /// RawEntryMut::Occupied(o) => assert_eq!(o.key(), &"a") + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn key(&self) -> &K { + unsafe { &self.elem.as_ref().0 } + } + + /// Gets a mutable reference to the key in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{HashMap, RawEntryMut}; + /// use std::rc::Rc; + /// + /// let key_one = Rc::new("a"); + /// let key_two = Rc::new("a"); + /// + /// let mut map: HashMap, u32> = HashMap::new(); + /// map.try_insert(key_one.clone(), 10)?; + /// + /// assert_eq!(map[&key_one], 10); + /// assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1); + /// + /// match map.raw_entry_mut().from_key(&key_one) { + /// RawEntryMut::Vacant(_) => panic!(), + /// RawEntryMut::Occupied(mut o) => { + /// *o.key_mut() = key_two.clone(); + /// } + /// } + /// assert_eq!(map[&key_two], 10); + /// assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn key_mut(&mut self) -> &mut K { + unsafe { &mut self.elem.as_mut().0 } + } + + /// Converts the entry into a mutable reference to the key in the entry + /// with a lifetime bound to the map itself. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{HashMap, RawEntryMut}; + /// use std::rc::Rc; + /// + /// let key_one = Rc::new("a"); + /// let key_two = Rc::new("a"); + /// + /// let mut map: HashMap, u32> = HashMap::new(); + /// map.try_insert(key_one.clone(), 10)?; + /// + /// assert_eq!(map[&key_one], 10); + /// assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1); + /// + /// let inside_key: &mut Rc<&str>; + /// + /// match map.raw_entry_mut().from_key(&key_one) { + /// RawEntryMut::Vacant(_) => panic!(), + /// RawEntryMut::Occupied(o) => inside_key = o.into_key(), + /// } + /// *inside_key = key_two.clone(); + /// + /// assert_eq!(map[&key_two], 10); + /// assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn into_key(self) -> &'a mut K { + unsafe { &mut self.elem.as_mut().0 } + } + + /// Gets a reference to the value in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{HashMap, RawEntryMut}; + /// + /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].try_into()?; + /// + /// match map.raw_entry_mut().from_key(&"a") { + /// RawEntryMut::Vacant(_) => panic!(), + /// RawEntryMut::Occupied(o) => assert_eq!(o.get(), &100), + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn get(&self) -> &V { + unsafe { &self.elem.as_ref().1 } + } + + /// Converts the OccupiedEntry into a mutable reference to the value in the entry + /// with a lifetime bound to the map itself. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{HashMap, RawEntryMut}; + /// + /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].try_into()?; + /// + /// let value: &mut u32; + /// + /// match map.raw_entry_mut().from_key(&"a") { + /// RawEntryMut::Vacant(_) => panic!(), + /// RawEntryMut::Occupied(o) => value = o.into_mut(), + /// } + /// *value += 900; + /// + /// assert_eq!(map[&"a"], 1000); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn into_mut(self) -> &'a mut V { + unsafe { &mut self.elem.as_mut().1 } + } + + /// Gets a mutable reference to the value in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{HashMap, RawEntryMut}; + /// + /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].try_into()?; + /// + /// match map.raw_entry_mut().from_key(&"a") { + /// RawEntryMut::Vacant(_) => panic!(), + /// RawEntryMut::Occupied(mut o) => *o.get_mut() += 900, + /// } + /// + /// assert_eq!(map[&"a"], 1000); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn get_mut(&mut self) -> &mut V { + unsafe { &mut self.elem.as_mut().1 } + } + + /// Gets a reference to the key and value in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{HashMap, RawEntryMut}; + /// + /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].try_into()?; + /// + /// match map.raw_entry_mut().from_key(&"a") { + /// RawEntryMut::Vacant(_) => panic!(), + /// RawEntryMut::Occupied(o) => assert_eq!(o.get_key_value(), (&"a", &100)), + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn get_key_value(&self) -> (&K, &V) { + unsafe { + let (key, value) = self.elem.as_ref(); + (key, value) + } + } + + /// Gets a mutable reference to the key and value in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{HashMap, RawEntryMut}; + /// use std::rc::Rc; + /// + /// let key_one = Rc::new("a"); + /// let key_two = Rc::new("a"); + /// + /// let mut map: HashMap, u32> = HashMap::new(); + /// map.try_insert(key_one.clone(), 10)?; + /// + /// assert_eq!(map[&key_one], 10); + /// assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1); + /// + /// match map.raw_entry_mut().from_key(&key_one) { + /// RawEntryMut::Vacant(_) => panic!(), + /// RawEntryMut::Occupied(mut o) => { + /// let (inside_key, inside_value) = o.get_key_value_mut(); + /// *inside_key = key_two.clone(); + /// *inside_value = 100; + /// } + /// } + /// assert_eq!(map[&key_two], 100); + /// assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn get_key_value_mut(&mut self) -> (&mut K, &mut V) { + unsafe { + let &mut (ref mut key, ref mut value) = self.elem.as_mut(); + (key, value) + } + } + + /// Converts the OccupiedEntry into a mutable reference to the key and value in the entry + /// with a lifetime bound to the map itself. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{HashMap, RawEntryMut}; + /// use std::rc::Rc; + /// + /// let key_one = Rc::new("a"); + /// let key_two = Rc::new("a"); + /// + /// let mut map: HashMap, u32> = HashMap::new(); + /// map.try_insert(key_one.clone(), 10)?; + /// + /// assert_eq!(map[&key_one], 10); + /// assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1); + /// + /// let inside_key: &mut Rc<&str>; + /// let inside_value: &mut u32; + /// match map.raw_entry_mut().from_key(&key_one) { + /// RawEntryMut::Vacant(_) => panic!(), + /// RawEntryMut::Occupied(o) => { + /// let tuple = o.into_key_value(); + /// inside_key = tuple.0; + /// inside_value = tuple.1; + /// } + /// } + /// *inside_key = key_two.clone(); + /// *inside_value = 100; + /// assert_eq!(map[&key_two], 100); + /// assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn into_key_value(self) -> (&'a mut K, &'a mut V) { + unsafe { + let &mut (ref mut key, ref mut value) = self.elem.as_mut(); + (key, value) + } + } + + /// Sets the value of the entry, and returns the entry's old value. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{HashMap, RawEntryMut}; + /// + /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].try_into()?; + /// + /// match map.raw_entry_mut().from_key(&"a") { + /// RawEntryMut::Vacant(_) => panic!(), + /// RawEntryMut::Occupied(mut o) => assert_eq!(o.insert(1000), 100), + /// } + /// + /// assert_eq!(map[&"a"], 1000); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn insert(&mut self, value: V) -> V { + mem::replace(self.get_mut(), value) + } + + /// Sets the value of the entry, and returns the entry's old value. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{HashMap, RawEntryMut}; + /// use std::rc::Rc; + /// + /// let key_one = Rc::new("a"); + /// let key_two = Rc::new("a"); + /// + /// let mut map: HashMap, u32> = HashMap::new(); + /// map.try_insert(key_one.clone(), 10)?; + /// + /// assert_eq!(map[&key_one], 10); + /// assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1); + /// + /// match map.raw_entry_mut().from_key(&key_one) { + /// RawEntryMut::Vacant(_) => panic!(), + /// RawEntryMut::Occupied(mut o) => { + /// let old_key = o.insert_key(key_two.clone()); + /// assert!(Rc::ptr_eq(&old_key, &key_one)); + /// } + /// } + /// assert_eq!(map[&key_two], 10); + /// assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn insert_key(&mut self, key: K) -> K { + mem::replace(self.key_mut(), key) + } + + /// Takes the value out of the entry, and returns it. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{HashMap, RawEntryMut}; + /// + /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].try_into()?; + /// + /// match map.raw_entry_mut().from_key(&"a") { + /// RawEntryMut::Vacant(_) => panic!(), + /// RawEntryMut::Occupied(o) => assert_eq!(o.remove(), 100), + /// } + /// assert_eq!(map.get(&"a"), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn remove(self) -> V { + self.remove_entry().1 + } + + /// Take the ownership of the key and value from the map. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{HashMap, RawEntryMut}; + /// + /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].try_into()?; + /// + /// match map.raw_entry_mut().from_key(&"a") { + /// RawEntryMut::Vacant(_) => panic!(), + /// RawEntryMut::Occupied(o) => assert_eq!(o.remove_entry(), ("a", 100)), + /// } + /// assert_eq!(map.get(&"a"), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn remove_entry(self) -> (K, V) { + unsafe { self.table.remove(self.elem).0 } + } + + /// Provides shared access to the key and owned access to the value of + /// the entry and allows to replace or remove it based on the + /// value of the returned option. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{HashMap, RawEntryMut}; + /// + /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].try_into()?; + /// + /// let raw_entry = match map.raw_entry_mut().from_key(&"a") { + /// RawEntryMut::Vacant(_) => panic!(), + /// RawEntryMut::Occupied(o) => o.replace_entry_with(|k, v| { + /// assert_eq!(k, &"a"); + /// assert_eq!(v, 100); + /// Some(v + 900) + /// }), + /// }; + /// let raw_entry = match raw_entry { + /// RawEntryMut::Vacant(_) => panic!(), + /// RawEntryMut::Occupied(o) => o.replace_entry_with(|k, v| { + /// assert_eq!(k, &"a"); + /// assert_eq!(v, 1000); + /// None + /// }), + /// }; + /// match raw_entry { + /// RawEntryMut::Vacant(_) => { }, + /// RawEntryMut::Occupied(_) => panic!(), + /// }; + /// assert_eq!(map.get(&"a"), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn replace_entry_with(self, f: F) -> RawEntryMut<'a, K, V, S, A> + where + F: FnOnce(&K, V) -> Option, + { + unsafe { + let still_occupied = self + .table + .replace_bucket_with(self.elem.clone(), |(key, value)| { + f(&key, value).map(|new_value| (key, new_value)) + }); + + if still_occupied { + RawEntryMut::Occupied(self) + } else { + RawEntryMut::Vacant(RawVacantEntryMut { + table: self.table, + hash_builder: self.hash_builder, + }) + } + } + } +} + +impl<'a, K, V, S, A: Allocator> RawVacantEntryMut<'a, K, V, S, A> { + /// Sets the value of the entry with the VacantEntry's key, + /// and returns a mutable reference to it. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{HashMap, RawEntryMut}; + /// + /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].try_into()?; + /// + /// match map.raw_entry_mut().from_key(&"c") { + /// RawEntryMut::Occupied(_) => panic!(), + /// RawEntryMut::Vacant(v) => assert_eq!(v.try_insert("c", 300)?, (&mut "c", &mut 300)), + /// } + /// + /// assert_eq!(map[&"c"], 300); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_insert(self, key: K, value: V) -> Result<(&'a mut K, &'a mut V), Error> + where + K: Hash, + S: BuildHasher, + { + let hasher = make_hasher(self.hash_builder); + let hash = into_ok(hasher.hash(&mut (), &key)); + + let &mut (ref mut k, ref mut v) = + into_ok_try( + self.table + .insert_entry(&mut (), hash, (key, value), hasher.into_tuple()), + )?; + + Ok((k, v)) + } + + #[cfg(test)] + pub(crate) fn insert(self, key: K, value: V) -> (&'a mut K, &'a mut V) + where + K: Hash, + S: BuildHasher, + { + self.try_insert(key, value).abort() + } + + /// Sets the value of the entry with the VacantEntry's key, and returns a + /// mutable reference to it. + /// + /// # Examples + /// + /// ``` + /// use core::hash::{BuildHasher, Hash}; + /// use rune_alloc::hash_map::{HashMap, RawEntryMut}; + /// + /// fn compute_hash(hash_builder: &S, key: &K) -> u64 { + /// use core::hash::Hasher; + /// let mut state = hash_builder.build_hasher(); + /// key.hash(&mut state); + /// state.finish() + /// } + /// + /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].try_into()?; + /// let key = "c"; + /// let hash = compute_hash(map.hasher(), &key); + /// + /// match map.raw_entry_mut().from_key_hashed_nocheck(hash, &key) { + /// RawEntryMut::Occupied(_) => panic!(), + /// RawEntryMut::Vacant(v) => assert_eq!( + /// v.try_insert_hashed_nocheck(hash, key, 300)?, + /// (&mut "c", &mut 300) + /// ), + /// } + /// + /// assert_eq!(map[&"c"], 300); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + #[allow(clippy::shadow_unrelated)] + pub fn try_insert_hashed_nocheck( + self, + hash: u64, + key: K, + value: V, + ) -> Result<(&'a mut K, &'a mut V), Error> + where + K: Hash, + S: BuildHasher, + { + let hasher = make_hasher::(self.hash_builder); + let &mut (ref mut k, ref mut v) = + into_ok_try( + self.table + .insert_entry(&mut (), hash, (key, value), hasher.into_tuple()), + )?; + Ok((k, v)) + } + + /// Set the value of an entry with a custom hasher function. + /// + /// # Examples + /// + /// ``` + /// use core::hash::{BuildHasher, Hash}; + /// use rune_alloc::hash_map::{HashMap, RawEntryMut}; + /// use rune_alloc::prelude::*; + /// + /// fn make_hasher(hash_builder: &S) -> impl Fn(&K) -> u64 + '_ + /// where + /// K: Hash + ?Sized, + /// S: BuildHasher, + /// { + /// move |key: &K| { + /// use core::hash::Hasher; + /// let mut state = hash_builder.build_hasher(); + /// key.hash(&mut state); + /// state.finish() + /// } + /// } + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// let key = "a"; + /// let hash_builder = map.hasher().clone(); + /// let hash = make_hasher(&hash_builder)(&key); + /// + /// match map.raw_entry_mut().from_hash(hash, |q| q == &key) { + /// RawEntryMut::Occupied(_) => panic!(), + /// RawEntryMut::Vacant(v) => assert_eq!( + /// v.try_insert_with_hasher(hash, key, 100, make_hasher(&hash_builder))?, + /// (&mut "a", &mut 100) + /// ), + /// } + /// + /// map.try_extend([("b", 200), ("c", 300), ("d", 400), ("e", 500), ("f", 600)])?; + /// assert_eq!(map[&"a"], 100); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_insert_with_hasher( + self, + hash: u64, + key: K, + value: V, + hasher: H, + ) -> Result<(&'a mut K, &'a mut V), Error> + where + H: Fn(&K) -> u64, + { + let &mut (ref mut k, ref mut v) = into_ok_try(self.table.insert_entry( + &mut (), + hash, + (key, value), + move |_: &mut (), x: &(K, V)| Ok(hasher(&x.0)), + ))?; + + Ok((k, v)) + } + + #[cfg(test)] + pub(crate) fn insert_with_hasher( + self, + hash: u64, + key: K, + value: V, + hasher: H, + ) -> (&'a mut K, &'a mut V) + where + H: Fn(&K) -> u64, + { + self.try_insert_with_hasher(hash, key, value, hasher) + .abort() + } + + #[cfg_attr(feature = "inline-more", inline)] + fn insert_entry( + self, + cx: &mut C, + hasher: impl HasherFn, + key: K, + value: V, + ) -> Result, CustomError> + where + K: Hash, + S: BuildHasher, + { + let hash = hasher.hash(cx, &key).map_err(CustomError::Custom)?; + + let elem = self + .table + .insert(cx, hash, (key, value), hasher.into_tuple())?; + + Ok(RawOccupiedEntryMut { + elem, + table: self.table, + hash_builder: self.hash_builder, + }) + } +} + +impl Debug for RawEntryBuilderMut<'_, K, V, S, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("RawEntryBuilder").finish() + } +} + +impl Debug for RawEntryMut<'_, K, V, S, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + RawEntryMut::Vacant(ref v) => f.debug_tuple("RawEntry").field(v).finish(), + RawEntryMut::Occupied(ref o) => f.debug_tuple("RawEntry").field(o).finish(), + } + } +} + +impl Debug for RawOccupiedEntryMut<'_, K, V, S, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("RawOccupiedEntryMut") + .field("key", self.key()) + .field("value", self.get()) + .finish() + } +} + +impl Debug for RawVacantEntryMut<'_, K, V, S, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("RawVacantEntryMut").finish() + } +} + +impl Debug for RawEntryBuilder<'_, K, V, S, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("RawEntryBuilder").finish() + } +} + +/// A view into a single entry in a map, which may either be vacant or occupied. +/// +/// This `enum` is constructed from the [`entry`] method on [`HashMap`]. +/// +/// [`HashMap`]: struct.HashMap.html +/// [`entry`]: struct.HashMap.html#method.entry +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::hash_map::{Entry, HashMap, OccupiedEntry}; +/// use rune_alloc::prelude::*; +/// +/// let mut map = HashMap::new(); +/// map.try_extend([("a", 10), ("b", 20), ("c", 30)])?; +/// assert_eq!(map.len(), 3); +/// +/// // Existing key (try_insert) +/// let entry: Entry<_, _, _> = map.entry("a"); +/// let _raw_o: OccupiedEntry<_, _, _> = entry.try_insert(1)?; +/// assert_eq!(map.len(), 3); +/// // Nonexistent key (try_insert) +/// map.entry("d").try_insert(4)?; +/// +/// // Existing key (or_try_insert) +/// let v = map.entry("b").or_try_insert(2)?; +/// assert_eq!(std::mem::replace(v, 2), 20); +/// // Nonexistent key (or_try_insert) +/// map.entry("e").or_try_insert(5)?; +/// +/// // Existing key (or_try_insert_with) +/// let v = map.entry("c").or_try_insert_with(|| 3)?; +/// assert_eq!(std::mem::replace(v, 3), 30); +/// // Nonexistent key (or_try_insert_with) +/// map.entry("f").or_try_insert_with(|| 6)?; +/// +/// println!("Our HashMap: {:?}", map); +/// +/// let mut vec: Vec<_> = map.iter().map(|(&k, &v)| (k, v)).collect(); +/// // The `Iter` iterator produces items in arbitrary order, so the +/// // items must be sorted to test them against a sorted array. +/// vec.sort_unstable(); +/// assert_eq!(vec, [("a", 1), ("b", 2), ("c", 3), ("d", 4), ("e", 5), ("f", 6)]); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub enum Entry<'a, K, V, S, A = Global> +where + A: Allocator, +{ + /// An occupied entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{Entry, HashMap}; + /// let mut map: HashMap<_, _> = [("a", 100), ("b", 200)].try_into()?; + /// + /// match map.entry("a") { + /// Entry::Vacant(_) => unreachable!(), + /// Entry::Occupied(_) => { } + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + Occupied(OccupiedEntry<'a, K, V, S, A>), + + /// A vacant entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{Entry, HashMap}; + /// let mut map: HashMap<&str, i32> = HashMap::new(); + /// + /// match map.entry("a") { + /// Entry::Occupied(_) => unreachable!(), + /// Entry::Vacant(_) => { } + /// } + /// ``` + Vacant(VacantEntry<'a, K, V, S, A>), +} + +impl Debug for Entry<'_, K, V, S, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + Entry::Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(), + Entry::Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(), + } + } +} + +/// A view into an occupied entry in a `HashMap`. +/// It is part of the [`Entry`] enum. +/// +/// [`Entry`]: enum.Entry.html +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::hash_map::{Entry, HashMap, OccupiedEntry}; +/// use rune_alloc::prelude::*; +/// +/// let mut map = HashMap::new(); +/// map.try_extend([("a", 10), ("b", 20), ("c", 30)])?; +/// +/// let _entry_o: OccupiedEntry<_, _, _> = map.entry("a").try_insert(100)?; +/// assert_eq!(map.len(), 3); +/// +/// // Existing key (insert and update) +/// match map.entry("a") { +/// Entry::Vacant(_) => unreachable!(), +/// Entry::Occupied(mut view) => { +/// assert_eq!(view.get(), &100); +/// let v = view.get_mut(); +/// *v *= 10; +/// assert_eq!(view.insert(1111), 1000); +/// } +/// } +/// +/// assert_eq!(map[&"a"], 1111); +/// assert_eq!(map.len(), 3); +/// +/// // Existing key (take) +/// match map.entry("c") { +/// Entry::Vacant(_) => unreachable!(), +/// Entry::Occupied(view) => { +/// assert_eq!(view.remove_entry(), ("c", 30)); +/// } +/// } +/// assert_eq!(map.get(&"c"), None); +/// assert_eq!(map.len(), 2); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct OccupiedEntry<'a, K, V, S = DefaultHashBuilder, A: Allocator = Global> { + hash: u64, + key: Option, + elem: Bucket<(K, V)>, + table: &'a mut HashMap, +} + +unsafe impl Send for OccupiedEntry<'_, K, V, S, A> +where + K: Send, + V: Send, + S: Send, + A: Send + Allocator, +{ +} +unsafe impl Sync for OccupiedEntry<'_, K, V, S, A> +where + K: Sync, + V: Sync, + S: Sync, + A: Sync + Allocator, +{ +} + +impl Debug for OccupiedEntry<'_, K, V, S, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("OccupiedEntry") + .field("key", self.key()) + .field("value", self.get()) + .finish() + } +} + +/// A view into a vacant entry in a `HashMap`. +/// It is part of the [`Entry`] enum. +/// +/// [`Entry`]: enum.Entry.html +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::hash_map::{Entry, HashMap, VacantEntry}; +/// +/// let mut map = HashMap::<&str, i32>::new(); +/// +/// let entry_v: VacantEntry<_, _, _> = match map.entry("a") { +/// Entry::Vacant(view) => view, +/// Entry::Occupied(_) => unreachable!(), +/// }; +/// entry_v.try_insert(10)?; +/// assert!(map[&"a"] == 10 && map.len() == 1); +/// +/// // Nonexistent key (insert and update) +/// match map.entry("b") { +/// Entry::Occupied(_) => unreachable!(), +/// Entry::Vacant(view) => { +/// let value = view.try_insert(2)?; +/// assert_eq!(*value, 2); +/// *value = 20; +/// } +/// } +/// assert!(map[&"b"] == 20 && map.len() == 2); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct VacantEntry<'a, K, V, S = DefaultHashBuilder, A: Allocator = Global> { + hash: u64, + key: K, + table: &'a mut HashMap, +} + +impl Debug for VacantEntry<'_, K, V, S, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("VacantEntry").field(self.key()).finish() + } +} + +/// A view into a single entry in a map, which may either be vacant or occupied, +/// with any borrowed form of the map's key type. +/// +/// +/// This `enum` is constructed from the [`entry_ref`] method on [`HashMap`]. +/// +/// [`Hash`] and [`Eq`] on the borrowed form of the map's key type *must* match those +/// for the key type. It also require that key may be constructed from the borrowed +/// form through the [`From`] trait. +/// +/// [`HashMap`]: struct.HashMap.html +/// [`entry_ref`]: struct.HashMap.html#method.entry_ref +/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html +/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html +/// [`From`]: https://doc.rust-lang.org/std/convert/trait.From.html +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::hash_map::{EntryRef, HashMap, OccupiedEntryRef}; +/// use rune_alloc::prelude::*; +/// +/// let mut map = HashMap::new(); +/// map.try_extend([("a".to_owned(), 10), ("b".into(), 20), ("c".into(), 30)])?; +/// assert_eq!(map.len(), 3); +/// +/// // Existing key (try_insert) +/// let key = String::from("a"); +/// let entry: EntryRef<_, _, _, _> = map.entry_ref(&key); +/// let _raw_o: OccupiedEntryRef<_, _, _, _> = entry.try_insert(1)?; +/// assert_eq!(map.len(), 3); +/// // Nonexistent key (try_insert) +/// map.entry_ref("d").try_insert(4)?; +/// +/// // Existing key (or_try_insert) +/// let v = map.entry_ref("b").or_try_insert(2)?; +/// assert_eq!(std::mem::replace(v, 2), 20); +/// // Nonexistent key (or_try_insert) +/// map.entry_ref("e").or_try_insert(5)?; +/// +/// // Existing key (or_try_insert_with) +/// let v = map.entry_ref("c").or_try_insert_with(|| 3)?; +/// assert_eq!(std::mem::replace(v, 3), 30); +/// // Nonexistent key (or_try_insert_with) +/// map.entry_ref("f").or_try_insert_with(|| 6)?; +/// +/// println!("Our HashMap: {:?}", map); +/// +/// for (key, value) in ["a", "b", "c", "d", "e", "f"].into_iter().zip(1..=6) { +/// assert_eq!(map[key], value) +/// } +/// assert_eq!(map.len(), 6); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub enum EntryRef<'a, 'b, K, Q: ?Sized, V, S, A = Global> +where + A: Allocator, +{ + /// An occupied entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{EntryRef, HashMap}; + /// let mut map: HashMap<_, _> = [("a".to_owned(), 100), ("b".into(), 200)].try_into()?; + /// + /// match map.entry_ref("a") { + /// EntryRef::Vacant(_) => unreachable!(), + /// EntryRef::Occupied(_) => { } + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + Occupied(OccupiedEntryRef<'a, 'b, K, Q, V, S, A>), + + /// A vacant entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{EntryRef, HashMap}; + /// let mut map: HashMap = HashMap::new(); + /// + /// match map.entry_ref("a") { + /// EntryRef::Occupied(_) => unreachable!(), + /// EntryRef::Vacant(_) => { } + /// } + /// ``` + Vacant(VacantEntryRef<'a, 'b, K, Q, V, S, A>), +} + +impl, Q: ?Sized + Debug, V: Debug, S, A: Allocator> Debug + for EntryRef<'_, '_, K, Q, V, S, A> +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + EntryRef::Vacant(ref v) => f.debug_tuple("EntryRef").field(v).finish(), + EntryRef::Occupied(ref o) => f.debug_tuple("EntryRef").field(o).finish(), + } + } +} + +enum KeyOrRef<'a, K, Q: ?Sized> { + Borrowed(&'a Q), + Owned(K), +} + +impl<'a, K, Q: ?Sized> KeyOrRef<'a, K, Q> { + fn into_owned(self) -> K + where + K: From<&'a Q>, + { + match self { + Self::Borrowed(borrowed) => borrowed.into(), + Self::Owned(owned) => owned, + } + } +} + +impl<'a, K: Borrow, Q: ?Sized> AsRef for KeyOrRef<'a, K, Q> { + fn as_ref(&self) -> &Q { + match self { + Self::Borrowed(borrowed) => borrowed, + Self::Owned(owned) => owned.borrow(), + } + } +} + +/// A view into an occupied entry in a `HashMap`. +/// It is part of the [`EntryRef`] enum. +/// +/// [`EntryRef`]: enum.EntryRef.html +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::hash_map::{EntryRef, HashMap, OccupiedEntryRef}; +/// use rune_alloc::prelude::*; +/// +/// let mut map = HashMap::new(); +/// map.try_extend([("a".to_owned(), 10), ("b".into(), 20), ("c".into(), 30)])?; +/// +/// let key = String::from("a"); +/// let _entry_o: OccupiedEntryRef<_, _, _, _> = map.entry_ref(&key).try_insert(100)?; +/// assert_eq!(map.len(), 3); +/// +/// // Existing key (insert and update) +/// match map.entry_ref("a") { +/// EntryRef::Vacant(_) => unreachable!(), +/// EntryRef::Occupied(mut view) => { +/// assert_eq!(view.get(), &100); +/// let v = view.get_mut(); +/// *v *= 10; +/// assert_eq!(view.insert(1111), 1000); +/// } +/// } +/// +/// assert_eq!(map["a"], 1111); +/// assert_eq!(map.len(), 3); +/// +/// // Existing key (take) +/// match map.entry_ref("c") { +/// EntryRef::Vacant(_) => unreachable!(), +/// EntryRef::Occupied(view) => { +/// assert_eq!(view.remove_entry(), ("c".to_owned(), 30)); +/// } +/// } +/// assert_eq!(map.get("c"), None); +/// assert_eq!(map.len(), 2); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct OccupiedEntryRef<'a, 'b, K, Q: ?Sized, V, S, A: Allocator = Global> { + hash: u64, + key: Option>, + elem: Bucket<(K, V)>, + table: &'a mut HashMap, +} + +unsafe impl<'a, 'b, K, Q, V, S, A> Send for OccupiedEntryRef<'a, 'b, K, Q, V, S, A> +where + K: Send, + Q: Sync + ?Sized, + V: Send, + S: Send, + A: Send + Allocator, +{ +} +unsafe impl<'a, 'b, K, Q, V, S, A> Sync for OccupiedEntryRef<'a, 'b, K, Q, V, S, A> +where + K: Sync, + Q: Sync + ?Sized, + V: Sync, + S: Sync, + A: Sync + Allocator, +{ +} + +impl, Q: ?Sized + Debug, V: Debug, S, A: Allocator> Debug + for OccupiedEntryRef<'_, '_, K, Q, V, S, A> +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("OccupiedEntryRef") + .field("key", &self.key().borrow()) + .field("value", &self.get()) + .finish() + } +} + +/// A view into a vacant entry in a `HashMap`. +/// It is part of the [`EntryRef`] enum. +/// +/// [`EntryRef`]: enum.EntryRef.html +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::hash_map::{EntryRef, HashMap, VacantEntryRef}; +/// +/// let mut map = HashMap::::new(); +/// +/// let entry_v: VacantEntryRef<_, _, _, _> = match map.entry_ref("a") { +/// EntryRef::Vacant(view) => view, +/// EntryRef::Occupied(_) => unreachable!(), +/// }; +/// entry_v.try_insert(10)?; +/// assert!(map["a"] == 10 && map.len() == 1); +/// +/// // Nonexistent key (insert and update) +/// match map.entry_ref("b") { +/// EntryRef::Occupied(_) => unreachable!(), +/// EntryRef::Vacant(view) => { +/// let value = view.try_insert(2)?; +/// assert_eq!(*value, 2); +/// *value = 20; +/// } +/// } +/// assert!(map["b"] == 20 && map.len() == 2); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct VacantEntryRef<'a, 'b, K, Q: ?Sized, V, S, A: Allocator = Global> { + hash: u64, + key: KeyOrRef<'b, K, Q>, + table: &'a mut HashMap, +} + +impl, Q: ?Sized + Debug, V, S, A: Allocator> Debug + for VacantEntryRef<'_, '_, K, Q, V, S, A> +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("VacantEntryRef").field(&self.key()).finish() + } +} + +/// The error returned by [`try_insert`](HashMap::try_insert) when the key already exists. +/// +/// Contains the occupied entry, and the value that was not inserted. +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::hash_map::{HashMap, OccupiedError}; +/// use rune_alloc::CustomError; +/// +/// let mut map: HashMap<_, _> = [("a", 10), ("b", 20)].try_into()?; +/// +/// // try_insert method returns mutable reference to the value if keys are vacant, +/// // but if the map did have key present, nothing is updated, and the provided +/// // value is returned inside `Err(_)` variant +/// match map.try_insert_or("a", 100) { +/// Err(CustomError::Custom(OccupiedError { mut entry, value })) => { +/// assert_eq!(entry.key(), &"a"); +/// assert_eq!(value, 100); +/// assert_eq!(entry.insert(100), 10) +/// } +/// _ => unreachable!(), +/// } +/// assert_eq!(map[&"a"], 100); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct OccupiedError<'a, K, V, S, A: Allocator = Global> { + /// The entry in the map that was already occupied. + pub entry: OccupiedEntry<'a, K, V, S, A>, + /// The value which was not inserted, because the entry was already occupied. + pub value: V, +} + +impl Debug for OccupiedError<'_, K, V, S, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("OccupiedError") + .field("key", self.entry.key()) + .field("old_value", self.entry.get()) + .field("new_value", &self.value) + .finish() + } +} + +impl<'a, K: Debug, V: Debug, S, A: Allocator> fmt::Display for OccupiedError<'a, K, V, S, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "failed to insert {:?}, key {:?} already exists with value {:?}", + self.value, + self.entry.key(), + self.entry.get(), + ) + } +} + +impl<'a, K, V, S, A: Allocator> IntoIterator for &'a HashMap { + type Item = (&'a K, &'a V); + type IntoIter = Iter<'a, K, V>; + + /// Creates an iterator over the entries of a `HashMap` in arbitrary order. + /// The iterator element type is `(&'a K, &'a V)`. + /// + /// Return the same `Iter` struct as by the [`iter`] method on [`HashMap`]. + /// + /// [`iter`]: struct.HashMap.html#method.iter + /// [`HashMap`]: struct.HashMap.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// let map_one: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].try_into()?; + /// let mut map_two = HashMap::new(); + /// + /// for (key, value) in &map_one { + /// println!("Key: {}, Value: {}", key, value); + /// map_two.try_insert_unique_unchecked(*key, *value)?; + /// } + /// + /// assert_eq!(map_one, map_two); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + fn into_iter(self) -> Iter<'a, K, V> { + self.iter() + } +} + +impl<'a, K, V, S, A: Allocator> IntoIterator for &'a mut HashMap { + type Item = (&'a K, &'a mut V); + type IntoIter = IterMut<'a, K, V>; + + /// Creates an iterator over the entries of a `HashMap` in arbitrary order + /// with mutable references to the values. The iterator element type is + /// `(&'a K, &'a mut V)`. + /// + /// Return the same `IterMut` struct as by the [`iter_mut`] method on + /// [`HashMap`]. + /// + /// [`iter_mut`]: struct.HashMap.html#method.iter_mut + /// [`HashMap`]: struct.HashMap.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// let mut map: HashMap<_, _> = [("a", 1), ("b", 2), ("c", 3)].try_into()?; + /// + /// for (key, value) in &mut map { + /// println!("Key: {}, Value: {}", key, value); + /// *value *= 2; + /// } + /// + /// let mut vec = map.iter().collect::>(); + /// // The `Iter` iterator produces items in arbitrary order, so the + /// // items must be sorted to test them against a sorted array. + /// vec.sort_unstable(); + /// assert_eq!(vec, [(&"a", &2), (&"b", &4), (&"c", &6)]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + fn into_iter(self) -> IterMut<'a, K, V> { + self.iter_mut() + } +} + +impl IntoIterator for HashMap { + type Item = (K, V); + type IntoIter = IntoIter; + + /// Creates a consuming iterator, that is, one that moves each key-value + /// pair out of the map in arbitrary order. The map cannot be used after + /// calling this. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let map: HashMap<_, _> = [("a", 1), ("b", 2), ("c", 3)].try_into()?; + /// + /// // Not possible with .iter() + /// let mut vec: Vec<(&str, i32)> = map.into_iter().collect(); + /// // The `IntoIter` iterator produces items in arbitrary order, so + /// // the items must be sorted to test them against a sorted array. + /// vec.sort_unstable(); + /// assert_eq!(vec, [("a", 1), ("b", 2), ("c", 3)]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + fn into_iter(self) -> IntoIter { + IntoIter { + inner: self.table.into_iter(), + } + } +} + +impl<'a, K, V> Iterator for Iter<'a, K, V> { + type Item = (&'a K, &'a V); + + #[cfg_attr(feature = "inline-more", inline)] + fn next(&mut self) -> Option<(&'a K, &'a V)> { + // Avoid `Option::map` because it bloats LLVM IR. + match self.inner.next() { + Some(x) => unsafe { + let r = x.as_ref(); + Some((&r.0, &r.1)) + }, + None => None, + } + } + #[cfg_attr(feature = "inline-more", inline)] + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } +} +impl ExactSizeIterator for Iter<'_, K, V> { + #[cfg_attr(feature = "inline-more", inline)] + fn len(&self) -> usize { + self.inner.len() + } +} + +impl FusedIterator for Iter<'_, K, V> {} + +impl<'a, K, V> Iterator for IterMut<'a, K, V> { + type Item = (&'a K, &'a mut V); + + #[cfg_attr(feature = "inline-more", inline)] + fn next(&mut self) -> Option<(&'a K, &'a mut V)> { + // Avoid `Option::map` because it bloats LLVM IR. + match self.inner.next() { + Some(x) => unsafe { + let r = x.as_mut(); + Some((&r.0, &mut r.1)) + }, + None => None, + } + } + #[cfg_attr(feature = "inline-more", inline)] + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } +} +impl ExactSizeIterator for IterMut<'_, K, V> { + #[cfg_attr(feature = "inline-more", inline)] + fn len(&self) -> usize { + self.inner.len() + } +} +impl FusedIterator for IterMut<'_, K, V> {} + +impl fmt::Debug for IterMut<'_, K, V> +where + K: fmt::Debug, + V: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.iter()).finish() + } +} + +impl Iterator for IntoIter { + type Item = (K, V); + + #[cfg_attr(feature = "inline-more", inline)] + fn next(&mut self) -> Option<(K, V)> { + self.inner.next() + } + #[cfg_attr(feature = "inline-more", inline)] + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } +} +impl ExactSizeIterator for IntoIter { + #[cfg_attr(feature = "inline-more", inline)] + fn len(&self) -> usize { + self.inner.len() + } +} +impl FusedIterator for IntoIter {} + +impl fmt::Debug for IntoIter { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.iter()).finish() + } +} + +impl<'a, K, V> Iterator for Keys<'a, K, V> { + type Item = &'a K; + + #[cfg_attr(feature = "inline-more", inline)] + fn next(&mut self) -> Option<&'a K> { + // Avoid `Option::map` because it bloats LLVM IR. + match self.inner.next() { + Some((k, _)) => Some(k), + None => None, + } + } + #[cfg_attr(feature = "inline-more", inline)] + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } +} +impl ExactSizeIterator for Keys<'_, K, V> { + #[cfg_attr(feature = "inline-more", inline)] + fn len(&self) -> usize { + self.inner.len() + } +} +impl FusedIterator for Keys<'_, K, V> {} + +impl<'a, K, V> Iterator for Values<'a, K, V> { + type Item = &'a V; + + #[cfg_attr(feature = "inline-more", inline)] + fn next(&mut self) -> Option<&'a V> { + // Avoid `Option::map` because it bloats LLVM IR. + match self.inner.next() { + Some((_, v)) => Some(v), + None => None, + } + } + #[cfg_attr(feature = "inline-more", inline)] + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } +} +impl ExactSizeIterator for Values<'_, K, V> { + #[cfg_attr(feature = "inline-more", inline)] + fn len(&self) -> usize { + self.inner.len() + } +} +impl FusedIterator for Values<'_, K, V> {} + +impl<'a, K, V> Iterator for ValuesMut<'a, K, V> { + type Item = &'a mut V; + + #[cfg_attr(feature = "inline-more", inline)] + fn next(&mut self) -> Option<&'a mut V> { + // Avoid `Option::map` because it bloats LLVM IR. + match self.inner.next() { + Some((_, v)) => Some(v), + None => None, + } + } + #[cfg_attr(feature = "inline-more", inline)] + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } +} +impl ExactSizeIterator for ValuesMut<'_, K, V> { + #[cfg_attr(feature = "inline-more", inline)] + fn len(&self) -> usize { + self.inner.len() + } +} +impl FusedIterator for ValuesMut<'_, K, V> {} + +impl fmt::Debug for ValuesMut<'_, K, V> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list() + .entries(self.inner.iter().map(|(_, val)| val)) + .finish() + } +} + +impl<'a, K, V, A: Allocator> Iterator for Drain<'a, K, V, A> { + type Item = (K, V); + + #[cfg_attr(feature = "inline-more", inline)] + fn next(&mut self) -> Option<(K, V)> { + self.inner.next() + } + #[cfg_attr(feature = "inline-more", inline)] + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } +} +impl ExactSizeIterator for Drain<'_, K, V, A> { + #[cfg_attr(feature = "inline-more", inline)] + fn len(&self) -> usize { + self.inner.len() + } +} +impl FusedIterator for Drain<'_, K, V, A> {} + +impl fmt::Debug for Drain<'_, K, V, A> +where + K: fmt::Debug, + V: fmt::Debug, + A: Allocator, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.iter()).finish() + } +} + +impl<'a, K, V, S, A: Allocator> Entry<'a, K, V, S, A> { + /// Sets the value of the entry, and returns an OccupiedEntry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// let entry = map.entry("horseyland").try_insert(37)?; + /// + /// assert_eq!(entry.key(), &"horseyland"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_insert(self, value: V) -> Result, Error> + where + K: Hash, + S: BuildHasher, + { + match self { + Entry::Occupied(mut entry) => { + entry.insert(value); + Ok(entry) + } + Entry::Vacant(entry) => entry.try_insert_entry(value), + } + } + + #[cfg(test)] + pub(crate) fn insert(self, value: V) -> OccupiedEntry<'a, K, V, S, A> + where + K: Hash, + S: BuildHasher, + { + self.try_insert(value).abort() + } + + /// Ensures a value is in the entry by inserting the default if empty, and returns + /// a mutable reference to the value in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// + /// // nonexistent key + /// map.entry("poneyland").or_try_insert(3)?; + /// assert_eq!(map["poneyland"], 3); + /// + /// // existing key + /// *map.entry("poneyland").or_try_insert(10)? *= 2; + /// assert_eq!(map["poneyland"], 6); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn or_try_insert(self, default: V) -> Result<&'a mut V, Error> + where + K: Hash, + S: BuildHasher, + { + match self { + Entry::Occupied(entry) => Ok(entry.into_mut()), + Entry::Vacant(entry) => entry.try_insert(default), + } + } + + #[cfg(test)] + pub(crate) fn or_insert(self, default: V) -> &'a mut V + where + K: Hash, + S: BuildHasher, + { + self.or_try_insert(default).abort() + } + + /// Ensures a value is in the entry by inserting the result of the default function if empty, + /// and returns a mutable reference to the value in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// + /// // nonexistent key + /// map.entry("poneyland").or_try_insert_with(|| 3)?; + /// assert_eq!(map["poneyland"], 3); + /// + /// // existing key + /// *map.entry("poneyland").or_try_insert_with(|| 10)? *= 2; + /// assert_eq!(map["poneyland"], 6); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn or_try_insert_with(self, default: F) -> Result<&'a mut V, Error> + where + K: Hash, + S: BuildHasher, + F: FnOnce() -> V, + { + match self { + Entry::Occupied(entry) => Ok(entry.into_mut()), + Entry::Vacant(entry) => entry.try_insert(default()), + } + } + + /// Ensures a value is in the entry by inserting, if empty, the result of + /// the default function. This method allows for generating key-derived + /// values for insertion by providing the default function a reference to + /// the key that was moved during the `.entry(key)` method call. + /// + /// The reference to the moved key is provided so that cloning or copying + /// the key is unnecessary, unlike with `.or_insert_with(|| ... )`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap<&str, usize> = HashMap::new(); + /// + /// // nonexistent key + /// map.entry("poneyland").or_try_insert_with_key(|key| key.chars().count())?; + /// assert_eq!(map["poneyland"], 9); + /// + /// // existing key + /// *map.entry("poneyland").or_try_insert_with_key(|key| key.chars().count() * 10)? *= 2; + /// assert_eq!(map["poneyland"], 18); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn or_try_insert_with_key(self, default: F) -> Result<&'a mut V, Error> + where + K: Hash, + S: BuildHasher, + F: FnOnce(&K) -> V, + { + match self { + Entry::Occupied(entry) => Ok(entry.into_mut()), + Entry::Vacant(entry) => { + let value = default(entry.key()); + entry.try_insert(value) + } + } + } + + /// Returns a reference to this entry's key. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// map.entry("poneyland").or_try_insert(3)?; + /// // existing key + /// assert_eq!(map.entry("poneyland").key(), &"poneyland"); + /// // nonexistent key + /// assert_eq!(map.entry("horseland").key(), &"horseland"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn key(&self) -> &K { + match *self { + Entry::Occupied(ref entry) => entry.key(), + Entry::Vacant(ref entry) => entry.key(), + } + } + + /// Provides in-place mutable access to an occupied entry before any + /// potential inserts into the map. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// + /// map.entry("poneyland") + /// .and_modify(|e| { *e += 1 }) + /// .or_try_insert(42); + /// assert_eq!(map["poneyland"], 42); + /// + /// map.entry("poneyland") + /// .and_modify(|e| { *e += 1 }) + /// .or_try_insert(42); + /// assert_eq!(map["poneyland"], 43); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn and_modify(self, f: F) -> Self + where + F: FnOnce(&mut V), + { + match self { + Entry::Occupied(mut entry) => { + f(entry.get_mut()); + Entry::Occupied(entry) + } + Entry::Vacant(entry) => Entry::Vacant(entry), + } + } + + /// Provides shared access to the key and owned access to the value of + /// an occupied entry and allows to replace or remove it based on the + /// value of the returned option. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::Entry; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// + /// let entry = map + /// .entry("poneyland") + /// .and_replace_entry_with(|_k, _v| panic!()); + /// + /// match entry { + /// Entry::Vacant(e) => { + /// assert_eq!(e.key(), &"poneyland"); + /// } + /// Entry::Occupied(_) => panic!(), + /// } + /// + /// map.try_insert("poneyland", 42)?; + /// + /// let entry = map + /// .entry("poneyland") + /// .and_replace_entry_with(|k, v| { + /// assert_eq!(k, &"poneyland"); + /// assert_eq!(v, 42); + /// Some(v + 1) + /// }); + /// + /// match entry { + /// Entry::Occupied(e) => { + /// assert_eq!(e.key(), &"poneyland"); + /// assert_eq!(e.get(), &43); + /// } + /// Entry::Vacant(_) => panic!(), + /// } + /// + /// assert_eq!(map["poneyland"], 43); + /// + /// let entry = map + /// .entry("poneyland") + /// .and_replace_entry_with(|_k, _v| None); + /// + /// match entry { + /// Entry::Vacant(e) => assert_eq!(e.key(), &"poneyland"), + /// Entry::Occupied(_) => panic!(), + /// } + /// + /// assert!(!map.contains_key("poneyland")); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn and_replace_entry_with(self, f: F) -> Self + where + F: FnOnce(&K, V) -> Option, + { + match self { + Entry::Occupied(entry) => entry.replace_entry_with(f), + Entry::Vacant(_) => self, + } + } +} + +impl<'a, K, V: Default, S, A: Allocator> Entry<'a, K, V, S, A> { + /// Ensures a value is in the entry by inserting the default value if empty, + /// and returns a mutable reference to the value in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap<&str, Option> = HashMap::new(); + /// + /// // nonexistent key + /// map.entry("poneyland").or_try_default()?; + /// assert_eq!(map["poneyland"], None); + /// + /// map.try_insert("horseland", Some(3))?; + /// + /// // existing key + /// assert_eq!(map.entry("horseland").or_try_default()?, &mut Some(3)); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn or_try_default(self) -> Result<&'a mut V, Error> + where + K: Hash, + S: BuildHasher, + { + match self { + Entry::Occupied(entry) => Ok(entry.into_mut()), + Entry::Vacant(entry) => entry.try_insert(Default::default()), + } + } +} + +impl<'a, K, V, S, A: Allocator> OccupiedEntry<'a, K, V, S, A> { + /// Gets a reference to the key in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::{Entry}; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// map.entry("poneyland").or_try_insert(12)?; + /// + /// match map.entry("poneyland") { + /// Entry::Vacant(_) => panic!(), + /// Entry::Occupied(entry) => assert_eq!(entry.key(), &"poneyland"), + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn key(&self) -> &K { + unsafe { &self.elem.as_ref().0 } + } + + /// Take the ownership of the key and value from the map. + /// Keeps the allocated memory for reuse. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::Entry; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// // The map is empty + /// assert!(map.is_empty() && map.capacity() == 0); + /// + /// map.entry("poneyland").or_try_insert(12)?; + /// + /// if let Entry::Occupied(o) = map.entry("poneyland") { + /// // We delete the entry from the map. + /// assert_eq!(o.remove_entry(), ("poneyland", 12)); + /// } + /// + /// assert_eq!(map.contains_key("poneyland"), false); + /// // Now map hold none elements + /// assert!(map.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn remove_entry(self) -> (K, V) { + unsafe { self.table.table.remove(self.elem).0 } + } + + /// Gets a reference to the value in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::Entry; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// map.entry("poneyland").or_try_insert(12)?; + /// + /// match map.entry("poneyland") { + /// Entry::Vacant(_) => panic!(), + /// Entry::Occupied(entry) => assert_eq!(entry.get(), &12), + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn get(&self) -> &V { + unsafe { &self.elem.as_ref().1 } + } + + /// Gets a mutable reference to the value in the entry. + /// + /// If you need a reference to the `OccupiedEntry` which may outlive the + /// destruction of the `Entry` value, see [`into_mut`]. + /// + /// [`into_mut`]: #method.into_mut + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::Entry; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// map.entry("poneyland").or_try_insert(12)?; + /// + /// assert_eq!(map["poneyland"], 12); + /// if let Entry::Occupied(mut o) = map.entry("poneyland") { + /// *o.get_mut() += 10; + /// assert_eq!(*o.get(), 22); + /// + /// // We can use the same Entry multiple times. + /// *o.get_mut() += 2; + /// } + /// + /// assert_eq!(map["poneyland"], 24); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn get_mut(&mut self) -> &mut V { + unsafe { &mut self.elem.as_mut().1 } + } + + /// Converts the OccupiedEntry into a mutable reference to the value in the entry + /// with a lifetime bound to the map itself. + /// + /// If you need multiple references to the `OccupiedEntry`, see [`get_mut`]. + /// + /// [`get_mut`]: #method.get_mut + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::Entry; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// map.entry("poneyland").or_try_insert(12)?; + /// + /// assert_eq!(map["poneyland"], 12); + /// + /// let value: &mut u32; + /// match map.entry("poneyland") { + /// Entry::Occupied(entry) => value = entry.into_mut(), + /// Entry::Vacant(_) => panic!(), + /// } + /// *value += 10; + /// + /// assert_eq!(map["poneyland"], 22); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn into_mut(self) -> &'a mut V { + unsafe { &mut self.elem.as_mut().1 } + } + + /// Sets the value of the entry, and returns the entry's old value. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::Entry; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// map.entry("poneyland").or_try_insert(12)?; + /// + /// if let Entry::Occupied(mut o) = map.entry("poneyland") { + /// assert_eq!(o.insert(15), 12); + /// } + /// + /// assert_eq!(map["poneyland"], 15); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn insert(&mut self, value: V) -> V { + mem::replace(self.get_mut(), value) + } + + /// Takes the value out of the entry, and returns it. + /// Keeps the allocated memory for reuse. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::Entry; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// // The map is empty + /// assert!(map.is_empty() && map.capacity() == 0); + /// + /// map.entry("poneyland").or_try_insert(12)?; + /// + /// if let Entry::Occupied(o) = map.entry("poneyland") { + /// assert_eq!(o.remove(), 12); + /// } + /// + /// assert_eq!(map.contains_key("poneyland"), false); + /// // Now map hold none elements + /// assert!(map.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn remove(self) -> V { + self.remove_entry().1 + } + + /// Replaces the entry, returning the old key and value. The new key in the hash map will be + /// the key used to create this entry. + /// + /// # Panics + /// + /// Will panic if this OccupiedEntry was created through [`Entry::try_insert`]. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::Entry; + /// use std::rc::Rc; + /// + /// let mut map: HashMap, u32> = HashMap::new(); + /// let key_one = Rc::new("Stringthing".to_string()); + /// let key_two = Rc::new("Stringthing".to_string()); + /// + /// map.try_insert(key_one.clone(), 15)?; + /// assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1); + /// + /// match map.entry(key_two.clone()) { + /// Entry::Occupied(entry) => { + /// let (old_key, old_value): (Rc, u32) = entry.replace_entry(16); + /// assert!(Rc::ptr_eq(&key_one, &old_key) && old_value == 15); + /// } + /// Entry::Vacant(_) => panic!(), + /// } + /// + /// assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2); + /// assert_eq!(map[&"Stringthing".to_owned()], 16); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn replace_entry(self, value: V) -> (K, V) { + let entry = unsafe { self.elem.as_mut() }; + + let old_key = mem::replace(&mut entry.0, self.key.unwrap()); + let old_value = mem::replace(&mut entry.1, value); + + (old_key, old_value) + } + + /// Replaces the key in the hash map with the key used to create this entry. + /// + /// # Panics + /// + /// Will panic if this OccupiedEntry was created through [`Entry::try_insert`]. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{Entry, HashMap}; + /// use std::rc::Rc; + /// + /// let mut map: HashMap, usize> = HashMap::try_with_capacity(6)?; + /// let mut keys_one: Vec> = Vec::with_capacity(6); + /// let mut keys_two: Vec> = Vec::with_capacity(6); + /// + /// for (value, key) in ["a", "b", "c", "d", "e", "f"].into_iter().enumerate() { + /// let rc_key = Rc::new(key.to_owned()); + /// keys_one.push(rc_key.clone()); + /// map.try_insert(rc_key.clone(), value)?; + /// keys_two.push(Rc::new(key.to_owned())); + /// } + /// + /// assert!( + /// keys_one.iter().all(|key| Rc::strong_count(key) == 2) + /// && keys_two.iter().all(|key| Rc::strong_count(key) == 1) + /// ); + /// + /// reclaim_memory(&mut map, &keys_two); + /// + /// assert!( + /// keys_one.iter().all(|key| Rc::strong_count(key) == 1) + /// && keys_two.iter().all(|key| Rc::strong_count(key) == 2) + /// ); + /// + /// fn reclaim_memory(map: &mut HashMap, usize>, keys: &[Rc]) { + /// for key in keys { + /// if let Entry::Occupied(entry) = map.entry(key.clone()) { + /// // Replaces the entry's key with our version of it in `keys`. + /// entry.replace_key(); + /// } + /// } + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn replace_key(self) -> K { + let entry = unsafe { self.elem.as_mut() }; + mem::replace(&mut entry.0, self.key.unwrap()) + } + + /// Provides shared access to the key and owned access to the value of + /// the entry and allows to replace or remove it based on the + /// value of the returned option. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::Entry; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// map.try_insert("poneyland", 42)?; + /// + /// let entry = match map.entry("poneyland") { + /// Entry::Occupied(e) => { + /// e.replace_entry_with(|k, v| { + /// assert_eq!(k, &"poneyland"); + /// assert_eq!(v, 42); + /// Some(v + 1) + /// }) + /// } + /// Entry::Vacant(_) => panic!(), + /// }; + /// + /// match entry { + /// Entry::Occupied(e) => { + /// assert_eq!(e.key(), &"poneyland"); + /// assert_eq!(e.get(), &43); + /// } + /// Entry::Vacant(_) => panic!(), + /// } + /// + /// assert_eq!(map["poneyland"], 43); + /// + /// let entry = match map.entry("poneyland") { + /// Entry::Occupied(e) => e.replace_entry_with(|_k, _v| None), + /// Entry::Vacant(_) => panic!(), + /// }; + /// + /// match entry { + /// Entry::Vacant(e) => { + /// assert_eq!(e.key(), &"poneyland"); + /// } + /// Entry::Occupied(_) => panic!(), + /// } + /// + /// assert!(!map.contains_key("poneyland")); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn replace_entry_with(self, f: F) -> Entry<'a, K, V, S, A> + where + F: FnOnce(&K, V) -> Option, + { + unsafe { + let mut spare_key = None; + + self.table + .table + .replace_bucket_with(self.elem.clone(), |(key, value)| { + if let Some(new_value) = f(&key, value) { + Some((key, new_value)) + } else { + spare_key = Some(key); + None + } + }); + + if let Some(key) = spare_key { + Entry::Vacant(VacantEntry { + hash: self.hash, + key, + table: self.table, + }) + } else { + Entry::Occupied(self) + } + } + } +} + +impl<'a, K, V, S, A: Allocator> VacantEntry<'a, K, V, S, A> { + /// Gets a reference to the key that would be used when inserting a value + /// through the `VacantEntry`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// assert_eq!(map.entry("poneyland").key(), &"poneyland"); + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn key(&self) -> &K { + &self.key + } + + /// Take ownership of the key. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{Entry, HashMap}; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// + /// match map.entry("poneyland") { + /// Entry::Occupied(_) => panic!(), + /// Entry::Vacant(v) => assert_eq!(v.into_key(), "poneyland"), + /// } + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn into_key(self) -> K { + self.key + } + + /// Sets the value of the entry with the VacantEntry's key, + /// and returns a mutable reference to it. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::Entry; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// + /// if let Entry::Vacant(o) = map.entry("poneyland") { + /// o.try_insert(37)?; + /// } + /// + /// assert_eq!(map["poneyland"], 37); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_insert(self, value: V) -> Result<&'a mut V, Error> + where + K: Hash, + S: BuildHasher, + { + let table = &mut self.table.table; + let hasher = make_hasher::(&self.table.hash_builder); + let entry = into_ok_try(table.insert_entry( + &mut (), + self.hash, + (self.key, value), + hasher.into_tuple(), + ))?; + Ok(&mut entry.1) + } + + #[cfg(test)] + pub(crate) fn insert(self, value: V) -> &'a mut V + where + K: Hash, + S: BuildHasher, + { + self.try_insert(value).abort() + } + + #[cfg_attr(feature = "inline-more", inline)] + pub(crate) fn try_insert_entry(self, value: V) -> Result, Error> + where + K: Hash, + S: BuildHasher, + { + let hasher = make_hasher::(&self.table.hash_builder); + + let elem = into_ok_try(self.table.table.insert( + &mut (), + self.hash, + (self.key, value), + hasher.into_tuple(), + ))?; + + Ok(OccupiedEntry { + hash: self.hash, + key: None, + elem, + table: self.table, + }) + } +} + +impl<'a, 'b, K, Q: ?Sized, V, S, A: Allocator> EntryRef<'a, 'b, K, Q, V, S, A> { + /// Sets the value of the entry, and returns an OccupiedEntryRef. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap = HashMap::new(); + /// let entry = map.entry_ref("horseyland").try_insert(37)?; + /// + /// assert_eq!(entry.key(), "horseyland"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_insert(self, value: V) -> Result, Error> + where + K: Hash + From<&'b Q>, + S: BuildHasher, + { + match self { + EntryRef::Occupied(mut entry) => { + entry.insert(value); + Ok(entry) + } + EntryRef::Vacant(entry) => entry.try_insert_entry(value), + } + } + + #[cfg(test)] + pub(crate) fn insert(self, value: V) -> OccupiedEntryRef<'a, 'b, K, Q, V, S, A> + where + K: Hash + From<&'b Q>, + S: BuildHasher, + { + self.try_insert(value).abort() + } + + /// Ensures a value is in the entry by inserting the default if empty, and returns + /// a mutable reference to the value in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap = HashMap::new(); + /// + /// // nonexistent key + /// map.entry_ref("poneyland").or_try_insert(3)?; + /// assert_eq!(map["poneyland"], 3); + /// + /// // existing key + /// *map.entry_ref("poneyland").or_try_insert(10)? *= 2; + /// assert_eq!(map["poneyland"], 6); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn or_try_insert(self, default: V) -> Result<&'a mut V, Error> + where + K: Hash + From<&'b Q>, + S: BuildHasher, + { + match self { + EntryRef::Occupied(entry) => Ok(entry.into_mut()), + EntryRef::Vacant(entry) => entry.try_insert(default), + } + } + + #[cfg(test)] + pub(crate) fn or_insert(self, default: V) -> &'a mut V + where + K: Hash + From<&'b Q>, + S: BuildHasher, + { + self.or_try_insert(default).abort() + } + + /// Ensures a value is in the entry by inserting the result of the default function if empty, + /// and returns a mutable reference to the value in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap = HashMap::new(); + /// + /// // nonexistent key + /// map.entry_ref("poneyland").or_try_insert_with(|| 3)?; + /// assert_eq!(map["poneyland"], 3); + /// + /// // existing key + /// *map.entry_ref("poneyland").or_try_insert_with(|| 10)? *= 2; + /// assert_eq!(map["poneyland"], 6); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn or_try_insert_with V>(self, default: F) -> Result<&'a mut V, Error> + where + K: Hash + From<&'b Q>, + S: BuildHasher, + { + match self { + EntryRef::Occupied(entry) => Ok(entry.into_mut()), + EntryRef::Vacant(entry) => entry.try_insert(default()), + } + } + + /// Ensures a value is in the entry by inserting, if empty, the result of the default function. + /// This method allows for generating key-derived values for insertion by providing the default + /// function an access to the borrower form of the key. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap = HashMap::new(); + /// + /// // nonexistent key + /// map.entry_ref("poneyland").or_try_insert_with_key(|key| key.chars().count())?; + /// assert_eq!(map["poneyland"], 9); + /// + /// // existing key + /// *map.entry_ref("poneyland").or_try_insert_with_key(|key| key.chars().count() * 10)? *= 2; + /// assert_eq!(map["poneyland"], 18); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn or_try_insert_with_key V>(self, default: F) -> Result<&'a mut V, Error> + where + K: Hash + Borrow + From<&'b Q>, + S: BuildHasher, + { + match self { + EntryRef::Occupied(entry) => Ok(entry.into_mut()), + EntryRef::Vacant(entry) => { + let value = default(entry.key.as_ref()); + entry.try_insert(value) + } + } + } + + /// Returns a reference to this entry's key. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap = HashMap::new(); + /// map.entry_ref("poneyland").or_try_insert(3)?; + /// // existing key + /// assert_eq!(map.entry_ref("poneyland").key(), "poneyland"); + /// // nonexistent key + /// assert_eq!(map.entry_ref("horseland").key(), "horseland"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn key(&self) -> &Q + where + K: Borrow, + { + match *self { + EntryRef::Occupied(ref entry) => entry.key().borrow(), + EntryRef::Vacant(ref entry) => entry.key(), + } + } + + /// Provides in-place mutable access to an occupied entry before any + /// potential inserts into the map. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap = HashMap::new(); + /// + /// map.entry_ref("poneyland") + /// .and_modify(|e| { *e += 1 }) + /// .or_try_insert(42)?; + /// assert_eq!(map["poneyland"], 42); + /// + /// map.entry_ref("poneyland") + /// .and_modify(|e| { *e += 1 }) + /// .or_try_insert(42)?; + /// assert_eq!(map["poneyland"], 43); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn and_modify(self, f: F) -> Self + where + F: FnOnce(&mut V), + { + match self { + EntryRef::Occupied(mut entry) => { + f(entry.get_mut()); + EntryRef::Occupied(entry) + } + EntryRef::Vacant(entry) => EntryRef::Vacant(entry), + } + } + + /// Provides shared access to the key and owned access to the value of + /// an occupied entry and allows to replace or remove it based on the + /// value of the returned option. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::EntryRef; + /// + /// let mut map: HashMap = HashMap::new(); + /// + /// let entry = map + /// .entry_ref("poneyland") + /// .and_replace_entry_with(|_k, _v| panic!()); + /// + /// match entry { + /// EntryRef::Vacant(e) => { + /// assert_eq!(e.key(), "poneyland"); + /// } + /// EntryRef::Occupied(_) => panic!(), + /// } + /// + /// map.try_insert("poneyland".to_string(), 42)?; + /// + /// let entry = map + /// .entry_ref("poneyland") + /// .and_replace_entry_with(|k, v| { + /// assert_eq!(k, "poneyland"); + /// assert_eq!(v, 42); + /// Some(v + 1) + /// }); + /// + /// match entry { + /// EntryRef::Occupied(e) => { + /// assert_eq!(e.key(), "poneyland"); + /// assert_eq!(e.get(), &43); + /// } + /// EntryRef::Vacant(_) => panic!(), + /// } + /// + /// assert_eq!(map["poneyland"], 43); + /// + /// let entry = map + /// .entry_ref("poneyland") + /// .and_replace_entry_with(|_k, _v| None); + /// + /// match entry { + /// EntryRef::Vacant(e) => assert_eq!(e.key(), "poneyland"), + /// EntryRef::Occupied(_) => panic!(), + /// } + /// + /// assert!(!map.contains_key("poneyland")); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn and_replace_entry_with(self, f: F) -> Self + where + F: FnOnce(&K, V) -> Option, + { + match self { + EntryRef::Occupied(entry) => entry.replace_entry_with(f), + EntryRef::Vacant(_) => self, + } + } +} + +impl<'a, 'b, K, Q: ?Sized, V: Default, S, A: Allocator> EntryRef<'a, 'b, K, Q, V, S, A> { + /// Ensures a value is in the entry by inserting the default value if empty, + /// and returns a mutable reference to the value in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap> = HashMap::new(); + /// + /// // nonexistent key + /// map.entry_ref("poneyland").or_try_default()?; + /// assert_eq!(map["poneyland"], None); + /// + /// map.try_insert("horseland".to_string(), Some(3))?; + /// + /// // existing key + /// assert_eq!(map.entry_ref("horseland").or_try_default()?, &mut Some(3)); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn or_try_default(self) -> Result<&'a mut V, Error> + where + K: Hash + From<&'b Q>, + S: BuildHasher, + { + match self { + EntryRef::Occupied(entry) => Ok(entry.into_mut()), + EntryRef::Vacant(entry) => entry.try_insert(Default::default()), + } + } +} + +impl<'a, 'b, K, Q: ?Sized, V, S, A: Allocator> OccupiedEntryRef<'a, 'b, K, Q, V, S, A> { + /// Gets a reference to the key in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{EntryRef, HashMap}; + /// + /// let mut map: HashMap = HashMap::new(); + /// map.entry_ref("poneyland").or_try_insert(12)?; + /// + /// match map.entry_ref("poneyland") { + /// EntryRef::Vacant(_) => panic!(), + /// EntryRef::Occupied(entry) => assert_eq!(entry.key(), "poneyland"), + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn key(&self) -> &K { + unsafe { &self.elem.as_ref().0 } + } + + /// Take the ownership of the key and value from the map. + /// Keeps the allocated memory for reuse. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::EntryRef; + /// + /// let mut map: HashMap = HashMap::new(); + /// // The map is empty + /// assert!(map.is_empty() && map.capacity() == 0); + /// + /// map.entry_ref("poneyland").or_try_insert(12)?; + /// + /// if let EntryRef::Occupied(o) = map.entry_ref("poneyland") { + /// // We delete the entry from the map. + /// assert_eq!(o.remove_entry(), ("poneyland".to_owned(), 12)); + /// } + /// + /// assert_eq!(map.contains_key("poneyland"), false); + /// // Now map hold none elements but capacity is equal to the old one + /// assert!(map.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn remove_entry(self) -> (K, V) { + unsafe { self.table.table.remove(self.elem).0 } + } + + /// Gets a reference to the value in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::EntryRef; + /// + /// let mut map: HashMap = HashMap::new(); + /// map.entry_ref("poneyland").or_try_insert(12)?; + /// + /// match map.entry_ref("poneyland") { + /// EntryRef::Vacant(_) => panic!(), + /// EntryRef::Occupied(entry) => assert_eq!(entry.get(), &12), + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn get(&self) -> &V { + unsafe { &self.elem.as_ref().1 } + } + + /// Gets a mutable reference to the value in the entry. + /// + /// If you need a reference to the `OccupiedEntryRef` which may outlive the + /// destruction of the `EntryRef` value, see [`into_mut`]. + /// + /// [`into_mut`]: #method.into_mut + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::EntryRef; + /// + /// let mut map: HashMap = HashMap::new(); + /// map.entry_ref("poneyland").or_try_insert(12)?; + /// + /// assert_eq!(map["poneyland"], 12); + /// if let EntryRef::Occupied(mut o) = map.entry_ref("poneyland") { + /// *o.get_mut() += 10; + /// assert_eq!(*o.get(), 22); + /// + /// // We can use the same Entry multiple times. + /// *o.get_mut() += 2; + /// } + /// + /// assert_eq!(map["poneyland"], 24); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn get_mut(&mut self) -> &mut V { + unsafe { &mut self.elem.as_mut().1 } + } + + /// Converts the OccupiedEntryRef into a mutable reference to the value in the entry + /// with a lifetime bound to the map itself. + /// + /// If you need multiple references to the `OccupiedEntryRef`, see [`get_mut`]. + /// + /// [`get_mut`]: #method.get_mut + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{EntryRef, HashMap}; + /// + /// let mut map: HashMap = HashMap::new(); + /// map.entry_ref("poneyland").or_try_insert(12)?; + /// + /// let value: &mut u32; + /// match map.entry_ref("poneyland") { + /// EntryRef::Occupied(entry) => value = entry.into_mut(), + /// EntryRef::Vacant(_) => panic!(), + /// } + /// *value += 10; + /// + /// assert_eq!(map["poneyland"], 22); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn into_mut(self) -> &'a mut V { + unsafe { &mut self.elem.as_mut().1 } + } + + /// Sets the value of the entry, and returns the entry's old value. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::EntryRef; + /// + /// let mut map: HashMap = HashMap::new(); + /// map.entry_ref("poneyland").or_try_insert(12)?; + /// + /// if let EntryRef::Occupied(mut o) = map.entry_ref("poneyland") { + /// assert_eq!(o.insert(15), 12); + /// } + /// + /// assert_eq!(map["poneyland"], 15); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn insert(&mut self, value: V) -> V { + mem::replace(self.get_mut(), value) + } + + /// Takes the value out of the entry, and returns it. + /// Keeps the allocated memory for reuse. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::EntryRef; + /// + /// let mut map: HashMap = HashMap::new(); + /// // The map is empty + /// assert!(map.is_empty() && map.capacity() == 0); + /// + /// map.entry_ref("poneyland").or_try_insert(12)?; + /// + /// if let EntryRef::Occupied(o) = map.entry_ref("poneyland") { + /// assert_eq!(o.remove(), 12); + /// } + /// + /// assert_eq!(map.contains_key("poneyland"), false); + /// // Now map hold none elements but capacity is equal to the old one + /// assert!(map.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn remove(self) -> V { + self.remove_entry().1 + } + + /// Replaces the entry, returning the old key and value. The new key in the hash map will be + /// the key used to create this entry. + /// + /// # Panics + /// + /// Will panic if this OccupiedEntryRef was created through [`EntryRef::try_insert`]. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{EntryRef, HashMap}; + /// use std::rc::Rc; + /// + /// let mut map: HashMap, u32> = HashMap::new(); + /// let key: Rc = Rc::from("Stringthing"); + /// + /// map.try_insert(key.clone(), 15)?; + /// assert_eq!(Rc::strong_count(&key), 2); + /// + /// match map.entry_ref("Stringthing") { + /// EntryRef::Occupied(entry) => { + /// let (old_key, old_value): (Rc, u32) = entry.replace_entry(16); + /// assert!(Rc::ptr_eq(&key, &old_key) && old_value == 15); + /// } + /// EntryRef::Vacant(_) => panic!(), + /// } + /// + /// assert_eq!(Rc::strong_count(&key), 1); + /// assert_eq!(map["Stringthing"], 16); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn replace_entry(self, value: V) -> (K, V) + where + K: From<&'b Q>, + { + let entry = unsafe { self.elem.as_mut() }; + + let old_key = mem::replace(&mut entry.0, self.key.unwrap().into_owned()); + let old_value = mem::replace(&mut entry.1, value); + + (old_key, old_value) + } + + /// Replaces the key in the hash map with the key used to create this entry. + /// + /// # Panics + /// + /// Will panic if this OccupiedEntryRef was created through + /// [`EntryRef::try_insert`]. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{EntryRef, HashMap}; + /// use std::rc::Rc; + /// + /// let mut map: HashMap, usize> = HashMap::try_with_capacity(6)?; + /// let mut keys: Vec> = Vec::with_capacity(6); + /// + /// for (value, key) in ["a", "b", "c", "d", "e", "f"].into_iter().enumerate() { + /// let rc_key: Rc = Rc::from(key); + /// keys.push(rc_key.clone()); + /// map.try_insert(rc_key.clone(), value)?; + /// } + /// + /// assert!(keys.iter().all(|key| Rc::strong_count(key) == 2)); + /// + /// // It doesn't matter that we kind of use a vector with the same keys, + /// // because all keys will be newly created from the references + /// reclaim_memory(&mut map, &keys); + /// + /// assert!(keys.iter().all(|key| Rc::strong_count(key) == 1)); + /// + /// fn reclaim_memory(map: &mut HashMap, usize>, keys: &[Rc]) { + /// for key in keys { + /// if let EntryRef::Occupied(entry) = map.entry_ref(key.as_ref()) { + /// // Replaces the entry's key with our version of it in `keys`. + /// entry.replace_key(); + /// } + /// } + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn replace_key(self) -> K + where + K: From<&'b Q>, + { + let entry = unsafe { self.elem.as_mut() }; + mem::replace(&mut entry.0, self.key.unwrap().into_owned()) + } + + /// Provides shared access to the key and owned access to the value of + /// the entry and allows to replace or remove it based on the + /// value of the returned option. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::EntryRef; + /// + /// let mut map: HashMap = HashMap::new(); + /// map.try_insert("poneyland".to_string(), 42)?; + /// + /// let entry = match map.entry_ref("poneyland") { + /// EntryRef::Occupied(e) => { + /// e.replace_entry_with(|k, v| { + /// assert_eq!(k, "poneyland"); + /// assert_eq!(v, 42); + /// Some(v + 1) + /// }) + /// } + /// EntryRef::Vacant(_) => panic!(), + /// }; + /// + /// match entry { + /// EntryRef::Occupied(e) => { + /// assert_eq!(e.key(), "poneyland"); + /// assert_eq!(e.get(), &43); + /// } + /// EntryRef::Vacant(_) => panic!(), + /// } + /// + /// assert_eq!(map["poneyland"], 43); + /// + /// let entry = match map.entry_ref("poneyland") { + /// EntryRef::Occupied(e) => e.replace_entry_with(|_k, _v| None), + /// EntryRef::Vacant(_) => panic!(), + /// }; + /// + /// match entry { + /// EntryRef::Vacant(e) => { + /// assert_eq!(e.key(), "poneyland"); + /// } + /// EntryRef::Occupied(_) => panic!(), + /// } + /// + /// assert!(!map.contains_key("poneyland")); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn replace_entry_with(self, f: F) -> EntryRef<'a, 'b, K, Q, V, S, A> + where + F: FnOnce(&K, V) -> Option, + { + unsafe { + let mut spare_key = None; + + self.table + .table + .replace_bucket_with(self.elem.clone(), |(key, value)| { + if let Some(new_value) = f(&key, value) { + Some((key, new_value)) + } else { + spare_key = Some(KeyOrRef::Owned(key)); + None + } + }); + + if let Some(key) = spare_key { + EntryRef::Vacant(VacantEntryRef { + hash: self.hash, + key, + table: self.table, + }) + } else { + EntryRef::Occupied(self) + } + } + } +} + +impl<'a, 'b, K, Q: ?Sized, V, S, A: Allocator> VacantEntryRef<'a, 'b, K, Q, V, S, A> { + /// Gets a reference to the key that would be used when inserting a value + /// through the `VacantEntryRef`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// + /// let mut map: HashMap = HashMap::new(); + /// let key: &str = "poneyland"; + /// assert_eq!(map.entry_ref(key).key(), "poneyland"); + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn key(&self) -> &Q + where + K: Borrow, + { + self.key.as_ref() + } + + /// Take ownership of the key. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::{EntryRef, HashMap}; + /// + /// let mut map: HashMap = HashMap::new(); + /// let key: &str = "poneyland"; + /// + /// if let EntryRef::Vacant(v) = map.entry_ref(key) { + /// assert_eq!(v.into_key(), "poneyland"); + /// } + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn into_key(self) -> K + where + K: From<&'b Q>, + { + self.key.into_owned() + } + + /// Sets the value of the entry with the VacantEntryRef's key, and returns a + /// mutable reference to it. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashMap; + /// use rune_alloc::hash_map::EntryRef; + /// + /// let mut map: HashMap = HashMap::new(); + /// let key: &str = "poneyland"; + /// + /// if let EntryRef::Vacant(o) = map.entry_ref(key) { + /// o.try_insert(37)?; + /// } + /// + /// assert_eq!(map["poneyland"], 37); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_insert(self, value: V) -> Result<&'a mut V, Error> + where + K: Hash + From<&'b Q>, + S: BuildHasher, + { + let table = &mut self.table.table; + let hasher = make_hasher::(&self.table.hash_builder); + + let entry = into_ok_try(table.insert_entry( + &mut (), + self.hash, + (self.key.into_owned(), value), + hasher.into_tuple(), + ))?; + + Ok(&mut entry.1) + } + + #[cfg(test)] + pub(crate) fn insert(self, value: V) -> &'a mut V + where + K: Hash + From<&'b Q>, + S: BuildHasher, + { + self.try_insert(value).abort() + } + + #[cfg_attr(feature = "inline-more", inline)] + fn try_insert_entry(self, value: V) -> Result, Error> + where + S: BuildHasher, + K: Hash + From<&'b Q>, + { + let hasher = make_hasher::(&self.table.hash_builder); + + let elem = into_ok_try(self.table.table.insert( + &mut (), + self.hash, + (self.key.into_owned(), value), + hasher.into_tuple(), + ))?; + + Ok(OccupiedEntryRef { + hash: self.hash, + key: None, + elem, + table: self.table, + }) + } +} + +impl TryFromIteratorIn<(K, V), A> for HashMap +where + K: Eq + Hash, + S: BuildHasher + Default, +{ + #[cfg_attr(feature = "inline-more", inline)] + fn try_from_iter_in>(iter: T, alloc: A) -> Result { + let iter = iter.into_iter(); + + let mut map = + Self::try_with_capacity_and_hasher_in(iter.size_hint().0, S::default(), alloc)?; + + for (k, v) in iter { + map.try_insert(k, v)?; + } + + Ok(map) + } +} + +#[cfg(test)] +impl FromIterator<(K, V)> for HashMap +where + K: Eq + Hash, + S: BuildHasher + Default, +{ + #[cfg_attr(feature = "inline-more", inline)] + fn from_iter>(iter: T) -> Self { + Self::try_from_iter_in(iter, A::default()).abort() + } +} + +/// Inserts all new key-values from the iterator and replaces values with existing +/// keys with new values returned from the iterator. +impl TryExtend<(K, V)> for HashMap +where + K: Eq + Hash, + S: BuildHasher, + A: Allocator, +{ + /// Inserts all new key-values from the iterator to existing `HashMap`. + /// Replace values with existing keys with new values returned from the iterator. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{HashMap, Vec}; + /// use rune_alloc::prelude::*; + /// + /// let mut map = HashMap::new(); + /// map.try_insert(1, 100)?; + /// + /// let some_iter = [(1, 1), (2, 2)].into_iter(); + /// map.try_extend(some_iter)?; + /// // Replace values with existing keys with new values returned from the iterator. + /// // So that the map.get(&1) doesn't return Some(&100). + /// assert_eq!(map.get(&1), Some(&1)); + /// + /// let some_vec: Vec<_> = rune_alloc::try_vec![(3, 3), (4, 4)]; + /// map.try_extend(some_vec)?; + /// + /// let some_arr = [(5, 5), (6, 6)]; + /// map.try_extend(some_arr)?; + /// let old_map_len = map.len(); + /// + /// // You can also extend from another HashMap + /// let mut new_map = HashMap::new(); + /// new_map.try_extend(map)?; + /// assert_eq!(new_map.len(), old_map_len); + /// + /// let mut vec: Vec<_> = new_map.into_iter().try_collect()?; + /// // The `IntoIter` iterator produces items in arbitrary order, so the + /// // items must be sorted to test them against a sorted array. + /// vec.sort_unstable(); + /// assert_eq!(vec, [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + fn try_extend>(&mut self, iter: T) -> Result<(), Error> { + // Keys may be already present or show multiple times in the iterator. + // Reserve the entire hint lower bound if the map is empty. + // Otherwise reserve half the hint (rounded up), so the map + // will only resize twice in the worst case. + let iter = iter.into_iter(); + + let reserve = if self.is_empty() { + iter.size_hint().0 + } else { + (iter.size_hint().0 + 1) / 2 + }; + + self.try_reserve(reserve)?; + + for (k, v) in iter { + self.try_insert(k, v)?; + } + + Ok(()) + } +} + +#[cfg(test)] +impl Extend<(K, V)> for HashMap +where + K: Eq + Hash, + S: BuildHasher, + A: Allocator, +{ + fn extend>(&mut self, iter: T) { + self.try_extend(iter).abort() + } +} + +/// Inserts all new key-values from the iterator and replaces values with existing +/// keys with new values returned from the iterator. +impl<'a, K, V, S, A> TryExtend<(&'a K, &'a V)> for HashMap +where + K: Eq + Hash + Copy, + V: Copy, + S: BuildHasher, + A: Allocator, +{ + /// Inserts all new key-values from the iterator to existing `HashMap`. + /// Replace values with existing keys with new values returned from the iterator. + /// The keys and values must implement [`Copy`] trait. + /// + /// [`Copy`]: https://doc.rust-lang.org/core/marker/trait.Copy.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::HashMap; + /// use rune_alloc::prelude::*; + /// + /// let mut map = HashMap::new(); + /// map.try_insert(1, 100)?; + /// + /// let arr = [(1, 1), (2, 2)]; + /// let some_iter = arr.iter().map(|(k, v)| (k, v)); + /// map.try_extend(some_iter)?; + /// // Replace values with existing keys with new values returned from the iterator. + /// // So that the map.get(&1) doesn't return Some(&100). + /// assert_eq!(map.get(&1), Some(&1)); + /// + /// let some_vec: Vec<_> = vec![(3, 3), (4, 4)]; + /// map.try_extend(some_vec.iter().map(|(k, v)| (k, v)))?; + /// + /// let some_arr = [(5, 5), (6, 6)]; + /// map.try_extend(some_arr.iter().map(|(k, v)| (k, v)))?; + /// + /// // You can also extend from another HashMap + /// let mut new_map = HashMap::new(); + /// new_map.try_extend(&map)?; + /// assert_eq!(new_map, map); + /// + /// let mut vec: Vec<_> = new_map.into_iter().collect(); + /// // The `IntoIter` iterator produces items in arbitrary order, so the + /// // items must be sorted to test them against a sorted array. + /// vec.sort_unstable(); + /// assert_eq!(vec, [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + fn try_extend>(&mut self, iter: T) -> Result<(), Error> { + self.try_extend(iter.into_iter().map(|(&key, &value)| (key, value))) + } +} + +#[cfg(test)] +impl<'a, K, V, S, A> Extend<(&'a K, &'a V)> for HashMap +where + K: Eq + Hash + Copy, + V: Copy, + S: BuildHasher, + A: Allocator, +{ + fn extend>(&mut self, iter: T) { + self.try_extend(iter).abort() + } +} + +/// Inserts all new key-values from the iterator and replaces values with existing +/// keys with new values returned from the iterator. +impl<'a, K, V, S, A> TryExtend<&'a (K, V)> for HashMap +where + K: Eq + Hash + Copy, + V: Copy, + S: BuildHasher, + A: Allocator, +{ + /// Inserts all new key-values from the iterator to existing `HashMap`. + /// Replace values with existing keys with new values returned from the iterator. + /// The keys and values must implement [`Copy`] trait. + /// + /// [`Copy`]: https://doc.rust-lang.org/core/marker/trait.Copy.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_map::HashMap; + /// use rune_alloc::prelude::*; + /// + /// let mut map = HashMap::new(); + /// map.try_insert(1, 100)?; + /// + /// let arr = [(1, 1), (2, 2)]; + /// let some_iter = arr.iter(); + /// map.try_extend(some_iter)?; + /// // Replace values with existing keys with new values returned from the iterator. + /// // So that the map.get(&1) doesn't return Some(&100). + /// assert_eq!(map.get(&1), Some(&1)); + /// + /// let some_vec: Vec<_> = vec![(3, 3), (4, 4)]; + /// map.try_extend(&some_vec)?; + /// + /// let some_arr = [(5, 5), (6, 6)]; + /// map.try_extend(&some_arr)?; + /// + /// let mut vec: Vec<_> = map.into_iter().collect(); + /// // The `IntoIter` iterator produces items in arbitrary order, so the + /// // items must be sorted to test them against a sorted array. + /// vec.sort_unstable(); + /// assert_eq!(vec, [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + fn try_extend>(&mut self, iter: T) -> Result<(), Error> { + self.try_extend(iter.into_iter().map(|&(key, value)| (key, value))) + } +} + +#[allow(dead_code)] +fn assert_covariance() { + fn map_key<'new>(v: HashMap<&'static str, u8>) -> HashMap<&'new str, u8> { + v + } + fn map_val<'new>(v: HashMap) -> HashMap { + v + } + fn iter_key<'a, 'new>(v: Iter<'a, &'static str, u8>) -> Iter<'a, &'new str, u8> { + v + } + fn iter_val<'a, 'new>(v: Iter<'a, u8, &'static str>) -> Iter<'a, u8, &'new str> { + v + } + fn into_iter_key<'new, A: Allocator>( + v: IntoIter<&'static str, u8, A>, + ) -> IntoIter<&'new str, u8, A> { + v + } + fn into_iter_val<'new, A: Allocator>( + v: IntoIter, + ) -> IntoIter { + v + } + fn keys_key<'a, 'new>(v: Keys<'a, &'static str, u8>) -> Keys<'a, &'new str, u8> { + v + } + fn keys_val<'a, 'new>(v: Keys<'a, u8, &'static str>) -> Keys<'a, u8, &'new str> { + v + } + fn values_key<'a, 'new>(v: Values<'a, &'static str, u8>) -> Values<'a, &'new str, u8> { + v + } + fn values_val<'a, 'new>(v: Values<'a, u8, &'static str>) -> Values<'a, u8, &'new str> { + v + } + fn drain<'new>( + d: Drain<'static, &'static str, &'static str>, + ) -> Drain<'new, &'new str, &'new str> { + d + } +} + +#[cfg(test)] +mod test_map { + use core::alloc::Layout; + use core::hash::BuildHasher; + use core::ptr::NonNull; + use core::sync::atomic::{AtomicI8, Ordering}; + + use ::rust_std::borrow::ToOwned; + use ::rust_std::cell::RefCell; + use ::rust_std::collections::hash_map::DefaultHasher; + use ::rust_std::ops::AddAssign; + use ::rust_std::thread; + use ::rust_std::usize; + use ::rust_std::vec::Vec; + use ::rust_std::{format, println}; + + use ::rust_alloc::string::{String, ToString}; + use ::rust_alloc::sync::Arc; + + use rand::{rngs::SmallRng, Rng, SeedableRng}; + + use super::DefaultHashBuilder; + use super::Entry::{Occupied, Vacant}; + use super::{EntryRef, HashMap, RawEntryMut}; + + use crate::alloc::testing::*; + use crate::alloc::{into_ok, into_ok_try}; + use crate::alloc::{AllocError, Allocator, Error, Global, TryClone, TryExtend}; + + ::rust_std::thread_local! { static DROP_VECTOR: RefCell> = RefCell::new(Vec::new()) } + + #[test] + fn test_zero_capacities() { + type HM = HashMap; + + let m = HM::new(); + assert_eq!(m.capacity(), 0); + + let m = HM::default(); + assert_eq!(m.capacity(), 0); + + let m = HM::with_hasher(DefaultHashBuilder::default()); + assert_eq!(m.capacity(), 0); + + let m = HM::with_capacity(0); + assert_eq!(m.capacity(), 0); + + let m = HM::with_capacity_and_hasher(0, DefaultHashBuilder::default()); + assert_eq!(m.capacity(), 0); + + let mut m = HM::new(); + m.insert(1, 1); + m.insert(2, 2); + m.remove(&1); + m.remove(&2); + m.shrink_to_fit(); + assert_eq!(m.capacity(), 0); + + let mut m = HM::new(); + m.reserve(0); + assert_eq!(m.capacity(), 0); + } + + #[test] + fn test_create_capacity_zero() { + let mut m = HashMap::with_capacity(0); + + assert!(m.insert(1, 1).is_none()); + + assert!(m.contains_key(&1)); + assert!(!m.contains_key(&0)); + } + + #[test] + fn test_insert() { + let mut m = HashMap::new(); + assert_eq!(m.len(), 0); + assert!(m.insert(1, 2).is_none()); + assert_eq!(m.len(), 1); + assert!(m.insert(2, 4).is_none()); + assert_eq!(m.len(), 2); + assert_eq!(*m.get(&1).unwrap(), 2); + assert_eq!(*m.get(&2).unwrap(), 4); + } + + #[test] + fn test_clone() { + let mut m = HashMap::new(); + assert_eq!(m.len(), 0); + assert!(m.insert(1, 2).is_none()); + assert_eq!(m.len(), 1); + assert!(m.insert(2, 4).is_none()); + assert_eq!(m.len(), 2); + #[allow(clippy::redundant_clone)] + let m2 = m.clone(); + assert_eq!(*m2.get(&1).unwrap(), 2); + assert_eq!(*m2.get(&2).unwrap(), 4); + assert_eq!(m2.len(), 2); + } + + #[test] + fn test_clone_from() { + let mut m = HashMap::new(); + let mut m2 = HashMap::new(); + assert_eq!(m.len(), 0); + assert!(m.insert(1, 2).is_none()); + assert_eq!(m.len(), 1); + assert!(m.insert(2, 4).is_none()); + assert_eq!(m.len(), 2); + m2.try_clone_from(&m).unwrap(); + assert_eq!(*m2.get(&1).unwrap(), 2); + assert_eq!(*m2.get(&2).unwrap(), 4); + assert_eq!(m2.len(), 2); + } + + #[derive(Hash, PartialEq, Eq)] + struct Droppable { + k: usize, + } + + impl Droppable { + fn new(k: usize) -> Droppable { + DROP_VECTOR.with(|slot| { + slot.borrow_mut()[k] += 1; + }); + + Droppable { k } + } + } + + impl Drop for Droppable { + fn drop(&mut self) { + DROP_VECTOR.with(|slot| { + slot.borrow_mut()[self.k] -= 1; + }); + } + } + + impl TryClone for Droppable { + fn try_clone(&self) -> Result { + Ok(Droppable::new(self.k)) + } + } + + #[test] + fn test_drops() { + DROP_VECTOR.with(|slot| { + *slot.borrow_mut() = ::rust_alloc::vec![0; 200]; + }); + + { + let mut m = HashMap::new(); + + DROP_VECTOR.with(|v| { + for i in 0..200 { + assert_eq!(v.borrow()[i], 0); + } + }); + + for i in 0..100 { + let d1 = Droppable::new(i); + let d2 = Droppable::new(i + 100); + m.insert(d1, d2); + } + + DROP_VECTOR.with(|v| { + for i in 0..200 { + assert_eq!(v.borrow()[i], 1); + } + }); + + for i in 0..50 { + let k = Droppable::new(i); + let v = m.remove(&k); + + assert!(v.is_some()); + + DROP_VECTOR.with(|v| { + assert_eq!(v.borrow()[i], 1); + assert_eq!(v.borrow()[i + 100], 1); + }); + } + + DROP_VECTOR.with(|v| { + for i in 0..50 { + assert_eq!(v.borrow()[i], 0); + assert_eq!(v.borrow()[i + 100], 0); + } + + for i in 50..100 { + assert_eq!(v.borrow()[i], 1); + assert_eq!(v.borrow()[i + 100], 1); + } + }); + } + + DROP_VECTOR.with(|v| { + for i in 0..200 { + assert_eq!(v.borrow()[i], 0); + } + }); + } + + #[test] + fn test_into_iter_drops() { + DROP_VECTOR.with(|v| { + *v.borrow_mut() = ::rust_alloc::vec![0; 200]; + }); + + let hm = { + let mut hm = HashMap::new(); + + DROP_VECTOR.with(|v| { + for i in 0..200 { + assert_eq!(v.borrow()[i], 0); + } + }); + + for i in 0..100 { + let d1 = Droppable::new(i); + let d2 = Droppable::new(i + 100); + hm.insert(d1, d2); + } + + DROP_VECTOR.with(|v| { + for i in 0..200 { + assert_eq!(v.borrow()[i], 1); + } + }); + + hm + }; + + // By the way, ensure that cloning doesn't screw up the dropping. + drop(hm.clone()); + + { + let mut half = hm.into_iter().take(50); + + DROP_VECTOR.with(|v| { + for i in 0..200 { + assert_eq!(v.borrow()[i], 1); + } + }); + + for _ in half.by_ref() {} + + DROP_VECTOR.with(|v| { + let nk = (0..100).filter(|&i| v.borrow()[i] == 1).count(); + + let nv = (0..100).filter(|&i| v.borrow()[i + 100] == 1).count(); + + assert_eq!(nk, 50); + assert_eq!(nv, 50); + }); + }; + + DROP_VECTOR.with(|v| { + for i in 0..200 { + assert_eq!(v.borrow()[i], 0); + } + }); + } + + #[test] + fn test_empty_remove() { + let mut m: HashMap = HashMap::new(); + assert_eq!(m.remove(&0), None); + } + + #[test] + fn test_empty_entry() { + let mut m: HashMap = HashMap::new(); + match m.entry(0) { + Occupied(_) => panic!(), + Vacant(_) => {} + } + assert!(*m.entry(0).or_insert(true)); + assert_eq!(m.len(), 1); + } + + #[test] + fn test_empty_entry_ref() { + let mut m: HashMap = HashMap::new(); + match m.entry_ref("poneyland") { + EntryRef::Occupied(_) => panic!(), + EntryRef::Vacant(_) => {} + } + assert!(*m.entry_ref("poneyland").or_insert(true)); + assert_eq!(m.len(), 1); + } + + #[test] + fn test_empty_iter() { + let mut m: HashMap = HashMap::new(); + assert_eq!(m.drain().next(), None); + assert_eq!(m.keys().next(), None); + assert_eq!(m.values().next(), None); + assert_eq!(m.values_mut().next(), None); + assert_eq!(m.iter().next(), None); + assert_eq!(m.iter_mut().next(), None); + assert_eq!(m.len(), 0); + assert!(m.is_empty()); + assert_eq!(m.into_iter().next(), None); + } + + #[test] + #[cfg_attr(miri, ignore)] // FIXME: takes too long + fn test_lots_of_insertions() { + let mut m = HashMap::new(); + + // Try this a few times to make sure we never screw up the hashmap's + // internal state. + for _ in 0..10 { + assert!(m.is_empty()); + + for i in 1..1001 { + assert!(m.insert(i, i).is_none()); + + for j in 1..=i { + let r = m.get(&j); + assert_eq!(r, Some(&j)); + } + + for j in i + 1..1001 { + let r = m.get(&j); + assert_eq!(r, None); + } + } + + for i in 1001..2001 { + assert!(!m.contains_key(&i)); + } + + // remove forwards + for i in 1..1001 { + assert!(m.remove(&i).is_some()); + + for j in 1..=i { + assert!(!m.contains_key(&j)); + } + + for j in i + 1..1001 { + assert!(m.contains_key(&j)); + } + } + + for i in 1..1001 { + assert!(!m.contains_key(&i)); + } + + for i in 1..1001 { + assert!(m.insert(i, i).is_none()); + } + + // remove backwards + for i in (1..1001).rev() { + assert!(m.remove(&i).is_some()); + + for j in i..1001 { + assert!(!m.contains_key(&j)); + } + + for j in 1..i { + assert!(m.contains_key(&j)); + } + } + } + } + + #[test] + fn test_find_mut() { + let mut m = HashMap::new(); + assert!(m.insert(1, 12).is_none()); + assert!(m.insert(2, 8).is_none()); + assert!(m.insert(5, 14).is_none()); + let new = 100; + match m.get_mut(&5) { + None => panic!(), + Some(x) => *x = new, + } + assert_eq!(m.get(&5), Some(&new)); + } + + #[test] + fn test_insert_overwrite() { + let mut m = HashMap::new(); + assert!(m.insert(1, 2).is_none()); + assert_eq!(*m.get(&1).unwrap(), 2); + assert!(m.insert(1, 3).is_some()); + assert_eq!(*m.get(&1).unwrap(), 3); + } + + #[test] + fn test_insert_conflicts() { + let mut m = HashMap::with_capacity(4); + assert!(m.insert(1, 2).is_none()); + assert!(m.insert(5, 3).is_none()); + assert!(m.insert(9, 4).is_none()); + assert_eq!(*m.get(&9).unwrap(), 4); + assert_eq!(*m.get(&5).unwrap(), 3); + assert_eq!(*m.get(&1).unwrap(), 2); + } + + #[test] + fn test_conflict_remove() { + let mut m = HashMap::with_capacity(4); + assert!(m.insert(1, 2).is_none()); + assert_eq!(*m.get(&1).unwrap(), 2); + assert!(m.insert(5, 3).is_none()); + assert_eq!(*m.get(&1).unwrap(), 2); + assert_eq!(*m.get(&5).unwrap(), 3); + assert!(m.insert(9, 4).is_none()); + assert_eq!(*m.get(&1).unwrap(), 2); + assert_eq!(*m.get(&5).unwrap(), 3); + assert_eq!(*m.get(&9).unwrap(), 4); + assert!(m.remove(&1).is_some()); + assert_eq!(*m.get(&9).unwrap(), 4); + assert_eq!(*m.get(&5).unwrap(), 3); + } + + #[test] + fn test_insert_unique_unchecked() { + let mut map = HashMap::new(); + let (k1, v1) = map.insert_unique_unchecked(10, 11); + assert_eq!((&10, &mut 11), (k1, v1)); + let (k2, v2) = map.insert_unique_unchecked(20, 21); + assert_eq!((&20, &mut 21), (k2, v2)); + assert_eq!(Some(&11), map.get(&10)); + assert_eq!(Some(&21), map.get(&20)); + assert_eq!(None, map.get(&30)); + } + + #[test] + fn test_is_empty() { + let mut m = HashMap::with_capacity(4); + assert!(m.insert(1, 2).is_none()); + assert!(!m.is_empty()); + assert!(m.remove(&1).is_some()); + assert!(m.is_empty()); + } + + #[test] + fn test_remove() { + let mut m = HashMap::new(); + m.insert(1, 2); + assert_eq!(m.remove(&1), Some(2)); + assert_eq!(m.remove(&1), None); + } + + #[test] + fn test_remove_entry() { + let mut m = HashMap::new(); + m.insert(1, 2); + assert_eq!(m.remove_entry(&1), Some((1, 2))); + assert_eq!(m.remove(&1), None); + } + + #[test] + fn test_iterate() { + let mut m = HashMap::with_capacity(4); + for i in 0..32 { + assert!(m.insert(i, i * 2).is_none()); + } + assert_eq!(m.len(), 32); + + let mut observed: u32 = 0; + + for (k, v) in &m { + assert_eq!(*v, *k * 2); + observed |= 1 << *k; + } + assert_eq!(observed, 0xFFFF_FFFF); + } + + #[test] + fn test_keys() { + let vec = ::rust_alloc::vec![(1, 'a'), (2, 'b'), (3, 'c')]; + let map: HashMap<_, _> = vec.into_iter().collect(); + let keys: Vec<_> = map.keys().copied().collect(); + assert_eq!(keys.len(), 3); + assert!(keys.contains(&1)); + assert!(keys.contains(&2)); + assert!(keys.contains(&3)); + } + + #[test] + fn test_values() { + let vec = ::rust_alloc::vec![(1, 'a'), (2, 'b'), (3, 'c')]; + let map: HashMap<_, _> = vec.into_iter().collect(); + let values: Vec<_> = map.values().copied().collect(); + assert_eq!(values.len(), 3); + assert!(values.contains(&'a')); + assert!(values.contains(&'b')); + assert!(values.contains(&'c')); + } + + #[test] + fn test_values_mut() { + let vec = ::rust_alloc::vec![(1, 1), (2, 2), (3, 3)]; + let mut map: HashMap<_, _> = vec.into_iter().collect(); + for value in map.values_mut() { + *value *= 2; + } + let values: Vec<_> = map.values().copied().collect(); + assert_eq!(values.len(), 3); + assert!(values.contains(&2)); + assert!(values.contains(&4)); + assert!(values.contains(&6)); + } + + #[test] + fn test_into_keys() { + let vec = ::rust_alloc::vec![(1, 'a'), (2, 'b'), (3, 'c')]; + let map: HashMap<_, _> = vec.into_iter().collect(); + let keys: Vec<_> = map.into_keys().collect(); + + assert_eq!(keys.len(), 3); + assert!(keys.contains(&1)); + assert!(keys.contains(&2)); + assert!(keys.contains(&3)); + } + + #[test] + fn test_into_values() { + let vec = ::rust_alloc::vec![(1, 'a'), (2, 'b'), (3, 'c')]; + let map: HashMap<_, _> = vec.into_iter().collect(); + let values: Vec<_> = map.into_values().collect(); + + assert_eq!(values.len(), 3); + assert!(values.contains(&'a')); + assert!(values.contains(&'b')); + assert!(values.contains(&'c')); + } + + #[test] + fn test_find() { + let mut m = HashMap::new(); + assert!(m.get(&1).is_none()); + m.insert(1, 2); + match m.get(&1) { + None => panic!(), + Some(v) => assert_eq!(*v, 2), + } + } + + #[test] + fn test_eq() { + let mut m1 = HashMap::new(); + m1.insert(1, 2); + m1.insert(2, 3); + m1.insert(3, 4); + + let mut m2 = HashMap::new(); + m2.insert(1, 2); + m2.insert(2, 3); + + assert!(m1 != m2); + + m2.insert(3, 4); + + assert_eq!(m1, m2); + } + + #[test] + fn test_show() { + let mut map = HashMap::new(); + let empty: HashMap = HashMap::new(); + + map.insert(1, 2); + map.insert(3, 4); + + let map_str = format!("{map:?}"); + + assert!(map_str == "{1: 2, 3: 4}" || map_str == "{3: 4, 1: 2}"); + assert_eq!(format!("{empty:?}"), "{}"); + } + + #[test] + fn test_expand() { + let mut m = HashMap::new(); + + assert_eq!(m.len(), 0); + assert!(m.is_empty()); + + let mut i = 0; + let old_raw_cap = m.raw_capacity(); + while old_raw_cap == m.raw_capacity() { + m.insert(i, i); + i += 1; + } + + assert_eq!(m.len(), i); + assert!(!m.is_empty()); + } + + #[test] + fn test_behavior_resize_policy() { + let mut m = HashMap::new(); + + assert_eq!(m.len(), 0); + assert_eq!(m.raw_capacity(), 1); + assert!(m.is_empty()); + + m.insert(0, 0); + m.remove(&0); + assert!(m.is_empty()); + let initial_raw_cap = m.raw_capacity(); + m.reserve(initial_raw_cap); + let raw_cap = m.raw_capacity(); + + assert_eq!(raw_cap, initial_raw_cap * 2); + + let mut i = 0; + for _ in 0..raw_cap * 3 / 4 { + m.insert(i, i); + i += 1; + } + // three quarters full + + assert_eq!(m.len(), i); + assert_eq!(m.raw_capacity(), raw_cap); + + for _ in 0..raw_cap / 4 { + m.insert(i, i); + i += 1; + } + // half full + + let new_raw_cap = m.raw_capacity(); + assert_eq!(new_raw_cap, raw_cap * 2); + + for _ in 0..raw_cap / 2 - 1 { + i -= 1; + m.remove(&i); + assert_eq!(m.raw_capacity(), new_raw_cap); + } + // A little more than one quarter full. + m.shrink_to_fit(); + assert_eq!(m.raw_capacity(), raw_cap); + // again, a little more than half full + for _ in 0..raw_cap / 2 { + i -= 1; + m.remove(&i); + } + m.shrink_to_fit(); + + assert_eq!(m.len(), i); + assert!(!m.is_empty()); + assert_eq!(m.raw_capacity(), initial_raw_cap); + } + + #[test] + fn test_reserve_shrink_to_fit() { + let mut m = HashMap::new(); + m.insert(0, 0); + m.remove(&0); + assert!(m.capacity() >= m.len()); + for i in 0..128 { + m.insert(i, i); + } + m.reserve(256); + + let usable_cap = m.capacity(); + for i in 128..(128 + 256) { + m.insert(i, i); + assert_eq!(m.capacity(), usable_cap); + } + + for i in 100..(128 + 256) { + assert_eq!(m.remove(&i), Some(i)); + } + m.shrink_to_fit(); + + assert_eq!(m.len(), 100); + assert!(!m.is_empty()); + assert!(m.capacity() >= m.len()); + + for i in 0..100 { + assert_eq!(m.remove(&i), Some(i)); + } + m.shrink_to_fit(); + m.insert(0, 0); + + assert_eq!(m.len(), 1); + assert!(m.capacity() >= m.len()); + assert_eq!(m.remove(&0), Some(0)); + } + + #[test] + fn test_from_iter() { + let xs = [(1, 1), (2, 2), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; + + let map: HashMap<_, _> = xs.iter().copied().collect(); + + for &(k, v) in &xs { + assert_eq!(map.get(&k), Some(&v)); + } + + assert_eq!(map.iter().len(), xs.len() - 1); + } + + #[test] + fn test_size_hint() { + let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; + + let map: HashMap<_, _> = xs.iter().copied().collect(); + + let mut iter = map.iter(); + + for _ in iter.by_ref().take(3) {} + + assert_eq!(iter.size_hint(), (3, Some(3))); + } + + #[test] + fn test_iter_len() { + let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; + + let map: HashMap<_, _> = xs.iter().copied().collect(); + + let mut iter = map.iter(); + + for _ in iter.by_ref().take(3) {} + + assert_eq!(iter.len(), 3); + } + + #[test] + fn test_mut_size_hint() { + let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; + + let mut map: HashMap<_, _> = xs.iter().copied().collect(); + + let mut iter = map.iter_mut(); + + for _ in iter.by_ref().take(3) {} + + assert_eq!(iter.size_hint(), (3, Some(3))); + } + + #[test] + fn test_iter_mut_len() { + let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; + + let mut map: HashMap<_, _> = xs.iter().copied().collect(); + + let mut iter = map.iter_mut(); + + for _ in iter.by_ref().take(3) {} + + assert_eq!(iter.len(), 3); + } + + #[test] + fn test_index() { + let mut map = HashMap::new(); + + map.insert(1, 2); + map.insert(2, 1); + map.insert(3, 4); + + assert_eq!(map[&2], 1); + } + + #[test] + #[should_panic] + fn test_index_nonexistent() { + let mut map = HashMap::new(); + + map.insert(1, 2); + map.insert(2, 1); + map.insert(3, 4); + + #[allow(clippy::no_effect)] // false positive lint + map[&4]; + } + + #[test] + fn test_entry() { + let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)]; + + let mut map: HashMap<_, _> = xs.iter().copied().collect(); + + // Existing key (insert) + match map.entry(1) { + Vacant(_) => unreachable!(), + Occupied(mut view) => { + assert_eq!(view.get(), &10); + assert_eq!(view.insert(100), 10); + } + } + assert_eq!(map.get(&1).unwrap(), &100); + assert_eq!(map.len(), 6); + + // Existing key (update) + match map.entry(2) { + Vacant(_) => unreachable!(), + Occupied(mut view) => { + let v = view.get_mut(); + let new_v = (*v) * 10; + *v = new_v; + } + } + assert_eq!(map.get(&2).unwrap(), &200); + assert_eq!(map.len(), 6); + + // Existing key (take) + match map.entry(3) { + Vacant(_) => unreachable!(), + Occupied(view) => { + assert_eq!(view.remove(), 30); + } + } + assert_eq!(map.get(&3), None); + assert_eq!(map.len(), 5); + + // Inexistent key (insert) + match map.entry(10) { + Occupied(_) => unreachable!(), + Vacant(view) => { + assert_eq!(*view.insert(1000), 1000); + } + } + assert_eq!(map.get(&10).unwrap(), &1000); + assert_eq!(map.len(), 6); + } + + #[test] + fn test_entry_ref() { + let xs = [ + ("One".to_owned(), 10), + ("Two".to_owned(), 20), + ("Three".to_owned(), 30), + ("Four".to_owned(), 40), + ("Five".to_owned(), 50), + ("Six".to_owned(), 60), + ]; + + let mut map: HashMap<_, _> = xs.iter().cloned().collect(); + + // Existing key (insert) + match map.entry_ref("One") { + EntryRef::Vacant(_) => unreachable!(), + EntryRef::Occupied(mut view) => { + assert_eq!(view.get(), &10); + assert_eq!(view.insert(100), 10); + } + } + assert_eq!(map.get("One").unwrap(), &100); + assert_eq!(map.len(), 6); + + // Existing key (update) + match map.entry_ref("Two") { + EntryRef::Vacant(_) => unreachable!(), + EntryRef::Occupied(mut view) => { + let v = view.get_mut(); + let new_v = (*v) * 10; + *v = new_v; + } + } + assert_eq!(map.get("Two").unwrap(), &200); + assert_eq!(map.len(), 6); + + // Existing key (take) + match map.entry_ref("Three") { + EntryRef::Vacant(_) => unreachable!(), + EntryRef::Occupied(view) => { + assert_eq!(view.remove(), 30); + } + } + assert_eq!(map.get("Three"), None); + assert_eq!(map.len(), 5); + + // Inexistent key (insert) + match map.entry_ref("Ten") { + EntryRef::Occupied(_) => unreachable!(), + EntryRef::Vacant(view) => { + assert_eq!(*view.insert(1000), 1000); + } + } + assert_eq!(map.get("Ten").unwrap(), &1000); + assert_eq!(map.len(), 6); + } + + #[test] + fn test_entry_take_doesnt_corrupt() { + #![allow(deprecated)] //rand + // Test for #19292 + fn check(m: &HashMap) { + for k in m.keys() { + assert!(m.contains_key(k), "{k} is in keys() but not in the map?"); + } + } + + let mut m = HashMap::new(); + + let mut rng = { + let seed = u64::from_le_bytes(*b"testseed"); + SmallRng::seed_from_u64(seed) + }; + + // Populate the map with some items. + for _ in 0..50 { + let x = rng.gen_range(-10..10); + m.insert(x, ()); + } + + for _ in 0..1000 { + let x = rng.gen_range(-10..10); + match m.entry(x) { + Vacant(_) => {} + Occupied(e) => { + e.remove(); + } + } + + check(&m); + } + } + + #[test] + fn test_entry_ref_take_doesnt_corrupt() { + #![allow(deprecated)] //rand + // Test for #19292 + fn check(m: &HashMap) { + for k in m.keys() { + assert!(m.contains_key(k), "{k} is in keys() but not in the map?"); + } + } + + let mut m = HashMap::new(); + + let mut rng = { + let seed = u64::from_le_bytes(*b"testseed"); + SmallRng::seed_from_u64(seed) + }; + + // Populate the map with some items. + for _ in 0..50 { + let mut x = String::with_capacity(1); + x.push(rng.gen_range('a'..='z')); + m.insert(x, ()); + } + + for _ in 0..1000 { + let mut x = String::with_capacity(1); + x.push(rng.gen_range('a'..='z')); + match m.entry_ref(x.as_str()) { + EntryRef::Vacant(_) => {} + EntryRef::Occupied(e) => { + e.remove(); + } + } + + check(&m); + } + } + + #[test] + fn test_extend_ref_k_ref_v() { + let mut a = HashMap::new(); + a.insert(1, "one"); + let mut b = HashMap::new(); + b.insert(2, "two"); + b.insert(3, "three"); + + a.extend(&b); + + assert_eq!(a.len(), 3); + assert_eq!(a[&1], "one"); + assert_eq!(a[&2], "two"); + assert_eq!(a[&3], "three"); + } + + #[test] + #[allow(clippy::needless_borrow)] + fn test_extend_ref_kv_tuple() { + let mut a = HashMap::new(); + a.insert(0, 0); + + fn create_arr + Copy, const N: usize>(start: T, step: T) -> [(T, T); N] { + let mut outs: [(T, T); N] = [(start, start); N]; + let mut element = step; + outs.iter_mut().skip(1).for_each(|(k, v)| { + *k += element; + *v += element; + element += step; + }); + outs + } + + let for_iter: Vec<_> = (0..100).map(|i| (i, i)).collect(); + let iter = for_iter.iter(); + let vec: Vec<_> = (100..200).map(|i| (i, i)).collect(); + a.try_extend(iter).abort(); + a.try_extend(&vec).abort(); + a.try_extend(create_arr::(200, 1)).abort(); + + assert_eq!(a.len(), 300); + + for item in 0..300 { + assert_eq!(a[&item], item); + } + } + + #[test] + fn test_capacity_not_less_than_len() { + let mut a = HashMap::new(); + let mut item = 0; + + for _ in 0..116 { + a.insert(item, 0); + item += 1; + } + + assert!(a.capacity() > a.len()); + + let free = a.capacity() - a.len(); + for _ in 0..free { + a.insert(item, 0); + item += 1; + } + + assert_eq!(a.len(), a.capacity()); + + // Insert at capacity should cause allocation. + a.insert(item, 0); + assert!(a.capacity() > a.len()); + } + + #[test] + fn test_occupied_entry_key() { + let mut a = HashMap::new(); + let key = "hello there"; + let value = "value goes here"; + assert!(a.is_empty()); + a.insert(key, value); + assert_eq!(a.len(), 1); + assert_eq!(a[key], value); + + match a.entry(key) { + Vacant(_) => panic!(), + Occupied(e) => assert_eq!(key, *e.key()), + } + assert_eq!(a.len(), 1); + assert_eq!(a[key], value); + } + + #[test] + fn test_occupied_entry_ref_key() { + let mut a = HashMap::new(); + let key = "hello there"; + let value = "value goes here"; + assert!(a.is_empty()); + a.insert(key.to_owned(), value); + assert_eq!(a.len(), 1); + assert_eq!(a[key], value); + + match a.entry_ref(key) { + EntryRef::Vacant(_) => panic!(), + EntryRef::Occupied(e) => assert_eq!(key, e.key()), + } + assert_eq!(a.len(), 1); + assert_eq!(a[key], value); + } + + #[test] + fn test_vacant_entry_key() { + let mut a = HashMap::new(); + let key = "hello there"; + let value = "value goes here"; + + assert!(a.is_empty()); + match a.entry(key) { + Occupied(_) => panic!(), + Vacant(e) => { + assert_eq!(key, *e.key()); + e.insert(value); + } + } + assert_eq!(a.len(), 1); + assert_eq!(a[key], value); + } + + #[test] + fn test_vacant_entry_ref_key() { + let mut a: HashMap = HashMap::new(); + let key = "hello there"; + let value = "value goes here"; + + assert!(a.is_empty()); + match a.entry_ref(key) { + EntryRef::Occupied(_) => panic!(), + EntryRef::Vacant(e) => { + assert_eq!(key, e.key()); + e.insert(value); + } + } + assert_eq!(a.len(), 1); + assert_eq!(a[key], value); + } + + #[test] + fn test_occupied_entry_replace_entry_with() { + let mut a = HashMap::new(); + + let key = "a key"; + let value = "an initial value"; + let new_value = "a new value"; + + let entry = a.entry(key).insert(value).replace_entry_with(|k, v| { + assert_eq!(k, &key); + assert_eq!(v, value); + Some(new_value) + }); + + match entry { + Occupied(e) => { + assert_eq!(e.key(), &key); + assert_eq!(e.get(), &new_value); + } + Vacant(_) => panic!(), + } + + assert_eq!(a[key], new_value); + assert_eq!(a.len(), 1); + + let entry = match a.entry(key) { + Occupied(e) => e.replace_entry_with(|k, v| { + assert_eq!(k, &key); + assert_eq!(v, new_value); + None + }), + Vacant(_) => panic!(), + }; + + match entry { + Vacant(e) => assert_eq!(e.key(), &key), + Occupied(_) => panic!(), + } + + assert!(!a.contains_key(key)); + assert_eq!(a.len(), 0); + } + + #[test] + fn test_occupied_entry_ref_replace_entry_with() { + let mut a: HashMap = HashMap::new(); + + let key = "a key"; + let value = "an initial value"; + let new_value = "a new value"; + + let entry = a.entry_ref(key).insert(value).replace_entry_with(|k, v| { + assert_eq!(k, key); + assert_eq!(v, value); + Some(new_value) + }); + + match entry { + EntryRef::Occupied(e) => { + assert_eq!(e.key(), key); + assert_eq!(e.get(), &new_value); + } + EntryRef::Vacant(_) => panic!(), + } + + assert_eq!(a[key], new_value); + assert_eq!(a.len(), 1); + + let entry = match a.entry_ref(key) { + EntryRef::Occupied(e) => e.replace_entry_with(|k, v| { + assert_eq!(k, key); + assert_eq!(v, new_value); + None + }), + EntryRef::Vacant(_) => panic!(), + }; + + match entry { + EntryRef::Vacant(e) => assert_eq!(e.key(), key), + EntryRef::Occupied(_) => panic!(), + } + + assert!(!a.contains_key(key)); + assert_eq!(a.len(), 0); + } + + #[test] + fn test_entry_and_replace_entry_with() { + let mut a = HashMap::new(); + + let key = "a key"; + let value = "an initial value"; + let new_value = "a new value"; + + let entry = a.entry(key).and_replace_entry_with(|_, _| panic!()); + + match entry { + Vacant(e) => assert_eq!(e.key(), &key), + Occupied(_) => panic!(), + } + + a.insert(key, value); + + let entry = a.entry(key).and_replace_entry_with(|k, v| { + assert_eq!(k, &key); + assert_eq!(v, value); + Some(new_value) + }); + + match entry { + Occupied(e) => { + assert_eq!(e.key(), &key); + assert_eq!(e.get(), &new_value); + } + Vacant(_) => panic!(), + } + + assert_eq!(a[key], new_value); + assert_eq!(a.len(), 1); + + let entry = a.entry(key).and_replace_entry_with(|k, v| { + assert_eq!(k, &key); + assert_eq!(v, new_value); + None + }); + + match entry { + Vacant(e) => assert_eq!(e.key(), &key), + Occupied(_) => panic!(), + } + + assert!(!a.contains_key(key)); + assert_eq!(a.len(), 0); + } + + #[test] + fn test_entry_ref_and_replace_entry_with() { + let mut a = HashMap::new(); + + let key = "a key"; + let value = "an initial value"; + let new_value = "a new value"; + + let entry = a.entry_ref(key).and_replace_entry_with(|_, _| panic!()); + + match entry { + EntryRef::Vacant(e) => assert_eq!(e.key(), key), + EntryRef::Occupied(_) => panic!(), + } + + a.insert(key.to_owned(), value); + + let entry = a.entry_ref(key).and_replace_entry_with(|k, v| { + assert_eq!(k, key); + assert_eq!(v, value); + Some(new_value) + }); + + match entry { + EntryRef::Occupied(e) => { + assert_eq!(e.key(), key); + assert_eq!(e.get(), &new_value); + } + EntryRef::Vacant(_) => panic!(), + } + + assert_eq!(a[key], new_value); + assert_eq!(a.len(), 1); + + let entry = a.entry_ref(key).and_replace_entry_with(|k, v| { + assert_eq!(k, key); + assert_eq!(v, new_value); + None + }); + + match entry { + EntryRef::Vacant(e) => assert_eq!(e.key(), key), + EntryRef::Occupied(_) => panic!(), + } + + assert!(!a.contains_key(key)); + assert_eq!(a.len(), 0); + } + + #[test] + fn test_raw_occupied_entry_replace_entry_with() { + let mut a = HashMap::new(); + + let key = "a key"; + let value = "an initial value"; + let new_value = "a new value"; + + let entry = a + .raw_entry_mut() + .from_key(&key) + .insert(key, value) + .replace_entry_with(|k, v| { + assert_eq!(k, &key); + assert_eq!(v, value); + Some(new_value) + }); + + match entry { + RawEntryMut::Occupied(e) => { + assert_eq!(e.key(), &key); + assert_eq!(e.get(), &new_value); + } + RawEntryMut::Vacant(_) => panic!(), + } + + assert_eq!(a[key], new_value); + assert_eq!(a.len(), 1); + + let entry = match a.raw_entry_mut().from_key(&key) { + RawEntryMut::Occupied(e) => e.replace_entry_with(|k, v| { + assert_eq!(k, &key); + assert_eq!(v, new_value); + None + }), + RawEntryMut::Vacant(_) => panic!(), + }; + + match entry { + RawEntryMut::Vacant(_) => {} + RawEntryMut::Occupied(_) => panic!(), + } + + assert!(!a.contains_key(key)); + assert_eq!(a.len(), 0); + } + + #[test] + fn test_raw_entry_and_replace_entry_with() { + let mut a = HashMap::new(); + + let key = "a key"; + let value = "an initial value"; + let new_value = "a new value"; + + let entry = a + .raw_entry_mut() + .from_key(&key) + .and_replace_entry_with(|_, _| panic!()); + + match entry { + RawEntryMut::Vacant(_) => {} + RawEntryMut::Occupied(_) => panic!(), + } + + a.insert(key, value); + + let entry = a + .raw_entry_mut() + .from_key(&key) + .and_replace_entry_with(|k, v| { + assert_eq!(k, &key); + assert_eq!(v, value); + Some(new_value) + }); + + match entry { + RawEntryMut::Occupied(e) => { + assert_eq!(e.key(), &key); + assert_eq!(e.get(), &new_value); + } + RawEntryMut::Vacant(_) => panic!(), + } + + assert_eq!(a[key], new_value); + assert_eq!(a.len(), 1); + + let entry = a + .raw_entry_mut() + .from_key(&key) + .and_replace_entry_with(|k, v| { + assert_eq!(k, &key); + assert_eq!(v, new_value); + None + }); + + match entry { + RawEntryMut::Vacant(_) => {} + RawEntryMut::Occupied(_) => panic!(), + } + + assert!(!a.contains_key(key)); + assert_eq!(a.len(), 0); + } + + #[test] + fn test_replace_entry_with_doesnt_corrupt() { + #![allow(deprecated)] //rand + // Test for #19292 + fn check(m: &HashMap) { + for k in m.keys() { + assert!(m.contains_key(k), "{k} is in keys() but not in the map?"); + } + } + + let mut m = HashMap::new(); + + let mut rng = { + let seed = u64::from_le_bytes(*b"testseed"); + SmallRng::seed_from_u64(seed) + }; + + // Populate the map with some items. + for _ in 0..50 { + let x = rng.gen_range(-10..10); + m.insert(x, ()); + } + + for _ in 0..1000 { + let x = rng.gen_range(-10..10); + m.entry(x).and_replace_entry_with(|_, _| None); + check(&m); + } + } + + #[test] + fn test_replace_entry_ref_with_doesnt_corrupt() { + #![allow(deprecated)] //rand + // Test for #19292 + fn check(m: &HashMap) { + for k in m.keys() { + assert!(m.contains_key(k), "{k} is in keys() but not in the map?"); + } + } + + let mut m = HashMap::new(); + + let mut rng = { + let seed = u64::from_le_bytes(*b"testseed"); + SmallRng::seed_from_u64(seed) + }; + + // Populate the map with some items. + for _ in 0..50 { + let mut x = String::with_capacity(1); + x.push(rng.gen_range('a'..='z')); + m.insert(x, ()); + } + + for _ in 0..1000 { + let mut x = String::with_capacity(1); + x.push(rng.gen_range('a'..='z')); + m.entry_ref(x.as_str()).and_replace_entry_with(|_, _| None); + check(&m); + } + } + + #[test] + fn test_retain() { + let mut map: HashMap = (0..100).map(|x| (x, x * 10)).collect(); + + map.retain(|&k, _| k % 2 == 0); + assert_eq!(map.len(), 50); + assert_eq!(map[&2], 20); + assert_eq!(map[&4], 40); + assert_eq!(map[&6], 60); + } + + #[test] + fn test_extract_if() { + { + let mut map: HashMap = (0..8).map(|x| (x, x * 10)).collect(); + let drained = map.extract_if(|&k, _| k % 2 == 0); + let mut out = drained.collect::>(); + out.sort_unstable(); + assert_eq!(::rust_alloc::vec![(0, 0), (2, 20), (4, 40), (6, 60)], out); + assert_eq!(map.len(), 4); + } + { + let mut map: HashMap = (0..8).map(|x| (x, x * 10)).collect(); + map.extract_if(|&k, _| k % 2 == 0).for_each(drop); + assert_eq!(map.len(), 4); + } + } + + #[test] + #[cfg_attr(miri, ignore)] // FIXME: no OOM signalling (https://github.com/rust-lang/miri/issues/613) + fn test_try_reserve() { + use crate::Error::{AllocError, CapacityOverflow}; + + const MAX_ISIZE: usize = isize::MAX as usize; + + let mut empty_bytes: HashMap = HashMap::new(); + + if let Err(CapacityOverflow) = empty_bytes.try_reserve(usize::MAX) { + } else { + panic!("usize::MAX should trigger an overflow!"); + } + + if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_ISIZE) { + } else { + panic!("isize::MAX should trigger an overflow!"); + } + + if let Err(AllocError { .. }) = empty_bytes.try_reserve(MAX_ISIZE / 5) { + } else { + // This may succeed if there is enough free memory. Attempt to + // allocate a few more hashmaps to ensure the allocation will fail. + let mut empty_bytes2: HashMap = HashMap::new(); + let _ = empty_bytes2.try_reserve(MAX_ISIZE / 5); + let mut empty_bytes3: HashMap = HashMap::new(); + let _ = empty_bytes3.try_reserve(MAX_ISIZE / 5); + let mut empty_bytes4: HashMap = HashMap::new(); + if let Err(AllocError { .. }) = empty_bytes4.try_reserve(MAX_ISIZE / 5) { + } else { + panic!("isize::MAX / 5 should trigger an OOM!"); + } + } + } + + #[test] + fn test_raw_entry() { + use super::RawEntryMut::{Occupied, Vacant}; + + let xs = [(1_i32, 10_i32), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)]; + + let mut map: HashMap<_, _> = xs.iter().copied().collect(); + + let compute_hash = |map: &HashMap, k: i32| -> u64 { + super::make_hash::(map.hasher(), &k) + }; + + // Existing key (insert) + match map.raw_entry_mut().from_key(&1) { + Vacant(_) => unreachable!(), + Occupied(mut view) => { + assert_eq!(view.get(), &10); + assert_eq!(view.insert(100), 10); + } + } + let hash1 = compute_hash(&map, 1); + assert_eq!(map.raw_entry().from_key(&1).unwrap(), (&1, &100)); + assert_eq!( + map.raw_entry().from_hash(hash1, |k| *k == 1).unwrap(), + (&1, &100) + ); + assert_eq!( + map.raw_entry().from_key_hashed_nocheck(hash1, &1).unwrap(), + (&1, &100) + ); + assert_eq!(map.len(), 6); + + // Existing key (update) + match map.raw_entry_mut().from_key(&2) { + Vacant(_) => unreachable!(), + Occupied(mut view) => { + let v = view.get_mut(); + let new_v = (*v) * 10; + *v = new_v; + } + } + let hash2 = compute_hash(&map, 2); + assert_eq!(map.raw_entry().from_key(&2).unwrap(), (&2, &200)); + assert_eq!( + map.raw_entry().from_hash(hash2, |k| *k == 2).unwrap(), + (&2, &200) + ); + assert_eq!( + map.raw_entry().from_key_hashed_nocheck(hash2, &2).unwrap(), + (&2, &200) + ); + assert_eq!(map.len(), 6); + + // Existing key (take) + let hash3 = compute_hash(&map, 3); + match map.raw_entry_mut().from_key_hashed_nocheck(hash3, &3) { + Vacant(_) => unreachable!(), + Occupied(view) => { + assert_eq!(view.remove_entry(), (3, 30)); + } + } + assert_eq!(map.raw_entry().from_key(&3), None); + assert_eq!(map.raw_entry().from_hash(hash3, |k| *k == 3), None); + assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash3, &3), None); + assert_eq!(map.len(), 5); + + // Nonexistent key (insert) + match map.raw_entry_mut().from_key(&10) { + Occupied(_) => unreachable!(), + Vacant(view) => { + assert_eq!(view.insert(10, 1000), (&mut 10, &mut 1000)); + } + } + assert_eq!(map.raw_entry().from_key(&10).unwrap(), (&10, &1000)); + assert_eq!(map.len(), 6); + + // Ensure all lookup methods produce equivalent results. + for k in 0..12 { + let hash = compute_hash(&map, k); + let v = map.get(&k).copied(); + let kv = v.as_ref().map(|v| (&k, v)); + + assert_eq!(map.raw_entry().from_key(&k), kv); + assert_eq!(map.raw_entry().from_hash(hash, |q| *q == k), kv); + assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &k), kv); + + match map.raw_entry_mut().from_key(&k) { + Occupied(o) => assert_eq!(Some(o.get_key_value()), kv), + Vacant(_) => assert_eq!(v, None), + } + match map.raw_entry_mut().from_key_hashed_nocheck(hash, &k) { + Occupied(o) => assert_eq!(Some(o.get_key_value()), kv), + Vacant(_) => assert_eq!(v, None), + } + match map.raw_entry_mut().from_hash(hash, |q| *q == k) { + Occupied(o) => assert_eq!(Some(o.get_key_value()), kv), + Vacant(_) => assert_eq!(v, None), + } + } + } + + #[test] + fn test_key_without_hash_impl() { + #[derive(Debug)] + struct IntWrapper(u64); + + let mut m: HashMap = HashMap::default(); + { + assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_none()); + } + { + let vacant_entry = match m.raw_entry_mut().from_hash(0, |k| k.0 == 0) { + RawEntryMut::Occupied(..) => panic!("Found entry for key 0"), + RawEntryMut::Vacant(e) => e, + }; + vacant_entry.insert_with_hasher(0, IntWrapper(0), (), |k| k.0); + } + { + assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_some()); + assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_none()); + assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none()); + } + { + let vacant_entry = match m.raw_entry_mut().from_hash(1, |k| k.0 == 1) { + RawEntryMut::Occupied(..) => panic!("Found entry for key 1"), + RawEntryMut::Vacant(e) => e, + }; + vacant_entry.insert_with_hasher(1, IntWrapper(1), (), |k| k.0); + } + { + assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_some()); + assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_some()); + assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none()); + } + { + let occupied_entry = match m.raw_entry_mut().from_hash(0, |k| k.0 == 0) { + RawEntryMut::Occupied(e) => e, + RawEntryMut::Vacant(..) => panic!("Couldn't find entry for key 0"), + }; + occupied_entry.remove(); + } + assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_none()); + assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_some()); + assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none()); + } + + #[test] + fn test_into_iter_refresh() { + #[cfg(miri)] + const N: usize = 32; + #[cfg(not(miri))] + const N: usize = 128; + + let mut rng = rand::thread_rng(); + for n in 0..N { + let mut map = HashMap::new(); + for i in 0..n { + assert!(map.try_insert(i, 2 * i).unwrap().is_none()); + } + let hash_builder = map.hasher().clone(); + + let mut it = unsafe { map.table.iter() }; + assert_eq!(it.len(), n); + + let mut i = 0; + let mut left = n; + let mut removed = Vec::new(); + loop { + // occasionally remove some elements + if i < n && rng.gen_bool(0.1) { + let hash_value = super::make_hash(&hash_builder, &i); + + unsafe { + let e = into_ok(map.table.find( + &mut (), + hash_value, + |_: &mut (), q: &(usize, _)| Ok(q.0.eq(&i)), + )); + if let Some(e) = e { + it.reflect_remove(&e); + let t = map.table.remove(e).0; + removed.push(t); + left -= 1; + } else { + assert!(removed.contains(&(i, 2 * i)), "{i} not in {removed:?}"); + let e = into_ok_try(map.table.insert( + &mut (), + hash_value, + (i, 2 * i), + super::make_hasher(&hash_builder), + )) + .unwrap(); + it.reflect_insert(&e); + if let Some(p) = removed.iter().position(|e| e == &(i, 2 * i)) { + removed.swap_remove(p); + } + left += 1; + } + } + } + + let e = it.next(); + if e.is_none() { + break; + } + assert!(i < n); + let t = unsafe { e.unwrap().as_ref() }; + assert!(!removed.contains(t)); + let (key, value) = t; + assert_eq!(*value, 2 * key); + i += 1; + } + assert!(i <= n); + + // just for safety: + assert_eq!(map.table.len(), left); + } + } + + #[test] + fn test_const_with_hasher() { + #[derive(Clone)] + struct MyHasher; + impl BuildHasher for MyHasher { + type Hasher = DefaultHasher; + + fn build_hasher(&self) -> DefaultHasher { + DefaultHasher::new() + } + } + + const EMPTY_MAP: HashMap = HashMap::with_hasher(MyHasher); + + let mut map = EMPTY_MAP; + map.try_insert(17, "seventeen".to_owned()).unwrap(); + assert_eq!("seventeen", map[&17]); + } + + #[test] + fn test_get_each_mut() { + let mut map = HashMap::new(); + map.try_insert("foo".to_owned(), 0).unwrap(); + map.try_insert("bar".to_owned(), 10).unwrap(); + map.try_insert("baz".to_owned(), 20).unwrap(); + map.try_insert("qux".to_owned(), 30).unwrap(); + + let xs = map.get_many_mut(["foo", "qux"]); + assert_eq!(xs, Some([&mut 0, &mut 30])); + + let xs = map.get_many_mut(["foo", "dud"]); + assert_eq!(xs, None); + + let xs = map.get_many_mut(["foo", "foo"]); + assert_eq!(xs, None); + + let ys = map.get_many_key_value_mut(["bar", "baz"]); + assert_eq!( + ys, + Some([(&"bar".to_owned(), &mut 10), (&"baz".to_owned(), &mut 20),]), + ); + + let ys = map.get_many_key_value_mut(["bar", "dip"]); + assert_eq!(ys, None); + + let ys = map.get_many_key_value_mut(["baz", "baz"]); + assert_eq!(ys, None); + } + + #[test] + #[should_panic = "panic in drop"] + fn test_clone_from_double_drop() { + struct CheckedDrop { + panic_in_drop: bool, + dropped: bool, + } + impl Drop for CheckedDrop { + fn drop(&mut self) { + if self.panic_in_drop { + self.dropped = true; + panic!("panic in drop"); + } + if self.dropped { + panic!("double drop"); + } + self.dropped = true; + } + } + impl TryClone for CheckedDrop { + fn try_clone(&self) -> Result { + Ok(Self { + panic_in_drop: self.panic_in_drop, + dropped: self.dropped, + }) + } + } + const DISARMED: CheckedDrop = CheckedDrop { + panic_in_drop: false, + dropped: false, + }; + const ARMED: CheckedDrop = CheckedDrop { + panic_in_drop: true, + dropped: false, + }; + + let mut map1 = HashMap::new(); + map1.try_insert(1, DISARMED).unwrap(); + map1.try_insert(2, DISARMED).unwrap(); + map1.try_insert(3, DISARMED).unwrap(); + map1.try_insert(4, DISARMED).unwrap(); + + let mut map2 = HashMap::new(); + map2.try_insert(1, DISARMED).unwrap(); + map2.try_insert(2, ARMED).unwrap(); + map2.try_insert(3, DISARMED).unwrap(); + map2.try_insert(4, DISARMED).unwrap(); + + map2.try_clone_from(&map1).unwrap(); + } + + #[test] + #[should_panic = "panic in clone"] + fn test_clone_from_memory_leaks() { + use ::rust_alloc::vec::Vec; + + struct CheckedClone { + panic_in_clone: bool, + need_drop: Vec, + } + impl TryClone for CheckedClone { + fn try_clone(&self) -> Result { + if self.panic_in_clone { + panic!("panic in clone") + } + Ok(Self { + panic_in_clone: self.panic_in_clone, + need_drop: self.need_drop.clone(), + }) + } + } + let mut map1 = HashMap::new(); + map1.try_insert( + 1, + CheckedClone { + panic_in_clone: false, + need_drop: ::rust_alloc::vec![0, 1, 2], + }, + ) + .unwrap(); + map1.try_insert( + 2, + CheckedClone { + panic_in_clone: false, + need_drop: ::rust_alloc::vec![3, 4, 5], + }, + ) + .unwrap(); + map1.try_insert( + 3, + CheckedClone { + panic_in_clone: true, + need_drop: ::rust_alloc::vec![6, 7, 8], + }, + ) + .unwrap(); + let _map2 = map1.try_clone().unwrap(); + } + + struct MyAllocInner { + drop_count: Arc, + } + + #[derive(Clone)] + struct MyAlloc { + _inner: Arc, + } + + impl MyAlloc { + fn new(drop_count: Arc) -> Self { + MyAlloc { + _inner: Arc::new(MyAllocInner { drop_count }), + } + } + } + + impl Drop for MyAllocInner { + fn drop(&mut self) { + println!("MyAlloc freed."); + self.drop_count.fetch_sub(1, Ordering::SeqCst); + } + } + + unsafe impl Allocator for MyAlloc { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + let g = Global; + g.allocate(layout) + } + + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + let g = Global; + g.deallocate(ptr, layout) + } + } + + #[test] + fn test_hashmap_into_iter_bug() { + let dropped: Arc = Arc::new(AtomicI8::new(1)); + + { + let mut map = HashMap::try_with_capacity_in(10, MyAlloc::new(dropped.clone())).unwrap(); + for i in 0..10 { + map.entry(i).or_try_insert_with(|| "i".to_string()).unwrap(); + } + + for (k, v) in map { + println!("{}, {}", k, v); + } + } + + // All allocator clones should already be dropped. + assert_eq!(dropped.load(Ordering::SeqCst), 0); + } + + #[derive(Debug)] + struct CheckedCloneDrop { + panic_in_clone: bool, + panic_in_drop: bool, + dropped: bool, + data: T, + } + + impl CheckedCloneDrop { + fn new(panic_in_clone: bool, panic_in_drop: bool, data: T) -> Self { + CheckedCloneDrop { + panic_in_clone, + panic_in_drop, + dropped: false, + data, + } + } + } + + impl TryClone for CheckedCloneDrop + where + T: TryClone, + { + fn try_clone(&self) -> Result { + if self.panic_in_clone { + panic!("panic in clone") + } + Ok(Self { + panic_in_clone: self.panic_in_clone, + panic_in_drop: self.panic_in_drop, + dropped: self.dropped, + data: self.data.try_clone()?, + }) + } + } + + impl Drop for CheckedCloneDrop { + fn drop(&mut self) { + if self.panic_in_drop { + self.dropped = true; + panic!("panic in drop"); + } + if self.dropped { + panic!("double drop"); + } + self.dropped = true; + } + } + + /// Return hashmap with predefined distribution of elements. + /// All elements will be located in the same order as elements + /// returned by iterator. + /// + /// This function does not panic, but returns an error as a `String` + /// to distinguish between a test panic and an error in the input data. + fn get_test_map( + iter: I, + mut fun: impl FnMut(u64) -> T, + alloc: A, + ) -> Result, DefaultHashBuilder, A>, String> + where + I: Iterator + Clone + ExactSizeIterator, + A: Allocator, + T: PartialEq + core::fmt::Debug, + { + use crate::hashbrown::scopeguard::guard; + + let mut map: HashMap, _, A> = + HashMap::try_with_capacity_in(iter.size_hint().0, alloc).unwrap(); + { + let mut guard = guard(&mut map, |map| { + for (_, value) in map.iter_mut() { + value.panic_in_drop = false + } + }); + + let mut count = 0; + // Hash and Key must be equal to each other for controlling the elements placement. + for (panic_in_clone, panic_in_drop) in iter.clone() { + if core::mem::needs_drop::() && panic_in_drop { + return Err(String::from( + "panic_in_drop can be set with a type that doesn't need to be dropped", + )); + } + into_ok_try(guard.table.insert( + &mut (), + count, + ( + count, + CheckedCloneDrop::new(panic_in_clone, panic_in_drop, fun(count)), + ), + |_: &mut (), (k, _): &(u64, _)| Ok(*k), + )) + .unwrap(); + count += 1; + } + + // Let's check that all elements are located as we wanted + let mut check_count = 0; + for ((key, value), (panic_in_clone, panic_in_drop)) in guard.iter().zip(iter) { + if *key != check_count { + return Err(format!( + "key != check_count,\nkey: `{}`,\ncheck_count: `{}`", + key, check_count + )); + } + if value.dropped + || value.panic_in_clone != panic_in_clone + || value.panic_in_drop != panic_in_drop + || value.data != fun(check_count) + { + return Err(format!( + "Value is not equal to expected,\nvalue: `{:?}`,\nexpected: \ + `CheckedCloneDrop {{ panic_in_clone: {}, panic_in_drop: {}, dropped: {}, data: {:?} }}`", + value, panic_in_clone, panic_in_drop, false, fun(check_count) + )); + } + check_count += 1; + } + + if guard.len() != check_count as usize { + return Err(format!( + "map.len() != check_count,\nmap.len(): `{}`,\ncheck_count: `{}`", + guard.len(), + check_count + )); + } + + if count != check_count { + return Err(format!( + "count != check_count,\ncount: `{}`,\ncheck_count: `{}`", + count, check_count + )); + } + core::mem::forget(guard); + } + Ok(map) + } + + const DISARMED: bool = false; + const ARMED: bool = true; + + const ARMED_FLAGS: [bool; 8] = [ + DISARMED, DISARMED, DISARMED, ARMED, DISARMED, DISARMED, DISARMED, DISARMED, + ]; + + const DISARMED_FLAGS: [bool; 8] = [ + DISARMED, DISARMED, DISARMED, DISARMED, DISARMED, DISARMED, DISARMED, DISARMED, + ]; + + #[test] + #[should_panic = "panic in clone"] + fn test_clone_memory_leaks_and_double_drop_one() { + let dropped: Arc = Arc::new(AtomicI8::new(2)); + + { + assert_eq!(ARMED_FLAGS.len(), DISARMED_FLAGS.len()); + + let map: HashMap>, DefaultHashBuilder, MyAlloc> = + match get_test_map( + ARMED_FLAGS.into_iter().zip(DISARMED_FLAGS), + |n| ::rust_alloc::vec![n], + MyAlloc::new(dropped.clone()), + ) { + Ok(map) => map, + Err(msg) => panic!("{msg}"), + }; + + // Clone should normally clone a few elements, and then (when the + // clone function panics), deallocate both its own memory, memory + // of `dropped: Arc` and the memory of already cloned + // elements (Vec memory inside CheckedCloneDrop). + let _map2 = map.try_clone().unwrap(); + } + } + + #[test] + #[should_panic = "panic in drop"] + fn test_clone_memory_leaks_and_double_drop_two() { + let dropped: Arc = Arc::new(AtomicI8::new(2)); + + { + assert_eq!(ARMED_FLAGS.len(), DISARMED_FLAGS.len()); + + let map: HashMap, DefaultHashBuilder, _> = match get_test_map( + DISARMED_FLAGS.into_iter().zip(DISARMED_FLAGS), + |n| n, + MyAlloc::new(dropped.clone()), + ) { + Ok(map) => map, + Err(msg) => panic!("{msg}"), + }; + + let mut map2 = match get_test_map( + DISARMED_FLAGS.into_iter().zip(ARMED_FLAGS), + |n| n, + MyAlloc::new(dropped.clone()), + ) { + Ok(map) => map, + Err(msg) => panic!("{msg}"), + }; + + // The `clone_from` should try to drop the elements of `map2` without + // double drop and leaking the allocator. Elements that have not been + // dropped leak their memory. + map2.try_clone_from(&map).unwrap(); + } + } + + /// We check that we have a working table if the clone operation from another + /// thread ended in a panic (when buckets of maps are equal to each other). + #[test] + fn test_catch_panic_clone_from_when_len_is_equal() { + let dropped: Arc = Arc::new(AtomicI8::new(2)); + + { + assert_eq!(ARMED_FLAGS.len(), DISARMED_FLAGS.len()); + + let mut map = match get_test_map( + DISARMED_FLAGS.into_iter().zip(DISARMED_FLAGS), + |n| ::rust_alloc::vec![n], + MyAlloc::new(dropped.clone()), + ) { + Ok(map) => map, + Err(msg) => panic!("{msg}"), + }; + + thread::scope(|s| { + let result: thread::ScopedJoinHandle<'_, String> = s.spawn(|| { + let scope_map = + match get_test_map(ARMED_FLAGS.into_iter().zip(DISARMED_FLAGS), |n| ::rust_alloc::vec![n * 2], MyAlloc::new(dropped.clone())) { + Ok(map) => map, + Err(msg) => return msg, + }; + if map.table.buckets() != scope_map.table.buckets() { + return format!( + "map.table.buckets() != scope_map.table.buckets(),\nleft: `{}`,\nright: `{}`", + map.table.buckets(), scope_map.table.buckets() + ); + } + map.try_clone_from(&scope_map).unwrap(); + "We must fail the cloning!!!".to_owned() + }); + if let Ok(msg) = result.join() { + panic!("{msg}") + } + }); + + // Let's check that all iterators work fine and do not return elements + // (especially `RawIterRange`, which does not depend on the number of + // elements in the table, but looks directly at the control bytes) + // + // SAFETY: We know for sure that `RawTable` will outlive + // the returned `RawIter / RawIterRange` iterator. + assert_eq!(map.len(), 0); + assert_eq!(map.iter().count(), 0); + assert_eq!(unsafe { map.table.iter().count() }, 0); + assert_eq!(unsafe { map.table.iter().iter.count() }, 0); + + for idx in 0..map.table.buckets() { + let idx = idx as u64; + assert!( + into_ok( + map.table + .find(&mut (), idx, |_: &mut (), (k, _): &(u64, _)| Ok(*k == idx)) + ) + .is_none(), + "Index: {idx}" + ); + } + } + + // All allocator clones should already be dropped. + assert_eq!(dropped.load(Ordering::SeqCst), 0); + } + + /// We check that we have a working table if the clone operation from another + /// thread ended in a panic (when buckets of maps are not equal to each other). + #[test] + fn test_catch_panic_clone_from_when_len_is_not_equal() { + let dropped: Arc = Arc::new(AtomicI8::new(2)); + + { + assert_eq!(ARMED_FLAGS.len(), DISARMED_FLAGS.len()); + + let mut map = match get_test_map( + [DISARMED].into_iter().zip([DISARMED]), + |n| ::rust_alloc::vec![n], + MyAlloc::new(dropped.clone()), + ) { + Ok(map) => map, + Err(msg) => panic!("{msg}"), + }; + + thread::scope(|s| { + let result: thread::ScopedJoinHandle<'_, String> = s.spawn(|| { + let scope_map = match get_test_map( + ARMED_FLAGS.into_iter().zip(DISARMED_FLAGS), + |n| ::rust_alloc::vec![n * 2], + MyAlloc::new(dropped.clone()), + ) { + Ok(map) => map, + Err(msg) => return msg, + }; + if map.table.buckets() == scope_map.table.buckets() { + return format!( + "map.table.buckets() == scope_map.table.buckets(): `{}`", + map.table.buckets() + ); + } + map.try_clone_from(&scope_map).unwrap(); + "We must fail the cloning!!!".to_owned() + }); + if let Ok(msg) = result.join() { + panic!("{msg}") + } + }); + + // Let's check that all iterators work fine and do not return elements + // (especially `RawIterRange`, which does not depend on the number of + // elements in the table, but looks directly at the control bytes) + // + // SAFETY: We know for sure that `RawTable` will outlive + // the returned `RawIter / RawIterRange` iterator. + assert_eq!(map.len(), 0); + assert_eq!(map.iter().count(), 0); + assert_eq!(unsafe { map.table.iter().count() }, 0); + assert_eq!(unsafe { map.table.iter().iter.count() }, 0); + + for idx in 0..map.table.buckets() { + let idx = idx as u64; + assert!( + into_ok( + map.table + .find(&mut (), idx, |_: &mut (), (k, _): &(u64, _)| Ok(*k == idx)) + ) + .is_none(), + "Index: {idx}" + ); + } + } + + // All allocator clones should already be dropped. + assert_eq!(dropped.load(Ordering::SeqCst), 0); + } +} diff --git a/crates/rune-alloc/src/alloc/hashbrown/mod.rs b/crates/rune-alloc/src/alloc/hashbrown/mod.rs new file mode 100644 index 000000000..8c754e202 --- /dev/null +++ b/crates/rune-alloc/src/alloc/hashbrown/mod.rs @@ -0,0 +1,143 @@ +#![allow(clippy::missing_safety_doc)] + +pub mod raw; +mod scopeguard; + +pub use self::map::HashMap; +pub mod map; + +pub use self::set::HashSet; +pub mod set; + +use super::CustomError; +use core::marker::PhantomData; + +/// Trait used to implement custom equality implementations which are not solely +/// based on traits. +pub trait EqFn { + fn eq(&self, cx: &mut C, key: &T) -> Result; + + #[doc(hidden)] + fn into_tuple(self) -> TupleFn + where + Self: Sized, + { + TupleFn { + this: self, + _marker: PhantomData, + } + } +} + +impl EqFn for U +where + U: Fn(&mut C, &T) -> Result, +{ + #[inline] + fn eq(&self, cx: &mut C, key: &T) -> Result { + self(cx, key) + } +} + +/// Trait used to implement custom hash implementations which are not solely +/// based on traits. +pub trait HasherFn { + fn hash(&self, cx: &mut C, key: &T) -> Result; + + #[doc(hidden)] + fn into_tuple(self) -> TupleFn + where + Self: Sized, + { + TupleFn { + this: self, + _marker: PhantomData, + } + } +} + +impl HasherFn for U +where + U: Fn(&mut C, &T) -> Result, +{ + #[inline] + fn hash(&self, cx: &mut C, key: &T) -> Result { + self(cx, key) + } +} + +/// Adapter for [`HasherFn`] for hashing tuples. +pub struct TupleFn { + this: T, + _marker: PhantomData, +} + +impl EqFn for TupleFn +where + T: EqFn, +{ + #[inline] + fn eq(&self, cx: &mut C, (key, _): &(K, V)) -> Result { + self.this.eq(cx, key) + } +} + +impl HasherFn for TupleFn +where + T: HasherFn, +{ + #[inline] + fn hash(&self, cx: &mut C, (key, _): &(K, V)) -> Result { + self.this.hash(cx, key) + } +} + +/// Error raised by [`RawTable::find_or_find_insert_slot`]. +/// +/// [`RawTable::find_or_find_insert_slot`]: +/// crate::hashbrown::raw::RawTable::find_or_find_insert_slot +pub enum ErrorOrInsertSlot { + /// An error was returned. + Error(CustomError), + /// A return slot was inserted. + InsertSlot(raw::InsertSlot), +} + +impl From> for ErrorOrInsertSlot { + #[inline] + fn from(error: CustomError) -> Self { + Self::Error(error) + } +} + +/// Key equivalence trait. +/// +/// This trait defines the function used to compare the input value with the map +/// keys (or set values) during a lookup operation such as [`HashMap::get`] or +/// [`HashSet::contains`]. It is provided with a blanket implementation based on +/// the [`Borrow`](core::borrow::Borrow) trait. +/// +/// # Correctness +/// +/// Equivalent values must hash to the same value. +pub trait Equivalent { + /// Checks if this value is equivalent to the given key. + /// + /// Returns `true` if both values are equivalent, and `false` otherwise. + /// + /// # Correctness + /// + /// When this function returns `true`, both `self` and `key` must hash to + /// the same value. + fn equivalent(&self, key: &K) -> bool; +} + +impl Equivalent for Q +where + Q: Eq, + K: core::borrow::Borrow, +{ + fn equivalent(&self, key: &K) -> bool { + self == key.borrow() + } +} diff --git a/crates/rune/src/hashbrown/fork/raw/bitmask.rs b/crates/rune-alloc/src/alloc/hashbrown/raw/bitmask.rs similarity index 99% rename from crates/rune/src/hashbrown/fork/raw/bitmask.rs rename to crates/rune-alloc/src/alloc/hashbrown/raw/bitmask.rs index 6576b3c5c..0c4bc9e35 100644 --- a/crates/rune/src/hashbrown/fork/raw/bitmask.rs +++ b/crates/rune-alloc/src/alloc/hashbrown/raw/bitmask.rs @@ -111,7 +111,7 @@ impl BitMaskIter { /// Returns the bit's previous state. #[inline] #[allow(clippy::cast_ptr_alignment)] - #[cfg(feature = "raw")] + pub(crate) unsafe fn flip(&mut self, index: usize) -> bool { // NOTE: The + BITMASK_STRIDE - 1 is to set the high bit. let mask = 1 << (index * BITMASK_STRIDE + BITMASK_STRIDE - 1); diff --git a/crates/rune/src/hashbrown/fork/raw/generic.rs b/crates/rune-alloc/src/alloc/hashbrown/raw/generic.rs similarity index 99% rename from crates/rune/src/hashbrown/fork/raw/generic.rs rename to crates/rune-alloc/src/alloc/hashbrown/raw/generic.rs index c668b0642..5f20d40e2 100644 --- a/crates/rune/src/hashbrown/fork/raw/generic.rs +++ b/crates/rune-alloc/src/alloc/hashbrown/raw/generic.rs @@ -1,6 +1,8 @@ +use core::mem; +use core::ptr; + use super::bitmask::BitMask; use super::EMPTY; -use core::{mem, ptr}; // Use the native word size as the group size. Using a 64-bit group size on // a 32-bit architecture will just end up being more expensive because diff --git a/crates/rune/src/hashbrown/fork/raw/mod.rs b/crates/rune-alloc/src/alloc/hashbrown/raw/mod.rs similarity index 67% rename from crates/rune/src/hashbrown/fork/raw/mod.rs rename to crates/rune-alloc/src/alloc/hashbrown/raw/mod.rs index 222a42e92..d1c808121 100644 --- a/crates/rune/src/hashbrown/fork/raw/mod.rs +++ b/crates/rune-alloc/src/alloc/hashbrown/raw/mod.rs @@ -1,15 +1,26 @@ -use crate::alloc::alloc::{handle_alloc_error, Layout}; -use crate::hashbrown::fork::scopeguard::guard; -use crate::hashbrown::fork::TryReserveError; -use core::convert::Infallible; +use core::alloc::Layout; use core::iter::FusedIterator; use core::marker::PhantomData; use core::mem; -use core::mem::ManuallyDrop; use core::mem::MaybeUninit; use core::ptr::NonNull; use core::{hint, ptr}; +use crate::hashbrown::scopeguard::{guard, ScopeGuard}; + +#[cfg(rune_nightly)] +use crate::alloc::TryCopy; +use crate::alloc::{Allocator, CustomError, Error, Global, SizedTypeProperties, TryClone}; +// Branch prediction hint. This is currently only available on nightly but it +// consistently improves performance by 10-15%. +use crate::hint::{likely, unlikely}; +use crate::ptr::invalid_mut; + +#[cfg(test)] +use crate::alloc::testing::*; + +use super::{EqFn, ErrorOrInsertSlot, HasherFn}; + cfg_if! { // Use the SSE2 implementation if possible: it allows us to scan 16 buckets // at once instead of 8. We don't bother with AVX since it would require @@ -35,86 +46,16 @@ cfg_if! { } } -mod alloc; -pub(crate) use self::alloc::{do_alloc, Allocator, Global}; - mod bitmask; use self::bitmask::BitMaskIter; use self::imp::Group; -// Branch prediction hint. This is currently only available on nightly but it -// consistently improves performance by 10-15%. -#[cfg(not(feature = "nightly"))] -use core::convert::identity as likely; -#[cfg(not(feature = "nightly"))] -use core::convert::identity as unlikely; -#[cfg(feature = "nightly")] -use core::intrinsics::{likely, unlikely}; - -// Use strict provenance functions if available. -#[cfg(feature = "nightly")] -use core::ptr::invalid_mut; -// Implement it with a cast otherwise. -#[cfg(not(feature = "nightly"))] -#[inline(always)] -fn invalid_mut(addr: usize) -> *mut T { - addr as *mut T -} - #[inline] unsafe fn offset_from(to: *const T, from: *const T) -> usize { to.offset_from(from) as usize } -/// Helper for coercing an infallible result into `Ok`. -#[inline(always)] -fn into_ok(result: Result) -> T { - match result { - Ok(value) => value, - Err(error) => match error {}, - } -} - -#[inline(always)] -fn infallible_eq( - mut f: impl FnMut(&T) -> bool, -) -> impl FnMut(&mut (), &T) -> Result { - move |_, value| Ok::<_, Infallible>(f(value)) -} - -#[inline(always)] -fn infallible_hasher(f: impl Fn(&T) -> u64) -> impl Fn(&mut (), &T) -> Result { - move |_, value| Ok::<_, Infallible>(f(value)) -} - -/// Whether memory allocation errors should return an error or abort. -#[derive(Copy, Clone)] -enum Fallibility { - Fallible, - Infallible, -} - -impl Fallibility { - /// Error to return on capacity overflow. - #[cfg_attr(feature = "inline-more", inline)] - fn capacity_overflow(self) -> TryReserveError { - match self { - Fallibility::Fallible => TryReserveError::CapacityOverflow, - Fallibility::Infallible => panic!("Hash table capacity overflow"), - } - } - - /// Error to return on allocation error. - #[cfg_attr(feature = "inline-more", inline)] - fn alloc_err(self, layout: Layout) -> TryReserveError { - match self { - Fallibility::Fallible => TryReserveError::AllocError { layout }, - Fallibility::Infallible => handle_alloc_error(layout), - } - } -} - /// Control byte value for an empty bucket. const EMPTY: u8 = 0b1111_1111; @@ -316,8 +257,6 @@ impl Clone for Bucket { } impl Bucket { - const IS_ZERO_SIZED_TYPE: bool = mem::size_of::() == 0; - /// Creates a [`Bucket`] that contain pointer to the data. /// The pointer calculation is performed by calculating the /// offset from given `base` pointer (convenience for @@ -357,12 +296,12 @@ impl Bucket { /// must be no greater than the number returned by the function /// [`RawTable::buckets`] or [`RawTableInner::buckets`]. /// - /// [`Bucket`]: crate::hashbrown::fork::raw::Bucket + /// [`Bucket`]: crate::raw::Bucket /// [`<*mut T>::sub`]: https://doc.rust-lang.org/core/primitive.pointer.html#method.sub-1 /// [`NonNull::new_unchecked`]: https://doc.rust-lang.org/stable/std/ptr/struct.NonNull.html#method.new_unchecked - /// [`RawTable::data_end`]: crate::hashbrown::fork::raw::RawTable::data_end + /// [`RawTable::data_end`]: crate::raw::RawTable::data_end /// [`RawTableInner::data_end`]: RawTableInner::data_end - /// [`RawTable::buckets`]: crate::hashbrown::fork::raw::RawTable::buckets + /// [`RawTable::buckets`]: crate::raw::RawTable::buckets /// [`RawTableInner::buckets`]: RawTableInner::buckets #[inline] unsafe fn from_base_index(base: NonNull, index: usize) -> Self { @@ -386,7 +325,7 @@ impl Bucket { // // where: T0...Tlast - our stored data; C0...Clast - control bytes // or metadata for data. - let ptr = if Self::IS_ZERO_SIZED_TYPE { + let ptr = if T::IS_ZST { // won't overflow because index must be less than length (bucket_mask) // and bucket_mask is guaranteed to be less than `isize::MAX` // (see TableLayout::calculate_layout_for method) @@ -430,11 +369,11 @@ impl Bucket { /// /// If `mem::size_of::() == 0`, this function is always safe. /// - /// [`Bucket`]: crate::hashbrown::fork::raw::Bucket - /// [`from_base_index`]: crate::hashbrown::fork::raw::Bucket::from_base_index - /// [`RawTable::data_end`]: crate::hashbrown::fork::raw::RawTable::data_end + /// [`Bucket`]: crate::raw::Bucket + /// [`from_base_index`]: crate::raw::Bucket::from_base_index + /// [`RawTable::data_end`]: crate::raw::RawTable::data_end /// [`RawTableInner::data_end`]: RawTableInner::data_end - /// [`RawTable`]: crate::hashbrown::fork::raw::RawTable + /// [`RawTable`]: crate::raw::RawTable /// [`RawTableInner`]: RawTableInner /// [`<*const T>::offset_from`]: https://doc.rust-lang.org/nightly/core/primitive.pointer.html#method.offset_from #[inline] @@ -460,7 +399,7 @@ impl Bucket { // (base.as_ptr() as usize - self.ptr.as_ptr() as usize) / mem::size_of::() // // where: T0...Tlast - our stored data; C0...Clast - control bytes or metadata for data. - if Self::IS_ZERO_SIZED_TYPE { + if T::IS_ZST { // this can not be UB self.ptr.as_ptr() as usize - 1 } else { @@ -483,7 +422,7 @@ impl Bucket { /// will not re-evaluate where the new value should go, meaning the value may become /// "lost" if their location does not reflect their state. /// - /// [`RawTable`]: crate::hashbrown::fork::raw::RawTable + /// [`RawTable`]: crate::hashbrown::raw::RawTable /// [`<*mut T>::drop_in_place`]: https://doc.rust-lang.org/core/primitive.pointer.html#method.drop_in_place /// [`Hash`]: https://doc.rust-lang.org/core/hash/trait.Hash.html /// [`Eq`]: https://doc.rust-lang.org/core/cmp/trait.Eq.html @@ -491,10 +430,9 @@ impl Bucket { /// # Examples /// /// ``` - /// # #[cfg(feature = "raw")] - /// # fn test() { /// use core::hash::{BuildHasher, Hash}; - /// use hashbrown::raw::{Bucket, RawTable}; + /// use core::convert::Infallible; + /// use rune_alloc::hashbrown::raw::{Bucket, RawTable}; /// /// type NewHashBuilder = core::hash::BuildHasherDefault; /// @@ -511,20 +449,15 @@ impl Bucket { /// let value = ("a", 100); /// let hash = make_hash(&hash_builder, &value.0); /// - /// table.insert(hash, value.clone(), |val| make_hash(&hash_builder, &val.0)); + /// table.insert(&mut (), hash, value.clone(), |_: &mut (), val: &(&str, _)| Ok::<_, Infallible>(make_hash(&hash_builder, &val.0))); /// - /// let bucket: Bucket<(&str, i32)> = table.find(hash, |(k1, _)| k1 == &value.0).unwrap(); + /// let bucket: Bucket<(&str, i32)> = table.find(&mut (), hash, |_: &mut (), (k1, _): &(&str, _)| Ok::<_, Infallible>(k1 == &value.0)).unwrap().unwrap(); /// /// assert_eq!(unsafe { &*bucket.as_ptr() }, &("a", 100)); - /// # } - /// # fn main() { - /// # #[cfg(feature = "raw")] - /// # test() - /// # } /// ``` #[inline] pub fn as_ptr(&self) -> *mut T { - if Self::IS_ZERO_SIZED_TYPE { + if T::IS_ZST { // Just return an arbitrary ZST pointer which is properly aligned // invalid pointer is good enough for ZST invalid_mut(mem::align_of::()) @@ -565,14 +498,14 @@ impl Bucket { /// `self.to_base_index() + ofset + 1` must be no greater than the number returned by the /// function [`RawTable::buckets`] or [`RawTableInner::buckets`]. /// - /// [`Bucket`]: crate::hashbrown::fork::raw::Bucket + /// [`Bucket`]: crate::raw::Bucket /// [`<*mut T>::sub`]: https://doc.rust-lang.org/core/primitive.pointer.html#method.sub-1 /// [`NonNull::new_unchecked`]: https://doc.rust-lang.org/stable/std/ptr/struct.NonNull.html#method.new_unchecked - /// [`RawTable::buckets`]: crate::hashbrown::fork::raw::RawTable::buckets + /// [`RawTable::buckets`]: crate::raw::RawTable::buckets /// [`RawTableInner::buckets`]: RawTableInner::buckets #[inline] unsafe fn next_n(&self, offset: usize) -> Self { - let ptr = if Self::IS_ZERO_SIZED_TYPE { + let ptr = if T::IS_ZST { // invalid pointer is good enough for ZST invalid_mut(self.ptr.as_ptr() as usize + offset) } else { @@ -596,8 +529,8 @@ impl Bucket { /// double drop when [`RawTable`] goes out of scope. /// /// [`ptr::drop_in_place`]: https://doc.rust-lang.org/core/ptr/fn.drop_in_place.html - /// [`RawTable`]: crate::hashbrown::fork::raw::RawTable - /// [`RawTable::erase`]: crate::hashbrown::fork::raw::RawTable::erase + /// [`RawTable`]: crate::raw::RawTable + /// [`RawTable::erase`]: crate::raw::RawTable::erase #[cfg_attr(feature = "inline-more", inline)] pub(crate) unsafe fn drop(&self) { self.as_ptr().drop_in_place(); @@ -617,8 +550,8 @@ impl Bucket { /// because of not erased `data control byte`. /// /// [`ptr::read`]: https://doc.rust-lang.org/core/ptr/fn.read.html - /// [`RawTable`]: crate::hashbrown::fork::raw::RawTable - /// [`RawTable::remove`]: crate::hashbrown::fork::raw::RawTable::remove + /// [`RawTable`]: crate::raw::RawTable + /// [`RawTable::remove`]: crate::raw::RawTable::remove #[inline] pub(crate) unsafe fn read(&self) -> T { self.as_ptr().read() @@ -657,10 +590,9 @@ impl Bucket { /// # Examples /// /// ``` - /// # #[cfg(feature = "raw")] - /// # fn test() { /// use core::hash::{BuildHasher, Hash}; - /// use hashbrown::raw::{Bucket, RawTable}; + /// use core::convert::Infallible; + /// use rune_alloc::hashbrown::raw::{Bucket, RawTable}; /// /// type NewHashBuilder = core::hash::BuildHasherDefault; /// @@ -677,19 +609,14 @@ impl Bucket { /// let value: (&str, String) = ("A pony", "is a small horse".to_owned()); /// let hash = make_hash(&hash_builder, &value.0); /// - /// table.insert(hash, value.clone(), |val| make_hash(&hash_builder, &val.0)); + /// table.insert(&mut (), hash, value.clone(), |_: &mut (), (val, _): &(&str, _)| Ok::<_, Infallible>(make_hash(&hash_builder, val))).unwrap(); /// - /// let bucket: Bucket<(&str, String)> = table.find(hash, |(k, _)| k == &value.0).unwrap(); + /// let bucket: Bucket<(&str, String)> = table.find(&mut (), hash, |_: &mut (), (k, _): &(&str, _)| Ok::<_, Infallible>(k == &value.0)).unwrap().unwrap(); /// /// assert_eq!( /// unsafe { bucket.as_ref() }, /// &("A pony", "is a small horse".to_owned()) /// ); - /// # } - /// # fn main() { - /// # #[cfg(feature = "raw")] - /// # test() - /// # } /// ``` #[inline] pub unsafe fn as_ref<'a>(&self) -> &'a T { @@ -716,10 +643,9 @@ impl Bucket { /// # Examples /// /// ``` - /// # #[cfg(feature = "raw")] - /// # fn test() { /// use core::hash::{BuildHasher, Hash}; - /// use hashbrown::raw::{Bucket, RawTable}; + /// use core::convert::Infallible; + /// use rune_alloc::hashbrown::raw::{Bucket, RawTable}; /// /// type NewHashBuilder = core::hash::BuildHasherDefault; /// @@ -736,9 +662,9 @@ impl Bucket { /// let value: (&str, String) = ("A pony", "is a small horse".to_owned()); /// let hash = make_hash(&hash_builder, &value.0); /// - /// table.insert(hash, value.clone(), |val| make_hash(&hash_builder, &val.0)); + /// table.insert(&mut (), hash, value.clone(), |_: &mut (), (k, _): &(&str, _)| Ok::<_, Infallible>(make_hash(&hash_builder, k))).unwrap(); /// - /// let bucket: Bucket<(&str, String)> = table.find(hash, |(k, _)| k == &value.0).unwrap(); + /// let bucket: Bucket<(&str, String)> = table.find(&mut (), hash, |_: &mut (), (k, _): &(&str, _)| Ok::<_, Infallible>(k == &value.0)).unwrap().unwrap(); /// /// unsafe { /// bucket @@ -753,11 +679,7 @@ impl Bucket { /// "is a small horse less than 147 cm at the withers".to_owned() /// ) /// ); - /// # } - /// # fn main() { - /// # #[cfg(feature = "raw")] - /// # test() - /// # } + /// # Ok::<_, rune_alloc::Error>(()) /// ``` #[inline] pub unsafe fn as_mut<'a>(&self) -> &'a mut T { @@ -788,7 +710,6 @@ impl Bucket { /// [violate memory safety]: https://doc.rust-lang.org/std/ptr/fn.read.html#ownership-of-the-returned-value /// [`Hash`]: https://doc.rust-lang.org/core/hash/trait.Hash.html /// [`Eq`]: https://doc.rust-lang.org/core/cmp/trait.Eq.html - #[cfg(feature = "raw")] #[inline] pub unsafe fn copy_from_nonoverlapping(&self, other: &Self) { self.as_ptr().copy_from_nonoverlapping(other.as_ptr(), 1); @@ -796,15 +717,16 @@ impl Bucket { } /// A raw hash table with an unsafe API. -pub struct RawTable { - table: RawTableInner, +pub struct RawTable { + table: RawTableInner, + alloc: A, // Tell dropck that we own instances of T. marker: PhantomData, } /// Non-generic part of `RawTable` which allows functions to be instantiated only once regardless /// of how many different key-value types are used. -struct RawTableInner { +struct RawTableInner { // Mask to get an index from a hash value. The value is one less than the // number of buckets in the table. bucket_mask: usize, @@ -818,8 +740,6 @@ struct RawTableInner { // Number of elements in the table, only really used by len() items: usize, - - alloc: A, } impl RawTable { @@ -831,28 +751,21 @@ impl RawTable { #[inline] pub const fn new() -> Self { Self { - table: RawTableInner::new_in(Global), + table: RawTableInner::NEW, + alloc: Global, marker: PhantomData, } } /// Attempts to allocate a new hash table with at least enough capacity /// for inserting the given number of elements without reallocating. - #[cfg(feature = "raw")] - pub fn try_with_capacity(capacity: usize) -> Result { + pub fn try_with_capacity(capacity: usize) -> Result { Self::try_with_capacity_in(capacity, Global) } - - /// Allocates a new hash table with at least enough capacity for inserting - /// the given number of elements without reallocating. - pub fn with_capacity(capacity: usize) -> Self { - Self::with_capacity_in(capacity, Global) - } } -impl RawTable { +impl RawTable { const TABLE_LAYOUT: TableLayout = TableLayout::new::(); - const DATA_NEEDS_DROP: bool = mem::needs_drop::(); /// Creates a new empty hash table without allocating any memory, using the /// given allocator. @@ -863,7 +776,8 @@ impl RawTable { #[inline] pub const fn new_in(alloc: A) -> Self { Self { - table: RawTableInner::new_in(alloc), + table: RawTableInner::NEW, + alloc, marker: PhantomData, } } @@ -872,69 +786,30 @@ impl RawTable { /// /// The control bytes are left uninitialized. #[cfg_attr(feature = "inline-more", inline)] - unsafe fn new_uninitialized( - alloc: A, - buckets: usize, - fallibility: Fallibility, - ) -> Result { + unsafe fn new_uninitialized(alloc: A, buckets: usize) -> Result { debug_assert!(buckets.is_power_of_two()); Ok(Self { - table: RawTableInner::new_uninitialized( - alloc, - Self::TABLE_LAYOUT, - buckets, - fallibility, - )?, + table: RawTableInner::new_uninitialized(&alloc, Self::TABLE_LAYOUT, buckets)?, + alloc, marker: PhantomData, }) } - /// Attempts to allocate a new hash table with at least enough capacity - /// for inserting the given number of elements without reallocating. - fn fallible_with_capacity( - alloc: A, - capacity: usize, - fallibility: Fallibility, - ) -> Result { + /// Allocates a new hash table using the given allocator, with at least enough capacity for + /// inserting the given number of elements without reallocating. + pub fn try_with_capacity_in(capacity: usize, alloc: A) -> Result { Ok(Self { - table: RawTableInner::fallible_with_capacity( - alloc, - Self::TABLE_LAYOUT, - capacity, - fallibility, - )?, + table: RawTableInner::try_with_capacity(&alloc, Self::TABLE_LAYOUT, capacity)?, + alloc, marker: PhantomData, }) } - /// Attempts to allocate a new hash table using the given allocator, with at least enough - /// capacity for inserting the given number of elements without reallocating. - #[cfg(feature = "raw")] - pub fn try_with_capacity_in(capacity: usize, alloc: A) -> Result { - Self::fallible_with_capacity(alloc, capacity, Fallibility::Fallible) - } - - /// Allocates a new hash table using the given allocator, with at least enough capacity for - /// inserting the given number of elements without reallocating. - pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { - // Avoid `Result::unwrap_or_else` because it bloats LLVM IR. - match Self::fallible_with_capacity(alloc, capacity, Fallibility::Infallible) { - Ok(capacity) => capacity, - Err(_) => unsafe { hint::unreachable_unchecked() }, - } - } - /// Returns a reference to the underlying allocator. #[inline] pub fn allocator(&self) -> &A { - &self.table.alloc - } - - /// Deallocates the table without dropping any entries. - #[cfg_attr(feature = "inline-more", inline)] - unsafe fn free_buckets(&mut self) { - self.table.free_buckets(Self::TABLE_LAYOUT); + &self.alloc } /// Returns pointer to one past last element of data table. @@ -945,7 +820,7 @@ impl RawTable { /// Returns pointer to start of data table. #[inline] - #[cfg(any(feature = "raw", feature = "nightly"))] + #[cfg(any(feature = "raw", rune_nightly))] pub unsafe fn data_start(&self) -> NonNull { NonNull::new_unchecked(self.data_end().as_ptr().wrapping_sub(self.buckets())) } @@ -958,9 +833,10 @@ impl RawTable { /// /// This function might be useful for memory profiling. #[inline] - #[cfg(feature = "raw")] pub fn allocation_info(&self) -> (NonNull, Layout) { - self.table.allocation_info_or_zero(Self::TABLE_LAYOUT) + // SAFETY: We use the same `table_layout` that was used to allocate + // this table. + unsafe { self.table.allocation_info_or_zero(Self::TABLE_LAYOUT) } } /// Returns the index of a bucket from a `Bucket`. @@ -995,26 +871,15 @@ impl RawTable { /// Finds and erases an element from the table, dropping it in place. /// Returns true if an element was found. - #[cfg(feature = "raw")] #[cfg_attr(feature = "inline-more", inline)] - pub fn erase_entry(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> bool { - into_ok(self.erase_entry_with(&mut (), hash, infallible_eq(eq))) - } - - /// Finds and erases an element from the table, dropping it in place. - /// Returns true if an element was found. - /// - /// This variant supports a fallible hasher with a passed around context. - #[cfg(feature = "raw")] - #[cfg_attr(feature = "inline-more", inline)] - pub fn erase_entry_with( + pub fn erase_entry( &mut self, cx: &mut C, hash: u64, - eq: impl FnMut(&mut C, &T) -> Result, + eq: impl EqFn, ) -> Result { // Avoid `Option::map` because it bloats LLVM IR. - if let Some(bucket) = self.find_with(cx, hash, eq)? { + if let Some(bucket) = self.find(cx, hash, eq)? { unsafe { self.erase(bucket); } @@ -1041,22 +906,14 @@ impl RawTable { /// Finds and removes an element from the table, returning it. #[cfg_attr(feature = "inline-more", inline)] - pub fn remove_entry(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option { - into_ok(self.remove_entry_with(&mut (), hash, infallible_eq(eq))) - } - - /// Finds and removes an element from the table, returning it. - /// - /// This variant supports a fallible hasher with a passed around context. - #[cfg_attr(feature = "inline-more", inline)] - pub fn remove_entry_with( + pub fn remove_entry( &mut self, cx: &mut C, hash: u64, - eq: impl FnMut(&mut C, &T) -> Result, + eq: impl EqFn, ) -> Result, E> { // Avoid `Option::map` because it bloats LLVM IR. - Ok(match self.find_with(cx, hash, eq)? { + Ok(match self.find(cx, hash, eq)? { Some(bucket) => Some(unsafe { self.remove(bucket).0 }), None => None, }) @@ -1078,39 +935,35 @@ impl RawTable { // Ensure that the table is reset even if one of the drops panic let mut self_ = guard(self, |self_| self_.clear_no_drop()); unsafe { - self_.drop_elements(); - } - } - - unsafe fn drop_elements(&mut self) { - if Self::DATA_NEEDS_DROP && !self.is_empty() { - for item in self.iter() { - item.drop(); - } + // SAFETY: ScopeGuard sets to zero the `items` field of the table + // even in case of panic during the dropping of the elements so + // that there will be no double drop of the elements. + self_.table.drop_elements::(); } } /// Shrinks the table to fit `max(self.len(), min_size)` elements. #[cfg_attr(feature = "inline-more", inline)] - pub fn shrink_to(&mut self, min_size: usize, hasher: impl Fn(&T) -> u64) { - into_ok(self.shrink_to_with(&mut (), min_size, infallible_hasher(hasher))); - } - - /// Shrinks the table to fit `max(self.len(), min_size)` elements. - /// - /// This variant supports a fallible hasher with a passed around context. - #[cfg_attr(feature = "inline-more", inline)] - pub fn shrink_to_with( + pub fn shrink_to( &mut self, cx: &mut C, min_size: usize, - hasher: impl Fn(&mut C, &T) -> Result, - ) -> Result<(), E> { + hasher: impl HasherFn, + ) -> Result<(), CustomError> { // Calculate the minimal number of elements that we need to reserve // space for. let min_size = usize::max(self.table.items, min_size); if min_size == 0 { - *self = Self::new_in(self.table.alloc.clone()); + let mut old_inner = mem::replace(&mut self.table, RawTableInner::NEW); + unsafe { + // SAFETY: + // 1. We call the function only once; + // 2. We know for sure that `alloc` and `table_layout` matches the [`Allocator`] + // and [`TableLayout`] that were used to allocate this table. + // 3. If any elements' drop function panics, then there will only be a memory leak, + // because we have replaced the inner table with a new one. + old_inner.drop_inner_table::(&self.alloc, Self::TABLE_LAYOUT); + } return Ok(()); } @@ -1127,14 +980,26 @@ impl RawTable { if min_buckets < self.buckets() { // Fast path if the table is empty if self.table.items == 0 { - *self = Self::with_capacity_in(min_size, self.table.alloc.clone()); + let new_inner = + RawTableInner::try_with_capacity(&self.alloc, Self::TABLE_LAYOUT, min_size)?; + let mut old_inner = mem::replace(&mut self.table, new_inner); + unsafe { + // SAFETY: + // 1. We call the function only once; + // 2. We know for sure that `alloc` and `table_layout` matches the [`Allocator`] + // and [`TableLayout`] that were used to allocate this table. + // 3. If any elements' drop function panics, then there will only be a memory leak, + // because we have replaced the inner table with a new one. + old_inner.drop_inner_table::(&self.alloc, Self::TABLE_LAYOUT); + } } else { // Avoid `Result::unwrap_or_else` because it bloats LLVM IR. - if self - .resize(cx, min_size, hasher, Fallibility::Infallible)? - .is_err() - { - unsafe { hint::unreachable_unchecked() } + unsafe { + // SAFETY: + // 1. We know for sure that `min_size >= self.table.items`. + // 2. The [`RawTableInner`] must already have properly initialized control bytes since + // we never exposed RawTable::new_uninitialized in a public API. + self.resize(cx, min_size, hasher)?; } } } @@ -1145,28 +1010,18 @@ impl RawTable { /// Ensures that at least `additional` items can be inserted into the table /// without reallocation. #[cfg_attr(feature = "inline-more", inline)] - pub fn reserve(&mut self, additional: usize, hasher: impl Fn(&T) -> u64) { - into_ok(self.reserve_with(&mut (), additional, infallible_hasher(hasher))) - } - - /// Ensures that at least `additional` items can be inserted into the table - /// without reallocation. - /// - /// This variant supports a fallible hasher with a passed around context. - #[cfg_attr(feature = "inline-more", inline)] - pub fn reserve_with( + pub fn reserve( &mut self, cx: &mut C, additional: usize, - hasher: impl Fn(&mut C, &T) -> Result, - ) -> Result<(), E> { + hasher: impl HasherFn, + ) -> Result<(), CustomError> { if unlikely(additional > self.table.growth_left) { // Avoid `Result::unwrap_or_else` because it bloats LLVM IR. - if self - .reserve_rehash(cx, additional, hasher, Fallibility::Infallible)? - .is_err() - { - unsafe { hint::unreachable_unchecked() } + unsafe { + // SAFETY: The [`RawTableInner`] must already have properly initialized control + // bytes since we never exposed RawTable::new_uninitialized in a public API. + self.reserve_rehash(cx, additional, hasher)?; } } @@ -1176,50 +1031,52 @@ impl RawTable { /// Tries to ensure that at least `additional` items can be inserted into /// the table without reallocation. #[cfg_attr(feature = "inline-more", inline)] - pub fn try_reserve( - &mut self, - additional: usize, - hasher: impl Fn(&T) -> u64, - ) -> Result<(), TryReserveError> { - into_ok(self.try_reserve_with(&mut (), additional, infallible_hasher(hasher))) - } - - /// Tries to ensure that at least `additional` items can be inserted into - /// the table without reallocation. - /// - /// This variant supports a fallible hasher with a passed around context. - #[cfg_attr(feature = "inline-more", inline)] - pub fn try_reserve_with( + pub fn try_reserve( &mut self, cx: &mut C, additional: usize, - hasher: impl Fn(&mut C, &T) -> Result, - ) -> Result, E> { + hasher: impl HasherFn, + ) -> Result<(), CustomError> { if additional > self.table.growth_left { - self.reserve_rehash(cx, additional, hasher, Fallibility::Fallible) + // SAFETY: The [`RawTableInner`] must already have properly initialized control + // bytes since we never exposed RawTable::new_uninitialized in a public API. + unsafe { self.reserve_rehash(cx, additional, hasher) } } else { - Ok(Ok(())) + Ok(()) } } /// Out-of-line slow path for `reserve` and `try_reserve`. + /// + /// # Safety + /// + /// The [`RawTableInner`] must have properly initialized control bytes, + /// otherwise calling this function results in [`undefined behavior`] + /// + /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html #[cold] #[inline(never)] - fn reserve_rehash( + unsafe fn reserve_rehash( &mut self, cx: &mut C, additional: usize, - hasher: impl Fn(&mut C, &T) -> Result, - fallibility: Fallibility, - ) -> Result, E> { + hasher: impl HasherFn, + ) -> Result<(), CustomError> { unsafe { + // SAFETY: + // 1. We know for sure that `alloc` and `layout` matches the [`Allocator`] and + // [`TableLayout`] that were used to allocate this table. + // 2. The `drop` function is the actual drop function of the elements stored in + // the table. + // 3. The caller ensures that the control bytes of the `RawTableInner` + // are already initialized. self.table.reserve_rehash_inner( cx, + &self.alloc, additional, - &|table, cx, index| hasher(cx, table.bucket::(index).as_ref()), - fallibility, + &|cx, table, index| hasher.hash(cx, table.bucket::(index).as_ref()), Self::TABLE_LAYOUT, - if Self::DATA_NEEDS_DROP { + if T::NEEDS_DROP { Some(mem::transmute(ptr::drop_in_place:: as unsafe fn(*mut T))) } else { None @@ -1230,29 +1087,63 @@ impl RawTable { /// Allocates a new table of a different size and moves the contents of the /// current table into it. - fn resize( + /// + /// # Safety + /// + /// The [`RawTableInner`] must have properly initialized control bytes, + /// otherwise calling this function results in [`undefined behavior`] + /// + /// The caller of this function must ensure that `capacity >= self.table.items` + /// otherwise: + /// + /// * If `self.table.items != 0`, calling of this function with `capacity` + /// equal to 0 (`capacity == 0`) results in [`undefined behavior`]. + /// + /// * If `capacity_to_buckets(capacity) < Group::WIDTH` and + /// `self.table.items > capacity_to_buckets(capacity)` + /// calling this function results in [`undefined behavior`]. + /// + /// * If `capacity_to_buckets(capacity) >= Group::WIDTH` and + /// `self.table.items > capacity_to_buckets(capacity)` + /// calling this function are never return (will go into an + /// infinite loop). + /// + /// See [`RawTableInner::find_insert_slot`] for more information. + /// + /// [`RawTableInner::find_insert_slot`]: RawTableInner::find_insert_slot + /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html + unsafe fn resize( &mut self, cx: &mut C, capacity: usize, - hasher: impl Fn(&mut C, &T) -> Result, - fallibility: Fallibility, - ) -> Result, E> { - unsafe { - self.table.resize_inner( - cx, - capacity, - &|table, cx, index| hasher(cx, table.bucket::(index).as_ref()), - fallibility, - Self::TABLE_LAYOUT, - ) - } + hasher: impl HasherFn, + ) -> Result<(), CustomError> { + // SAFETY: + // 1. The caller of this function guarantees that `capacity >= self.table.items`. + // 2. We know for sure that `alloc` and `layout` matches the [`Allocator`] and + // [`TableLayout`] that were used to allocate this table. + // 3. The caller ensures that the control bytes of the `RawTableInner` + // are already initialized. + self.table.resize_inner( + cx, + &self.alloc, + capacity, + &move |cx, table, index| hasher.hash(cx, table.bucket::(index).as_ref()), + Self::TABLE_LAYOUT, + ) } /// Inserts a new element into the table, and returns its raw bucket. /// /// This does not check if the given element already exists in the table. #[cfg_attr(feature = "inline-more", inline)] - pub fn insert(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> Bucket { + pub fn insert( + &mut self, + cx: &mut C, + hash: u64, + value: T, + hasher: impl HasherFn, + ) -> Result, CustomError> { unsafe { let mut slot = self.table.find_insert_slot(hash); @@ -1261,11 +1152,11 @@ impl RawTable { // number of EMPTY slots does not change in this case. let old_ctrl = *self.table.ctrl(slot.index); if unlikely(self.table.growth_left == 0 && special_is_empty(old_ctrl)) { - self.reserve(1, hasher); + self.reserve(cx, 1, hasher)?; slot = self.table.find_insert_slot(hash); } - self.insert_in_slot(hash, slot, value) + Ok(self.insert_in_slot(hash, slot, value)) } } @@ -1275,7 +1166,6 @@ impl RawTable { /// table. /// /// This does not check if the given element already exists in the table. - #[cfg(feature = "raw")] #[cfg_attr(feature = "inline-more", inline)] pub fn try_insert_no_grow(&mut self, hash: u64, value: T) -> Result, T> { unsafe { @@ -1294,8 +1184,14 @@ impl RawTable { /// /// This does not check if the given element already exists in the table. #[cfg_attr(feature = "inline-more", inline)] - pub fn insert_entry(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> &mut T { - unsafe { self.insert(hash, value, hasher).as_mut() } + pub fn insert_entry( + &mut self, + cx: &mut C, + hash: u64, + value: T, + hasher: impl HasherFn, + ) -> Result<&mut T, CustomError> { + Ok(unsafe { self.insert(cx, hash, value, hasher)?.as_mut() }) } /// Inserts a new element into the table, without growing the table. @@ -1352,46 +1248,22 @@ impl RawTable { /// This function may resize the table if additional space is required for /// inserting an element. #[inline] - pub fn find_or_find_insert_slot( - &mut self, - hash: u64, - eq: impl FnMut(&T) -> bool, - hasher: impl Fn(&T) -> u64, - ) -> Result, InsertSlot> { - into_ok(self.find_or_find_insert_slot_with( - &mut (), - hash, - infallible_eq(eq), - infallible_hasher(hasher), - )) - } - - /// Searches for an element in the table. If the element is not found, - /// returns `Err` with the position of a slot where an element with the - /// same hash could be inserted. - /// - /// This function may resize the table if additional space is required for - /// inserting an element. - /// - /// This variant supports a fallible hasher with a passed around context. - #[inline] - pub fn find_or_find_insert_slot_with( + pub fn find_or_find_insert_slot( &mut self, cx: &mut C, hash: u64, - mut eq: impl FnMut(&mut C, &T) -> Result, - hasher: impl Fn(&mut C, &T) -> Result, - ) -> Result, InsertSlot>, E> { - self.reserve_with(cx, 1, hasher)?; + eq: impl EqFn, + hasher: impl HasherFn, + ) -> Result, ErrorOrInsertSlot> { + self.reserve(cx, 1, hasher)?; - match self + let index = self .table - .find_or_find_insert_slot_inner(cx, hash, &mut |cx, index| unsafe { - eq(cx, self.bucket(index).as_ref()) - })? { - Ok(index) => Ok(Ok(unsafe { self.bucket(index) })), - Err(slot) => Ok(Err(slot)), - } + .find_or_find_insert_slot_inner(cx, hash, &|cx, index| unsafe { + eq.eq(cx, self.bucket(index).as_ref()) + })?; + + Ok(unsafe { self.bucket(index) }) } /// Inserts a new element into the table in the given slot, and returns its @@ -1414,22 +1286,14 @@ impl RawTable { /// Searches for an element in the table. #[inline] - pub fn find(&self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option> { - into_ok(self.find_with(&mut (), hash, infallible_eq(eq))) - } - - /// Searches for an element in the table. - /// - /// This variant supports a fallible hasher with a passed around context. - #[inline] - pub fn find_with( + pub fn find( &self, cx: &mut C, hash: u64, - mut eq: impl FnMut(&mut C, &T) -> Result, + eq: impl EqFn, ) -> Result>, E> { - let result = self.table.find_inner(cx, hash, &mut |cx, index| unsafe { - eq(cx, self.bucket(index).as_ref()) + let result = self.table.find_inner(cx, hash, &|cx, index| unsafe { + eq.eq(cx, self.bucket(index).as_ref()) })?; // Avoid `Option::map` because it bloats LLVM IR. @@ -1441,22 +1305,14 @@ impl RawTable { /// Gets a reference to an element in the table. #[inline] - pub fn get(&self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&T> { - into_ok(self.get_with(&mut (), hash, infallible_eq(eq))) - } - - /// Gets a reference to an element in the table. - /// - /// This variant supports a fallible hasher with a passed around context. - #[inline] - pub fn get_with( + pub fn get( &self, cx: &mut C, hash: u64, - eq: impl FnMut(&mut C, &T) -> Result, + eq: impl EqFn, ) -> Result, E> { // Avoid `Option::map` because it bloats LLVM IR. - Ok(match self.find_with(cx, hash, eq)? { + Ok(match self.find(cx, hash, eq)? { Some(bucket) => Some(unsafe { bucket.as_ref() }), None => None, }) @@ -1464,22 +1320,14 @@ impl RawTable { /// Gets a mutable reference to an element in the table. #[inline] - pub fn get_mut(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&mut T> { - into_ok(self.get_mut_with(&mut (), hash, infallible_eq(eq))) - } - - /// Gets a mutable reference to an element in the table. - /// - /// This variant supports a fallible hasher with a passed around context. - #[inline] - pub fn get_mut_with( + pub fn get_mut( &mut self, cx: &mut C, hash: u64, - eq: impl FnMut(&mut C, &T) -> Result, + eq: impl EqFn, ) -> Result, E> { // Avoid `Option::map` because it bloats LLVM IR. - Ok(match self.find_with(cx, hash, eq)? { + Ok(match self.find(cx, hash, eq)? { Some(bucket) => Some(unsafe { bucket.as_mut() }), None => None, }) @@ -1494,52 +1342,65 @@ impl RawTable { /// /// The `eq` argument should be a closure such that `eq(i, k)` returns true if `k` is equal to /// the `i`th key to be looked up. - pub fn get_many_mut( + pub fn get_many_mut( &mut self, + cx: &mut C, hashes: [u64; N], - eq: impl FnMut(usize, &T) -> bool, - ) -> Option<[&'_ mut T; N]> { + eq: impl Fn(&mut C, usize, &T) -> Result, + ) -> Result, E> { unsafe { - let ptrs = self.get_many_mut_pointers(hashes, eq)?; + let ptrs = match self.get_many_mut_pointers(cx, hashes, eq)? { + Some(ptrs) => ptrs, + None => return Ok(None), + }; for (i, &cur) in ptrs.iter().enumerate() { if ptrs[..i].iter().any(|&prev| ptr::eq::(prev, cur)) { - return None; + return Ok(None); } } // All bucket are distinct from all previous buckets so we're clear to return the result // of the lookup. // TODO use `MaybeUninit::array_assume_init` here instead once that's stable. - Some(mem::transmute_copy(&ptrs)) + Ok(Some(mem::transmute_copy(&ptrs))) } } - pub unsafe fn get_many_unchecked_mut( + pub unsafe fn get_many_unchecked_mut( &mut self, + cx: &mut C, hashes: [u64; N], - eq: impl FnMut(usize, &T) -> bool, - ) -> Option<[&'_ mut T; N]> { - let ptrs = self.get_many_mut_pointers(hashes, eq)?; - Some(mem::transmute_copy(&ptrs)) + eq: impl Fn(&mut C, usize, &T) -> Result, + ) -> Result, E> { + let ptrs = match self.get_many_mut_pointers(cx, hashes, eq)? { + Some(ptrs) => ptrs, + None => return Ok(None), + }; + + Ok(Some(mem::transmute_copy(&ptrs))) } - unsafe fn get_many_mut_pointers( + unsafe fn get_many_mut_pointers( &mut self, + cx: &mut C, hashes: [u64; N], - mut eq: impl FnMut(usize, &T) -> bool, - ) -> Option<[*mut T; N]> { + eq: impl Fn(&mut C, usize, &T) -> Result, + ) -> Result, E> { // TODO use `MaybeUninit::uninit_array` here instead once that's stable. let mut outs: MaybeUninit<[*mut T; N]> = MaybeUninit::uninit(); let outs_ptr = outs.as_mut_ptr(); for (i, &hash) in hashes.iter().enumerate() { - let cur = self.find(hash, |k| eq(i, k))?; + let cur = match self.find(cx, hash, |cx: &mut C, k: &T| eq(cx, i, k))? { + Some(cur) => cur, + None => return Ok(None), + }; *(*outs_ptr).get_unchecked_mut(i) = cur.as_mut(); } // TODO use `MaybeUninit::array_assume_init` here instead once that's stable. - Some(outs.assume_init()) + Ok(Some(outs.assume_init())) } /// Returns the number of elements the map can hold without reallocating. @@ -1583,13 +1444,17 @@ impl RawTable { /// the caller to ensure that the `RawTable` outlives the `RawIter`. /// Because we cannot make the `next` method unsafe on the `RawIter` /// struct, we have to make the `iter` method unsafe. + /// + /// # Safety + /// + /// Caller must ensure that the raw iterator doesn't outlive `self`. #[inline] pub unsafe fn iter(&self) -> RawIter { - let data = Bucket::from_base_index(self.data_end(), 0); - RawIter { - iter: RawIterRange::new(self.table.ctrl.as_ptr(), data, self.table.buckets()), - items: self.table.items, - } + // SAFETY: + // 1. The caller must uphold the safety contract for `iter` method. + // 2. The [`RawTableInner`] must already have properly initialized control bytes since + // we never exposed RawTable::new_uninitialized in a public API. + self.table.iter() } /// Returns an iterator over occupied buckets that could match a given hash. @@ -1602,7 +1467,6 @@ impl RawTable { /// `RawIterHash`. Because we cannot make the `next` method unsafe on the /// `RawIterHash` struct, we have to make the `iter_hash` method unsafe. #[cfg_attr(feature = "inline-more", inline)] - #[cfg(feature = "raw")] pub unsafe fn iter_hash(&self, hash: u64) -> RawIterHash { RawIterHash::new(self, hash) } @@ -1629,8 +1493,8 @@ impl RawTable { debug_assert_eq!(iter.len(), self.len()); RawDrain { iter, - table: ManuallyDrop::new(mem::replace(self, Self::new_in(self.table.alloc.clone()))), - orig_table: NonNull::from(self), + table: mem::replace(&mut self.table, RawTableInner::NEW), + orig_table: NonNull::from(&mut self.table), marker: PhantomData, } } @@ -1668,7 +1532,7 @@ impl RawTable { Some(( unsafe { NonNull::new_unchecked(self.table.ctrl.as_ptr().sub(ctrl_offset)) }, layout, - unsafe { ptr::read(&self.table.alloc) }, + unsafe { ptr::read(&self.alloc) }, )) }; mem::forget(self); @@ -1676,39 +1540,40 @@ impl RawTable { } } -unsafe impl Send for RawTable +unsafe impl Send for RawTable where T: Send, A: Send, { } -unsafe impl Sync for RawTable +unsafe impl Sync for RawTable where T: Sync, A: Sync, { } -impl RawTableInner { +impl RawTableInner { + const NEW: Self = RawTableInner::new(); + /// Creates a new empty hash table without allocating any memory. /// /// In effect this returns a table with exactly 1 bucket. However we can /// leave the data pointer dangling since that bucket is never accessed /// due to our load factor forcing us to always have at least 1 free bucket. #[inline] - const fn new_in(alloc: A) -> Self { + const fn new() -> Self { Self { // Be careful to cast the entire slice to a raw pointer. ctrl: unsafe { NonNull::new_unchecked(Group::static_empty() as *const _ as *mut u8) }, bucket_mask: 0, items: 0, growth_left: 0, - alloc, } } } -impl RawTableInner { +impl RawTableInner { /// Allocates a new [`RawTableInner`] with the given number of buckets. /// The control bytes and buckets are left uninitialized. /// @@ -1722,24 +1587,23 @@ impl RawTableInner { /// /// [`Allocator`]: https://doc.rust-lang.org/alloc/alloc/trait.Allocator.html #[cfg_attr(feature = "inline-more", inline)] - unsafe fn new_uninitialized( - alloc: A, + unsafe fn new_uninitialized( + alloc: &A, table_layout: TableLayout, buckets: usize, - fallibility: Fallibility, - ) -> Result { + ) -> Result + where + A: Allocator, + { debug_assert!(buckets.is_power_of_two()); // Avoid `Option::ok_or_else` because it bloats LLVM IR. let (layout, ctrl_offset) = match table_layout.calculate_layout_for(buckets) { Some(lco) => lco, - None => return Err(fallibility.capacity_overflow()), + None => return Err(Error::CapacityOverflow), }; - let ptr: NonNull = match do_alloc(&alloc, layout) { - Ok(block) => block.cast(), - Err(_) => return Err(fallibility.alloc_err(layout)), - }; + let ptr: NonNull = alloc.allocate(layout)?.cast(); // SAFETY: null pointer will be caught in above check let ctrl = NonNull::new_unchecked(ptr.as_ptr().add(ctrl_offset)); @@ -1748,7 +1612,6 @@ impl RawTableInner { bucket_mask: buckets - 1, items: 0, growth_left: bucket_mask_to_capacity(buckets - 1), - alloc, }) } @@ -1757,22 +1620,23 @@ impl RawTableInner { /// /// All the control bytes are initialized with the [`EMPTY`] bytes. #[inline] - fn fallible_with_capacity( - alloc: A, + fn fallible_with_capacity( + alloc: &A, table_layout: TableLayout, capacity: usize, - fallibility: Fallibility, - ) -> Result { + ) -> Result + where + A: Allocator, + { if capacity == 0 { - Ok(Self::new_in(alloc)) + Ok(Self::NEW) } else { // SAFETY: We checked that we could successfully allocate the new table, and then // initialized all control bytes with the constant `EMPTY` byte. unsafe { - let buckets = - capacity_to_buckets(capacity).ok_or_else(|| fallibility.capacity_overflow())?; + let buckets = capacity_to_buckets(capacity).ok_or(Error::CapacityOverflow)?; - let result = Self::new_uninitialized(alloc, table_layout, buckets, fallibility)?; + let result = Self::new_uninitialized(alloc, table_layout, buckets)?; // SAFETY: We checked that the table is allocated and therefore the table already has // `self.bucket_mask + 1 + Group::WIDTH` number of control bytes (see TableLayout::calculate_layout_for) // so writing `self.num_ctrl_bytes() == bucket_mask + 1 + Group::WIDTH` bytes is safe. @@ -1783,6 +1647,28 @@ impl RawTableInner { } } + /// Allocates a new [`RawTableInner`] with at least enough capacity for inserting + /// the given number of elements without reallocating. + /// + /// Panics if the new capacity exceeds [`isize::MAX`] bytes and [`abort`] the program + /// in case of allocation error. Use [`fallible_with_capacity`] instead if you want to + /// handle memory allocation failure. + /// + /// All the control bytes are initialized with the [`EMPTY`] bytes. + /// + /// [`fallible_with_capacity`]: RawTableInner::fallible_with_capacity + fn try_with_capacity( + alloc: &A, + table_layout: TableLayout, + capacity: usize, + ) -> Result + where + A: Allocator, + { + // Avoid `Result::unwrap_or_else` because it bloats LLVM IR. + Self::fallible_with_capacity(alloc, table_layout, capacity) + } + /// Fixes up an insertion slot due to false positives for groups smaller than the group width. /// This must only be used on insertion slots found by `find_insert_slot_in_group`. #[inline] @@ -1834,18 +1720,18 @@ impl RawTableInner { } } - /// Searches for an element in the table, or a potential slot where that - /// element could be inserted. + /// Searches for an element in the table, or a potential slot where that element could be + /// inserted. /// - /// This uses dynamic dispatch to reduce the amount of code generated for - /// the `eq` argument, but that is eliminated by LLVM optimizations. + /// This uses dynamic dispatch to reduce the amount of code generated, but that is + /// eliminated by LLVM optimizations. #[inline] fn find_or_find_insert_slot_inner( &self, cx: &mut C, hash: u64, - eq: &mut dyn FnMut(&mut C, usize) -> Result, - ) -> Result, E> { + eq: &dyn Fn(&mut C, usize) -> Result, + ) -> Result> { let mut insert_slot = None; let h2_hash = h2(hash); @@ -1857,8 +1743,8 @@ impl RawTableInner { for bit in group.match_byte(h2_hash) { let index = (probe_seq.pos + bit) & self.bucket_mask; - if likely(eq(cx, index)?) { - return Ok(Ok(index)); + if likely(eq(cx, index).map_err(CustomError::Custom)?) { + return Ok(index); } } @@ -1875,7 +1761,9 @@ impl RawTableInner { // least one. For tables smaller than the group width, there will still be an // empty element in the current (and only) group due to the load factor. unsafe { - return Ok(Err(self.fix_insert_slot(insert_slot.unwrap_unchecked()))); + return Err(ErrorOrInsertSlot::InsertSlot( + self.fix_insert_slot(insert_slot.unwrap_unchecked()), + )); } } @@ -1883,13 +1771,55 @@ impl RawTableInner { } } - /// Searches for an empty or deleted bucket which is suitable for inserting - /// a new element and sets the hash for that slot. + /// Searches for an empty or deleted bucket which is suitable for inserting a new + /// element and sets the hash for that slot. Returns an index of that slot and the + /// old control byte stored in the found index. + /// + /// This function does not check if the given element exists in the table. Also, + /// this function does not check if there is enough space in the table to insert + /// a new element, so the caller must make ensure that the table has at least 1 + /// empty or deleted `bucket` or this function will never return (will go into + /// an infinite loop). + /// + /// This function does not make any changes to the `data` parts of the table, + /// or any changes to the the `items` or `growth_left` field of the table. /// - /// There must be at least 1 empty bucket in the table. + /// # Safety + /// + /// The safety rules are directly derived from the safety rule for the + /// [`RawTableInner::set_ctrl_h2`] methods. Thus, in order to uphold the safety + /// contracts for that method, as well as for the correct logic of the work of this + /// crate, you must observe the following rules when calling this function: + /// + /// * The [`RawTableInner`] has already been allocated; + /// + /// * The caller of this function must ensure that the "data" parts of the table + /// will have an entry in the returned index (matching the given hash) right + /// after calling this function. + /// + /// Calling this function on a table that has not been allocated results in + /// [`undefined behavior`]. + /// + /// The caller must independently increase the `items` field of the table, and also, + /// if the old control byte was [`EMPTY`], then decrease the table's `growth_left` + /// field, and do not change it if the old control byte was [`DELETED`]. + /// + /// See also [`Bucket::as_ptr`] method, for more information about of properly removing + /// or saving `element` from / into the [`RawTable`] / [`RawTableInner`]. + /// + /// [`Bucket::as_ptr`]: Bucket::as_ptr + /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html + /// [`RawTableInner::ctrl`]: RawTableInner::ctrl + /// [`RawTableInner::set_ctrl_h2`]: RawTableInner::set_ctrl_h2 #[inline] - unsafe fn prepare_insert_slot(&self, hash: u64) -> (usize, u8) { - let index = self.find_insert_slot(hash).index; + unsafe fn prepare_insert_slot(&mut self, hash: u64) -> (usize, u8) { + let index: usize = self.find_insert_slot(hash).index; + // SAFETY: + // 1. The `find_insert_slot` function either returns an `index` less than or + // equal to `self.bucket_mask = self.buckets() - 1` of the table, or never + // returns if it cannot find an empty or deleted slot. + // 2. The caller of this function guarantees that the table has already been + // allocated let old_ctrl = *self.ctrl(index); self.set_ctrl_h2(index, hash); (index, old_ctrl) @@ -1960,7 +1890,7 @@ impl RawTableInner { &self, cx: &mut C, hash: u64, - eq: &mut dyn FnMut(&mut C, usize) -> Result, + eq: &dyn Fn(&mut C, usize) -> Result, ) -> Result, E> { let h2_hash = h2(hash); let mut probe_seq = self.probe_seq(hash); @@ -2020,6 +1950,9 @@ impl RawTableInner { /// to do during the first insert due to tombstones). If the caller does not do /// this, then calling this function may result in a memory leak. /// + /// * The [`RawTableInner`] must have properly initialized control bytes otherwise + /// calling this function results in [`undefined behavior`]. + /// /// Calling this function on a table that has not been allocated results in /// [`undefined behavior`]. /// @@ -2067,25 +2000,174 @@ impl RawTableInner { } } + /// Returns an iterator over every element in the table. + /// + /// # Safety + /// + /// If any of the following conditions are violated, the result + /// is [`undefined behavior`]: + /// + /// * The caller has to ensure that the `RawTableInner` outlives the + /// `RawIter`. Because we cannot make the `next` method unsafe on + /// the `RawIter` struct, we have to make the `iter` method unsafe. + /// + /// * The [`RawTableInner`] must have properly initialized control bytes. + /// + /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html #[inline] - unsafe fn bucket(&self, index: usize) -> Bucket { - debug_assert_ne!(self.bucket_mask, 0); - debug_assert!(index < self.buckets()); - Bucket::from_base_index(self.data_end(), index) - } - - #[inline] - unsafe fn bucket_ptr(&self, index: usize, size_of: usize) -> *mut u8 { - debug_assert_ne!(self.bucket_mask, 0); - debug_assert!(index < self.buckets()); - let base: *mut u8 = self.data_end().as_ptr(); - base.sub((index + 1) * size_of) + unsafe fn iter(&self) -> RawIter { + // SAFETY: + // 1. Since the caller of this function ensures that the control bytes + // are properly initialized and `self.data_end()` points to the start + // of the array of control bytes, therefore: `ctrl` is valid for reads, + // properly aligned to `Group::WIDTH` and points to the properly initialized + // control bytes. + // 2. `data` bucket index in the table is equal to the `ctrl` index (i.e. + // equal to zero). + // 3. We pass the exact value of buckets of the table to the function. + // + // `ctrl` points here (to the start + // of the first control byte `CT0`) + // ∨ + // [Pad], T_n, ..., T1, T0, |CT0, CT1, ..., CT_n|, Group::WIDTH + // \________ ________/ + // \/ + // `n = buckets - 1`, i.e. `RawIndexTableInner::buckets() - 1` + // + // where: T0...T_n - our stored data; + // CT0...CT_n - control bytes or metadata for `data`. + let data = Bucket::from_base_index(self.data_end(), 0); + RawIter { + // SAFETY: See explanation above + iter: RawIterRange::new(self.ctrl.as_ptr(), data, self.buckets()), + items: self.items, + } } - #[inline] - unsafe fn data_end(&self) -> NonNull { - NonNull::new_unchecked(self.ctrl.as_ptr().cast()) - } + /// Executes the destructors (if any) of the values stored in the table. + /// + /// # Note + /// + /// This function does not erase the control bytes of the table and does + /// not make any changes to the `items` or `growth_left` fields of the + /// table. If necessary, the caller of this function must manually set + /// up these table fields, for example using the [`clear_no_drop`] function. + /// + /// Be careful during calling this function, because drop function of + /// the elements can panic, and this can leave table in an inconsistent + /// state. + /// + /// # Safety + /// + /// If `T` is a type that should be dropped and **the table is not empty**, + /// calling this function more than once results in [`undefined behavior`]. + /// + /// If `T` is not [`Copy`], attempting to use values stored in the table after + /// calling this function may result in [`undefined behavior`]. + /// + /// It is safe to call this function on a table that has not been allocated, + /// on a table with uninitialized control bytes, and on a table with no actual + /// data but with `Full` control bytes if `self.items == 0`. + /// + /// See also [`Bucket::drop`] / [`Bucket::as_ptr`] methods, for more information + /// about of properly removing or saving `element` from / into the [`RawTable`] / + /// [`RawTableInner`]. + /// + /// [`Bucket::drop`]: Bucket::drop + /// [`Bucket::as_ptr`]: Bucket::as_ptr + /// [`clear_no_drop`]: RawTableInner::clear_no_drop + /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html + unsafe fn drop_elements(&mut self) { + // Check that `self.items != 0`. Protects against the possibility + // of creating an iterator on an table with uninitialized control bytes. + if T::NEEDS_DROP && self.items != 0 { + // SAFETY: We know for sure that RawTableInner will outlive the + // returned `RawIter` iterator, and the caller of this function + // must uphold the safety contract for `drop_elements` method. + for item in self.iter::() { + // SAFETY: The caller must uphold the safety contract for + // `drop_elements` method. + item.drop(); + } + } + } + + /// Executes the destructors (if any) of the values stored in the table and than + /// deallocates the table. + /// + /// # Note + /// + /// Calling this function automatically makes invalid (dangling) all instances of + /// buckets ([`Bucket`]) and makes invalid (dangling) the `ctrl` field of the table. + /// + /// This function does not make any changes to the `bucket_mask`, `items` or `growth_left` + /// fields of the table. If necessary, the caller of this function must manually set + /// up these table fields. + /// + /// # Safety + /// + /// If any of the following conditions are violated, the result is [`undefined behavior`]: + /// + /// * Calling this function more than once; + /// + /// * The `alloc` must be the same [`Allocator`] as the `Allocator` that was used + /// to allocate this table. + /// + /// * The `table_layout` must be the same [`TableLayout`] as the `TableLayout` that + /// was used to allocate this table. + /// + /// The caller of this function should pay attention to the possibility of the + /// elements' drop function panicking, because this: + /// + /// * May leave the table in an inconsistent state; + /// + /// * Memory is never deallocated, so a memory leak may occur. + /// + /// Attempt to use the `ctrl` field of the table (dereference) after calling this + /// function results in [`undefined behavior`]. + /// + /// It is safe to call this function on a table that has not been allocated, + /// on a table with uninitialized control bytes, and on a table with no actual + /// data but with `Full` control bytes if `self.items == 0`. + /// + /// See also [`RawTableInner::drop_elements`] or [`RawTableInner::free_buckets`] + /// for more information. + /// + /// [`RawTableInner::drop_elements`]: RawTableInner::drop_elements + /// [`RawTableInner::free_buckets`]: RawTableInner::free_buckets + /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html + unsafe fn drop_inner_table(&mut self, alloc: &A, table_layout: TableLayout) { + if !self.is_empty_singleton() { + unsafe { + // SAFETY: The caller must uphold the safety contract for `drop_inner_table` method. + self.drop_elements::(); + // SAFETY: + // 1. We have checked that our table is allocated. + // 2. The caller must uphold the safety contract for `drop_inner_table` method. + self.free_buckets(alloc, table_layout); + } + } + } + + #[inline] + unsafe fn bucket(&self, index: usize) -> Bucket { + debug_assert_ne!(self.bucket_mask, 0); + debug_assert!(index < self.buckets()); + Bucket::from_base_index(self.data_end(), index) + } + + #[inline] + unsafe fn bucket_ptr(&self, index: usize, size_of: usize) -> *mut u8 { + debug_assert_ne!(self.bucket_mask, 0); + debug_assert!(index < self.buckets()); + let base: *mut u8 = self.data_end().as_ptr(); + base.sub((index + 1) * size_of) + } + + #[inline] + unsafe fn data_end(&self) -> NonNull { + NonNull::new_unchecked(self.ctrl.as_ptr().cast()) + } /// Returns an iterator-like object for a probe sequence on the table. /// @@ -2102,7 +2184,6 @@ impl RawTableInner { /// Returns the index of a bucket for which a value must be inserted if there is enough rooom /// in the table, otherwise returns error - #[cfg(feature = "raw")] #[inline] unsafe fn prepare_insert_no_grow(&mut self, hash: u64) -> Result { let index = self.find_insert_slot(hash).index; @@ -2158,7 +2239,7 @@ impl RawTableInner { /// [`Bucket::as_ptr`]: Bucket::as_ptr /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html #[inline] - unsafe fn set_ctrl_h2(&self, index: usize, hash: u64) { + unsafe fn set_ctrl_h2(&mut self, index: usize, hash: u64) { // SAFETY: The caller must uphold the safety rules for the [`RawTableInner::set_ctrl_h2`] self.set_ctrl(index, h2(hash)); } @@ -2192,7 +2273,7 @@ impl RawTableInner { /// [`Bucket::as_ptr`]: Bucket::as_ptr /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html #[inline] - unsafe fn replace_ctrl_h2(&self, index: usize, hash: u64) -> u8 { + unsafe fn replace_ctrl_h2(&mut self, index: usize, hash: u64) -> u8 { // SAFETY: The caller must uphold the safety rules for the [`RawTableInner::replace_ctrl_h2`] let prev_ctrl = *self.ctrl(index); self.set_ctrl_h2(index, hash); @@ -2224,9 +2305,12 @@ impl RawTableInner { /// [`Bucket::as_ptr`]: Bucket::as_ptr /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html #[inline] - unsafe fn set_ctrl(&self, index: usize, ctrl: u8) { + unsafe fn set_ctrl(&mut self, index: usize, ctrl: u8) { // Replicate the first Group::WIDTH control bytes at the end of - // the array without using a branch: + // the array without using a branch. If the tables smaller than + // the group width (self.buckets() < Group::WIDTH), + // `index2 = Group::WIDTH + index`, otherwise `index2` is: + // // - If index >= Group::WIDTH then index == index2. // - Otherwise index2 == self.bucket_mask + 1 + index. // @@ -2309,28 +2393,43 @@ impl RawTableInner { self.bucket_mask == 0 } + /// Attempts to allocate a new hash table with at least enough capacity + /// for inserting the given number of elements without reallocating, + /// and return it inside ScopeGuard to protect against panic in the hash + /// function. + /// + /// # Note + /// + /// It is recommended (but not required): + /// + /// * That the new table's `capacity` be greater than or equal to `self.items`. + /// + /// * The `alloc` is the same [`Allocator`] as the `Allocator` used + /// to allocate this table. + /// + /// * The `table_layout` is the same [`TableLayout`] as the `TableLayout` used + /// to allocate this table. + /// + /// If `table_layout` does not match the `TableLayout` that was used to allocate + /// this table, then using `mem::swap` with the `self` and the new table returned + /// by this function results in [`undefined behavior`]. + /// + /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html #[allow(clippy::mut_mut)] #[inline] - unsafe fn prepare_resize( + fn prepare_resize<'a, A>( &self, + alloc: &'a A, table_layout: TableLayout, capacity: usize, - fallibility: Fallibility, - ) -> Result< - crate::hashbrown::fork::scopeguard::ScopeGuard, - TryReserveError, - > { + ) -> Result, Error> + where + A: Allocator, + { debug_assert!(self.items <= capacity); // Allocate and initialize the new table. - let mut new_table = RawTableInner::fallible_with_capacity( - self.alloc.clone(), - table_layout, - capacity, - fallibility, - )?; - new_table.growth_left -= self.items; - new_table.items = self.items; + let new_table = RawTableInner::fallible_with_capacity(alloc, table_layout, capacity)?; // The hash function may panic, in which case we simply free the new // table without dropping any elements that may have been copied into @@ -2340,7 +2439,11 @@ impl RawTableInner { // the comment at the bottom of this function. Ok(guard(new_table, move |self_| { if !self_.is_empty_singleton() { - self_.free_buckets(table_layout); + // SAFETY: + // 1. We have checked that our table is allocated. + // 2. We know for sure that the `alloc` and `table_layout` matches the + // [`Allocator`] and [`TableLayout`] used to allocate this table. + unsafe { self_.free_buckets(alloc, table_layout) }; } })) } @@ -2349,90 +2452,238 @@ impl RawTableInner { /// /// This uses dynamic dispatch to reduce the amount of /// code generated, but it is eliminated by LLVM optimizations when inlined. + /// + /// # Safety + /// + /// If any of the following conditions are violated, the result is + /// [`undefined behavior`]: + /// + /// * The `alloc` must be the same [`Allocator`] as the `Allocator` used + /// to allocate this table. + /// + /// * The `layout` must be the same [`TableLayout`] as the `TableLayout` + /// used to allocate this table. + /// + /// * The `drop` function (`fn(*mut u8)`) must be the actual drop function of + /// the elements stored in the table. + /// + /// * The [`RawTableInner`] must have properly initialized control bytes. + /// + /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html #[allow(clippy::inline_always)] #[inline(always)] - unsafe fn reserve_rehash_inner( + unsafe fn reserve_rehash_inner( &mut self, cx: &mut C, + alloc: &A, additional: usize, - hasher: &dyn Fn(&mut Self, &mut C, usize) -> Result, - fallibility: Fallibility, + hasher: &dyn Fn(&mut C, &mut Self, usize) -> Result, layout: TableLayout, drop: Option, - ) -> Result, E> { + ) -> Result<(), CustomError> + where + A: Allocator, + { // Avoid `Option::ok_or_else` because it bloats LLVM IR. let new_items = match self.items.checked_add(additional) { Some(new_items) => new_items, - None => return Ok(Err(fallibility.capacity_overflow())), + None => return Err(CustomError::from(Error::CapacityOverflow)), }; let full_capacity = bucket_mask_to_capacity(self.bucket_mask); if new_items <= full_capacity / 2 { // Rehash in-place without re-allocating if we have plenty of spare // capacity that is locked up due to DELETED entries. - self.rehash_in_place(cx, hasher, layout.size, drop)?; - Ok(Ok(())) + + // SAFETY: + // 1. We know for sure that `[`RawTableInner`]` has already been allocated + // (since new_items <= full_capacity / 2); + // 2. The caller ensures that `drop` function is the actual drop function of + // the elements stored in the table. + // 3. The caller ensures that `layout` matches the [`TableLayout`] that was + // used to allocate this table. + // 4. The caller ensures that the control bytes of the `RawTableInner` + // are already initialized. + self.rehash_in_place(cx, hasher, layout.size, drop) + .map_err(CustomError::Custom)?; + Ok(()) } else { // Otherwise, conservatively resize to at least the next size up // to avoid churning deletes into frequent rehashes. + // + // SAFETY: + // 1. We know for sure that `capacity >= self.items`. + // 2. The caller ensures that `alloc` and `layout` matches the [`Allocator`] and + // [`TableLayout`] that were used to allocate this table. + // 3. The caller ensures that the control bytes of the `RawTableInner` + // are already initialized. self.resize_inner( cx, + alloc, usize::max(new_items, full_capacity + 1), hasher, - fallibility, layout, ) } } + /// Returns an iterator over full buckets indices in the table. + /// + /// # Safety + /// + /// Behavior is undefined if any of the following conditions are violated: + /// + /// * The caller has to ensure that the `RawTableInner` outlives the + /// `FullBucketsIndices`. Because we cannot make the `next` method + /// unsafe on the `FullBucketsIndices` struct, we have to make the + /// `full_buckets_indices` method unsafe. + /// + /// * The [`RawTableInner`] must have properly initialized control bytes. + #[inline(always)] + unsafe fn full_buckets_indices(&self) -> FullBucketsIndices { + // SAFETY: + // 1. Since the caller of this function ensures that the control bytes + // are properly initialized and `self.ctrl(0)` points to the start + // of the array of control bytes, therefore: `ctrl` is valid for reads, + // properly aligned to `Group::WIDTH` and points to the properly initialized + // control bytes. + // 2. The value of `items` is equal to the amount of data (values) added + // to the table. + // + // `ctrl` points here (to the start + // of the first control byte `CT0`) + // ∨ + // [Pad], T_n, ..., T1, T0, |CT0, CT1, ..., CT_n|, Group::WIDTH + // \________ ________/ + // \/ + // `n = buckets - 1`, i.e. `RawIndexTableInner::buckets() - 1` + // + // where: T0...T_n - our stored data; + // CT0...CT_n - control bytes or metadata for `data`. + let ctrl = NonNull::new_unchecked(self.ctrl(0)); + + FullBucketsIndices { + // Load the first group + // SAFETY: See explanation above. + current_group: Group::load_aligned(ctrl.as_ptr()).match_full().into_iter(), + group_first_index: 0, + ctrl, + items: self.items, + } + } + /// Allocates a new table of a different size and moves the contents of the /// current table into it. /// /// This uses dynamic dispatch to reduce the amount of /// code generated, but it is eliminated by LLVM optimizations when inlined. + /// + /// # Safety + /// + /// If any of the following conditions are violated, the result is + /// [`undefined behavior`]: + /// + /// * The `alloc` must be the same [`Allocator`] as the `Allocator` used + /// to allocate this table; + /// + /// * The `layout` must be the same [`TableLayout`] as the `TableLayout` + /// used to allocate this table; + /// + /// * The [`RawTableInner`] must have properly initialized control bytes. + /// + /// The caller of this function must ensure that `capacity >= self.items` + /// otherwise: + /// + /// * If `self.items != 0`, calling of this function with `capacity == 0` + /// results in [`undefined behavior`]. + /// + /// * If `capacity_to_buckets(capacity) < Group::WIDTH` and + /// `self.items > capacity_to_buckets(capacity)` calling this function + /// results in [`undefined behavior`]. + /// + /// * If `capacity_to_buckets(capacity) >= Group::WIDTH` and + /// `self.items > capacity_to_buckets(capacity)` calling this function + /// are never return (will go into an infinite loop). + /// + /// Note: It is recommended (but not required) that the new table's `capacity` + /// be greater than or equal to `self.items`. In case if `capacity <= self.items` + /// this function can never return. See [`RawTableInner::find_insert_slot`] for + /// more information. + /// + /// [`RawTableInner::find_insert_slot`]: RawTableInner::find_insert_slot + /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html #[allow(clippy::inline_always)] #[inline(always)] - unsafe fn resize_inner( + unsafe fn resize_inner( &mut self, cx: &mut C, + alloc: &A, capacity: usize, - hasher: &dyn Fn(&mut Self, &mut C, usize) -> Result, - fallibility: Fallibility, + hasher: &dyn Fn(&mut C, &mut Self, usize) -> Result, layout: TableLayout, - ) -> Result, E> { - let mut new_table = match self.prepare_resize(layout, capacity, fallibility) { - Ok(new_table) => new_table, - Err(error) => return Ok(Err(error)), - }; - - // Copy all elements to the new table. - for i in 0..self.buckets() { - if !self.is_bucket_full(i) { - continue; - } - + ) -> Result<(), CustomError> + where + A: Allocator, + { + // SAFETY: We know for sure that `alloc` and `layout` matches the [`Allocator`] and [`TableLayout`] + // that were used to allocate this table. + let mut new_table = self.prepare_resize(alloc, layout, capacity)?; + + // SAFETY: We know for sure that RawTableInner will outlive the + // returned `FullBucketsIndices` iterator, and the caller of this + // function ensures that the control bytes are properly initialized. + for full_byte_index in self.full_buckets_indices() { // This may panic. - let hash = hasher(self, cx, i)?; + let hash = hasher(cx, self, full_byte_index).map_err(CustomError::Custom)?; // We can use a simpler version of insert() here since: // - there are no DELETED entries. // - we know there is enough space in the table. // - all elements are unique. - let (index, _) = new_table.prepare_insert_slot(hash); + // + // SAFETY: + // 1. The caller of this function guarantees that `capacity > 0` + // so `new_table` must already have some allocated memory. + // 2. We set `growth_left` and `items` fields of the new table + // after the loop. + // 3. We insert into the table, at the returned index, the data + // matching the given hash immediately after calling this function. + let (new_index, _) = new_table.prepare_insert_slot(hash); + // SAFETY: + // + // * `src` is valid for reads of `layout.size` bytes, since the + // table is alive and the `full_byte_index` is guaranteed to be + // within bounds (see `FullBucketsIndices::next_impl`); + // + // * `dst` is valid for writes of `layout.size` bytes, since the + // caller ensures that `table_layout` matches the [`TableLayout`] + // that was used to allocate old table and we have the `new_index` + // returned by `prepare_insert_slot`. + // + // * Both `src` and `dst` are properly aligned. + // + // * Both `src` and `dst` point to different region of memory. ptr::copy_nonoverlapping( - self.bucket_ptr(i, layout.size), - new_table.bucket_ptr(index, layout.size), + self.bucket_ptr(full_byte_index, layout.size), + new_table.bucket_ptr(new_index, layout.size), layout.size, ); } + // The hash function didn't panic, so we can safely set the + // `growth_left` and `items` fields of the new table. + new_table.growth_left -= self.items; + new_table.items = self.items; + // We successfully copied all elements without panicking. Now replace // self with the new table. The old table will have its memory freed but // the items will not be dropped (since they have been moved into the // new table). + // SAFETY: The caller ensures that `table_layout` matches the [`TableLayout`] + // that was used to allocate this table. mem::swap(self, &mut new_table); - Ok(Ok(())) + Ok(()) } /// Rehashes the contents of the table in place (i.e. without changing the @@ -2442,13 +2693,28 @@ impl RawTableInner { /// /// This uses dynamic dispatch to reduce the amount of /// code generated, but it is eliminated by LLVM optimizations when inlined. + /// + /// # Safety + /// + /// If any of the following conditions are violated, the result is [`undefined behavior`]: + /// + /// * The `size_of` must be equal to the size of the elements stored in the table; + /// + /// * The `drop` function (`fn(*mut u8)`) must be the actual drop function of + /// the elements stored in the table. + /// + /// * The [`RawTableInner`] has already been allocated; + /// + /// * The [`RawTableInner`] must have properly initialized control bytes. + /// + /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html #[allow(clippy::inline_always)] #[cfg_attr(feature = "inline-more", inline(always))] #[cfg_attr(not(feature = "inline-more"), inline)] unsafe fn rehash_in_place( &mut self, cx: &mut C, - hasher: &dyn Fn(&mut Self, &mut C, usize) -> Result, + hasher: &dyn Fn(&mut C, &mut Self, usize) -> Result, size_of: usize, drop: Option, ) -> Result<(), E> { @@ -2483,7 +2749,7 @@ impl RawTableInner { 'inner: loop { // Hash the current item - let hash = hasher(*guard, cx, i)?; + let hash = hasher(cx, *guard, i)?; // Search for a suitable place to put it let new_i = guard.find_insert_slot(hash).index; @@ -2527,14 +2793,64 @@ impl RawTableInner { Ok(()) } + /// Deallocates the table without dropping any entries. + /// + /// # Note + /// + /// This function must be called only after [`drop_elements`](RawTable::drop_elements), + /// else it can lead to leaking of memory. Also calling this function automatically + /// makes invalid (dangling) all instances of buckets ([`Bucket`]) and makes invalid + /// (dangling) the `ctrl` field of the table. + /// + /// # Safety + /// + /// If any of the following conditions are violated, the result is [`Undefined Behavior`]: + /// + /// * The [`RawTableInner`] has already been allocated; + /// + /// * The `alloc` must be the same [`Allocator`] as the `Allocator` that was used + /// to allocate this table. + /// + /// * The `table_layout` must be the same [`TableLayout`] as the `TableLayout` that was used + /// to allocate this table. + /// + /// See also [`GlobalAlloc::dealloc`] or [`Allocator::deallocate`] for more information. + /// + /// [`Undefined Behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html + /// [`GlobalAlloc::dealloc`]: https://doc.rust-lang.org/alloc/alloc/trait.GlobalAlloc.html#tymethod.dealloc + /// [`Allocator::deallocate`]: https://doc.rust-lang.org/alloc/alloc/trait.Allocator.html#tymethod.deallocate #[inline] - unsafe fn free_buckets(&mut self, table_layout: TableLayout) { + unsafe fn free_buckets(&mut self, alloc: &A, table_layout: TableLayout) + where + A: Allocator, + { + // SAFETY: The caller must uphold the safety contract for `free_buckets` + // method. let (ptr, layout) = self.allocation_info(table_layout); - self.alloc.deallocate(ptr, layout); + alloc.deallocate(ptr, layout); } + /// Returns a pointer to the allocated memory and the layout that was used to + /// allocate the table. + /// + /// # Safety + /// + /// Caller of this function must observe the following safety rules: + /// + /// * The [`RawTableInner`] has already been allocated, otherwise + /// calling this function results in [`undefined behavior`] + /// + /// * The `table_layout` must be the same [`TableLayout`] as the `TableLayout` + /// that was used to allocate this table. Failure to comply with this condition + /// may result in [`undefined behavior`]. + /// + /// See also [`GlobalAlloc::dealloc`] or [`Allocator::deallocate`] for more information. + /// + /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html + /// [`GlobalAlloc::dealloc`]: https://doc.rust-lang.org/alloc/alloc/trait.GlobalAlloc.html#tymethod.dealloc + /// [`Allocator::deallocate`]: https://doc.rust-lang.org/alloc/alloc/trait.Allocator.html#tymethod.deallocate #[inline] - fn allocation_info(&self, table_layout: TableLayout) -> (NonNull, Layout) { + unsafe fn allocation_info(&self, table_layout: TableLayout) -> (NonNull, Layout) { debug_assert!( !self.is_empty_singleton(), "this function can only be called on non-empty tables" @@ -2546,17 +2862,36 @@ impl RawTableInner { None => unsafe { hint::unreachable_unchecked() }, }; ( + // SAFETY: The caller must uphold the safety contract for `allocation_info` method. unsafe { NonNull::new_unchecked(self.ctrl.as_ptr().sub(ctrl_offset)) }, layout, ) } - #[cfg(feature = "raw")] - fn allocation_info_or_zero(&self, table_layout: TableLayout) -> (NonNull, Layout) { + /// Returns a pointer to the allocated memory and the layout that was used to + /// allocate the table. If [`RawTableInner`] has not been allocated, this + /// function return `dangling` pointer and `()` (unit) layout. + /// + /// # Safety + /// + /// The `table_layout` must be the same [`TableLayout`] as the `TableLayout` + /// that was used to allocate this table. Failure to comply with this condition + /// may result in [`undefined behavior`]. + /// + /// See also [`GlobalAlloc::dealloc`] or [`Allocator::deallocate`] for more information. + /// + /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html + /// [`GlobalAlloc::dealloc`]: https://doc.rust-lang.org/alloc/alloc/trait.GlobalAlloc.html#tymethod.dealloc + /// [`Allocator::deallocate`]: https://doc.rust-lang.org/alloc/alloc/trait.Allocator.html#tymethod.deallocate + unsafe fn allocation_info_or_zero(&self, table_layout: TableLayout) -> (NonNull, Layout) { if self.is_empty_singleton() { (NonNull::dangling(), Layout::new::<()>()) } else { - self.allocation_info(table_layout) + // SAFETY: + // 1. We have checked that our table is allocated. + // 2. The caller ensures that `table_layout` matches the [`TableLayout`] + // that was used to allocate this table. + unsafe { self.allocation_info(table_layout) } } } @@ -2666,10 +3001,13 @@ impl RawTableInner { } } -impl Clone for RawTable { - fn clone(&self) -> Self { +impl TryClone for RawTable +where + T: TryClone, +{ + fn try_clone(&self) -> Result { if self.table.is_empty_singleton() { - Self::new_in(self.table.alloc.clone()) + Ok(Self::new_in(self.alloc.clone())) } else { unsafe { // Avoid `Result::ok_or_else` because it bloats LLVM IR. @@ -2677,14 +3015,8 @@ impl Clone for RawTable { // SAFETY: This is safe as we are taking the size of an already allocated table // and therefore сapacity overflow cannot occur, `self.table.buckets()` is power // of two and all allocator errors will be caught inside `RawTableInner::new_uninitialized`. - let mut new_table = match Self::new_uninitialized( - self.table.alloc.clone(), - self.table.buckets(), - Fallibility::Infallible, - ) { - Ok(table) => table, - Err(_) => hint::unreachable_unchecked(), - }; + let mut new_table = + Self::new_uninitialized(self.alloc.clone(), self.table.buckets())?; // Cloning elements may fail (the clone function may panic). But we don't // need to worry about uninitialized control bits, since: @@ -2694,21 +3026,29 @@ impl Clone for RawTable { // `self` (thus initializing them). But this will not affect the `Drop` // function, since the `clone_from_spec` function sets `items` only after // successfully clonning all elements. - new_table.clone_from_spec(self); - new_table + new_table.clone_from_spec(self)?; + Ok(new_table) } } } - fn clone_from(&mut self, source: &Self) { + fn try_clone_from(&mut self, source: &Self) -> Result<(), Error> { if source.table.is_empty_singleton() { - // Dereference drops old `self` table - *self = Self::new_in(self.table.alloc.clone()); + let mut old_inner = mem::replace(&mut self.table, RawTableInner::NEW); + unsafe { + // SAFETY: + // 1. We call the function only once; + // 2. We know for sure that `alloc` and `table_layout` matches the [`Allocator`] + // and [`TableLayout`] that were used to allocate this table. + // 3. If any elements' drop function panics, then there will only be a memory leak, + // because we have replaced the inner table with a new one. + old_inner.drop_inner_table::(&self.alloc, Self::TABLE_LAYOUT); + } } else { unsafe { // Make sure that if any panics occurs, we clear the table and // leave it in an empty state. - let mut guard = guard(&mut *self, |self_| { + let mut self_ = guard(self, |self_| { self_.clear_no_drop(); }); @@ -2719,66 +3059,74 @@ impl Clone for RawTable { // This leak is unavoidable: we can't try dropping more elements // since this could lead to another panic and abort the process. // - // SAFETY: We clear our table right after dropping the elements, - // so there is no double drop, since `items` will be equal to zero. - guard.drop_elements(); - - // Okay, we've successfully dropped all elements, so we'll just set - // `items` to zero (so that the `Drop` of `RawTable` doesn't try to - // drop all elements twice) and just forget about the guard. - guard.table.items = 0; - mem::forget(guard); + // SAFETY: If something gets wrong we clear our table right after + // dropping the elements, so there is no double drop, since `items` + // will be equal to zero. + self_.table.drop_elements::(); // If necessary, resize our table to match the source. - if self.buckets() != source.buckets() { - // Skip our drop by using ptr::write. - if !self.table.is_empty_singleton() { - // SAFETY: We have verified that the table is allocated. - self.free_buckets(); + if self_.buckets() != source.buckets() { + let new_inner = RawTableInner::new_uninitialized( + &self_.alloc, + Self::TABLE_LAYOUT, + source.buckets(), + )?; + // Replace the old inner with new uninitialized one. It's ok, since if something gets + // wrong `ScopeGuard` will initialize all control bytes and leave empty table. + let mut old_inner = mem::replace(&mut self_.table, new_inner); + if !old_inner.is_empty_singleton() { + // SAFETY: + // 1. We have checked that our table is allocated. + // 2. We know for sure that `alloc` and `table_layout` matches + // the [`Allocator`] and [`TableLayout`] that were used to allocate this table. + old_inner.free_buckets(&self_.alloc, Self::TABLE_LAYOUT); } - (self as *mut Self).write( - // Avoid `Result::unwrap_or_else` because it bloats LLVM IR. - // - // SAFETY: This is safe as we are taking the size of an already allocated table - // and therefore сapacity overflow cannot occur, `self.table.buckets()` is power - // of two and all allocator errors will be caught inside `RawTableInner::new_uninitialized`. - match Self::new_uninitialized( - self.table.alloc.clone(), - source.buckets(), - Fallibility::Infallible, - ) { - Ok(table) => table, - Err(_) => hint::unreachable_unchecked(), - }, - ); } // Cloning elements may fail (the clone function may panic), but the `ScopeGuard` // inside the `clone_from_impl` function will take care of that, dropping all - // cloned elements if necessary. The `Drop` of `RawTable` takes care of the rest - // by freeing up the allocated memory. - self.clone_from_spec(source); + // cloned elements if necessary. Our `ScopeGuard` will clear the table. + self_.clone_from_spec(source)?; + + // Disarm the scope guard if cloning was successful. + ScopeGuard::into_inner(self_); } } + + Ok(()) + } +} + +#[cfg(test)] +impl Clone for RawTable +where + T: TryClone, +{ + fn clone(&self) -> Self { + self.try_clone().abort() + } + + fn clone_from(&mut self, source: &Self) { + self.try_clone_from(source).abort() } } /// Specialization of `clone_from` for `Copy` types trait RawTableClone { - unsafe fn clone_from_spec(&mut self, source: &Self); + unsafe fn clone_from_spec(&mut self, source: &Self) -> Result<(), Error>; } -impl RawTableClone for RawTable { +impl RawTableClone for RawTable { default_fn! { #[cfg_attr(feature = "inline-more", inline)] - unsafe fn clone_from_spec(&mut self, source: &Self) { - self.clone_from_impl(source); + unsafe fn clone_from_spec(&mut self, source: &Self) -> Result<(), Error> { + self.clone_from_impl(source) } } } -#[cfg(feature = "nightly")] -impl RawTableClone for RawTable { +#[cfg(rune_nightly)] +impl RawTableClone for RawTable { #[cfg_attr(feature = "inline-more", inline)] - unsafe fn clone_from_spec(&mut self, source: &Self) { + unsafe fn clone_from_spec(&mut self, source: &Self) -> Result<(), Error> { source .table .ctrl(0) @@ -2790,16 +3138,17 @@ impl RawTableClone for RawTable { self.table.items = source.table.items; self.table.growth_left = source.table.growth_left; + Ok(()) } } -impl RawTable { +impl RawTable { /// Common code for clone and clone_from. Assumes: /// - `self.buckets() == source.buckets()`. /// - Any existing elements have been dropped. /// - The control bytes are not initialized yet. #[cfg_attr(feature = "inline-more", inline)] - unsafe fn clone_from_impl(&mut self, source: &Self) { + unsafe fn clone_from_impl(&mut self, source: &Self) -> Result<(), Error> { // Copy the control bytes unchanged. We do this in a single pass source .table @@ -2810,7 +3159,7 @@ impl RawTable { // to make sure we drop only the elements that have been // cloned so far. let mut guard = guard((0, &mut *self), |(index, self_)| { - if Self::DATA_NEEDS_DROP { + if T::NEEDS_DROP { for i in 0..=*index { if self_.is_bucket_full(i) { self_.bucket(i).drop(); @@ -2822,7 +3171,7 @@ impl RawTable { for from in source.iter() { let index = source.bucket_index(&from); let to = guard.1.bucket(index); - to.write(from.as_ref().clone()); + to.write(from.as_ref().try_clone()?); // Update the index in case we need to unwind. guard.0 = index; @@ -2833,11 +3182,16 @@ impl RawTable { self.table.items = source.table.items; self.table.growth_left = source.table.growth_left; + Ok(()) } /// Variant of `clone_from` to use when a hasher is available. - #[cfg(feature = "raw")] - pub fn clone_from_with_hasher(&mut self, source: &Self, hasher: impl Fn(&T) -> u64) { + pub fn clone_from_with_hasher( + &mut self, + cx: &mut C, + source: &Self, + hasher: impl HasherFn, + ) -> Result<(), CustomError> { // If we have enough capacity in the table, just clear it and insert // elements one by one. We don't do this if we have the same number of // buckets as the source since we can just copy the contents directly @@ -2847,7 +3201,7 @@ impl RawTable { { self.clear(); - let guard_self = guard(&mut *self, |self_| { + let mut guard_self = guard(&mut *self, |self_| { // Clear the partially copied table if a panic occurs, otherwise // items and growth_left will be out of sync with the contents // of the table. @@ -2857,8 +3211,8 @@ impl RawTable { unsafe { for item in source.iter() { // This may panic. - let item = item.as_ref().clone(); - let hash = hasher(&item); + let item = item.as_ref().try_clone()?; + let hash = hasher.hash(cx, &item).map_err(CustomError::Custom)?; // We can use a simpler version of insert() here since: // - there are no DELETED entries. @@ -2875,44 +3229,56 @@ impl RawTable { self.table.items = source.table.items; self.table.growth_left -= source.table.items; } else { - self.clone_from(source); + self.try_clone_from(source)?; } + + Ok(()) } } -impl Default for RawTable { +impl Default for RawTable { #[inline] fn default() -> Self { Self::new_in(Default::default()) } } -#[cfg(feature = "nightly")] -unsafe impl<#[may_dangle] T, A: Allocator + Clone> Drop for RawTable { +#[cfg(rune_nightly)] +unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawTable { #[cfg_attr(feature = "inline-more", inline)] fn drop(&mut self) { - if !self.table.is_empty_singleton() { - unsafe { - self.drop_elements(); - self.free_buckets(); - } + unsafe { + // SAFETY: + // 1. We call the function only once; + // 2. We know for sure that `alloc` and `table_layout` matches the [`Allocator`] + // and [`TableLayout`] that were used to allocate this table. + // 3. If the drop function of any elements fails, then only a memory leak will occur, + // and we don't care because we are inside the `Drop` function of the `RawTable`, + // so there won't be any table left in an inconsistent state. + self.table + .drop_inner_table::(&self.alloc, Self::TABLE_LAYOUT); } } } -#[cfg(not(feature = "nightly"))] -impl Drop for RawTable { +#[cfg(not(rune_nightly))] +impl Drop for RawTable { #[cfg_attr(feature = "inline-more", inline)] fn drop(&mut self) { - if !self.table.is_empty_singleton() { - unsafe { - self.drop_elements(); - self.free_buckets(); - } + unsafe { + // SAFETY: + // 1. We call the function only once; + // 2. We know for sure that `alloc` and `table_layout` matches the [`Allocator`] + // and [`TableLayout`] that were used to allocate this table. + // 3. If the drop function of any elements fails, then only a memory leak will occur, + // and we don't care because we are inside the `Drop` function of the `RawTable`, + // so there won't be any table left in an inconsistent state. + self.table + .drop_inner_table::(&self.alloc, Self::TABLE_LAYOUT); } } } -impl IntoIterator for RawTable { +impl IntoIterator for RawTable { type Item = T; type IntoIter = RawIntoIter; @@ -2946,14 +3312,38 @@ pub(crate) struct RawIterRange { impl RawIterRange { /// Returns a `RawIterRange` covering a subset of a table. /// - /// The control byte address must be aligned to the group size. + /// # Safety + /// + /// If any of the following conditions are violated, the result is + /// [`undefined behavior`]: + /// + /// * `ctrl` must be [valid] for reads, i.e. table outlives the `RawIterRange`; + /// + /// * `ctrl` must be properly aligned to the group size (Group::WIDTH); + /// + /// * `ctrl` must point to the array of properly initialized control bytes; + /// + /// * `data` must be the [`Bucket`] at the `ctrl` index in the table; + /// + /// * the value of `len` must be less than or equal to the number of table buckets, + /// and the returned value of `ctrl.as_ptr().add(len).offset_from(ctrl.as_ptr())` + /// must be positive. + /// + /// * The `ctrl.add(len)` pointer must be either in bounds or one + /// byte past the end of the same [allocated table]. + /// + /// * The `len` must be a power of two. + /// + /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html #[cfg_attr(feature = "inline-more", inline)] unsafe fn new(ctrl: *const u8, data: Bucket, len: usize) -> Self { debug_assert_ne!(len, 0); debug_assert_eq!(ctrl as usize % Group::WIDTH, 0); + // SAFETY: The caller must uphold the safety rules for the [`RawIterRange::new`] let end = ctrl.add(len); // Load the first group and advance ctrl to point to the next group + // SAFETY: The caller must uphold the safety rules for the [`RawIterRange::new`] let current_group = Group::load_aligned(ctrl).match_full(); let next_ctrl = ctrl.add(Group::WIDTH); @@ -3097,8 +3487,6 @@ pub struct RawIter { } impl RawIter { - const DATA_NEEDS_DROP: bool = mem::needs_drop::(); - /// Refresh the iterator so that it reflects a removal from the given bucket. /// /// For the iterator to remain valid, this method must be called once @@ -3106,7 +3494,6 @@ impl RawIter { /// /// This method should be called _before_ the removal is made. It is not necessary to call this /// method if you are removing an item that this iterator yielded in the past. - #[cfg(feature = "raw")] pub unsafe fn reflect_remove(&mut self, b: &Bucket) { self.reflect_toggle_full(b, false); } @@ -3120,13 +3507,11 @@ impl RawIter { /// index than the last one yielded will be reflected in the iterator. /// /// This method should be called _after_ the given insert is made. - #[cfg(feature = "raw")] pub unsafe fn reflect_insert(&mut self, b: &Bucket) { self.reflect_toggle_full(b, true); } /// Refresh the iterator so that it reflects a change to the state of the given bucket. - #[cfg(feature = "raw")] unsafe fn reflect_toggle_full(&mut self, b: &Bucket, is_insert: bool) { if b.as_ptr() > self.iter.data.as_ptr() { // The iterator has already passed the bucket's group. @@ -3214,7 +3599,7 @@ impl RawIter { } unsafe fn drop_elements(&mut self) { - if Self::DATA_NEEDS_DROP && self.len() != 0 { + if T::NEEDS_DROP && self.items != 0 { for item in self { item.drop(); } @@ -3263,35 +3648,154 @@ impl Iterator for RawIter { impl ExactSizeIterator for RawIter {} impl FusedIterator for RawIter {} +/// Iterator which returns an index of every full bucket in the table. +/// +/// For maximum flexibility this iterator is not bound by a lifetime, but you +/// must observe several rules when using it: +/// - You must not free the hash table while iterating (including via growing/shrinking). +/// - It is fine to erase a bucket that has been yielded by the iterator. +/// - Erasing a bucket that has not yet been yielded by the iterator may still +/// result in the iterator yielding index of that bucket. +/// - It is unspecified whether an element inserted after the iterator was +/// created will be yielded by that iterator. +/// - The order in which the iterator yields indices of the buckets is unspecified +/// and may change in the future. +pub(crate) struct FullBucketsIndices { + // Mask of full buckets in the current group. Bits are cleared from this + // mask as each element is processed. + current_group: BitMaskIter, + + // Initial value of the bytes' indices of the current group (relative + // to the start of the control bytes). + group_first_index: usize, + + // Pointer to the current group of control bytes, + // Must be aligned to the group size (Group::WIDTH). + ctrl: NonNull, + + // Number of elements in the table. + items: usize, +} + +impl FullBucketsIndices { + /// Advances the iterator and returns the next value. + /// + /// # Safety + /// + /// If any of the following conditions are violated, the result is + /// [`Undefined Behavior`]: + /// + /// * The [`RawTableInner`] / [`RawTable`] must be alive and not moved, + /// i.e. table outlives the `FullBucketsIndices`; + /// + /// * It never tries to iterate after getting all elements. + /// + /// [`Undefined Behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html + #[inline(always)] + unsafe fn next_impl(&mut self) -> Option { + loop { + if let Some(index) = self.current_group.next() { + // The returned `self.group_first_index + index` will always + // be in the range `0..self.buckets()`. See explanation below. + return Some(self.group_first_index + index); + } + + // SAFETY: The caller of this function ensures that: + // + // 1. It never tries to iterate after getting all the elements; + // 2. The table is alive and did not moved; + // 3. The first `self.ctrl` pointed to the start of the array of control bytes. + // + // Taking the above into account, we always stay within the bounds, because: + // + // 1. For tables smaller than the group width (self.buckets() <= Group::WIDTH), + // we will never end up in the given branch, since we should have already + // yielded all the elements of the table. + // + // 2. For tables larger than the group width. The the number of buckets is a + // power of two (2 ^ n), Group::WIDTH is also power of two (2 ^ k). Sinse + // `(2 ^ n) > (2 ^ k)`, than `(2 ^ n) % (2 ^ k) = 0`. As we start from the + // the start of the array of control bytes, and never try to iterate after + // getting all the elements, the last `self.ctrl` will be equal to + // the `self.buckets() - Group::WIDTH`, so `self.current_group.next()` + // will always contains indices within the range `0..Group::WIDTH`, + // and subsequent `self.group_first_index + index` will always return a + // number less than `self.buckets()`. + self.ctrl = NonNull::new_unchecked(self.ctrl.as_ptr().add(Group::WIDTH)); + + // SAFETY: See explanation above. + self.current_group = Group::load_aligned(self.ctrl.as_ptr()) + .match_full() + .into_iter(); + self.group_first_index += Group::WIDTH; + } + } +} + +impl Iterator for FullBucketsIndices { + type Item = usize; + + /// Advances the iterator and returns the next value. It is up to + /// the caller to ensure that the `RawTable` outlives the `FullBucketsIndices`, + /// because we cannot make the `next` method unsafe. + #[inline(always)] + fn next(&mut self) -> Option { + // Return if we already yielded all items. + if self.items == 0 { + return None; + } + + let nxt = unsafe { + // SAFETY: + // 1. We check number of items to yield using `items` field. + // 2. The caller ensures that the table is alive and has not moved. + self.next_impl() + }; + + debug_assert!(nxt.is_some()); + self.items -= 1; + + nxt + } + + #[inline(always)] + fn size_hint(&self) -> (usize, Option) { + (self.items, Some(self.items)) + } +} + +impl ExactSizeIterator for FullBucketsIndices {} +impl FusedIterator for FullBucketsIndices {} + /// Iterator which consumes a table and returns elements. -pub struct RawIntoIter { +pub struct RawIntoIter { iter: RawIter, allocation: Option<(NonNull, Layout, A)>, marker: PhantomData, } -impl RawIntoIter { +impl RawIntoIter { #[cfg_attr(feature = "inline-more", inline)] pub fn iter(&self) -> RawIter { self.iter.clone() } } -unsafe impl Send for RawIntoIter +unsafe impl Send for RawIntoIter where T: Send, A: Send, { } -unsafe impl Sync for RawIntoIter +unsafe impl Sync for RawIntoIter where T: Sync, A: Sync, { } -#[cfg(feature = "nightly")] -unsafe impl<#[may_dangle] T, A: Allocator + Clone> Drop for RawIntoIter { +#[cfg(rune_nightly)] +unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawIntoIter { #[cfg_attr(feature = "inline-more", inline)] fn drop(&mut self) { unsafe { @@ -3305,8 +3809,8 @@ unsafe impl<#[may_dangle] T, A: Allocator + Clone> Drop for RawIntoIter { } } } -#[cfg(not(feature = "nightly"))] -impl Drop for RawIntoIter { +#[cfg(not(rune_nightly))] +impl Drop for RawIntoIter { #[cfg_attr(feature = "inline-more", inline)] fn drop(&mut self) { unsafe { @@ -3321,7 +3825,7 @@ impl Drop for RawIntoIter { } } -impl Iterator for RawIntoIter { +impl Iterator for RawIntoIter { type Item = T; #[cfg_attr(feature = "inline-more", inline)] @@ -3335,45 +3839,45 @@ impl Iterator for RawIntoIter { } } -impl ExactSizeIterator for RawIntoIter {} -impl FusedIterator for RawIntoIter {} +impl ExactSizeIterator for RawIntoIter {} +impl FusedIterator for RawIntoIter {} /// Iterator which consumes elements without freeing the table storage. -pub struct RawDrain<'a, T, A: Allocator + Clone = Global> { +pub struct RawDrain<'a, T, A: Allocator = Global> { iter: RawIter, // The table is moved into the iterator for the duration of the drain. This // ensures that an empty table is left if the drain iterator is leaked // without dropping. - table: ManuallyDrop>, - orig_table: NonNull>, + table: RawTableInner, + orig_table: NonNull, // We don't use a &'a mut RawTable because we want RawDrain to be // covariant over T. marker: PhantomData<&'a RawTable>, } -impl RawDrain<'_, T, A> { +impl RawDrain<'_, T, A> { #[cfg_attr(feature = "inline-more", inline)] pub fn iter(&self) -> RawIter { self.iter.clone() } } -unsafe impl Send for RawDrain<'_, T, A> +unsafe impl Send for RawDrain<'_, T, A> where T: Send, A: Send, { } -unsafe impl Sync for RawDrain<'_, T, A> +unsafe impl Sync for RawDrain<'_, T, A> where T: Sync, A: Sync, { } -impl Drop for RawDrain<'_, T, A> { +impl Drop for RawDrain<'_, T, A> { #[cfg_attr(feature = "inline-more", inline)] fn drop(&mut self) { unsafe { @@ -3387,12 +3891,12 @@ impl Drop for RawDrain<'_, T, A> { // Move the now empty table back to its original location. self.orig_table .as_ptr() - .copy_from_nonoverlapping(&*self.table, 1); + .copy_from_nonoverlapping(&self.table, 1); } } } -impl Iterator for RawDrain<'_, T, A> { +impl Iterator for RawDrain<'_, T, A> { type Item = T; #[cfg_attr(feature = "inline-more", inline)] @@ -3409,8 +3913,8 @@ impl Iterator for RawDrain<'_, T, A> { } } -impl ExactSizeIterator for RawDrain<'_, T, A> {} -impl FusedIterator for RawDrain<'_, T, A> {} +impl ExactSizeIterator for RawDrain<'_, T, A> {} +impl FusedIterator for RawDrain<'_, T, A> {} /// Iterator over occupied buckets that could match a given hash. /// @@ -3454,8 +3958,7 @@ struct RawIterHashInner { impl RawIterHash { #[cfg_attr(feature = "inline-more", inline)] - #[cfg(feature = "raw")] - unsafe fn new(table: &RawTable, hash: u64) -> Self { + unsafe fn new(table: &RawTable, hash: u64) -> Self { RawIterHash { inner: RawIterHashInner::new(&table.table, hash), _marker: PhantomData, @@ -3464,8 +3967,7 @@ impl RawIterHash { } impl RawIterHashInner { #[cfg_attr(feature = "inline-more", inline)] - #[cfg(feature = "raw")] - unsafe fn new(table: &RawTableInner, hash: u64) -> Self { + unsafe fn new(table: &RawTableInner, hash: u64) -> Self { let h2_hash = h2(hash); let probe_seq = table.probe_seq(hash); let group = Group::load(table.ctrl(probe_seq.pos)); @@ -3533,13 +4035,18 @@ impl Iterator for RawIterHashInner { mod test_map { use super::*; - fn rehash_in_place(table: &mut RawTable, hasher: impl Fn(&T) -> u64) { - let hasher = infallible_hasher(hasher); + use crate::alloc::into_ok; + use crate::alloc::Global; + use core::convert::Infallible; + fn rehash_in_place( + table: &mut RawTable, + hasher: impl Fn(&mut (), &T) -> Result, + ) { unsafe { into_ok(table.table.rehash_in_place( &mut (), - &|table, cx, index| hasher(cx, table.bucket::(index).as_ref()), + &move |cx, table, index| hasher(cx, table.bucket::(index).as_ref()), mem::size_of::(), if mem::needs_drop::() { Some(mem::transmute(ptr::drop_in_place:: as unsafe fn(*mut T))) @@ -3553,25 +4060,39 @@ mod test_map { #[test] fn rehash() { let mut table = RawTable::new(); - let hasher = |i: &u64| *i; + let hasher = |_: &mut (), i: &u64| Ok(*i); for i in 0..100 { - table.insert(i, i, hasher); + table.insert(&mut (), i, i, hasher).abort(); } for i in 0..100 { unsafe { - assert_eq!(table.find(i, |x| *x == i).map(|b| b.read()), Some(i)); + assert_eq!( + into_ok(table.find(&mut (), i, |_: &mut (), x: &u64| Ok(*x == i))) + .map(|b| b.read()), + Some(i) + ); } - assert!(table.find(i + 100, |x| *x == i + 100).is_none()); + assert!( + into_ok(table.find(&mut (), i + 100, |_: &mut (), x: &u64| Ok(*x == i + 100))) + .is_none() + ); } rehash_in_place(&mut table, hasher); for i in 0..100 { unsafe { - assert_eq!(table.find(i, |x| *x == i).map(|b| b.read()), Some(i)); + assert_eq!( + into_ok(table.find(&mut (), i, |_: &mut (), x: &u64| Ok(*x == i))) + .map(|b| b.read()), + Some(i) + ); } - assert!(table.find(i + 100, |x| *x == i + 100).is_none()); + assert!( + into_ok(table.find(&mut (), i + 100, |_: &mut (), x: &u64| Ok(*x == i + 100))) + .is_none() + ); } } @@ -3579,13 +4100,12 @@ mod test_map { /// AN UNINITIALIZED TABLE DURING THE DROP #[test] fn test_drop_uninitialized() { - use ::alloc::vec::Vec; + use ::rust_alloc::vec::Vec; let table = unsafe { // SAFETY: The `buckets` is power of two and we're not // trying to actually use the returned RawTable. - RawTable::<(u64, Vec)>::new_uninitialized(Global, 8, Fallibility::Infallible) - .unwrap() + RawTable::<(u64, Vec)>::new_uninitialized(Global, 8).unwrap() }; drop(table); } @@ -3594,13 +4114,11 @@ mod test_map { /// ARE ZERO, EVEN IF WE HAVE `FULL` CONTROL BYTES. #[test] fn test_drop_zero_items() { - use ::alloc::vec::Vec; + use ::rust_alloc::vec::Vec; unsafe { // SAFETY: The `buckets` is power of two and we're not // trying to actually use the returned RawTable. - let table = - RawTable::<(u64, Vec)>::new_uninitialized(Global, 8, Fallibility::Infallible) - .unwrap(); + let table = RawTable::<(u64, Vec)>::new_uninitialized(Global, 8).unwrap(); // WE SIMULATE, AS IT WERE, A FULL TABLE. @@ -3637,4 +4155,158 @@ mod test_map { drop(table); } } + + /// CHECKING THAT WE DON'T TRY TO DROP DATA IF THE `ITEMS` + /// ARE ZERO, EVEN IF WE HAVE `FULL` CONTROL BYTES. + #[test] + fn test_catch_panic_clone_from() { + use crate::alloc::{AllocError, Allocator}; + use ::rust_alloc::sync::Arc; + use ::rust_alloc::vec::Vec; + use ::rust_std::thread; + use core::sync::atomic::{AtomicI8, Ordering}; + + struct MyAllocInner { + drop_count: Arc, + } + + #[derive(Clone)] + struct MyAlloc { + _inner: Arc, + } + + impl Drop for MyAllocInner { + fn drop(&mut self) { + ::rust_std::println!("MyAlloc freed."); + self.drop_count.fetch_sub(1, Ordering::SeqCst); + } + } + + unsafe impl Allocator for MyAlloc { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + let g = Global; + g.allocate(layout) + } + + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + let g = Global; + g.deallocate(ptr, layout) + } + } + + const DISARMED: bool = false; + const ARMED: bool = true; + + struct CheckedCloneDrop { + panic_in_clone: bool, + dropped: bool, + need_drop: Vec, + } + + impl TryClone for CheckedCloneDrop { + fn try_clone(&self) -> Result { + if self.panic_in_clone { + panic!("panic in clone") + } + Ok(Self { + panic_in_clone: self.panic_in_clone, + dropped: self.dropped, + need_drop: self.need_drop.clone(), + }) + } + } + + impl Drop for CheckedCloneDrop { + fn drop(&mut self) { + if self.dropped { + panic!("double drop"); + } + self.dropped = true; + } + } + + let dropped: Arc = Arc::new(AtomicI8::new(2)); + + let mut table = RawTable::new_in(MyAlloc { + _inner: Arc::new(MyAllocInner { + drop_count: dropped.clone(), + }), + }); + + for (idx, panic_in_clone) in core::iter::repeat(DISARMED).take(7).enumerate() { + let idx = idx as u64; + table + .insert( + &mut (), + idx, + ( + idx, + CheckedCloneDrop { + panic_in_clone, + dropped: false, + need_drop: ::rust_alloc::vec![idx], + }, + ), + |_: &mut (), (k, _): &(u64, _)| Ok::<_, Infallible>(*k), + ) + .abort(); + } + + assert_eq!(table.len(), 7); + + thread::scope(|s| { + let result = s.spawn(|| { + let armed_flags = [ + DISARMED, DISARMED, ARMED, DISARMED, DISARMED, DISARMED, DISARMED, + ]; + let mut scope_table = RawTable::new_in(MyAlloc { + _inner: Arc::new(MyAllocInner { + drop_count: dropped.clone(), + }), + }); + for (idx, &panic_in_clone) in armed_flags.iter().enumerate() { + let idx = idx as u64; + scope_table + .insert( + &mut (), + idx, + ( + idx, + CheckedCloneDrop { + panic_in_clone, + dropped: false, + need_drop: ::rust_alloc::vec![idx + 100], + }, + ), + |_: &mut (), (k, _): &(u64, _)| Ok::<_, Infallible>(*k), + ) + .abort(); + } + table.clone_from(&scope_table); + }); + assert!(result.join().is_err()); + }); + + // Let's check that all iterators work fine and do not return elements + // (especially `RawIterRange`, which does not depend on the number of + // elements in the table, but looks directly at the control bytes) + // + // SAFETY: We know for sure that `RawTable` will outlive + // the returned `RawIter / RawIterRange` iterator. + assert_eq!(table.len(), 0); + assert_eq!(unsafe { table.iter().count() }, 0); + assert_eq!(unsafe { table.iter().iter.count() }, 0); + + for idx in 0..table.buckets() { + let idx = idx as u64; + assert!( + into_ok(table.find(&mut (), idx, |_: &mut (), (k, _): &(u64, _)| Ok(*k == idx))) + .is_none(), + "Index: {idx}" + ); + } + + // All allocator clones should already be dropped. + assert_eq!(dropped.load(Ordering::SeqCst), 1); + } } diff --git a/crates/rune/src/hashbrown/fork/raw/neon.rs b/crates/rune-alloc/src/alloc/hashbrown/raw/neon.rs similarity index 100% rename from crates/rune/src/hashbrown/fork/raw/neon.rs rename to crates/rune-alloc/src/alloc/hashbrown/raw/neon.rs diff --git a/crates/rune/src/hashbrown/fork/raw/sse2.rs b/crates/rune-alloc/src/alloc/hashbrown/raw/sse2.rs similarity index 100% rename from crates/rune/src/hashbrown/fork/raw/sse2.rs rename to crates/rune-alloc/src/alloc/hashbrown/raw/sse2.rs diff --git a/crates/rune/src/hashbrown/fork/scopeguard.rs b/crates/rune-alloc/src/alloc/hashbrown/scopeguard.rs similarity index 92% rename from crates/rune/src/hashbrown/fork/scopeguard.rs rename to crates/rune-alloc/src/alloc/hashbrown/scopeguard.rs index 47965a845..12e80faca 100644 --- a/crates/rune/src/hashbrown/fork/scopeguard.rs +++ b/crates/rune-alloc/src/alloc/hashbrown/scopeguard.rs @@ -1,9 +1,7 @@ // Extracted from the scopeguard crate -use core::{ - mem::ManuallyDrop, - ops::{Deref, DerefMut}, - ptr, -}; +use core::mem::ManuallyDrop; +use core::ops::{Deref, DerefMut}; +use core::ptr; pub struct ScopeGuard where @@ -25,7 +23,6 @@ impl ScopeGuard where F: FnMut(&mut T), { - #[allow(dead_code)] #[inline] pub fn into_inner(guard: Self) -> T { // Cannot move out of Drop-implementing types, so diff --git a/crates/rune-alloc/src/alloc/hashbrown/set.rs b/crates/rune-alloc/src/alloc/hashbrown/set.rs new file mode 100644 index 000000000..91d6e23b6 --- /dev/null +++ b/crates/rune-alloc/src/alloc/hashbrown/set.rs @@ -0,0 +1,2892 @@ +use core::fmt; +use core::hash::{BuildHasher, Hash}; +use core::iter::{Chain, FusedIterator}; + +use super::Equivalent; + +#[cfg(test)] +use crate::alloc::testing::*; +use crate::alloc::{Allocator, Error, Global, TryClone, TryExtend, TryFromIteratorIn, TryToOwned}; + +use super::map::{self, DefaultHashBuilder, ExtractIfInner, HashMap, Keys}; +use super::raw::RawTable; + +// Future Optimization (FIXME!) +// ============================= +// +// Iteration over zero sized values is a noop. There is no need +// for `bucket.val` in the case of HashSet. I suppose we would need HKT +// to get rid of it properly. + +/// A hash set implemented as a `HashMap` where the value is `()`. +/// +/// As with the [`HashMap`] type, a `HashSet` requires that the elements +/// implement the [`Eq`] and [`Hash`] traits. This can frequently be achieved by +/// using `#[derive(PartialEq, Eq, Hash)]`. If you implement these yourself, +/// it is important that the following property holds: +/// +/// ```text +/// k1 == k2 -> hash(k1) == hash(k2) +/// ``` +/// +/// In other words, if two keys are equal, their hashes must be equal. +/// +/// +/// It is a logic error for an item to be modified in such a way that the +/// item's hash, as determined by the [`Hash`] trait, or its equality, as +/// determined by the [`Eq`] trait, changes while it is in the set. This is +/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or +/// unsafe code. +/// +/// It is also a logic error for the [`Hash`] implementation of a key to panic. +/// This is generally only possible if the trait is implemented manually. If a +/// panic does occur then the contents of the `HashSet` may become corrupted and +/// some items may be dropped from the table. +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::HashSet; +/// // Type inference lets us omit an explicit type signature (which +/// // would be `HashSet` in this example). +/// let mut books = HashSet::new(); +/// +/// // Add some books. +/// books.try_insert("A Dance With Dragons".to_string())?; +/// books.try_insert("To Kill a Mockingbird".to_string())?; +/// books.try_insert("The Odyssey".to_string())?; +/// books.try_insert("The Great Gatsby".to_string())?; +/// +/// // Check for a specific one. +/// if !books.contains("The Winds of Winter") { +/// println!("We have {} books, but The Winds of Winter ain't one.", +/// books.len()); +/// } +/// +/// // Remove a book. +/// books.remove("The Odyssey"); +/// +/// // Iterate over everything. +/// for book in &books { +/// println!("{}", book); +/// } +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// The easiest way to use `HashSet` with a custom type is to derive +/// [`Eq`] and [`Hash`]. We must also derive [`PartialEq`]. This will in the +/// future be implied by [`Eq`]. +/// +/// ``` +/// use rune_alloc::HashSet; +/// +/// #[derive(Hash, Eq, PartialEq, Debug)] +/// struct Viking { +/// name: String, +/// power: usize, +/// } +/// +/// let mut vikings = HashSet::new(); +/// +/// vikings.try_insert(Viking { name: "Einar".to_string(), power: 9 })?; +/// vikings.try_insert(Viking { name: "Einar".to_string(), power: 9 })?; +/// vikings.try_insert(Viking { name: "Olaf".to_string(), power: 4 })?; +/// vikings.try_insert(Viking { name: "Harald".to_string(), power: 8 })?; +/// +/// // Use derived implementation to print the vikings. +/// for x in &vikings { +/// println!("{:?}", x); +/// } +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// A `HashSet` with fixed list of elements can be initialized from an array: +/// +/// ``` +/// use rune_alloc::HashSet; +/// use rune_alloc::prelude::*; +/// +/// let viking_names: HashSet<&'static str> = +/// [ "Einar", "Olaf", "Harald" ].iter().copied().try_collect()?; +/// // use the values stored in the set +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// [`Cell`]: https://doc.rust-lang.org/std/cell/struct.Cell.html +/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html +/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html +/// [`HashMap`]: struct.HashMap.html +/// [`PartialEq`]: https://doc.rust-lang.org/std/cmp/trait.PartialEq.html +/// [`RefCell`]: https://doc.rust-lang.org/std/cell/struct.RefCell.html +pub struct HashSet { + pub(crate) map: HashMap, +} + +impl TryClone for HashSet +where + T: TryClone, + S: Clone, +{ + fn try_clone(&self) -> Result { + Ok(HashSet { + map: self.map.try_clone()?, + }) + } + + fn try_clone_from(&mut self, source: &Self) -> Result<(), Error> { + self.map.try_clone_from(&source.map)?; + Ok(()) + } +} + +#[cfg(test)] +impl Clone for HashSet +where + T: TryClone, + S: Clone, +{ + fn clone(&self) -> Self { + self.try_clone().abort() + } + + fn clone_from(&mut self, source: &Self) { + self.map.try_clone_from(&source.map).abort(); + } +} + +impl HashSet { + /// Creates an empty `HashSet`. + /// + /// The hash set is initially created with a capacity of 0, so it will not allocate until it + /// is first inserted into. + /// + /// # HashDoS resistance + /// + /// The `hash_builder` normally use a fixed key by default and that does + /// not allow the `HashSet` to be protected against attacks such as [`HashDoS`]. + /// Users who require HashDoS resistance should explicitly use + /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`] + /// as the hasher when creating a [`HashSet`], for example with + /// [`with_hasher`](HashSet::with_hasher) method. + /// + /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack + /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// let set: HashSet = HashSet::new(); + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn new() -> Self { + Self { + map: HashMap::new(), + } + } + + /// Creates an empty `HashSet` with the specified capacity. + /// + /// The hash set will be able to hold at least `capacity` elements without + /// reallocating. If `capacity` is 0, the hash set will not allocate. + /// + /// # HashDoS resistance + /// + /// The `hash_builder` normally use a fixed key by default and that does not + /// allow the `HashSet` to be protected against attacks such as [`HashDoS`]. + /// Users who require HashDoS resistance should explicitly use + /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`] as + /// the hasher when creating a [`HashSet`], for example with + /// [`try_with_capacity_and_hasher`] method. + /// + /// [`try_with_capacity_and_hasher`]: HashSet::try_with_capacity_and_hasher + /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack + /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// let set: HashSet = HashSet::try_with_capacity(10)?; + /// assert!(set.capacity() >= 10); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_with_capacity(capacity: usize) -> Result { + Ok(Self { + map: HashMap::try_with_capacity(capacity)?, + }) + } + + #[cfg(test)] + pub(crate) fn with_capacity(capacity: usize) -> Self { + Self::try_with_capacity(capacity).abort() + } +} + +impl HashSet { + /// Creates an empty `HashSet`. + /// + /// The hash set is initially created with a capacity of 0, so it will not allocate until it + /// is first inserted into. + /// + /// # HashDoS resistance + /// + /// The `hash_builder` normally use a fixed key by default and that does + /// not allow the `HashSet` to be protected against attacks such as [`HashDoS`]. + /// Users who require HashDoS resistance should explicitly use + /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`] + /// as the hasher when creating a [`HashSet`], for example with + /// [`with_hasher_in`](HashSet::with_hasher_in) method. + /// + /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack + /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{HashSet, Global}; + /// let set: HashSet = HashSet::new_in(Global); + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn new_in(alloc: A) -> Self { + Self { + map: HashMap::new_in(alloc), + } + } + + /// Creates an empty `HashSet` with the specified capacity. + /// + /// The hash set will be able to hold at least `capacity` elements without + /// reallocating. If `capacity` is 0, the hash set will not allocate. + /// + /// # HashDoS resistance + /// + /// The `hash_builder` normally use a fixed key by default and that does not + /// allow the `HashSet` to be protected against attacks such as [`HashDoS`]. + /// Users who require HashDoS resistance should explicitly use + /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`] as + /// the hasher when creating a [`HashSet`], for example with + /// [`try_with_capacity_and_hasher_in`] method. + /// + /// [`try_with_capacity_and_hasher_in`]: + /// HashSet::try_with_capacity_and_hasher_in + /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack + /// [`std::collections::hash_map::RandomState`]: + /// https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// let set: HashSet = HashSet::try_with_capacity(10)?; + /// assert!(set.capacity() >= 10); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_with_capacity_in(capacity: usize, alloc: A) -> Result { + Ok(Self { + map: HashMap::try_with_capacity_in(capacity, alloc)?, + }) + } +} + +impl HashSet { + /// Returns the number of elements the set can hold without reallocating. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let set: HashSet = HashSet::try_with_capacity(100)?; + /// assert!(set.capacity() >= 100); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn capacity(&self) -> usize { + self.map.capacity() + } + + /// An iterator visiting all elements in arbitrary order. + /// The iterator element type is `&'a T`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let mut set = HashSet::new(); + /// set.try_insert("a")?; + /// set.try_insert("b")?; + /// + /// // Will print in an arbitrary order. + /// for x in set.iter() { + /// println!("{}", x); + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn iter(&self) -> Iter<'_, T> { + Iter { + iter: self.map.keys(), + } + } + + /// Returns the number of elements in the set. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let mut v = HashSet::new(); + /// assert_eq!(v.len(), 0); + /// v.try_insert(1)?; + /// assert_eq!(v.len(), 1); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn len(&self) -> usize { + self.map.len() + } + + /// Returns `true` if the set contains no elements. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let mut v = HashSet::new(); + /// assert!(v.is_empty()); + /// v.try_insert(1)?; + /// assert!(!v.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn is_empty(&self) -> bool { + self.map.is_empty() + } + + /// Clears the set, returning all elements in an iterator. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let mut set: HashSet<_> = HashSet::try_from([1, 2, 3])?; + /// assert!(!set.is_empty()); + /// + /// // print 1, 2, 3 in an arbitrary order + /// for i in set.drain() { + /// println!("{}", i); + /// } + /// + /// assert!(set.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn drain(&mut self) -> Drain<'_, T, A> { + Drain { + iter: self.map.drain(), + } + } + + /// Retains only the elements specified by the predicate. + /// + /// In other words, remove all elements `e` such that `f(&e)` returns `false`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let mut set: HashSet = HashSet::try_from([1, 2, 3, 4, 5, 6])?; + /// set.retain(|&k| k % 2 == 0); + /// assert_eq!(set.len(), 3); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn retain(&mut self, mut f: F) + where + F: FnMut(&T) -> bool, + { + self.map.retain(|k, _| f(k)); + } + + /// Drains elements which are true under the given predicate, and returns an + /// iterator over the removed items. + /// + /// In other words, move all elements `e` such that `f(&e)` returns `true` + /// out into another iterator. + /// + /// If the returned `ExtractIf` is not exhausted, e.g. because it is dropped + /// without iterating or the iteration short-circuits, then the remaining + /// elements will be retained. Use [`retain`] with a negated predicate if + /// you do not need the returned iterator. + /// + /// [`retain`]: Self::retain + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{HashSet, Vec}; + /// use rune_alloc::prelude::*; + /// + /// let mut set: HashSet = (0..8).try_collect()?; + /// let drained: HashSet = set.extract_if(|v| v % 2 == 0).try_collect()?; + /// + /// let mut evens = drained.into_iter().try_collect::>()?; + /// let mut odds = set.into_iter().try_collect::>()?; + /// evens.sort(); + /// odds.sort(); + /// + /// assert_eq!(evens, rune_alloc::try_vec![0, 2, 4, 6]); + /// assert_eq!(odds, rune_alloc::try_vec![1, 3, 5, 7]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn extract_if(&mut self, f: F) -> ExtractIf<'_, T, F, A> + where + F: FnMut(&T) -> bool, + { + ExtractIf { + f, + inner: ExtractIfInner { + iter: unsafe { self.map.table.iter() }, + table: &mut self.map.table, + }, + } + } + + /// Clears the set, removing all values. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let mut v = HashSet::new(); + /// v.try_insert(1)?; + /// v.clear(); + /// assert!(v.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn clear(&mut self) { + self.map.clear(); + } +} + +impl HashSet { + /// Creates a new empty hash set which will use the given hasher to hash + /// keys. + /// + /// The hash set is initially created with a capacity of 0, so it will not + /// allocate until it is first inserted into. + /// + /// # HashDoS resistance + /// + /// The `hash_builder` normally use a fixed key by default and that does + /// not allow the `HashSet` to be protected against attacks such as [`HashDoS`]. + /// Users who require HashDoS resistance should explicitly use + /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`] + /// as the hasher when creating a [`HashSet`]. + /// + /// The `hash_builder` passed should implement the [`BuildHasher`] trait for + /// the HashSet to be useful, see its documentation for details. + /// + /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack + /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html + /// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// use rune_alloc::hash_map::DefaultHashBuilder; + /// + /// let s = DefaultHashBuilder::default(); + /// let mut set = HashSet::with_hasher(s); + /// set.try_insert(2)?; + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub const fn with_hasher(hasher: S) -> Self { + Self { + map: HashMap::with_hasher(hasher), + } + } + + /// Creates an empty `HashSet` with the specified capacity, using + /// `hasher` to hash the keys. + /// + /// The hash set will be able to hold at least `capacity` elements without + /// reallocating. If `capacity` is 0, the hash set will not allocate. + /// + /// # HashDoS resistance + /// + /// The `hash_builder` normally use a fixed key by default and that does + /// not allow the `HashSet` to be protected against attacks such as [`HashDoS`]. + /// Users who require HashDoS resistance should explicitly use + /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`] + /// as the hasher when creating a [`HashSet`]. + /// + /// The `hash_builder` passed should implement the [`BuildHasher`] trait for + /// the HashSet to be useful, see its documentation for details. + /// + /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack + /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html + /// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// use rune_alloc::hash_map::DefaultHashBuilder; + /// + /// let s = DefaultHashBuilder::default(); + /// let mut set = HashSet::try_with_capacity_and_hasher(10, s)?; + /// set.try_insert(1)?; + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_with_capacity_and_hasher(capacity: usize, hasher: S) -> Result { + Ok(Self { + map: HashMap::try_with_capacity_and_hasher(capacity, hasher)?, + }) + } + + #[cfg(test)] + pub(crate) fn with_capacity_and_hasher(capacity: usize, hasher: S) -> Self { + Self::try_with_capacity_and_hasher(capacity, hasher).abort() + } +} + +impl HashSet +where + A: Allocator, +{ + /// Returns a reference to the underlying allocator. + #[inline] + pub fn allocator(&self) -> &A { + self.map.allocator() + } + + /// Creates a new empty hash set which will use the given hasher to hash + /// keys. + /// + /// The hash set is initially created with a capacity of 0, so it will not + /// allocate until it is first inserted into. + /// + /// # HashDoS resistance + /// + /// The `hash_builder` normally use a fixed key by default and that does + /// not allow the `HashSet` to be protected against attacks such as [`HashDoS`]. + /// Users who require HashDoS resistance should explicitly use + /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`] + /// as the hasher when creating a [`HashSet`]. + /// + /// The `hash_builder` passed should implement the [`BuildHasher`] trait for + /// the HashSet to be useful, see its documentation for details. + /// + /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack + /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html + /// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// use rune_alloc::hash_map::DefaultHashBuilder; + /// + /// let s = DefaultHashBuilder::default(); + /// let mut set = HashSet::with_hasher(s); + /// set.try_insert(2)?; + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub const fn with_hasher_in(hasher: S, alloc: A) -> Self { + Self { + map: HashMap::with_hasher_in(hasher, alloc), + } + } + + /// Creates an empty `HashSet` with the specified capacity, using + /// `hasher` to hash the keys. + /// + /// The hash set will be able to hold at least `capacity` elements without + /// reallocating. If `capacity` is 0, the hash set will not allocate. + /// + /// # HashDoS resistance + /// + /// The `hash_builder` normally use a fixed key by default and that does + /// not allow the `HashSet` to be protected against attacks such as [`HashDoS`]. + /// Users who require HashDoS resistance should explicitly use + /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`] + /// as the hasher when creating a [`HashSet`]. + /// + /// The `hash_builder` passed should implement the [`BuildHasher`] trait for + /// the HashSet to be useful, see its documentation for details. + /// + /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack + /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html + /// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{HashSet, Global}; + /// use rune_alloc::hash_map::DefaultHashBuilder; + /// + /// let s = DefaultHashBuilder::default(); + /// let mut set = HashSet::try_with_capacity_and_hasher_in(10, s, Global)?; + /// set.try_insert(1)?; + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_with_capacity_and_hasher_in( + capacity: usize, + hasher: S, + alloc: A, + ) -> Result { + Ok(Self { + map: HashMap::try_with_capacity_and_hasher_in(capacity, hasher, alloc)?, + }) + } + + /// Returns a reference to the set's [`BuildHasher`]. + /// + /// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// use rune_alloc::hash_map::DefaultHashBuilder; + /// + /// let hasher = DefaultHashBuilder::default(); + /// let set: HashSet = HashSet::with_hasher(hasher); + /// let hasher: &DefaultHashBuilder = set.hasher(); + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn hasher(&self) -> &S { + self.map.hasher() + } +} + +impl HashSet +where + T: Eq + Hash, + S: BuildHasher, + A: Allocator, +{ + /// Tries to reserve capacity for at least `additional` more elements to be inserted + /// in the given `HashSet`. The collection may reserve more space to avoid + /// frequent reallocations. + /// + /// # Errors + /// + /// If the capacity overflows, or the allocator reports a failure, then an error + /// is returned. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// let mut set: HashSet = HashSet::new(); + /// set.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?"); + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_reserve(&mut self, additional: usize) -> Result<(), Error> { + self.map.try_reserve(additional) + } + + #[cfg(test)] + pub(crate) fn reserve(&mut self, additional: usize) { + self.try_reserve(additional).abort() + } + + /// Shrinks the capacity of the set as much as possible. It will drop + /// down as much as possible while maintaining the internal rules + /// and possibly leaving some space in accordance with the resize policy. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let mut set = HashSet::try_with_capacity(100)?; + /// set.try_insert(1)?; + /// set.try_insert(2)?; + /// assert!(set.capacity() >= 100); + /// set.try_shrink_to_fit()?; + /// assert!(set.capacity() >= 2); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_shrink_to_fit(&mut self) -> Result<(), Error> { + self.map.try_shrink_to_fit() + } + + #[cfg(test)] + pub(crate) fn shrink_to_fit(&mut self) { + self.try_shrink_to_fit().abort(); + } + + /// Shrinks the capacity of the set with a lower limit. It will drop + /// down no lower than the supplied limit while maintaining the internal rules + /// and possibly leaving some space in accordance with the resize policy. + /// + /// Panics if the current capacity is smaller than the supplied + /// minimum capacity. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let mut set = HashSet::try_with_capacity(100)?; + /// set.try_insert(1)?; + /// set.try_insert(2)?; + /// assert!(set.capacity() >= 100); + /// set.try_shrink_to(10)?; + /// assert!(set.capacity() >= 10); + /// set.try_shrink_to(0)?; + /// assert!(set.capacity() >= 2); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_shrink_to(&mut self, min_capacity: usize) -> Result<(), Error> { + self.map.try_shrink_to(min_capacity) + } + + /// Visits the values representing the difference, + /// i.e., the values that are in `self` but not in `other`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// use rune_alloc::prelude::*; + /// + /// let a: HashSet<_> = HashSet::try_from([1, 2, 3])?; + /// let b: HashSet<_> = HashSet::try_from([4, 2, 3, 4])?; + /// + /// // Can be seen as `a - b`. + /// for x in a.difference(&b) { + /// println!("{}", x); // Print 1 + /// } + /// + /// let diff: HashSet<_> = a.difference(&b).copied().try_collect()?; + /// assert_eq!(diff, HashSet::try_from([1])?); + /// + /// // Note that difference is not symmetric, + /// // and `b - a` means something else: + /// let diff: HashSet<_> = b.difference(&a).copied().try_collect()?; + /// assert_eq!(diff, HashSet::try_from([4])?); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn difference<'a>(&'a self, other: &'a Self) -> Difference<'a, T, S, A> { + Difference { + iter: self.iter(), + other, + } + } + + /// Visits the values representing the symmetric difference, + /// i.e., the values that are in `self` or in `other` but not in both. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// use rune_alloc::prelude::*; + /// + /// let a: HashSet<_> = HashSet::try_from([1, 2, 3])?; + /// let b: HashSet<_> = HashSet::try_from([4, 2, 3, 4])?; + /// + /// // Print 1, 4 in arbitrary order. + /// for x in a.symmetric_difference(&b) { + /// println!("{}", x); + /// } + /// + /// let diff1: HashSet<_> = a.symmetric_difference(&b).copied().try_collect()?; + /// let diff2: HashSet<_> = b.symmetric_difference(&a).copied().try_collect()?; + /// + /// assert_eq!(diff1, diff2); + /// assert_eq!(diff1, HashSet::try_from([1, 4])?); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn symmetric_difference<'a>(&'a self, other: &'a Self) -> SymmetricDifference<'a, T, S, A> { + SymmetricDifference { + iter: self.difference(other).chain(other.difference(self)), + } + } + + /// Visits the values representing the intersection, + /// i.e., the values that are both in `self` and `other`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// use rune_alloc::prelude::*; + /// + /// let a: HashSet<_> = HashSet::try_from([1, 2, 3])?; + /// let b: HashSet<_> = HashSet::try_from([4, 2, 3, 4])?; + /// + /// // Print 2, 3 in arbitrary order. + /// for x in a.intersection(&b) { + /// println!("{}", x); + /// } + /// + /// let intersection: HashSet<_> = a.intersection(&b).copied().try_collect()?; + /// assert_eq!(intersection, HashSet::try_from([2, 3])?); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn intersection<'a>(&'a self, other: &'a Self) -> Intersection<'a, T, S, A> { + let (smaller, larger) = if self.len() <= other.len() { + (self, other) + } else { + (other, self) + }; + Intersection { + iter: smaller.iter(), + other: larger, + } + } + + /// Visits the values representing the union, + /// i.e., all the values in `self` or `other`, without duplicates. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// use rune_alloc::prelude::*; + /// + /// let a: HashSet<_> = HashSet::try_from([1, 2, 3])?; + /// let b: HashSet<_> = HashSet::try_from([4, 2, 3, 4])?; + /// + /// // Print 1, 2, 3, 4 in arbitrary order. + /// for x in a.union(&b) { + /// println!("{}", x); + /// } + /// + /// let union: HashSet<_> = a.union(&b).copied().try_collect()?; + /// assert_eq!(union, HashSet::try_from([1, 2, 3, 4])?); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn union<'a>(&'a self, other: &'a Self) -> Union<'a, T, S, A> { + // We'll iterate one set in full, and only the remaining difference from the other. + // Use the smaller set for the difference in order to reduce hash lookups. + let (smaller, larger) = if self.len() <= other.len() { + (self, other) + } else { + (other, self) + }; + Union { + iter: larger.iter().chain(smaller.difference(larger)), + } + } + + /// Returns `true` if the set contains a value. + /// + /// The value may be any borrowed form of the set's value type, but + /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for + /// the value type. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let set: HashSet<_> = HashSet::try_from([1, 2, 3])?; + /// assert_eq!(set.contains(&1), true); + /// assert_eq!(set.contains(&4), false); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html + /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html + #[cfg_attr(feature = "inline-more", inline)] + pub fn contains(&self, value: &Q) -> bool + where + Q: Hash + Equivalent, + { + self.map.contains_key(value) + } + + /// Returns a reference to the value in the set, if any, that is equal to the given value. + /// + /// The value may be any borrowed form of the set's value type, but + /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for + /// the value type. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let set: HashSet<_> = HashSet::try_from([1, 2, 3])?; + /// assert_eq!(set.get(&2), Some(&2)); + /// assert_eq!(set.get(&4), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html + /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html + #[cfg_attr(feature = "inline-more", inline)] + pub fn get(&self, value: &Q) -> Option<&T> + where + Q: Hash + Equivalent, + { + // Avoid `Option::map` because it bloats LLVM IR. + match self.map.get_key_value(value) { + Some((k, _)) => Some(k), + None => None, + } + } + + /// Inserts the given `value` into the set if it is not present, then + /// returns a reference to the value in the set. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let mut set: HashSet<_> = HashSet::try_from([1, 2, 3])?; + /// assert_eq!(set.len(), 3); + /// assert_eq!(set.get_or_try_insert(2)?, &2); + /// assert_eq!(set.get_or_try_insert(100)?, &100); + /// assert_eq!(set.len(), 4); // 100 was inserted + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn get_or_try_insert(&mut self, value: T) -> Result<&T, Error> { + // Although the raw entry gives us `&mut T`, we only return `&T` to be consistent with + // `get`. Key mutation is "raw" because you're not supposed to affect `Eq` or `Hash`. + Ok(self + .map + .raw_entry_mut() + .from_key(&value) + .or_try_insert(value, ())? + .0) + } + + /// Inserts an owned copy of the given `value` into the set if it is not + /// present, then returns a reference to the value in the set. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{HashSet, String, Error}; + /// use rune_alloc::prelude::*; + /// + /// let mut set: HashSet = ["cat", "dog", "horse"] + /// .iter().map(|&pet| pet.try_to_owned()).try_collect::>()??; + /// + /// assert_eq!(set.len(), 3); + /// for &pet in &["cat", "dog", "fish"] { + /// let value = set.get_or_try_insert_owned(pet)?; + /// assert_eq!(value, pet); + /// } + /// assert_eq!(set.len(), 4); // a new "fish" was inserted + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn get_or_try_insert_owned(&mut self, value: &Q) -> Result<&T, Error> + where + Q: Hash + Equivalent + TryToOwned, + { + // Although the raw entry gives us `&mut T`, we only return `&T` to be consistent with + // `get`. Key mutation is "raw" because you're not supposed to affect `Eq` or `Hash`. + let (key, _) = match self.map.raw_entry_mut().from_key(value) { + map::RawEntryMut::Occupied(entry) => entry.into_key_value(), + map::RawEntryMut::Vacant(entry) => entry.try_insert(value.try_to_owned()?, ())?, + }; + + Ok(key) + } + + /// Inserts a value computed from `f` into the set if the given `value` is + /// not present, then returns a reference to the value in the set. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{HashSet, String, Error}; + /// use rune_alloc::prelude::*; + /// + /// let mut set: HashSet = ["cat", "dog", "horse"] + /// .iter().map(|&pet| pet.try_to_owned()).try_collect::>()??; + /// + /// assert_eq!(set.len(), 3); + /// for &pet in &["cat", "dog", "fish"] { + /// let value = set.get_or_try_insert_with(pet, str::try_to_owned)?; + /// assert_eq!(value, pet); + /// } + /// assert_eq!(set.len(), 4); // a new "fish" was inserted + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn get_or_try_insert_with(&mut self, value: &Q, f: F) -> Result<&T, Error> + where + Q: Hash + Equivalent, + F: FnOnce(&Q) -> Result, + { + // Although the raw entry gives us `&mut T`, we only return `&T` to be consistent with + // `get`. Key mutation is "raw" because you're not supposed to affect `Eq` or `Hash`. + let (key, _) = match self.map.raw_entry_mut().from_key(value) { + map::RawEntryMut::Occupied(entry) => entry.into_key_value(), + map::RawEntryMut::Vacant(entry) => entry.try_insert(f(value)?, ())?, + }; + + Ok(key) + } + + /// Gets the given value's corresponding entry in the set for in-place manipulation. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// use rune_alloc::hash_set::Entry::*; + /// + /// let mut singles = HashSet::new(); + /// let mut dupes = HashSet::new(); + /// + /// for ch in "a short treatise on fungi".chars() { + /// if let Vacant(dupe_entry) = dupes.entry(ch) { + /// // We haven't already seen a duplicate, so + /// // check if we've at least seen it once. + /// match singles.entry(ch) { + /// Vacant(single_entry) => { + /// // We found a new character for the first time. + /// single_entry.try_insert()?; + /// } + /// Occupied(single_entry) => { + /// // We've already seen this once, "move" it to dupes. + /// single_entry.remove(); + /// dupe_entry.try_insert()?; + /// } + /// } + /// } + /// } + /// + /// assert!(!singles.contains(&'t') && dupes.contains(&'t')); + /// assert!(singles.contains(&'u') && !dupes.contains(&'u')); + /// assert!(!singles.contains(&'v') && !dupes.contains(&'v')); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn entry(&mut self, value: T) -> Entry<'_, T, S, A> { + match self.map.entry(value) { + map::Entry::Occupied(entry) => Entry::Occupied(OccupiedEntry { inner: entry }), + map::Entry::Vacant(entry) => Entry::Vacant(VacantEntry { inner: entry }), + } + } + + /// Returns `true` if `self` has no elements in common with `other`. + /// This is equivalent to checking for an empty intersection. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let a = HashSet::try_from([1, 2, 3])?; + /// let mut b = HashSet::new(); + /// + /// assert_eq!(a.is_disjoint(&b), true); + /// b.try_insert(4)?; + /// assert_eq!(a.is_disjoint(&b), true); + /// b.try_insert(1)?; + /// assert_eq!(a.is_disjoint(&b), false); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn is_disjoint(&self, other: &Self) -> bool { + self.iter().all(|v| !other.contains(v)) + } + + /// Returns `true` if the set is a subset of another, + /// i.e., `other` contains at least all the values in `self`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let sup = HashSet::try_from([1, 2, 3])?; + /// let mut set = HashSet::new(); + /// + /// assert_eq!(set.is_subset(&sup), true); + /// set.try_insert(2)?; + /// assert_eq!(set.is_subset(&sup), true); + /// set.try_insert(4)?; + /// assert_eq!(set.is_subset(&sup), false); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn is_subset(&self, other: &Self) -> bool { + self.len() <= other.len() && self.iter().all(|v| other.contains(v)) + } + + /// Returns `true` if the set is a superset of another, + /// i.e., `self` contains at least all the values in `other`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let sub = HashSet::try_from([1, 2])?; + /// let mut set = HashSet::new(); + /// + /// assert_eq!(set.is_superset(&sub), false); + /// + /// set.try_insert(0)?; + /// set.try_insert(1)?; + /// assert_eq!(set.is_superset(&sub), false); + /// + /// set.try_insert(2)?; + /// assert_eq!(set.is_superset(&sub), true); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn is_superset(&self, other: &Self) -> bool { + other.is_subset(self) + } + + /// Adds a value to the set. + /// + /// If the set did not have this value present, `true` is returned. + /// + /// If the set did have this value present, `false` is returned. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let mut set = HashSet::new(); + /// + /// assert_eq!(set.try_insert(2)?, true); + /// assert_eq!(set.try_insert(2)?, false); + /// assert_eq!(set.len(), 1); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_insert(&mut self, value: T) -> Result { + Ok(self.map.try_insert(value, ())?.is_none()) + } + + #[cfg(test)] + pub(crate) fn insert(&mut self, value: T) -> bool { + self.try_insert(value).abort() + } + + /// Insert a value the set without checking if the value already exists in the set. + /// + /// Returns a reference to the value just inserted. + /// + /// This operation is safe if a value does not exist in the set. + /// + /// However, if a value exists in the set already, the behavior is unspecified: + /// this operation may panic, loop forever, or any following operation with the set + /// may panic, loop forever or return arbitrary result. + /// + /// That said, this operation (and following operations) are guaranteed to + /// not violate memory safety. + /// + /// This operation is faster than regular insert, because it does not perform + /// lookup before insertion. + /// + /// This operation is useful during initial population of the set. + /// For example, when constructing a set from another set, we know + /// that values are unique. + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_insert_unique_unchecked(&mut self, value: T) -> Result<&T, Error> { + Ok(self.map.try_insert_unique_unchecked(value, ())?.0) + } + + /// Adds a value to the set, replacing the existing value, if any, that is equal to the given + /// one. Returns the replaced value. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let mut set = HashSet::new(); + /// set.try_insert(Vec::::new())?; + /// + /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 0); + /// set.try_replace(Vec::with_capacity(10))?; + /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 10); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_replace(&mut self, value: T) -> Result, Error> { + match self.map.entry(value) { + map::Entry::Occupied(occupied) => Ok(Some(occupied.replace_key())), + map::Entry::Vacant(vacant) => { + vacant.try_insert(())?; + Ok(None) + } + } + } + + #[cfg(test)] + pub(crate) fn replace(&mut self, value: T) -> Option { + self.try_replace(value).abort() + } + + /// Removes a value from the set. Returns whether the value was + /// present in the set. + /// + /// The value may be any borrowed form of the set's value type, but + /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for + /// the value type. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let mut set = HashSet::new(); + /// + /// set.try_insert(2)?; + /// assert_eq!(set.remove(&2), true); + /// assert_eq!(set.remove(&2), false); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html + /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html + #[cfg_attr(feature = "inline-more", inline)] + pub fn remove(&mut self, value: &Q) -> bool + where + Q: Hash + Equivalent, + { + self.map.remove(value).is_some() + } + + /// Removes and returns the value in the set, if any, that is equal to the given one. + /// + /// The value may be any borrowed form of the set's value type, but + /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for + /// the value type. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let mut set: HashSet<_> = HashSet::try_from([1, 2, 3])?; + /// assert_eq!(set.take(&2), Some(2)); + /// assert_eq!(set.take(&2), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html + /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html + #[cfg_attr(feature = "inline-more", inline)] + pub fn take(&mut self, value: &Q) -> Option + where + Q: Hash + Equivalent, + { + // Avoid `Option::map` because it bloats LLVM IR. + match self.map.remove_entry(value) { + Some((k, _)) => Some(k), + None => None, + } + } +} + +impl HashSet { + /// Returns a reference to the [`RawTable`] used underneath [`HashSet`]. + /// This function is only available if the `raw` feature of the crate is enabled. + /// + /// # Note + /// + /// Calling this function is safe, but using the raw hash table API may require + /// unsafe functions or blocks. + /// + /// `RawTable` API gives the lowest level of control under the set that can be useful + /// for extending the HashSet's API, but may lead to *[undefined behavior]*. + /// + /// [`RawTable`]: crate::hashbrown::raw::RawTable + /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html + #[cfg_attr(feature = "inline-more", inline)] + pub fn raw_table(&self) -> &RawTable<(T, ()), A> { + self.map.raw_table() + } + + /// Returns a mutable reference to the [`RawTable`] used underneath + /// [`HashSet`]. This function is only available if the `raw` feature of the + /// crate is enabled. + /// + /// # Note + /// + /// Calling this function is safe, but using the raw hash table API may + /// require unsafe functions or blocks. + /// + /// `RawTable` API gives the lowest level of control under the set that can + /// be useful for extending the HashSet's API, but may lead to *[undefined + /// behavior]*. + /// + /// [`RawTable`]: crate::hashbrown::raw::RawTable + /// [undefined behavior]: + /// https://doc.rust-lang.org/reference/behavior-considered-undefined.html + #[cfg_attr(feature = "inline-more", inline)] + pub fn raw_table_mut(&mut self) -> &mut RawTable<(T, ()), A> { + self.map.raw_table_mut() + } +} + +impl PartialEq for HashSet +where + T: Eq + Hash, + S: BuildHasher, + A: Allocator, +{ + fn eq(&self, other: &Self) -> bool { + if self.len() != other.len() { + return false; + } + + self.iter().all(|key| other.contains(key)) + } +} + +impl Eq for HashSet +where + T: Eq + Hash, + S: BuildHasher, + A: Allocator, +{ +} + +impl fmt::Debug for HashSet +where + T: fmt::Debug, + A: Allocator, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_set().entries(self.iter()).finish() + } +} + +impl From> for HashSet +where + A: Allocator, +{ + fn from(map: HashMap) -> Self { + Self { map } + } +} + +impl TryFromIteratorIn for HashSet +where + T: Eq + Hash, + S: BuildHasher + Default, +{ + #[cfg_attr(feature = "inline-more", inline)] + fn try_from_iter_in>(iter: I, alloc: A) -> Result { + let mut set = Self::with_hasher_in(S::default(), alloc); + set.try_extend(iter)?; + Ok(set) + } +} + +#[cfg(test)] +impl FromIterator for HashSet +where + T: Eq + Hash, + S: BuildHasher + Default, +{ + #[inline] + fn from_iter>(iter: I) -> Self { + Self::try_from_iter_in(iter, A::default()).abort() + } +} + +// The default hasher is used to match the std implementation signature +impl TryFrom<[T; N]> + for HashSet +where + T: Eq + Hash, +{ + type Error = Error; + + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let set1: HashSet<_> = HashSet::try_from([1, 2, 3, 4])?; + /// let set2: HashSet<_> = [1, 2, 3, 4].try_into()?; + /// assert_eq!(set1, set2); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + fn try_from(arr: [T; N]) -> Result { + Self::try_from_iter_in(arr, A::default()) + } +} + +impl TryExtend for HashSet +where + T: Eq + Hash, + S: BuildHasher, + A: Allocator, +{ + #[cfg_attr(feature = "inline-more", inline)] + fn try_extend>(&mut self, iter: I) -> Result<(), Error> { + self.map.try_extend(iter.into_iter().map(|k| (k, ()))) + } +} + +#[cfg(test)] +impl Extend for HashSet +where + T: Eq + Hash, + S: BuildHasher, + A: Allocator, +{ + #[cfg_attr(feature = "inline-more", inline)] + fn extend>(&mut self, iter: I) { + self.try_extend(iter).abort(); + } +} + +impl<'a, T, S, A> TryExtend<&'a T> for HashSet +where + T: 'a + Eq + Hash + Copy, + S: BuildHasher, + A: Allocator, +{ + #[cfg_attr(feature = "inline-more", inline)] + fn try_extend>(&mut self, iter: I) -> Result<(), Error> { + self.try_extend(iter.into_iter().copied()) + } +} + +#[cfg(test)] +impl<'a, T, S, A> Extend<&'a T> for HashSet +where + T: 'a + Eq + Hash + Copy, + S: BuildHasher, + A: Allocator, +{ + #[cfg_attr(feature = "inline-more", inline)] + fn extend>(&mut self, iter: I) { + self.try_extend(iter).abort() + } +} + +impl Default for HashSet +where + S: Default, + A: Default + Allocator, +{ + /// Creates an empty `HashSet` with the `Default` value for the hasher. + #[cfg_attr(feature = "inline-more", inline)] + fn default() -> Self { + Self { + map: HashMap::default(), + } + } +} + +/// An iterator over the items of a `HashSet`. +/// +/// This `struct` is created by the [`iter`] method on [`HashSet`]. +/// See its documentation for more. +/// +/// [`HashSet`]: struct.HashSet.html +/// [`iter`]: struct.HashSet.html#method.iter +pub struct Iter<'a, K> { + iter: Keys<'a, K, ()>, +} + +/// An owning iterator over the items of a `HashSet`. +/// +/// This `struct` is created by the [`into_iter`] method on [`HashSet`] +/// (provided by the `IntoIterator` trait). See its documentation for more. +/// +/// [`HashSet`]: struct.HashSet.html +/// [`into_iter`]: struct.HashSet.html#method.into_iter +pub struct IntoIter { + iter: map::IntoIter, +} + +/// A draining iterator over the items of a `HashSet`. +/// +/// This `struct` is created by the [`drain`] method on [`HashSet`]. +/// See its documentation for more. +/// +/// [`HashSet`]: struct.HashSet.html +/// [`drain`]: struct.HashSet.html#method.drain +pub struct Drain<'a, K, A: Allocator = Global> { + iter: map::Drain<'a, K, (), A>, +} + +/// A draining iterator over entries of a `HashSet` which don't satisfy the predicate `f`. +/// +/// This `struct` is created by the [`extract_if`] method on [`HashSet`]. See its +/// documentation for more. +/// +/// [`extract_if`]: struct.HashSet.html#method.extract_if +/// [`HashSet`]: struct.HashSet.html +#[must_use = "Iterators are lazy unless consumed"] +pub struct ExtractIf<'a, K, F, A: Allocator = Global> +where + F: FnMut(&K) -> bool, +{ + f: F, + inner: ExtractIfInner<'a, K, (), A>, +} + +/// A lazy iterator producing elements in the intersection of `HashSet`s. +/// +/// This `struct` is created by the [`intersection`] method on [`HashSet`]. +/// See its documentation for more. +/// +/// [`HashSet`]: struct.HashSet.html +/// [`intersection`]: struct.HashSet.html#method.intersection +pub struct Intersection<'a, T, S, A: Allocator = Global> { + // iterator of the first set + iter: Iter<'a, T>, + // the second set + other: &'a HashSet, +} + +/// A lazy iterator producing elements in the difference of `HashSet`s. +/// +/// This `struct` is created by the [`difference`] method on [`HashSet`]. +/// See its documentation for more. +/// +/// [`HashSet`]: struct.HashSet.html +/// [`difference`]: struct.HashSet.html#method.difference +pub struct Difference<'a, T, S, A: Allocator = Global> { + // iterator of the first set + iter: Iter<'a, T>, + // the second set + other: &'a HashSet, +} + +/// A lazy iterator producing elements in the symmetric difference of `HashSet`s. +/// +/// This `struct` is created by the [`symmetric_difference`] method on +/// [`HashSet`]. See its documentation for more. +/// +/// [`HashSet`]: struct.HashSet.html +/// [`symmetric_difference`]: struct.HashSet.html#method.symmetric_difference +pub struct SymmetricDifference<'a, T, S, A: Allocator = Global> { + iter: Chain, Difference<'a, T, S, A>>, +} + +/// A lazy iterator producing elements in the union of `HashSet`s. +/// +/// This `struct` is created by the [`union`] method on [`HashSet`]. +/// See its documentation for more. +/// +/// [`HashSet`]: struct.HashSet.html +/// [`union`]: struct.HashSet.html#method.union +pub struct Union<'a, T, S, A: Allocator = Global> { + iter: Chain, Difference<'a, T, S, A>>, +} + +impl<'a, T, S, A: Allocator> IntoIterator for &'a HashSet { + type Item = &'a T; + type IntoIter = Iter<'a, T>; + + #[cfg_attr(feature = "inline-more", inline)] + fn into_iter(self) -> Iter<'a, T> { + self.iter() + } +} + +impl IntoIterator for HashSet { + type Item = T; + type IntoIter = IntoIter; + + /// Creates a consuming iterator, that is, one that moves each value out + /// of the set in arbitrary order. The set cannot be used after calling + /// this. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{HashSet, Vec}; + /// use rune_alloc::prelude::*; + /// + /// let mut set = HashSet::new(); + /// set.try_insert("a".to_string())?; + /// set.try_insert("b".to_string())?; + /// + /// // Not possible to collect to a Vec with a regular `.iter()`. + /// let v: Vec = set.into_iter().try_collect()?; + /// + /// // Will print in an arbitrary order. + /// for x in &v { + /// println!("{}", x); + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + fn into_iter(self) -> IntoIter { + IntoIter { + iter: self.map.into_iter(), + } + } +} + +impl Clone for Iter<'_, K> { + #[cfg_attr(feature = "inline-more", inline)] + fn clone(&self) -> Self { + Iter { + iter: self.iter.clone(), + } + } +} +impl<'a, K> Iterator for Iter<'a, K> { + type Item = &'a K; + + #[cfg_attr(feature = "inline-more", inline)] + fn next(&mut self) -> Option<&'a K> { + self.iter.next() + } + #[cfg_attr(feature = "inline-more", inline)] + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} +impl<'a, K> ExactSizeIterator for Iter<'a, K> { + #[cfg_attr(feature = "inline-more", inline)] + fn len(&self) -> usize { + self.iter.len() + } +} +impl FusedIterator for Iter<'_, K> {} + +impl fmt::Debug for Iter<'_, K> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +impl Iterator for IntoIter { + type Item = K; + + #[cfg_attr(feature = "inline-more", inline)] + fn next(&mut self) -> Option { + // Avoid `Option::map` because it bloats LLVM IR. + match self.iter.next() { + Some((k, _)) => Some(k), + None => None, + } + } + #[cfg_attr(feature = "inline-more", inline)] + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} +impl ExactSizeIterator for IntoIter { + #[cfg_attr(feature = "inline-more", inline)] + fn len(&self) -> usize { + self.iter.len() + } +} +impl FusedIterator for IntoIter {} + +impl fmt::Debug for IntoIter { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let entries_iter = self.iter.iter().map(|(k, _)| k); + f.debug_list().entries(entries_iter).finish() + } +} + +impl Iterator for Drain<'_, K, A> { + type Item = K; + + #[cfg_attr(feature = "inline-more", inline)] + fn next(&mut self) -> Option { + // Avoid `Option::map` because it bloats LLVM IR. + match self.iter.next() { + Some((k, _)) => Some(k), + None => None, + } + } + #[cfg_attr(feature = "inline-more", inline)] + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} +impl ExactSizeIterator for Drain<'_, K, A> { + #[cfg_attr(feature = "inline-more", inline)] + fn len(&self) -> usize { + self.iter.len() + } +} +impl FusedIterator for Drain<'_, K, A> {} + +impl fmt::Debug for Drain<'_, K, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let entries_iter = self.iter.iter().map(|(k, _)| k); + f.debug_list().entries(entries_iter).finish() + } +} + +impl Iterator for ExtractIf<'_, K, F, A> +where + F: FnMut(&K) -> bool, +{ + type Item = K; + + #[cfg_attr(feature = "inline-more", inline)] + fn next(&mut self) -> Option { + let f = &mut self.f; + let (k, _) = self.inner.next(&mut |k, _| f(k))?; + Some(k) + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + (0, self.inner.iter.size_hint().1) + } +} + +impl FusedIterator for ExtractIf<'_, K, F, A> where F: FnMut(&K) -> bool {} + +impl Clone for Intersection<'_, T, S, A> { + #[cfg_attr(feature = "inline-more", inline)] + fn clone(&self) -> Self { + Intersection { + iter: self.iter.clone(), + ..*self + } + } +} + +impl<'a, T, S, A> Iterator for Intersection<'a, T, S, A> +where + T: Eq + Hash, + S: BuildHasher, + A: Allocator, +{ + type Item = &'a T; + + #[cfg_attr(feature = "inline-more", inline)] + fn next(&mut self) -> Option<&'a T> { + loop { + let elt = self.iter.next()?; + if self.other.contains(elt) { + return Some(elt); + } + } + } + + #[cfg_attr(feature = "inline-more", inline)] + fn size_hint(&self) -> (usize, Option) { + let (_, upper) = self.iter.size_hint(); + (0, upper) + } +} + +impl fmt::Debug for Intersection<'_, T, S, A> +where + T: fmt::Debug + Eq + Hash, + S: BuildHasher, + A: Allocator, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +impl FusedIterator for Intersection<'_, T, S, A> +where + T: Eq + Hash, + S: BuildHasher, + A: Allocator, +{ +} + +impl Clone for Difference<'_, T, S, A> { + #[cfg_attr(feature = "inline-more", inline)] + fn clone(&self) -> Self { + Difference { + iter: self.iter.clone(), + ..*self + } + } +} + +impl<'a, T, S, A> Iterator for Difference<'a, T, S, A> +where + T: Eq + Hash, + S: BuildHasher, + A: Allocator, +{ + type Item = &'a T; + + #[cfg_attr(feature = "inline-more", inline)] + fn next(&mut self) -> Option<&'a T> { + loop { + let elt = self.iter.next()?; + if !self.other.contains(elt) { + return Some(elt); + } + } + } + + #[cfg_attr(feature = "inline-more", inline)] + fn size_hint(&self) -> (usize, Option) { + let (_, upper) = self.iter.size_hint(); + (0, upper) + } +} + +impl FusedIterator for Difference<'_, T, S, A> +where + T: Eq + Hash, + S: BuildHasher, + A: Allocator, +{ +} + +impl fmt::Debug for Difference<'_, T, S, A> +where + T: fmt::Debug + Eq + Hash, + S: BuildHasher, + A: Allocator, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +impl Clone for SymmetricDifference<'_, T, S, A> { + #[cfg_attr(feature = "inline-more", inline)] + fn clone(&self) -> Self { + SymmetricDifference { + iter: self.iter.clone(), + } + } +} + +impl<'a, T, S, A> Iterator for SymmetricDifference<'a, T, S, A> +where + T: Eq + Hash, + S: BuildHasher, + A: Allocator, +{ + type Item = &'a T; + + #[cfg_attr(feature = "inline-more", inline)] + fn next(&mut self) -> Option<&'a T> { + self.iter.next() + } + #[cfg_attr(feature = "inline-more", inline)] + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} + +impl FusedIterator for SymmetricDifference<'_, T, S, A> +where + T: Eq + Hash, + S: BuildHasher, + A: Allocator, +{ +} + +impl fmt::Debug for SymmetricDifference<'_, T, S, A> +where + T: fmt::Debug + Eq + Hash, + S: BuildHasher, + A: Allocator, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +impl Clone for Union<'_, T, S, A> { + #[cfg_attr(feature = "inline-more", inline)] + fn clone(&self) -> Self { + Union { + iter: self.iter.clone(), + } + } +} + +impl FusedIterator for Union<'_, T, S, A> +where + T: Eq + Hash, + S: BuildHasher, + A: Allocator, +{ +} + +impl fmt::Debug for Union<'_, T, S, A> +where + T: fmt::Debug + Eq + Hash, + S: BuildHasher, + A: Allocator, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +impl<'a, T, S, A> Iterator for Union<'a, T, S, A> +where + T: Eq + Hash, + S: BuildHasher, + A: Allocator, +{ + type Item = &'a T; + + #[cfg_attr(feature = "inline-more", inline)] + fn next(&mut self) -> Option<&'a T> { + self.iter.next() + } + #[cfg_attr(feature = "inline-more", inline)] + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} + +/// A view into a single entry in a set, which may either be vacant or occupied. +/// +/// This `enum` is constructed from the [`entry`] method on [`HashSet`]. +/// +/// [`HashSet`]: struct.HashSet.html +/// [`entry`]: struct.HashSet.html#method.entry +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::{HashSet, Vec}; +/// use rune_alloc::hash_set::{Entry, OccupiedEntry}; +/// use rune_alloc::prelude::*; +/// +/// let mut set = HashSet::new(); +/// set.try_extend(["a", "b", "c"])?; +/// assert_eq!(set.len(), 3); +/// +/// // Existing value (insert) +/// let entry: Entry<_, _> = set.entry("a"); +/// let _raw_o: OccupiedEntry<_, _> = entry.try_insert()?; +/// assert_eq!(set.len(), 3); +/// // Nonexistent value (insert) +/// set.entry("d").try_insert()?; +/// +/// // Existing value (or_try_insert) +/// set.entry("b").or_try_insert()?; +/// // Nonexistent value (or_try_insert) +/// set.entry("e").or_try_insert()?; +/// +/// println!("Our HashSet: {:?}", set); +/// +/// let mut vec: Vec<_> = set.iter().copied().try_collect()?; +/// // The `Iter` iterator produces items in arbitrary order, so the +/// // items must be sorted to test them against a sorted array. +/// vec.sort_unstable(); +/// assert_eq!(vec, ["a", "b", "c", "d", "e"]); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub enum Entry<'a, T, S, A = Global> +where + A: Allocator, +{ + /// An occupied entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_set::Entry; + /// use rune_alloc::HashSet; + /// + /// let mut set: HashSet<_> = HashSet::try_from(["a", "b"])?; + /// + /// match set.entry("a") { + /// Entry::Vacant(_) => unreachable!(), + /// Entry::Occupied(_) => { } + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + Occupied(OccupiedEntry<'a, T, S, A>), + + /// A vacant entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_set::{Entry, HashSet}; + /// let mut set = HashSet::new(); + /// + /// match set.entry("a") { + /// Entry::Occupied(_) => unreachable!(), + /// Entry::Vacant(_) => { } + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + Vacant(VacantEntry<'a, T, S, A>), +} + +impl fmt::Debug for Entry<'_, T, S, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + Entry::Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(), + Entry::Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(), + } + } +} + +/// A view into an occupied entry in a `HashSet`. +/// It is part of the [`Entry`] enum. +/// +/// [`Entry`]: enum.Entry.html +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::hash_set::{Entry, HashSet, OccupiedEntry}; +/// use rune_alloc::prelude::*; +/// +/// let mut set = HashSet::new(); +/// set.try_extend(["a", "b", "c"])?; +/// +/// let _entry_o: OccupiedEntry<_, _> = set.entry("a").try_insert()?; +/// assert_eq!(set.len(), 3); +/// +/// // Existing key +/// match set.entry("a") { +/// Entry::Vacant(_) => unreachable!(), +/// Entry::Occupied(view) => { +/// assert_eq!(view.get(), &"a"); +/// } +/// } +/// +/// assert_eq!(set.len(), 3); +/// +/// // Existing key (take) +/// match set.entry("c") { +/// Entry::Vacant(_) => unreachable!(), +/// Entry::Occupied(view) => { +/// assert_eq!(view.remove(), "c"); +/// } +/// } +/// assert_eq!(set.get(&"c"), None); +/// assert_eq!(set.len(), 2); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct OccupiedEntry<'a, T, S, A: Allocator = Global> { + inner: map::OccupiedEntry<'a, T, (), S, A>, +} + +impl fmt::Debug for OccupiedEntry<'_, T, S, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("OccupiedEntry") + .field("value", self.get()) + .finish() + } +} + +/// A view into a vacant entry in a `HashSet`. +/// It is part of the [`Entry`] enum. +/// +/// [`Entry`]: enum.Entry.html +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::hash_set::{Entry, HashSet, VacantEntry}; +/// +/// let mut set = HashSet::<&str>::new(); +/// +/// let entry_v: VacantEntry<_, _> = match set.entry("a") { +/// Entry::Vacant(view) => view, +/// Entry::Occupied(_) => unreachable!(), +/// }; +/// entry_v.try_insert()?; +/// assert!(set.contains("a") && set.len() == 1); +/// +/// // Nonexistent key (try_insert) +/// match set.entry("b") { +/// Entry::Vacant(view) => view.try_insert()?, +/// Entry::Occupied(_) => unreachable!(), +/// } +/// assert!(set.contains("b") && set.len() == 2); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct VacantEntry<'a, T, S, A: Allocator = Global> { + inner: map::VacantEntry<'a, T, (), S, A>, +} + +impl fmt::Debug for VacantEntry<'_, T, S, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("VacantEntry").field(self.get()).finish() + } +} + +impl<'a, T, S, A: Allocator> Entry<'a, T, S, A> { + /// Sets the value of the entry, and returns an OccupiedEntry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let mut set: HashSet<&str> = HashSet::new(); + /// let entry = set.entry("horseyland").try_insert()?; + /// + /// assert_eq!(entry.get(), &"horseyland"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_insert(self) -> Result, Error> + where + T: Hash, + S: BuildHasher, + { + match self { + Entry::Occupied(entry) => Ok(entry), + Entry::Vacant(entry) => entry.try_insert_entry(), + } + } + + /// Ensures a value is in the entry by inserting if it was vacant. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let mut set: HashSet<&str> = HashSet::new(); + /// + /// // nonexistent key + /// set.entry("poneyland").or_try_insert()?; + /// assert!(set.contains("poneyland")); + /// + /// // existing key + /// set.entry("poneyland").or_try_insert()?; + /// assert!(set.contains("poneyland")); + /// assert_eq!(set.len(), 1); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn or_try_insert(self) -> Result<(), Error> + where + T: Hash, + S: BuildHasher, + { + if let Entry::Vacant(entry) = self { + entry.try_insert()?; + } + + Ok(()) + } + + /// Returns a reference to this entry's value. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let mut set: HashSet<&str> = HashSet::new(); + /// set.entry("poneyland").or_try_insert()?; + /// // existing key + /// assert_eq!(set.entry("poneyland").get(), &"poneyland"); + /// // nonexistent key + /// assert_eq!(set.entry("horseland").get(), &"horseland"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn get(&self) -> &T { + match *self { + Entry::Occupied(ref entry) => entry.get(), + Entry::Vacant(ref entry) => entry.get(), + } + } +} + +impl OccupiedEntry<'_, T, S, A> { + /// Gets a reference to the value in the entry. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_set::{Entry, HashSet}; + /// + /// let mut set: HashSet<&str> = HashSet::new(); + /// set.entry("poneyland").or_try_insert()?; + /// + /// match set.entry("poneyland") { + /// Entry::Vacant(_) => panic!(), + /// Entry::Occupied(entry) => assert_eq!(entry.get(), &"poneyland"), + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn get(&self) -> &T { + self.inner.key() + } + + /// Takes the value out of the entry, and returns it. + /// Keeps the allocated memory for reuse. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// use rune_alloc::hash_set::Entry; + /// + /// let mut set: HashSet<&str> = HashSet::new(); + /// // The set is empty + /// assert!(set.is_empty() && set.capacity() == 0); + /// + /// set.entry("poneyland").or_try_insert()?; + /// let capacity_before_remove = set.capacity(); + /// + /// if let Entry::Occupied(o) = set.entry("poneyland") { + /// assert_eq!(o.remove(), "poneyland"); + /// } + /// + /// assert_eq!(set.contains("poneyland"), false); + /// // Now set hold none elements but capacity is equal to the old one + /// assert!(set.len() == 0 && set.capacity() == capacity_before_remove); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn remove(self) -> T { + self.inner.remove_entry().0 + } + + /// Replaces the entry, returning the old value. The new value in the hash + /// map will be the value used to create this entry. + /// + /// # Panics + /// + /// Will panic if this OccupiedEntry was created through + /// [`Entry::try_insert`]. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_set::{Entry, HashSet}; + /// use std::rc::Rc; + /// + /// let mut set: HashSet> = HashSet::new(); + /// let key_one = Rc::new("Stringthing".to_string()); + /// let key_two = Rc::new("Stringthing".to_string()); + /// + /// set.try_insert(key_one.clone())?; + /// assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1); + /// + /// match set.entry(key_two.clone()) { + /// Entry::Occupied(entry) => { + /// let old_key: Rc = entry.replace(); + /// assert!(Rc::ptr_eq(&key_one, &old_key)); + /// } + /// Entry::Vacant(_) => panic!(), + /// } + /// + /// assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2); + /// assert!(set.contains(&"Stringthing".to_owned())); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn replace(self) -> T { + self.inner.replace_key() + } +} + +impl<'a, T, S, A: Allocator> VacantEntry<'a, T, S, A> { + /// Gets a reference to the value that would be used when inserting + /// through the `VacantEntry`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// + /// let mut set: HashSet<&str> = HashSet::new(); + /// assert_eq!(set.entry("poneyland").get(), &"poneyland"); + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn get(&self) -> &T { + self.inner.key() + } + + /// Take ownership of the value. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::hash_set::{Entry, HashSet}; + /// + /// let mut set = HashSet::new(); + /// + /// match set.entry("poneyland") { + /// Entry::Occupied(_) => panic!(), + /// Entry::Vacant(v) => assert_eq!(v.into_value(), "poneyland"), + /// } + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn into_value(self) -> T { + self.inner.into_key() + } + + /// Sets the value of the entry with the VacantEntry's value. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::HashSet; + /// use rune_alloc::hash_set::Entry; + /// + /// let mut set: HashSet<&str> = HashSet::new(); + /// + /// if let Entry::Vacant(o) = set.entry("poneyland") { + /// o.try_insert()?; + /// } + /// assert!(set.contains("poneyland")); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn try_insert(self) -> Result<(), Error> + where + T: Hash, + S: BuildHasher, + { + self.inner.try_insert(())?; + Ok(()) + } + + #[cfg_attr(feature = "inline-more", inline)] + fn try_insert_entry(self) -> Result, Error> + where + T: Hash, + S: BuildHasher, + { + Ok(OccupiedEntry { + inner: self.inner.try_insert_entry(())?, + }) + } +} + +#[allow(dead_code)] +fn assert_covariance() { + fn set<'new>(v: HashSet<&'static str>) -> HashSet<&'new str> { + v + } + fn iter<'a, 'new>(v: Iter<'a, &'static str>) -> Iter<'a, &'new str> { + v + } + fn into_iter<'new, A: Allocator>(v: IntoIter<&'static str, A>) -> IntoIter<&'new str, A> { + v + } + fn difference<'a, 'new, A: Allocator>( + v: Difference<'a, &'static str, DefaultHashBuilder, A>, + ) -> Difference<'a, &'new str, DefaultHashBuilder, A> { + v + } + fn symmetric_difference<'a, 'new, A: Allocator>( + v: SymmetricDifference<'a, &'static str, DefaultHashBuilder, A>, + ) -> SymmetricDifference<'a, &'new str, DefaultHashBuilder, A> { + v + } + fn intersection<'a, 'new, A: Allocator>( + v: Intersection<'a, &'static str, DefaultHashBuilder, A>, + ) -> Intersection<'a, &'new str, DefaultHashBuilder, A> { + v + } + fn union<'a, 'new, A: Allocator>( + v: Union<'a, &'static str, DefaultHashBuilder, A>, + ) -> Union<'a, &'new str, DefaultHashBuilder, A> { + v + } + fn drain<'new, A: Allocator>(d: Drain<'static, &'static str, A>) -> Drain<'new, &'new str, A> { + d + } +} + +#[cfg(test)] +mod test_set { + use super::super::map::DefaultHashBuilder; + use super::HashSet; + use rust_alloc::vec::Vec; + use rust_alloc::{format, vec}; + + #[test] + fn test_zero_capacities() { + type HS = HashSet; + + let s = HS::new(); + assert_eq!(s.capacity(), 0); + + let s = HS::default(); + assert_eq!(s.capacity(), 0); + + let s = HS::with_hasher(DefaultHashBuilder::default()); + assert_eq!(s.capacity(), 0); + + let s = HS::with_capacity(0); + assert_eq!(s.capacity(), 0); + + let s = HS::with_capacity_and_hasher(0, DefaultHashBuilder::default()); + assert_eq!(s.capacity(), 0); + + let mut s = HS::new(); + s.insert(1); + s.insert(2); + s.remove(&1); + s.remove(&2); + s.shrink_to_fit(); + assert_eq!(s.capacity(), 0); + + let mut s = HS::new(); + s.reserve(0); + assert_eq!(s.capacity(), 0); + } + + #[test] + fn test_disjoint() { + let mut xs = HashSet::new(); + let mut ys = HashSet::new(); + assert!(xs.is_disjoint(&ys)); + assert!(ys.is_disjoint(&xs)); + assert!(xs.insert(5)); + assert!(ys.insert(11)); + assert!(xs.is_disjoint(&ys)); + assert!(ys.is_disjoint(&xs)); + assert!(xs.insert(7)); + assert!(xs.insert(19)); + assert!(xs.insert(4)); + assert!(ys.insert(2)); + assert!(ys.insert(-11)); + assert!(xs.is_disjoint(&ys)); + assert!(ys.is_disjoint(&xs)); + assert!(ys.insert(7)); + assert!(!xs.is_disjoint(&ys)); + assert!(!ys.is_disjoint(&xs)); + } + + #[test] + fn test_subset_and_superset() { + let mut a = HashSet::new(); + assert!(a.insert(0)); + assert!(a.insert(5)); + assert!(a.insert(11)); + assert!(a.insert(7)); + + let mut b = HashSet::new(); + assert!(b.insert(0)); + assert!(b.insert(7)); + assert!(b.insert(19)); + assert!(b.insert(250)); + assert!(b.insert(11)); + assert!(b.insert(200)); + + assert!(!a.is_subset(&b)); + assert!(!a.is_superset(&b)); + assert!(!b.is_subset(&a)); + assert!(!b.is_superset(&a)); + + assert!(b.insert(5)); + + assert!(a.is_subset(&b)); + assert!(!a.is_superset(&b)); + assert!(!b.is_subset(&a)); + assert!(b.is_superset(&a)); + } + + #[test] + fn test_iterate() { + let mut a = HashSet::new(); + for i in 0..32 { + assert!(a.insert(i)); + } + let mut observed: u32 = 0; + for k in &a { + observed |= 1 << *k; + } + assert_eq!(observed, 0xFFFF_FFFF); + } + + #[test] + fn test_intersection() { + let mut a = HashSet::new(); + let mut b = HashSet::new(); + + assert!(a.insert(11)); + assert!(a.insert(1)); + assert!(a.insert(3)); + assert!(a.insert(77)); + assert!(a.insert(103)); + assert!(a.insert(5)); + assert!(a.insert(-5)); + + assert!(b.insert(2)); + assert!(b.insert(11)); + assert!(b.insert(77)); + assert!(b.insert(-9)); + assert!(b.insert(-42)); + assert!(b.insert(5)); + assert!(b.insert(3)); + + let mut i = 0; + let expected = [3, 5, 11, 77]; + for x in a.intersection(&b) { + assert!(expected.contains(x)); + i += 1; + } + assert_eq!(i, expected.len()); + } + + #[test] + fn test_difference() { + let mut a = HashSet::new(); + let mut b = HashSet::new(); + + assert!(a.insert(1)); + assert!(a.insert(3)); + assert!(a.insert(5)); + assert!(a.insert(9)); + assert!(a.insert(11)); + + assert!(b.insert(3)); + assert!(b.insert(9)); + + let mut i = 0; + let expected = [1, 5, 11]; + for x in a.difference(&b) { + assert!(expected.contains(x)); + i += 1; + } + assert_eq!(i, expected.len()); + } + + #[test] + fn test_symmetric_difference() { + let mut a = HashSet::new(); + let mut b = HashSet::new(); + + assert!(a.insert(1)); + assert!(a.insert(3)); + assert!(a.insert(5)); + assert!(a.insert(9)); + assert!(a.insert(11)); + + assert!(b.insert(-2)); + assert!(b.insert(3)); + assert!(b.insert(9)); + assert!(b.insert(14)); + assert!(b.insert(22)); + + let mut i = 0; + let expected = [-2, 1, 5, 11, 14, 22]; + for x in a.symmetric_difference(&b) { + assert!(expected.contains(x)); + i += 1; + } + assert_eq!(i, expected.len()); + } + + #[test] + fn test_union() { + let mut a = HashSet::new(); + let mut b = HashSet::new(); + + assert!(a.insert(1)); + assert!(a.insert(3)); + assert!(a.insert(5)); + assert!(a.insert(9)); + assert!(a.insert(11)); + assert!(a.insert(16)); + assert!(a.insert(19)); + assert!(a.insert(24)); + + assert!(b.insert(-2)); + assert!(b.insert(1)); + assert!(b.insert(5)); + assert!(b.insert(9)); + assert!(b.insert(13)); + assert!(b.insert(19)); + + let mut i = 0; + let expected = [-2, 1, 3, 5, 9, 11, 13, 16, 19, 24]; + for x in a.union(&b) { + assert!(expected.contains(x)); + i += 1; + } + assert_eq!(i, expected.len()); + } + + #[test] + fn test_from_map() { + let mut a = crate::HashMap::new(); + a.insert(1, ()); + a.insert(2, ()); + a.insert(3, ()); + a.insert(4, ()); + + let a: HashSet<_> = a.into(); + + assert_eq!(a.len(), 4); + assert!(a.contains(&1)); + assert!(a.contains(&2)); + assert!(a.contains(&3)); + assert!(a.contains(&4)); + } + + #[test] + fn test_from_iter() { + let xs = [1, 2, 2, 3, 4, 5, 6, 7, 8, 9]; + + let set: HashSet<_> = xs.iter().copied().collect(); + + for x in &xs { + assert!(set.contains(x)); + } + + assert_eq!(set.iter().len(), xs.len() - 1); + } + + #[test] + fn test_move_iter() { + let hs = { + let mut hs = HashSet::new(); + + hs.insert('a'); + hs.insert('b'); + + hs + }; + + let v = hs.into_iter().collect::>(); + assert!(v == ['a', 'b'] || v == ['b', 'a']); + } + + #[test] + fn test_eq() { + // These constants once happened to expose a bug in insert(). + // I'm keeping them around to prevent a regression. + let mut s1 = HashSet::new(); + + s1.insert(1); + s1.insert(2); + s1.insert(3); + + let mut s2 = HashSet::new(); + + s2.insert(1); + s2.insert(2); + + assert!(s1 != s2); + + s2.insert(3); + + assert_eq!(s1, s2); + } + + #[test] + fn test_show() { + let mut set = HashSet::new(); + let empty = HashSet::::new(); + + set.insert(1); + set.insert(2); + + let set_str = format!("{set:?}"); + + assert!(set_str == "{1, 2}" || set_str == "{2, 1}"); + assert_eq!(format!("{empty:?}"), "{}"); + } + + #[test] + fn test_trivial_drain() { + let mut s = HashSet::::new(); + for _ in s.drain() {} + assert!(s.is_empty()); + drop(s); + + let mut s = HashSet::::new(); + drop(s.drain()); + assert!(s.is_empty()); + } + + #[test] + fn test_drain() { + let mut s: HashSet<_> = (1..100).collect(); + + // try this a bunch of times to make sure we don't screw up internal state. + for _ in 0..20 { + assert_eq!(s.len(), 99); + + { + let mut last_i = 0; + let mut d = s.drain(); + for (i, x) in d.by_ref().take(50).enumerate() { + last_i = i; + assert!(x != 0); + } + assert_eq!(last_i, 49); + } + + assert!(s.is_empty(), "s should be empty!"); + // reset to try again. + s.extend(1..100); + } + } + + #[test] + fn test_replace() { + use core::hash; + + #[derive(Debug)] + struct Foo(&'static str, i32); + + impl PartialEq for Foo { + fn eq(&self, other: &Self) -> bool { + self.0 == other.0 + } + } + + impl Eq for Foo {} + + impl hash::Hash for Foo { + fn hash(&self, h: &mut H) { + self.0.hash(h); + } + } + + let mut s = HashSet::new(); + assert_eq!(s.replace(Foo("a", 1)), None); + assert_eq!(s.len(), 1); + assert_eq!(s.replace(Foo("a", 2)), Some(Foo("a", 1))); + assert_eq!(s.len(), 1); + + let mut it = s.iter(); + assert_eq!(it.next(), Some(&Foo("a", 2))); + assert_eq!(it.next(), None); + } + + #[test] + #[allow(clippy::needless_borrow)] + fn test_extend_ref() { + let mut a = HashSet::new(); + a.insert(1); + + a.extend([2, 3, 4]); + + assert_eq!(a.len(), 4); + assert!(a.contains(&1)); + assert!(a.contains(&2)); + assert!(a.contains(&3)); + assert!(a.contains(&4)); + + let mut b = HashSet::new(); + b.insert(5); + b.insert(6); + + a.extend(&b); + + assert_eq!(a.len(), 6); + assert!(a.contains(&1)); + assert!(a.contains(&2)); + assert!(a.contains(&3)); + assert!(a.contains(&4)); + assert!(a.contains(&5)); + assert!(a.contains(&6)); + } + + #[test] + fn test_retain() { + let xs = [1, 2, 3, 4, 5, 6]; + let mut set: HashSet = xs.iter().copied().collect(); + set.retain(|&k| k % 2 == 0); + assert_eq!(set.len(), 3); + assert!(set.contains(&2)); + assert!(set.contains(&4)); + assert!(set.contains(&6)); + } + + #[test] + fn test_extract_if() { + { + let mut set: HashSet = (0..8).collect(); + let drained = set.extract_if(|&k| k % 2 == 0); + let mut out = drained.collect::>(); + out.sort_unstable(); + assert_eq!(vec![0, 2, 4, 6], out); + assert_eq!(set.len(), 4); + } + { + let mut set: HashSet = (0..8).collect(); + set.extract_if(|&k| k % 2 == 0).for_each(drop); + assert_eq!(set.len(), 4, "Removes non-matching items on drop"); + } + } + + #[test] + fn test_const_with_hasher() { + use core::hash::BuildHasher; + use rust_std::collections::hash_map::DefaultHasher; + + #[derive(Clone)] + struct MyHasher; + impl BuildHasher for MyHasher { + type Hasher = DefaultHasher; + + fn build_hasher(&self) -> DefaultHasher { + DefaultHasher::new() + } + } + + const EMPTY_SET: HashSet = HashSet::with_hasher(MyHasher); + + let mut set = EMPTY_SET; + set.insert(19); + assert!(set.contains(&19)); + } + + #[test] + fn rehash_in_place() { + let mut set = HashSet::new(); + + for i in 0..224 { + set.insert(i); + } + + assert_eq!( + set.capacity(), + 224, + "The set must be at or close to capacity to trigger a re hashing" + ); + + for i in 100..1400 { + set.remove(&(i - 100)); + set.insert(i); + } + } + + #[test] + fn collect() { + // At the time of writing, this hits the ZST case in from_base_index + // (and without the `map`, it does not). + let mut _set: HashSet<_> = (0..3).map(|_| ()).collect(); + } +} diff --git a/crates/rune-alloc/src/alloc/mod.rs b/crates/rune-alloc/src/alloc/mod.rs new file mode 100644 index 000000000..c52f6e6fb --- /dev/null +++ b/crates/rune-alloc/src/alloc/mod.rs @@ -0,0 +1,139 @@ +//! Allocated types. + +pub(crate) mod raw_vec; + +pub use self::borrow::TryToOwned; +pub(crate) mod borrow; + +pub use self::allocator::{AllocError, Allocator, Global}; +pub(crate) mod allocator; + +pub use self::boxed::Box; +pub mod boxed; + +pub(crate) mod btree; + +pub use self::hashbrown::HashMap; +pub mod hashbrown; + +pub use self::vec::Vec; +pub mod vec; + +pub use self::vec_deque::VecDeque; +pub mod vec_deque; + +pub use self::try_clone::{TryClone, TryCopy}; +mod try_clone; + +pub use self::try_extend::TryExtend; +mod try_extend; + +pub use self::try_from_iterator::{TryFromIterator, TryFromIteratorIn}; +mod try_from_iterator; + +pub use self::string::String; +pub mod string; + +mod slice; +pub mod str; + +#[cfg(test)] +pub(crate) mod testing; + +use core::convert::Infallible; +use core::fmt; + +/// An error type returned when a custom error is available alongside an allocation error. +#[derive(Debug)] +pub enum CustomError { + /// Custom error being returned. + Custom(E), + /// Try reserve error being returned. + Error(Error), +} + +impl From for CustomError { + fn from(error: Error) -> Self { + CustomError::Error(error) + } +} + +impl From for CustomError { + fn from(error: AllocError) -> Self { + CustomError::Error(Error::from(error)) + } +} + +/// The error type for methods which allocate or reserve. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[non_exhaustive] +pub enum Error { + /// Error due to the computed capacity exceeding the collection's maximum + /// (usually `isize::MAX` bytes). + #[doc(hidden)] + CapacityOverflow, + + /// Error when computing layout. + #[doc(hidden)] + LayoutError, + + /// The memory allocator returned an error + #[doc(hidden)] + AllocError { + /// The layout of the allocation request that failed. + error: AllocError, + }, +} + +impl From for Error { + #[inline] + fn from(error: AllocError) -> Self { + Error::AllocError { error } + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Error::CapacityOverflow => write!(f, "Capacity overflow"), + Error::LayoutError => write!(f, "Layout error"), + Error::AllocError { error } => error.fmt(f), + } + } +} + +#[cfg(feature = "std")] +impl ::rust_std::error::Error for Error { + fn source(&self) -> Option<&(dyn ::rust_std::error::Error + 'static)> { + match self { + Error::AllocError { error } => Some(error), + _ => None, + } + } +} + +pub(crate) trait SizedTypeProperties: Sized { + const IS_ZST: bool = core::mem::size_of::() == 0; + const NEEDS_DROP: bool = core::mem::needs_drop::(); +} + +impl SizedTypeProperties for T {} + +#[inline(always)] +pub(crate) fn into_ok(result: Result) -> T { + match result { + Ok(value) => value, + Err(error) => match error {}, + } +} + +#[inline(always)] +pub(crate) fn into_ok_try(result: Result>) -> Result { + match result { + Ok(value) => Ok(value), + Err(error) => match error { + CustomError::Error(error) => Err(error), + CustomError::Custom(error) => match error {}, + }, + } +} diff --git a/crates/rune-alloc/src/alloc/raw_vec.rs b/crates/rune-alloc/src/alloc/raw_vec.rs new file mode 100644 index 000000000..405f103c9 --- /dev/null +++ b/crates/rune-alloc/src/alloc/raw_vec.rs @@ -0,0 +1,450 @@ +use core::alloc::{Layout, LayoutError}; +use core::cmp; +use core::mem::{self, ManuallyDrop, MaybeUninit}; +use core::slice; + +use crate::alloc::boxed::Box; +use crate::alloc::{AllocError, Allocator, Error, Global, SizedTypeProperties}; +use crate::ptr::Unique; +use crate::ptr::{self, NonNull}; + +enum AllocInit { + /// The contents of the new memory are uninitialized. + Uninitialized, + /// The new memory is guaranteed to be zeroed. + Zeroed, +} + +/// A low-level utility for more ergonomically allocating, reallocating, and deallocating +/// a buffer of memory on the heap without having to worry about all the corner cases +/// involved. This type is excellent for building your own data structures like Vec and VecDeque. +/// In particular: +/// +/// * Produces `Unique::dangling()` on zero-sized types. +/// * Produces `Unique::dangling()` on zero-length allocations. +/// * Avoids freeing `Unique::dangling()`. +/// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics). +/// * Guards against 32-bit systems allocating more than isize::MAX bytes. +/// * Guards against overflowing your length. +/// * Calls `handle_alloc_error` for fallible allocations. +/// * Contains a `ptr::Unique` and thus endows the user with all related benefits. +/// * Uses the excess returned from the allocator to use the largest available capacity. +/// +/// This type does not in anyway inspect the memory that it manages. When dropped it *will* +/// free its memory, but it *won't* try to drop its contents. It is up to the user of `RawVec` +/// to handle the actual things *stored* inside of a `RawVec`. +/// +/// Note that the excess of a zero-sized types is always infinite, so `capacity()` always returns +/// `usize::MAX`. This means that you need to be careful when round-tripping this type with a +/// `Box<[T]>`, since `capacity()` won't yield the length. +#[allow(missing_debug_implementations)] +pub(crate) struct RawVec { + ptr: Unique, + cap: usize, + alloc: A, +} + +impl RawVec { + /// HACK(Centril): This exists because stable `const fn` can only call + /// stable `const fn`, so they cannot call `Self::new()`. + /// + /// If you change `RawVec::new` or dependencies, please take care to not + /// introduce anything that would truly const-call something unstable. + pub const NEW: Self = Self::new(); + + /// Creates the biggest possible `RawVec` (on the system heap) + /// without allocating. If `T` has positive size, then this makes a + /// `RawVec` with capacity `0`. If `T` is zero-sized, then it makes a + /// `RawVec` with capacity `usize::MAX`. Useful for implementing + /// delayed allocation. + #[must_use] + pub const fn new() -> Self { + Self::new_in(Global) + } +} + +impl RawVec { + // Tiny Vecs are dumb. Skip to: + // - 8 if the element size is 1, because any heap allocators is likely + // to round up a request of less than 8 bytes to at least 8 bytes. + // - 4 if elements are moderate-sized (<= 1 KiB). + // - 1 otherwise, to avoid wasting too much space for very short Vecs. + pub(crate) const MIN_NON_ZERO_CAP: usize = if mem::size_of::() == 1 { + 8 + } else if mem::size_of::() <= 1024 { + 4 + } else { + 1 + }; + + /// Like `new`, but parameterized over the choice of allocator for + /// the returned `RawVec`. + pub const fn new_in(alloc: A) -> Self { + // `cap: 0` means "unallocated". zero-sized types are ignored. + Self { + ptr: Unique::dangling(), + cap: 0, + alloc, + } + } + + /// Like `with_capacity`, but parameterized over the choice of + /// allocator for the returned `RawVec`. + #[inline] + pub(crate) fn try_with_capacity_in(capacity: usize, alloc: A) -> Result { + Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc) + } + + /// Like `with_capacity_zeroed`, but parameterized over the choice + /// of allocator for the returned `RawVec`. + #[inline] + pub(crate) fn try_with_capacity_zeroed_in(capacity: usize, alloc: A) -> Result { + Self::try_allocate_in(capacity, AllocInit::Zeroed, alloc) + } + + /// Converts the entire buffer into `Box<[MaybeUninit]>` with the specified `len`. + /// + /// Note that this will correctly reconstitute any `cap` changes + /// that may have been performed. (See description of type for details.) + /// + /// # Safety + /// + /// * `len` must be greater than or equal to the most recently requested capacity, and + /// * `len` must be less than or equal to `self.capacity()`. + /// + /// Note, that the requested capacity and `self.capacity()` could differ, as + /// an allocator could overallocate and return a greater memory block than requested. + pub unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit], A> { + // Sanity-check one half of the safety requirement (we cannot check the other half). + debug_assert!( + len <= self.capacity(), + "`len` must be smaller than or equal to `self.capacity()`" + ); + + let me = ManuallyDrop::new(self); + unsafe { + let slice = slice::from_raw_parts_mut(me.ptr() as *mut MaybeUninit, len); + Box::from_raw_in(slice, ptr::read(&me.alloc)) + } + } + + fn try_allocate_in(capacity: usize, init: AllocInit, alloc: A) -> Result { + // Don't allocate here because `Drop` will not deallocate when `capacity` is 0. + if T::IS_ZST || capacity == 0 { + Ok(Self::new_in(alloc)) + } else { + // We avoid `unwrap_or_else` here because it bloats the amount of + // LLVM IR generated. + let layout = match Layout::array::(capacity) { + Ok(layout) => layout, + Err(_) => return Err(Error::CapacityOverflow), + }; + match alloc_guard(layout.size()) { + Ok(_) => {} + Err(_) => return Err(Error::CapacityOverflow), + } + let ptr = match init { + AllocInit::Uninitialized => alloc.allocate(layout)?, + AllocInit::Zeroed => alloc.allocate_zeroed(layout)?, + }; + + // Allocators currently return a `NonNull<[u8]>` whose length + // matches the size requested. If that ever changes, the capacity + // here should change to `ptr.len() / mem::size_of::()`. + Ok(Self { + ptr: unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) }, + cap: capacity, + alloc, + }) + } + } + + /// Reconstitutes a `RawVec` from a pointer, capacity, and allocator. + /// + /// # Safety + /// + /// The `ptr` must be allocated (via the given allocator `alloc`), and with the given + /// `capacity`. + /// The `capacity` cannot exceed `isize::MAX` for sized types. (only a concern on 32-bit + /// systems). ZST vectors may have a capacity up to `usize::MAX`. + /// If the `ptr` and `capacity` come from a `RawVec` created via `alloc`, then this is + /// guaranteed. + #[inline] + pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, alloc: A) -> Self { + Self { + ptr: unsafe { Unique::new_unchecked(ptr) }, + cap: capacity, + alloc, + } + } + + /// Gets a raw pointer to the start of the allocation. Note that this is + /// `Unique::dangling()` if `capacity == 0` or `T` is zero-sized. In the former case, you must + /// be careful. + #[inline] + pub(crate) fn ptr(&self) -> *mut T { + self.ptr.as_ptr() + } + + /// Gets the capacity of the allocation. + /// + /// This will always be `usize::MAX` if `T` is zero-sized. + #[inline(always)] + pub(crate) fn capacity(&self) -> usize { + if T::IS_ZST { + usize::MAX + } else { + self.cap + } + } + + /// Returns a shared reference to the allocator backing this `RawVec`. + pub(crate) fn allocator(&self) -> &A { + &self.alloc + } + + fn current_memory(&self) -> Option<(NonNull, Layout)> { + if T::IS_ZST || self.cap == 0 { + None + } else { + // We could use Layout::array here which ensures the absence of isize and usize overflows + // and could hypothetically handle differences between stride and size, but this memory + // has already been allocated so we know it can't overflow and currently rust does not + // support such types. So we can do better by skipping some checks and avoid an unwrap. + assert!(mem::size_of::() % mem::align_of::() == 0); + + unsafe { + let align = mem::align_of::(); + let size = mem::size_of::().wrapping_mul(self.cap); + let layout = Layout::from_size_align_unchecked(size, align); + Some((self.ptr.cast().into(), layout)) + } + } + } + + /// Ensures that the buffer contains at least enough space to hold `len + + /// additional` elements. If it doesn't already have enough capacity, will + /// reallocate enough space plus comfortable slack space to get amortized + /// *O*(1) behavior. Will limit this behavior if it would needlessly cause + /// itself to panic. + /// + /// If `len` exceeds `self.capacity()`, this may fail to actually allocate + /// the requested space. This is not really unsafe, but the unsafe + /// code *you* write that relies on the behavior of this function may break. + /// + /// This is ideal for implementing a bulk-push operation like `extend`. + /// + /// # Panics + /// + /// Panics if the new capacity exceeds `isize::MAX` bytes. + /// + /// # Aborts + /// + /// Aborts on OOM. + pub(crate) fn try_reserve(&mut self, len: usize, additional: usize) -> Result<(), Error> { + if self.needs_to_grow(len, additional) { + self.grow_amortized(len, additional)?; + } + + Ok(()) + } + + /// A specialized version of `reserve()` used only by the hot and + /// oft-instantiated `Vec::push()`, which does its own capacity check. + pub(crate) fn try_reserve_for_push(&mut self, len: usize) -> Result<(), Error> { + self.grow_amortized(len, 1) + } + + /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting. + pub(crate) fn try_reserve_exact(&mut self, len: usize, additional: usize) -> Result<(), Error> { + if self.needs_to_grow(len, additional) { + self.grow_exact(len, additional) + } else { + Ok(()) + } + } + + /// Shrinks the buffer down to the specified capacity. If the given amount + /// is 0, actually completely deallocates. + /// + /// # Aborts + /// + /// Aborts on OOM. + pub(crate) fn try_shrink_to_fit(&mut self, cap: usize) -> Result<(), Error> { + self.shrink(cap) + } +} + +impl RawVec { + /// Returns if the buffer needs to grow to fulfill the needed extra capacity. + /// Mainly used to make inlining reserve-calls possible without inlining `grow`. + fn needs_to_grow(&self, len: usize, additional: usize) -> bool { + additional > self.capacity().wrapping_sub(len) + } + + fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) { + // Allocators currently return a `NonNull<[u8]>` whose length matches + // the size requested. If that ever changes, the capacity here should + // change to `ptr.len() / mem::size_of::()`. + self.ptr = unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) }; + self.cap = cap; + } + + // This method is usually instantiated many times. So we want it to be as + // small as possible, to improve compile times. But we also want as much of + // its contents to be statically computable as possible, to make the + // generated code run faster. Therefore, this method is carefully written + // so that all of the code that depends on `T` is within it, while as much + // of the code that doesn't depend on `T` as possible is in functions that + // are non-generic over `T`. + fn grow_amortized(&mut self, len: usize, additional: usize) -> Result<(), Error> { + // This is ensured by the calling contexts. + debug_assert!(additional > 0); + + if T::IS_ZST { + // Since we return a capacity of `usize::MAX` when `elem_size` is + // 0, getting to here necessarily means the `RawVec` is overfull. + return Err(Error::CapacityOverflow); + } + + // Nothing we can really do about these checks, sadly. + let required_cap = len.checked_add(additional).ok_or(Error::CapacityOverflow)?; + + // This guarantees exponential growth. The doubling cannot overflow + // because `cap <= isize::MAX` and the type of `cap` is `usize`. + let cap = cmp::max(self.cap * 2, required_cap); + let cap = cmp::max(Self::MIN_NON_ZERO_CAP, cap); + + let new_layout = Layout::array::(cap); + + // `finish_grow` is non-generic over `T`. + let ptr = finish_grow(new_layout, self.current_memory(), &self.alloc)?; + self.set_ptr_and_cap(ptr, cap); + Ok(()) + } + + // The constraints on this method are much the same as those on + // `grow_amortized`, but this method is usually instantiated less often so + // it's less critical. + fn grow_exact(&mut self, len: usize, additional: usize) -> Result<(), Error> { + if T::IS_ZST { + // Since we return a capacity of `usize::MAX` when the type size is + // 0, getting to here necessarily means the `RawVec` is overfull. + return Err(Error::CapacityOverflow); + } + + let cap = len.checked_add(additional).ok_or(Error::CapacityOverflow)?; + let new_layout = Layout::array::(cap); + + // `finish_grow` is non-generic over `T`. + let ptr = finish_grow(new_layout, self.current_memory(), &self.alloc)?; + self.set_ptr_and_cap(ptr, cap); + Ok(()) + } + + fn shrink(&mut self, cap: usize) -> Result<(), Error> { + // See current_memory() why this assert is here + assert!(mem::size_of::() % mem::align_of::() == 0); + assert!( + cap <= self.capacity(), + "Tried to shrink to a larger capacity" + ); + + let (ptr, layout) = if let Some(mem) = self.current_memory() { + mem + } else { + return Ok(()); + }; + + // If shrinking to 0, deallocate the buffer. We don't reach this point + // for the T::IS_ZST case since current_memory() will have returned + // None. + if cap == 0 { + unsafe { self.alloc.deallocate(ptr, layout) }; + self.ptr = Unique::dangling(); + self.cap = 0; + } else { + let ptr = unsafe { + // `Layout::array` cannot overflow here because it would have + // overflowed earlier when capacity was larger. + let new_size = mem::size_of::().wrapping_mul(cap); + let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); + self.alloc + .shrink(ptr, layout, new_layout) + .map_err(|_| AllocError { layout: new_layout })? + }; + self.set_ptr_and_cap(ptr, cap); + } + Ok(()) + } +} + +// This function is outside `RawVec` to minimize compile times. See the comment +// above `RawVec::grow_amortized` for details. (The `A` parameter isn't +// significant, because the number of different `A` types seen in practice is +// much smaller than the number of `T` types.) +#[inline(never)] +fn finish_grow( + new_layout: Result, + current_memory: Option<(NonNull, Layout)>, + alloc: &A, +) -> Result, Error> +where + A: Allocator, +{ + // Check for the error here to minimize the size of `RawVec::grow_*`. + let new_layout = new_layout.map_err(|_| Error::CapacityOverflow)?; + + alloc_guard(new_layout.size())?; + + let memory = if let Some((ptr, old_layout)) = current_memory { + debug_assert_eq!(old_layout.align(), new_layout.align()); + unsafe { + // The allocator checks for alignment equality + debug_assert!(old_layout.align() == new_layout.align()); + alloc.grow(ptr, old_layout, new_layout) + } + } else { + alloc.allocate(new_layout) + }; + + memory.map_err(|_| AllocError { layout: new_layout }.into()) +} + +#[cfg(not(rune_nightly))] +impl Drop for RawVec { + /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. + fn drop(&mut self) { + if let Some((ptr, layout)) = self.current_memory() { + unsafe { self.alloc.deallocate(ptr, layout) } + } + } +} + +#[cfg(rune_nightly)] +unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec { + /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. + fn drop(&mut self) { + if let Some((ptr, layout)) = self.current_memory() { + unsafe { self.alloc.deallocate(ptr, layout) } + } + } +} + +// We need to guarantee the following: +// * We don't ever allocate `> isize::MAX` byte-size objects. +// * We don't overflow `usize::MAX` and actually allocate too little. +// +// On 64-bit we just need to check for overflow since trying to allocate +// `> isize::MAX` bytes will surely fail. On 32-bit and 16-bit we need to add +// an extra guard for this in case we're running on a platform which can use +// all 4GB in user-space, e.g., PAE or x32. + +#[inline] +fn alloc_guard(alloc_size: usize) -> Result<(), Error> { + if usize::BITS < 64 && alloc_size > isize::MAX as usize { + Err(Error::CapacityOverflow) + } else { + Ok(()) + } +} diff --git a/crates/rune-alloc/src/alloc/slice.rs b/crates/rune-alloc/src/alloc/slice.rs new file mode 100644 index 000000000..ab18e8dd2 --- /dev/null +++ b/crates/rune-alloc/src/alloc/slice.rs @@ -0,0 +1,133 @@ +use crate::alloc::{Allocator, Box, Error, Global, TryClone, TryToOwned, Vec}; + +/// Converts `self` into a vector without clones or allocation. +/// +/// The resulting vector can be converted back into a box via +/// `Vec`'s `into_boxed_slice` method. +/// +/// # Examples +/// +/// ``` +/// let s: Box<[i32]> = Box::new([10, 40, 30]); +/// let x = s.into_vec(); +/// // `s` cannot be used anymore because it has been converted into `x`. +/// +/// assert_eq!(x, vec![10, 40, 30]); +/// ``` +#[inline] +pub fn into_vec(this: Box<[T], A>) -> Vec { + // N.B., see the `hack` module in this file for more details. + hack::into_vec(this) +} + +#[inline] +pub(crate) fn to_vec(s: &[T], alloc: A) -> Result, Error> +where + T: TryClone, +{ + hack::to_vec(s, alloc) +} + +impl TryToOwned for [T] +where + T: TryClone, +{ + type Owned = Vec; + + #[inline] + fn try_to_owned(&self) -> Result { + hack::to_vec(self, Global) + } +} + +// HACK(japaric): With cfg(test) `impl [T]` is not available, these three +// functions are actually methods that are in `impl [T]` but not in +// `core::slice::SliceExt` - we need to supply these functions for the +// `test_permutations` test +pub(crate) mod hack { + use crate::alloc::{Allocator, Box, Error, TryClone, Vec}; + + // We shouldn't add inline attribute to this since this is used in `vec!` + // macro mostly and causes perf regression. See #71204 for discussion and + // perf results. + pub(crate) fn into_vec(b: Box<[T], A>) -> Vec { + unsafe { + let len = b.len(); + let (b, alloc) = Box::into_raw_with_allocator(b); + Vec::from_raw_parts_in(b as *mut T, len, len, alloc) + } + } + + #[inline] + pub(crate) fn to_vec( + s: &[T], + alloc: A, + ) -> Result, Error> { + T::to_vec(s, alloc) + } + + pub(crate) trait ConvertVec { + fn to_vec(s: &[Self], alloc: A) -> Result, Error> + where + Self: Sized; + } + + impl ConvertVec for T + where + T: TryClone, + { + #[inline] + fn to_vec(s: &[Self], alloc: A) -> Result, Error> { + struct DropGuard<'a, T, A: Allocator> { + vec: &'a mut Vec, + num_init: usize, + } + impl<'a, T, A: Allocator> Drop for DropGuard<'a, T, A> { + #[inline] + fn drop(&mut self) { + // SAFETY: + // items were marked initialized in the loop below + unsafe { + self.vec.set_len(self.num_init); + } + } + } + let mut vec = Vec::try_with_capacity_in(s.len(), alloc)?; + let mut guard = DropGuard { + vec: &mut vec, + num_init: 0, + }; + let slots = guard.vec.spare_capacity_mut(); + // .take(slots.len()) is necessary for LLVM to remove bounds checks + // and has better codegen than zip. + for (i, b) in s.iter().enumerate().take(slots.len()) { + guard.num_init = i; + slots[i].write(b.try_clone()?); + } + core::mem::forget(guard); + // SAFETY: + // the vec was allocated and initialized above to at least this length. + unsafe { + vec.set_len(s.len()); + } + Ok(vec) + } + } + + #[cfg(rune_nightly)] + impl ConvertVec for T { + #[inline] + fn to_vec(s: &[Self], alloc: A) -> Result, Error> { + let mut v = Vec::try_with_capacity_in(s.len(), alloc)?; + + // SAFETY: + // allocated above with the capacity of `s`, and initialize to `s.len()` in + // ptr::copy_to_non_overlapping below. + unsafe { + s.as_ptr().copy_to_nonoverlapping(v.as_mut_ptr(), s.len()); + v.set_len(s.len()); + } + Ok(v) + } + } +} diff --git a/crates/rune-alloc/src/alloc/str.rs b/crates/rune-alloc/src/alloc/str.rs new file mode 100644 index 000000000..a26abae1a --- /dev/null +++ b/crates/rune-alloc/src/alloc/str.rs @@ -0,0 +1,60 @@ +use crate::alloc::{Allocator, Box, Error, Global, String, TryToOwned}; + +/// Converts a boxed slice of bytes to a boxed string slice without checking +/// that the string contains valid UTF-8. +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::Box; +/// use rune_alloc::str; +/// +/// let smile_utf8 = Box::try_from([226, 152, 186])?; +/// let smile = unsafe { str::from_boxed_utf8_unchecked(smile_utf8) }; +/// +/// assert_eq!("☺", &*smile); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// # Safety +/// +/// The provided buffer must be valid UTF-8. +#[must_use] +#[inline] +pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8], A>) -> Box { + let (ptr, alloc) = Box::into_raw_with_allocator(v); + unsafe { Box::from_raw_in(ptr as *mut str, alloc) } +} + +/// Converts a [`Box`] into a [`String`] without copying or allocating. +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// use rune_alloc::{String, TryClone}; +/// use rune_alloc::str; +/// +/// let string = String::try_from("birthday gift")?; +/// let boxed_str = string.try_clone()?.try_into_boxed_str()?; +/// +/// assert_eq!(str::into_string(boxed_str), string); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +#[must_use = "`self` will be dropped if the result is not used"] +#[inline] +pub fn into_string(this: Box) -> String { + let slice = Box::<[u8], A>::from(this); + let vec = crate::alloc::slice::into_vec(slice); + unsafe { String::::from_utf8_unchecked(vec) } +} + +impl TryToOwned for str { + type Owned = String; + + #[inline] + fn try_to_owned(&self) -> Result, Error> { + Ok(unsafe { String::from_utf8_unchecked(self.as_bytes().try_to_owned()?) }) + } +} diff --git a/crates/rune-alloc/src/alloc/string.rs b/crates/rune-alloc/src/alloc/string.rs new file mode 100644 index 000000000..5b559620a --- /dev/null +++ b/crates/rune-alloc/src/alloc/string.rs @@ -0,0 +1,2255 @@ +//! A UTF-8–encoded, growable string. +//! +//! This module contains the [`String`] type, the [`TryToString`] trait for +//! converting to strings, and several error types that may result from working +//! with [`String`]s. +//! +//! # Examples +//! +//! There are multiple ways to create a new [`String`] from a string literal: +//! +//! ``` +//! use rune_alloc::String; +//! use rune_alloc::prelude::*; +//! +//! let s = "Hello".try_to_string()?; +//! +//! let s = String::try_from("world")?; +//! let s: String = "also this".try_into()?; +//! # Ok::<_, rune_alloc::Error>(()) +//! ``` +//! +//! If you have a vector of valid UTF-8 bytes, you can make a [`String`] out of +//! it. You can do the reverse too. +//! +//! ``` +//! use rune_alloc::String; +//! use rune_alloc::prelude::*; +//! +//! let sparkle_heart = rune_alloc::try_vec![240, 159, 146, 150]; +//! +//! // We know these bytes are valid, so we'll use `unwrap()`. +//! let sparkle_heart = String::from_utf8(sparkle_heart).unwrap(); +//! +//! assert_eq!("💖", sparkle_heart); +//! +//! let bytes = sparkle_heart.into_bytes(); +//! +//! assert_eq!(bytes, [240, 159, 146, 150]); +//! # Ok::<_, rune_alloc::Error>(()) +//! ``` + +#[cfg(feature = "serde")] +mod serde; + +pub use self::try_to_string::TryToString; +pub(crate) mod try_to_string; + +use core::borrow::Borrow; +use core::cmp::Ordering; +use core::fmt; +use core::hash; +use core::iter::FusedIterator; +use core::ops::Bound::{Excluded, Included, Unbounded}; +use core::ops::{self, Index, IndexMut, Range, RangeBounds}; +use core::ptr; +use core::slice; +use core::str::{from_utf8, from_utf8_unchecked, from_utf8_unchecked_mut}; +use core::str::{Chars, Utf8Error}; + +use super::{Allocator, Box, Error, Global, TryClone, TryExtend, TryFromIteratorIn, Vec}; + +#[cfg(test)] +use crate::alloc::testing::*; +use crate::fmt::TryWrite; +use crate::slice::range as slice_range; + +/// A UTF-8–encoded, growable string. +/// +/// The `String` type is the most common string type that has ownership over the +/// contents of the string. It has a close relationship with its borrowed +/// counterpart, the primitive [`str`]. +/// +/// # Examples +/// +/// You can create a `String` from [a literal string][`&str`] with +/// [`String::try_from`]: +/// +/// [`String::try_from`]: TryFrom::try_from +/// +/// ``` +/// use rune_alloc::String; +/// +/// let hello = String::try_from("Hello, world!")?; +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// You can append a [`char`] to a `String` with the [`try_push`] method, and +/// append a [`&str`] with the [`try_push_str`] method: +/// +/// ``` +/// use rune_alloc::String; +/// +/// let mut hello = String::try_from("Hello, ")?; +/// +/// hello.try_push('w')?; +/// hello.try_push_str("orld!")?; +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// [`try_push`]: String::try_push +/// [`try_push_str`]: String::try_push_str +/// +/// If you have a vector of UTF-8 bytes, you can create a `String` from it with +/// the [`from_utf8`] method: +/// +/// ``` +/// use rune_alloc::String; +/// +/// // some bytes, in a vector +/// let sparkle_heart = rune_alloc::try_vec![240, 159, 146, 150]; +/// +/// // We know these bytes are valid, so we'll use `unwrap()`. +/// let sparkle_heart = String::from_utf8(sparkle_heart).unwrap(); +/// +/// assert_eq!("💖", sparkle_heart); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// [`from_utf8`]: String::from_utf8 +/// +/// # UTF-8 +/// +/// `String`s are always valid UTF-8. If you need a non-UTF-8 string, consider +/// [`OsString`]. It is similar, but without the UTF-8 constraint. Because UTF-8 +/// is a variable width encoding, `String`s are typically smaller than an array of +/// the same `chars`: +/// +/// ``` +/// use core::mem; +/// +/// // `s` is ASCII which represents each `char` as one byte +/// let s = "hello"; +/// assert_eq!(s.len(), 5); +/// +/// // A `char` array with the same contents would be longer because +/// // every `char` is four bytes +/// let s = ['h', 'e', 'l', 'l', 'o']; +/// let size: usize = s.into_iter().map(|c| mem::size_of_val(&c)).sum(); +/// assert_eq!(size, 20); +/// +/// // However, for non-ASCII strings, the difference will be smaller +/// // and sometimes they are the same +/// let s = "💖💖💖💖💖"; +/// assert_eq!(s.len(), 20); +/// +/// let s = ['💖', '💖', '💖', '💖', '💖']; +/// let size: usize = s.into_iter().map(|c| mem::size_of_val(&c)).sum(); +/// assert_eq!(size, 20); +/// ``` +/// +/// This raises interesting questions as to how `s[i]` should work. +/// What should `i` be here? Several options include byte indices and +/// `char` indices but, because of UTF-8 encoding, only byte indices +/// would provide constant time indexing. Getting the `i`th `char`, for +/// example, is available using [`chars`]: +/// +/// ``` +/// let s = "hello"; +/// let third_character = s.chars().nth(2); +/// assert_eq!(third_character, Some('l')); +/// +/// let s = "💖💖💖💖💖"; +/// let third_character = s.chars().nth(2); +/// assert_eq!(third_character, Some('💖')); +/// ``` +/// +/// Next, what should `s[i]` return? Because indexing returns a reference +/// to underlying data it could be `&u8`, `&[u8]`, or something else similar. +/// Since we're only providing one index, `&u8` makes the most sense but that +/// might not be what the user expects and can be explicitly achieved with +/// [`as_bytes()`]: +/// +/// ``` +/// // The first byte is 104 - the byte value of `'h'` +/// let s = "hello"; +/// assert_eq!(s.as_bytes()[0], 104); +/// // or +/// assert_eq!(s.as_bytes()[0], b'h'); +/// +/// // The first byte is 240 which isn't obviously useful +/// let s = "💖💖💖💖💖"; +/// assert_eq!(s.as_bytes()[0], 240); +/// ``` +/// +/// Due to these ambiguities/restrictions, indexing with a `usize` is simply +/// forbidden: +/// +/// ```compile_fail,E0277 +/// let s = "hello"; +/// +/// // The following will not compile! +/// println!("The first letter of s is {}", s[0]); +/// ``` +/// +/// It is more clear, however, how `&s[i..j]` should work (that is, +/// indexing with a range). It should accept byte indices (to be constant-time) +/// and return a `&str` which is UTF-8 encoded. This is also called "string slicing". +/// Note this will panic if the byte indices provided are not character +/// boundaries - see [`is_char_boundary`] for more details. See the implementations +/// for [`SliceIndex`] for more details on string slicing. For a non-panicking +/// version of string slicing, see [`get`]. +/// +/// [`OsString`]: ../../std/ffi/struct.OsString.html "ffi::OsString" +/// [`SliceIndex`]: core::slice::SliceIndex +/// [`as_bytes()`]: str::as_bytes +/// [`get`]: str::get +/// [`is_char_boundary`]: str::is_char_boundary +/// +/// The [`bytes`] and [`chars`] methods return iterators over the bytes and +/// codepoints of the string, respectively. To iterate over codepoints along +/// with byte indices, use [`char_indices`]. +/// +/// [`bytes`]: str::bytes +/// [`chars`]: str::chars +/// [`char_indices`]: str::char_indices +/// +/// # Deref +/// +/// `String` implements [Deref], and so inherits all of [`str`]'s +/// methods. In addition, this means that you can pass a `String` to a +/// function which takes a [`&str`] by using an ampersand (`&`): +/// +/// ``` +/// use rune_alloc::String; +/// +/// fn takes_str(s: &str) { } +/// +/// let s = String::try_from("Hello")?; +/// +/// takes_str(&s); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// This will create a [`&str`] from the `String` and pass it in. This +/// conversion is very inexpensive, and so generally, functions will accept +/// [`&str`]s as arguments unless they need a `String` for some specific +/// reason. +/// +/// In certain cases Rust doesn't have enough information to make this +/// conversion, known as [`Deref`] coercion. In the following example a string +/// slice [`&'a str`][`&str`] implements the trait `TraitExample`, and the function +/// `example_func` takes anything that implements the trait. In this case Rust +/// would need to make two implicit conversions, which Rust doesn't have the +/// means to do. For that reason, the following example will not compile. +/// +/// ```compile_fail,E0277 +/// use rune_alloc::String; +/// +/// trait TraitExample {} +/// +/// impl<'a> TraitExample for &'a str {} +/// +/// fn example_func(example_arg: A) {} +/// +/// let example_string = String::try_from("example_string")?; +/// example_func(&example_string); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// There are two options that would work instead. The first would be to +/// change the line `example_func(&example_string);` to +/// `example_func(example_string.as_str());`, using the method [`as_str()`] +/// to explicitly extract the string slice containing the string. The second +/// way changes `example_func(&example_string);` to +/// `example_func(&*example_string);`. In this case we are dereferencing a +/// `String` to a [`str`], then referencing the [`str`] back to +/// [`&str`]. The second way is more idiomatic, however both work to do the +/// conversion explicitly rather than relying on the implicit conversion. +/// +/// # Representation +/// +/// A `String` is made up of three components: a pointer to some bytes, a +/// length, and a capacity. The pointer points to an internal buffer `String` +/// uses to store its data. The length is the number of bytes currently stored +/// in the buffer, and the capacity is the size of the buffer in bytes. As such, +/// the length will always be less than or equal to the capacity. +/// +/// This buffer is always stored on the heap. +/// +/// You can look at these with the [`as_ptr`], [`len`], and [`capacity`] +/// methods: +/// +/// ``` +/// use core::mem; +/// use rune_alloc::String; +/// +/// let story = String::try_from("Once upon a time...")?; +/// +/// // Prevent automatically dropping the String's data +/// let mut story = mem::ManuallyDrop::new(story); +/// +/// let ptr = story.as_mut_ptr(); +/// let len = story.len(); +/// let capacity = story.capacity(); +/// let allocator = story.allocator().clone(); +/// +/// // story has nineteen bytes +/// assert_eq!(19, len); +/// +/// // We can re-build a String out of ptr, len, and capacity. This is all +/// // unsafe because we are responsible for making sure the components are +/// // valid: +/// let s = unsafe { String::from_raw_parts_in(ptr, len, capacity, allocator) } ; +/// +/// assert_eq!("Once upon a time...", s); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// [`as_ptr`]: str::as_ptr +/// [`len`]: String::len +/// [`capacity`]: String::capacity +/// +/// If a `String` has enough capacity, adding elements to it will not +/// re-allocate. For example, consider this program: +/// +/// ``` +/// use rune_alloc::String; +/// +/// let mut s = String::new(); +/// +/// println!("{}", s.capacity()); +/// +/// for _ in 0..5 { +/// s.try_push_str("hello")?; +/// println!("{}", s.capacity()); +/// } +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// This will output the following: +/// +/// ```text +/// 0 +/// 8 +/// 16 +/// 16 +/// 32 +/// 32 +/// ``` +/// +/// At first, we have no memory allocated at all, but as we append to the +/// string, it increases its capacity appropriately. If we instead use the +/// [`try_with_capacity_in`] method to allocate the correct capacity initially: +/// +/// ``` +/// use rune_alloc::{String, Global}; +/// +/// let mut s = String::try_with_capacity_in(25, Global)?; +/// +/// println!("{}", s.capacity()); +/// +/// for _ in 0..5 { +/// s.try_push_str("hello")?; +/// println!("{}", s.capacity()); +/// } +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// [`try_with_capacity_in`]: String::try_with_capacity_in +/// +/// We end up with a different output: +/// +/// ```text +/// 25 +/// 25 +/// 25 +/// 25 +/// 25 +/// 25 +/// ``` +/// +/// Here, there's no need to allocate more memory inside the loop. +/// +/// [str]: prim@str "str" +/// [`str`]: prim@str "str" +/// [`&str`]: prim@str "&str" +/// [Deref]: core::ops::Deref "ops::Deref" +/// [`Deref`]: core::ops::Deref "ops::Deref" +/// [`as_str()`]: String::as_str +pub struct String { + vec: Vec, +} + +impl String { + /// Creates a new empty `String`. + /// + /// Given that the `String` is empty, this will not allocate any initial + /// buffer. While that means that this initial operation is very + /// inexpensive, it may cause excessive allocation later when you add data. + /// If you have an idea of how much data the `String` will hold, consider + /// the [`try_with_capacity`] method to prevent excessive re-allocation. + /// + /// [`try_with_capacity`]: String::try_with_capacity + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::String; + /// + /// let s = String::new(); + /// ``` + #[inline] + #[must_use] + pub const fn new() -> Self { + String { vec: Vec::new() } + } + + /// Creates a new empty `String` with at least the specified capacity. + /// + /// `String`s have an internal buffer to hold their data. The capacity is + /// the length of that buffer, and can be queried with the [`capacity`] + /// method. This method creates an empty `String`, but one with an initial + /// buffer that can hold at least `capacity` bytes. This is useful when you + /// may be appending a bunch of data to the `String`, reducing the number of + /// reallocations it needs to do. + /// + /// [`capacity`]: String::capacity + /// + /// If the given capacity is `0`, no allocation will occur, and this method + /// is identical to the [`new`] method. + /// + /// [`new`]: String::new + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::String; + /// + /// let mut s = String::try_with_capacity(10)?; + /// + /// // The String contains no chars, even though it has capacity for more + /// assert_eq!(s.len(), 0); + /// + /// // These are all done without reallocating... + /// let cap = s.capacity(); + /// + /// for _ in 0..10 { + /// s.try_push('a')?; + /// } + /// + /// assert_eq!(s.capacity(), cap); + /// + /// // ...but this may make the string reallocate + /// s.try_push('a')?; + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn try_with_capacity(capacity: usize) -> Result { + Ok(String { + vec: Vec::try_with_capacity_in(capacity, Global)?, + }) + } + + #[cfg(test)] + pub fn from(value: &str) -> Self { + Self::try_from(value).abort() + } +} + +/// A possible error value when converting a `String` from a UTF-8 byte vector. +/// +/// This type is the error type for the [`from_utf8`] method on [`String`]. It +/// is designed in such a way to carefully avoid reallocations: the +/// [`into_bytes`] method will give back the byte vector that was used in the +/// conversion attempt. +/// +/// [`from_utf8`]: String::from_utf8 +/// [`into_bytes`]: FromUtf8Error::into_bytes +/// +/// The [`Utf8Error`] type provided by [`std::str`] represents an error that may +/// occur when converting a slice of [`u8`]s to a [`&str`]. In this sense, it's +/// an analogue to `FromUtf8Error`, and you can get one from a `FromUtf8Error` +/// through the [`utf8_error`] method. +/// +/// [`Utf8Error`]: core::str::Utf8Error "std::str::Utf8Error" +/// [`std::str`]: core::str "std::str" +/// [`&str`]: prim@str "&str" +/// [`utf8_error`]: FromUtf8Error::utf8_error +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::String; +/// +/// // some invalid bytes, in a vector +/// let bytes = rune_alloc::try_vec![0, 159]; +/// +/// let value = String::from_utf8(bytes); +/// +/// assert!(value.is_err()); +/// assert_eq!(rune_alloc::try_vec![0, 159], value.unwrap_err().into_bytes()); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +pub struct FromUtf8Error { + bytes: Vec, + error: Utf8Error, +} + +impl fmt::Debug for FromUtf8Error { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("FromUtf8Error") + .field("bytes", &self.bytes) + .field("error", &self.error) + .finish() + } +} + +impl PartialEq for FromUtf8Error { + fn eq(&self, other: &Self) -> bool { + self.bytes == other.bytes && self.error == other.error + } +} + +impl Eq for FromUtf8Error {} + +/// A possible error value when converting a `String` from a UTF-16 byte slice. +/// +/// This type is the error type for the [`from_utf16`] method on [`String`]. +/// +/// [`from_utf16`]: String::from_utf16 +/// +/// # Examples +/// +/// ``` +/// // 𝄞muic +/// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075, +/// 0xD800, 0x0069, 0x0063]; +/// +/// assert!(String::from_utf16(v).is_err()); +/// ``` +#[derive(Debug)] +pub(crate) struct FromUtf16Error(()); + +impl String { + /// Creates a new empty `String`. + /// + /// Given that the `String` is empty, this will not allocate any initial + /// buffer. While that means that this initial operation is very + /// inexpensive, it may cause excessive allocation later when you add data. + /// If you have an idea of how much data the `String` will hold, consider + /// the [`try_with_capacity_in`] method to prevent excessive re-allocation. + /// + /// [`try_with_capacity_in`]: String::try_with_capacity_in + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{String, Global}; + /// + /// let s = String::new_in(Global); + /// ``` + #[inline] + #[must_use] + pub fn new_in(alloc: A) -> String { + String { + vec: Vec::new_in(alloc), + } + } + + /// Returns a reference to the underlying allocator. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{String, Global}; + /// + /// let s = String::new_in(Global); + /// let alloc: &Global = s.allocator(); + /// ``` + #[inline] + pub fn allocator(&self) -> &A { + self.vec.allocator() + } + + /// Creates a new empty `String` with at least the specified capacity. + /// + /// `String`s have an internal buffer to hold their data. The capacity is + /// the length of that buffer, and can be queried with the [`capacity`] + /// method. This method creates an empty `String`, but one with an initial + /// buffer that can hold at least `capacity` bytes. This is useful when you + /// may be appending a bunch of data to the `String`, reducing the number of + /// reallocations it needs to do. + /// + /// [`capacity`]: String::capacity + /// + /// If the given capacity is `0`, no allocation will occur, and this method + /// is identical to the [`new_in`] method. + /// + /// [`new_in`]: String::new_in + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{String, Global}; + /// + /// let mut s = String::try_with_capacity_in(10, Global)?; + /// + /// // The String contains no chars, even though it has capacity for more + /// assert_eq!(s.len(), 0); + /// + /// // These are all done without reallocating... + /// let cap = s.capacity(); + /// + /// for _ in 0..10 { + /// s.try_push('a')?; + /// } + /// + /// assert_eq!(s.capacity(), cap); + /// + /// // ...but this may make the string reallocate + /// s.try_push('a')?; + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn try_with_capacity_in(capacity: usize, alloc: A) -> Result, Error> { + Ok(String { + vec: Vec::try_with_capacity_in(capacity, alloc)?, + }) + } + + /// Converts a vector of bytes to a `String`. + /// + /// A string ([`String`]) is made of bytes ([`u8`]), and a vector of bytes + /// ([`Vec`]) is made of bytes, so this function converts between the + /// two. Not all byte slices are valid `String`s, however: `String` requires + /// that it is valid UTF-8. `from_utf8()` checks to ensure that the bytes + /// are valid UTF-8, and then does the conversion. + /// + /// If you are sure that the byte slice is valid UTF-8, and you don't want + /// to incur the overhead of the validity check, there is an unsafe version + /// of this function, [`from_utf8_unchecked`], which has the same behavior + /// but skips the check. + /// + /// This method will take care to not copy the vector, for efficiency's + /// sake. + /// + /// If you need a [`&str`] instead of a `String`, consider + /// [`str::from_utf8`]. + /// + /// The inverse of this method is [`into_bytes`]. + /// + /// [`str::from_utf8`]: core::str::from_utf8 + /// + /// # Errors + /// + /// Returns [`Err`] if the slice is not UTF-8 with a description as to why + /// the provided bytes are not UTF-8. The vector you moved in is also + /// included. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::String; + /// + /// // some bytes, in a vector + /// let sparkle_heart = rune_alloc::try_vec![240, 159, 146, 150]; + /// + /// // We know these bytes are valid, so we'll use `unwrap()`. + /// let sparkle_heart = String::from_utf8(sparkle_heart).unwrap(); + /// + /// assert_eq!("💖", sparkle_heart); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// Incorrect bytes: + /// + /// ``` + /// use rune_alloc::String; + /// + /// // some invalid bytes, in a vector + /// let sparkle_heart = rune_alloc::try_vec![0, 159, 146, 150]; + /// + /// assert!(String::from_utf8(sparkle_heart).is_err()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// See the docs for [`FromUtf8Error`] for more details on what you can do + /// with this error. + /// + /// [`from_utf8_unchecked`]: String::from_utf8_unchecked + /// [`Vec`]: crate::vec::Vec "Vec" + /// [`&str`]: prim@str "&str" + /// [`into_bytes`]: String::into_bytes + #[inline] + pub fn from_utf8(vec: Vec) -> Result, FromUtf8Error> { + match from_utf8(&vec) { + Ok(..) => Ok(String { vec }), + Err(e) => Err(FromUtf8Error { + bytes: vec, + error: e, + }), + } + } + + /// Creates a new `String` from a length, capacity, and pointer. + /// + /// # Safety + /// + /// This is highly unsafe, due to the number of invariants that aren't + /// checked: + /// + /// * The memory at `buf` needs to have been previously allocated by the + /// same allocator the standard library uses, with a required alignment of exactly 1. + /// * `length` needs to be less than or equal to `capacity`. + /// * `capacity` needs to be the correct value. + /// * The first `length` bytes at `buf` need to be valid UTF-8. + /// + /// Violating these may cause problems like corrupting the allocator's + /// internal data structures. For example, it is normally **not** safe to + /// build a `String` from a pointer to a C `char` array containing UTF-8 + /// _unless_ you are certain that array was originally allocated by the + /// Rust standard library's allocator. + /// + /// The ownership of `buf` is effectively transferred to the + /// `String` which may then deallocate, reallocate or change the + /// contents of memory pointed to by the pointer at will. Ensure + /// that nothing else uses the pointer after calling this + /// function. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// use core::mem; + /// + /// unsafe { + /// let s = String::try_from("hello")?; + /// + /// // Prevent automatically dropping the String's data + /// let mut s = mem::ManuallyDrop::new(s); + /// + /// let ptr = s.as_mut_ptr(); + /// let len = s.len(); + /// let capacity = s.capacity(); + /// let allocator = s.allocator().clone(); + /// + /// let s = String::from_raw_parts_in(ptr, len, capacity, allocator); + /// + /// assert_eq!("hello", s); + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub unsafe fn from_raw_parts_in( + buf: *mut u8, + length: usize, + capacity: usize, + alloc: A, + ) -> String { + unsafe { + String { + vec: Vec::from_raw_parts_in(buf, length, capacity, alloc), + } + } + } + + /// Converts a vector of bytes to a `String` without checking that the + /// string contains valid UTF-8. + /// + /// See the safe version, [`from_utf8`], for more details. + /// + /// [`from_utf8`]: String::from_utf8 + /// + /// # Safety + /// + /// This function is unsafe because it does not check that the bytes passed + /// to it are valid UTF-8. If this constraint is violated, it may cause + /// memory unsafety issues with future users of the `String`, as the rest of + /// the standard library assumes that `String`s are valid UTF-8. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// + /// // some bytes, in a vector + /// let sparkle_heart = rune_alloc::try_vec![240, 159, 146, 150]; + /// + /// let sparkle_heart = unsafe { + /// String::from_utf8_unchecked(sparkle_heart) + /// }; + /// + /// assert_eq!("💖", sparkle_heart); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + #[must_use] + pub unsafe fn from_utf8_unchecked(bytes: Vec) -> String { + String { vec: bytes } + } + + /// Converts a `String` into a byte vector. + /// + /// This consumes the `String`, so we do not need to copy its contents. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// + /// let s = String::try_from("hello")?; + /// let bytes = s.into_bytes(); + /// + /// assert_eq!(&[104, 101, 108, 108, 111][..], &bytes[..]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + #[must_use = "`self` will be dropped if the result is not used"] + pub fn into_bytes(self) -> Vec { + self.vec + } + + /// Extracts a string slice containing the entire `String`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// + /// let s = String::try_from("foo")?; + /// + /// assert_eq!("foo", s.as_str()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + #[must_use] + pub fn as_str(&self) -> &str { + self + } + + /// Converts a `String` into a mutable string slice. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// + /// let mut s = String::try_from("foobar")?; + /// let s_mut_str = s.as_mut_str(); + /// + /// s_mut_str.make_ascii_uppercase(); + /// + /// assert_eq!("FOOBAR", s_mut_str); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + #[must_use] + pub fn as_mut_str(&mut self) -> &mut str { + self + } + + /// Appends a given string slice onto the end of this `String`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{String, Global}; + /// + /// let mut s = String::try_with_capacity_in(3, Global)?; + /// + /// s.try_push_str("foo")?; + /// s.try_push_str("bar")?; + /// + /// assert_eq!("foobar", s); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn try_push_str(&mut self, string: &str) -> Result<(), Error> { + self.vec.try_extend_from_slice(string.as_bytes()) + } + + #[cfg(test)] + pub(crate) fn push_str(&mut self, string: &str) { + self.try_push_str(string).abort() + } + + /// Returns this `String`'s capacity, in bytes. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{String, Global}; + /// + /// let s = String::try_with_capacity_in(10, Global)?; + /// + /// assert!(s.capacity() >= 10); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + #[must_use] + pub fn capacity(&self) -> usize { + self.vec.capacity() + } + + /// Tries to reserve capacity for at least `additional` bytes more than the + /// current length. The allocator may reserve more space to speculatively + /// avoid frequent allocations. After calling `try_reserve`, capacity will be + /// greater than or equal to `self.len() + additional` if it returns + /// `Ok(())`. Does nothing if capacity is already sufficient. This method + /// preserves the contents even if an error occurs. + /// + /// # Errors + /// + /// If the capacity overflows, or the allocator reports a failure, then an error + /// is returned. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{String, Error}; + /// + /// fn process_data(data: &str) -> Result { + /// let mut output = String::new(); + /// + /// // Pre-reserve the memory, exiting if we can't + /// output.try_reserve(data.len())?; + /// + /// // Now we know this can't OOM in the middle of our complex work + /// output.try_push_str(data)?; + /// + /// Ok(output) + /// } + /// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?"); + /// ``` + pub fn try_reserve(&mut self, additional: usize) -> Result<(), Error> { + self.vec.try_reserve(additional) + } + + /// Tries to reserve the minimum capacity for at least `additional` bytes + /// more than the current length. Unlike [`try_reserve`], this will not + /// deliberately over-allocate to speculatively avoid frequent allocations. + /// After calling `try_reserve_exact`, capacity will be greater than or + /// equal to `self.len() + additional` if it returns `Ok(())`. + /// Does nothing if the capacity is already sufficient. + /// + /// Note that the allocator may give the collection more space than it + /// requests. Therefore, capacity can not be relied upon to be precisely + /// minimal. Prefer [`try_reserve`] if future insertions are expected. + /// + /// [`try_reserve`]: String::try_reserve + /// + /// # Errors + /// + /// If the capacity overflows, or the allocator reports a failure, then an error + /// is returned. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{String, Error}; + /// + /// fn process_data(data: &str) -> Result { + /// let mut output = String::new(); + /// + /// // Pre-reserve the memory, exiting if we can't + /// output.try_reserve_exact(data.len())?; + /// + /// // Now we know this can't OOM in the middle of our complex work + /// output.try_push_str(data); + /// + /// Ok(output) + /// } + /// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?"); + /// ``` + pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), Error> { + self.vec.try_reserve_exact(additional) + } + + /// Shrinks the capacity of this `String` to match its length. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// let mut s = String::try_from("foo")?; + /// + /// s.try_reserve(100)?; + /// assert!(s.capacity() >= 100); + /// + /// s.try_shrink_to_fit()?; + /// assert_eq!(3, s.capacity()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn try_shrink_to_fit(&mut self) -> Result<(), Error> { + self.vec.try_shrink_to_fit() + } + + /// Shrinks the capacity of this `String` with a lower bound. + /// + /// The capacity will remain at least as large as both the length + /// and the supplied value. + /// + /// If the current capacity is less than the lower limit, this is a no-op. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// + /// let mut s = String::try_from("foo")?; + /// + /// s.try_reserve(100)?; + /// assert!(s.capacity() >= 100); + /// + /// s.try_shrink_to(10)?; + /// assert!(s.capacity() >= 10); + /// s.try_shrink_to(0)?; + /// assert!(s.capacity() >= 3); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn try_shrink_to(&mut self, min_capacity: usize) -> Result<(), Error> { + self.vec.try_shrink_to(min_capacity) + } + + /// Appends the given [`char`] to the end of this `String`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{String, Global}; + /// + /// let mut s = String::try_with_capacity_in(3, Global)?; + /// s.try_push_str("abc")?; + /// + /// s.try_push('1')?; + /// s.try_push('2')?; + /// s.try_push('3')?; + /// + /// assert_eq!("abc123", s); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn try_push(&mut self, ch: char) -> Result<(), Error> { + match ch.len_utf8() { + 1 => self.vec.try_push(ch as u8), + _ => self + .vec + .try_extend_from_slice(ch.encode_utf8(&mut [0; 4]).as_bytes()), + } + } + + /// Returns a byte slice of this `String`'s contents. + /// + /// The inverse of this method is [`from_utf8`]. + /// + /// [`from_utf8`]: String::from_utf8 + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// + /// let s = String::try_from("hello")?; + /// + /// assert_eq!(&[104, 101, 108, 108, 111], s.as_bytes()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + #[must_use] + pub(crate) fn as_bytes(&self) -> &[u8] { + &self.vec + } + + /// Shortens this `String` to the specified length. + /// + /// If `new_len` is greater than the string's current length, this has no + /// effect. + /// + /// Note that this method has no effect on the allocated capacity + /// of the string + /// + /// # Panics + /// + /// Panics if `new_len` does not lie on a [`char`] boundary. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// + /// let mut s = String::try_from("hello")?; + /// + /// s.truncate(2); + /// + /// assert_eq!("he", s); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn truncate(&mut self, new_len: usize) { + if new_len <= self.len() { + assert!(self.is_char_boundary(new_len)); + self.vec.truncate(new_len) + } + } + + /// Removes the last character from the string buffer and returns it. + /// + /// Returns [`None`] if this `String` is empty. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// + /// let mut s = String::try_from("abč")?; + /// + /// assert_eq!(s.pop(), Some('č')); + /// assert_eq!(s.pop(), Some('b')); + /// assert_eq!(s.pop(), Some('a')); + /// + /// assert_eq!(s.pop(), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn pop(&mut self) -> Option { + let ch = self.chars().next_back()?; + let newlen = self.len() - ch.len_utf8(); + unsafe { + self.vec.set_len(newlen); + } + Some(ch) + } + + /// Removes a [`char`] from this `String` at a byte position and returns it. + /// + /// This is an *O*(*n*) operation, as it requires copying every element in the + /// buffer. + /// + /// # Panics + /// + /// Panics if `idx` is larger than or equal to the `String`'s length, + /// or if it does not lie on a [`char`] boundary. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// + /// let mut s = String::try_from("abç")?; + /// + /// assert_eq!(s.remove(0), 'a'); + /// assert_eq!(s.remove(1), 'ç'); + /// assert_eq!(s.remove(0), 'b'); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn remove(&mut self, idx: usize) -> char { + let ch = match self[idx..].chars().next() { + Some(ch) => ch, + None => panic!("cannot remove a char from the end of a string"), + }; + + let next = idx + ch.len_utf8(); + let len = self.len(); + unsafe { + ptr::copy( + self.vec.as_ptr().add(next), + self.vec.as_mut_ptr().add(idx), + len - next, + ); + self.vec.set_len(len - (next - idx)); + } + ch + } + + /// Retains only the characters specified by the predicate. + /// + /// In other words, remove all characters `c` such that `f(c)` returns `false`. + /// This method operates in place, visiting each character exactly once in the + /// original order, and preserves the order of the retained characters. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// + /// let mut s = String::try_from("f_o_ob_ar")?; + /// + /// s.retain(|c| c != '_'); + /// + /// assert_eq!(s, "foobar"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// Because the elements are visited exactly once in the original order, + /// external state may be used to decide which elements to keep. + /// + /// ``` + /// use rune_alloc::String; + /// + /// let mut s = String::try_from("abcde")?; + /// let keep = [false, true, true, false, true]; + /// let mut iter = keep.iter(); + /// s.retain(|_| *iter.next().unwrap()); + /// assert_eq!(s, "bce"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn retain(&mut self, mut f: F) + where + F: FnMut(char) -> bool, + { + struct SetLenOnDrop<'a, A: Allocator> { + s: &'a mut String, + idx: usize, + del_bytes: usize, + } + + impl<'a, A: Allocator> Drop for SetLenOnDrop<'a, A> { + fn drop(&mut self) { + let new_len = self.idx - self.del_bytes; + debug_assert!(new_len <= self.s.len()); + unsafe { self.s.vec.set_len(new_len) }; + } + } + + let len = self.len(); + let mut guard = SetLenOnDrop { + s: self, + idx: 0, + del_bytes: 0, + }; + + while guard.idx < len { + let ch = + // SAFETY: `guard.idx` is positive-or-zero and less that len so the `get_unchecked` + // is in bound. `self` is valid UTF-8 like string and the returned slice starts at + // a unicode code point so the `Chars` always return one character. + unsafe { guard.s.get_unchecked(guard.idx..len).chars().next().unwrap_unchecked() }; + let ch_len = ch.len_utf8(); + + if !f(ch) { + guard.del_bytes += ch_len; + } else if guard.del_bytes > 0 { + // SAFETY: `guard.idx` is in bound and `guard.del_bytes` represent the number of + // bytes that are erased from the string so the resulting `guard.idx - + // guard.del_bytes` always represent a valid unicode code point. + // + // `guard.del_bytes` >= `ch.len_utf8()`, so taking a slice with `ch.len_utf8()` len + // is safe. + ch.encode_utf8(unsafe { + slice::from_raw_parts_mut( + guard.s.as_mut_ptr().add(guard.idx - guard.del_bytes), + ch.len_utf8(), + ) + }); + } + + // Point idx to the next char + guard.idx += ch_len; + } + + drop(guard); + } + + /// Inserts a character into this `String` at a byte position. + /// + /// This is an *O*(*n*) operation as it requires copying every element in the + /// buffer. + /// + /// # Panics + /// + /// Panics if `idx` is larger than the `String`'s length, or if it does not + /// lie on a [`char`] boundary. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{String, Global}; + /// + /// let mut s = String::try_with_capacity_in(3, Global)?; + /// + /// s.try_insert(0, 'f')?; + /// s.try_insert(1, 'o')?; + /// s.try_insert(2, 'o')?; + /// + /// assert_eq!(s, "foo"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn try_insert(&mut self, idx: usize, ch: char) -> Result<(), Error> { + assert!(self.is_char_boundary(idx)); + let mut bits = [0; 4]; + let bits = ch.encode_utf8(&mut bits).as_bytes(); + + unsafe { + self.insert_bytes(idx, bits)?; + } + + Ok(()) + } + + unsafe fn insert_bytes(&mut self, idx: usize, bytes: &[u8]) -> Result<(), Error> { + let len = self.len(); + let amt = bytes.len(); + self.vec.try_reserve(amt)?; + + unsafe { + ptr::copy( + self.vec.as_ptr().add(idx), + self.vec.as_mut_ptr().add(idx + amt), + len - idx, + ); + ptr::copy_nonoverlapping(bytes.as_ptr(), self.vec.as_mut_ptr().add(idx), amt); + self.vec.set_len(len + amt); + } + + Ok(()) + } + + /// Inserts a string slice into this `String` at a byte position. + /// + /// This is an *O*(*n*) operation as it requires copying every element in the + /// buffer. + /// + /// # Panics + /// + /// Panics if `idx` is larger than the `String`'s length, or if it does not + /// lie on a [`char`] boundary. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// + /// let mut s = String::try_from("bar")?; + /// + /// s.try_insert_str(0, "foo")?; + /// + /// assert_eq!("foobar", s); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn try_insert_str(&mut self, idx: usize, string: &str) -> Result<(), Error> { + assert!(self.is_char_boundary(idx)); + + unsafe { + self.insert_bytes(idx, string.as_bytes())?; + } + + Ok(()) + } + + /// Returns a mutable reference to the contents of this `String`. + /// + /// # Safety + /// + /// This function is unsafe because the returned `&mut Vec` allows writing + /// bytes which are not valid UTF-8. If this constraint is violated, using + /// the original `String` after dropping the `&mut Vec` may violate memory + /// safety, as the rest of the standard library assumes that `String`s are + /// valid UTF-8. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// + /// let mut s = String::try_from("hello")?; + /// + /// unsafe { + /// let vec = s.as_mut_vec(); + /// assert_eq!(&[104, 101, 108, 108, 111][..], &vec[..]); + /// + /// vec.reverse(); + /// } + /// assert_eq!(s, "olleh"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub unsafe fn as_mut_vec(&mut self) -> &mut Vec { + &mut self.vec + } + + /// Returns the length of this `String`, in bytes, not [`char`]s or + /// graphemes. In other words, it might not be what a human considers the + /// length of the string. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// + /// let a = String::try_from("foo")?; + /// assert_eq!(a.len(), 3); + /// + /// let fancy_f = String::try_from("ƒoo")?; + /// assert_eq!(fancy_f.len(), 4); + /// assert_eq!(fancy_f.chars().count(), 3); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + #[must_use] + pub fn len(&self) -> usize { + self.vec.len() + } + + /// Returns `true` if this `String` has a length of zero, and `false` + /// otherwise. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// + /// let mut v = String::new(); + /// assert!(v.is_empty()); + /// + /// v.try_push('a')?; + /// assert!(!v.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + #[must_use] + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Splits the string into two at the given byte index. + /// + /// Returns a newly allocated `String`. `self` contains bytes `[0, at)`, and + /// the returned `String` contains bytes `[at, len)`. `at` must be on the + /// boundary of a UTF-8 code point. + /// + /// Note that the capacity of `self` does not change. + /// + /// # Panics + /// + /// Panics if `at` is not on a `UTF-8` code point boundary, or if it is beyond the last + /// code point of the string. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// + /// let mut hello = String::try_from("Hello, World!")?; + /// let world = hello.try_split_off(7)?; + /// assert_eq!(hello, "Hello, "); + /// assert_eq!(world, "World!"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + #[must_use = "use `.truncate()` if you don't need the other half"] + pub fn try_split_off(&mut self, at: usize) -> Result, Error> + where + A: Clone, + { + assert!(self.is_char_boundary(at)); + let other = self.vec.try_split_off(at)?; + Ok(unsafe { String::from_utf8_unchecked(other) }) + } + + /// Truncates this `String`, removing all contents. + /// + /// While this means the `String` will have a length of zero, it does not + /// touch its capacity. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// + /// let mut s = String::try_from("foo")?; + /// + /// s.clear(); + /// + /// assert!(s.is_empty()); + /// assert_eq!(0, s.len()); + /// assert_eq!(3, s.capacity()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn clear(&mut self) { + self.vec.clear() + } + + /// Removes the specified range from the string in bulk, returning all + /// removed characters as an iterator. + /// + /// The returned iterator keeps a mutable borrow on the string to optimize + /// its implementation. + /// + /// # Panics + /// + /// Panics if the starting point or end point do not lie on a [`char`] + /// boundary, or if they're out of bounds. + /// + /// # Leaking + /// + /// If the returned iterator goes out of scope without being dropped (due to + /// [`core::mem::forget`], for example), the string may still contain a copy + /// of any drained characters, or may have lost characters arbitrarily, + /// including characters outside the range. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// use rune_alloc::prelude::*; + /// + /// let mut s = String::try_from("α is alpha, β is beta")?; + /// let beta_offset = s.find('β').unwrap_or(s.len()); + /// + /// // Remove the range up until the β from the string + /// let t: String = s.drain(..beta_offset).try_collect()?; + /// assert_eq!(t, "α is alpha, "); + /// assert_eq!(s, "β is beta"); + /// + /// // A full range clears the string, like `clear()` does + /// s.drain(..); + /// assert_eq!(s, ""); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn drain(&mut self, range: R) -> Drain<'_, A> + where + R: RangeBounds, + { + // Memory safety + // + // The String version of Drain does not have the memory safety issues + // of the vector version. The data is just plain bytes. + // Because the range removal happens in Drop, if the Drain iterator is leaked, + // the removal will not happen. + let Range { start, end } = slice_range(range, ..self.len()); + assert!(self.is_char_boundary(start)); + assert!(self.is_char_boundary(end)); + + // Take out two simultaneous borrows. The &mut String won't be accessed + // until iteration is over, in Drop. + let self_ptr = self as *mut _; + // SAFETY: `slice::range` and `is_char_boundary` do the appropriate bounds checks. + let chars_iter = unsafe { self.get_unchecked(start..end) }.chars(); + + Drain { + start, + end, + iter: chars_iter, + string: self_ptr, + } + } + + /// Removes the specified range in the string, + /// and replaces it with the given string. + /// The given string doesn't need to be the same length as the range. + /// + /// # Panics + /// + /// Panics if the starting point or end point do not lie on a [`char`] + /// boundary, or if they're out of bounds. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// + /// let mut s = String::try_from("α is alpha, β is beta")?; + /// let beta_offset = s.find('β').unwrap_or(s.len()); + /// + /// // Replace the range up until the β from the string + /// s.try_replace_range(..beta_offset, "Α is capital alpha; ")?; + /// assert_eq!(s, "Α is capital alpha; β is beta"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_replace_range(&mut self, range: R, replace_with: &str) -> Result<(), Error> + where + R: RangeBounds, + { + // Memory safety + // + // Replace_range does not have the memory safety issues of a vector Splice. + // of the vector version. The data is just plain bytes. + + // WARNING: Inlining this variable would be unsound (#81138) + let start = range.start_bound(); + match start { + Included(&n) => assert!(self.is_char_boundary(n)), + Excluded(&n) => assert!(self.is_char_boundary(n + 1)), + Unbounded => {} + }; + // WARNING: Inlining this variable would be unsound (#81138) + let end = range.end_bound(); + match end { + Included(&n) => assert!(self.is_char_boundary(n + 1)), + Excluded(&n) => assert!(self.is_char_boundary(n)), + Unbounded => {} + }; + + // Using `range` again would be unsound (#81138) + // We assume the bounds reported by `range` remain the same, but + // an adversarial implementation could change between calls + unsafe { self.as_mut_vec() }.try_splice_in_place((start, end), replace_with.bytes())?; + Ok(()) + } + + /// Converts this `String` into a [Box]<[str]>. + /// + /// This will drop any excess capacity. + /// + /// [str]: prim@str "str" + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// let s = String::try_from("hello")?; + /// + /// let b = s.try_into_boxed_str()?; + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[must_use = "`self` will be dropped if the result is not used"] + #[inline] + pub fn try_into_boxed_str(self) -> Result, Error> { + let slice = self.vec.try_into_boxed_slice()?; + Ok(unsafe { crate::alloc::str::from_boxed_utf8_unchecked(slice) }) + } + + /// Consumes and leaks the `String`, returning a mutable reference to the contents, + /// `&'a mut str`. + /// + /// The caller has free choice over the returned lifetime, including `'static`. Indeed, + /// this function is ideally used for data that lives for the remainder of the program's life, + /// as dropping the returned reference will cause a memory leak. + /// + /// It does not reallocate or shrink the `String`, + /// so the leaked allocation may include unused capacity that is not part + /// of the returned slice. If you don't want that, call [`try_into_boxed_str`], + /// and then [`Box::leak`]. + /// + /// [`try_into_boxed_str`]: Self::try_into_boxed_str + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// + /// let x = String::try_from("bucket")?; + /// let static_ref: &'static mut str = x.leak(); + /// assert_eq!(static_ref, "bucket"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn leak<'a>(self) -> &'a mut str + where + A: 'a, + { + let slice = self.vec.leak(); + unsafe { from_utf8_unchecked_mut(slice) } + } +} + +impl FromUtf8Error { + /// Returns a slice of [`u8`]s bytes that were attempted to convert to a `String`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// + /// // some invalid bytes, in a vector + /// let bytes = rune_alloc::try_vec![0, 159]; + /// + /// let value = String::from_utf8(bytes); + /// + /// assert_eq!(&[0, 159], value.unwrap_err().as_bytes()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[must_use] + pub fn as_bytes(&self) -> &[u8] { + &self.bytes[..] + } + + /// Returns the bytes that were attempted to convert to a `String`. + /// + /// This method is carefully constructed to avoid allocation. It will + /// consume the error, moving out the bytes, so that a copy of the bytes + /// does not need to be made. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// + /// // some invalid bytes, in a vector + /// let bytes = rune_alloc::try_vec![0, 159]; + /// + /// let value = String::from_utf8(bytes); + /// + /// assert_eq!(rune_alloc::try_vec![0, 159], value.unwrap_err().into_bytes()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[must_use = "`self` will be dropped if the result is not used"] + pub fn into_bytes(self) -> Vec { + self.bytes + } + + /// Fetch a `Utf8Error` to get more details about the conversion failure. + /// + /// The [`Utf8Error`] type provided by [`std::str`] represents an error that + /// may occur when converting a slice of [`u8`]s to a [`&str`]. In this + /// sense, it's an analogue to `FromUtf8Error`. See its documentation for + /// more details on using it. + /// + /// [`std::str`]: core::str "std::str" + /// [`&str`]: prim@str "&str" + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{try_vec, String}; + /// + /// // some invalid bytes, in a vector + /// let bytes = try_vec![0, 159]; + /// + /// let error = String::from_utf8(bytes).unwrap_err().utf8_error(); + /// + /// // the first byte is invalid here + /// assert_eq!(1, error.valid_up_to()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[must_use] + pub fn utf8_error(&self) -> Utf8Error { + self.error + } +} + +impl Borrow for String { + #[inline] + fn borrow(&self) -> &str { + &self[..] + } +} + +impl fmt::Display for FromUtf8Error { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&self.error, f) + } +} + +impl fmt::Display for FromUtf16Error { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt("invalid utf-16: lone surrogate found", f) + } +} + +#[cfg(feature = "std")] +impl ::rust_std::error::Error for FromUtf8Error {} +#[cfg(feature = "std")] +impl ::rust_std::error::Error for FromUtf16Error {} + +impl TryClone for String { + fn try_clone(&self) -> Result { + Ok(String { + vec: self.vec.try_clone()?, + }) + } +} + +#[cfg(test)] +impl Clone for String { + fn clone(&self) -> Self { + self.try_clone().abort() + } +} + +impl PartialEq for String { + #[inline] + fn eq(&self, other: &Self) -> bool { + self.vec == other.vec + } +} + +impl Eq for String {} + +impl PartialOrd for String { + #[inline] + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for String { + #[inline] + fn cmp(&self, other: &Self) -> Ordering { + self.vec.cmp(&other.vec) + } +} + +macro_rules! impl_eq { + ($lhs:ty, $rhs: ty) => { + #[allow(unused_lifetimes)] + #[allow(clippy::partialeq_ne_impl)] + impl<'a, 'b> PartialEq<$rhs> for $lhs { + #[inline] + fn eq(&self, other: &$rhs) -> bool { + PartialEq::eq(&self[..], &other[..]) + } + #[inline] + fn ne(&self, other: &$rhs) -> bool { + PartialEq::ne(&self[..], &other[..]) + } + } + + #[allow(unused_lifetimes)] + #[allow(clippy::partialeq_ne_impl)] + impl<'a, 'b> PartialEq<$lhs> for $rhs { + #[inline] + fn eq(&self, other: &$lhs) -> bool { + PartialEq::eq(&self[..], &other[..]) + } + #[inline] + fn ne(&self, other: &$lhs) -> bool { + PartialEq::ne(&self[..], &other[..]) + } + } + }; +} + +impl_eq! { String, str } +impl_eq! { String, &'a str } + +impl fmt::Display for String { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&**self, f) + } +} + +impl fmt::Debug for String { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Debug::fmt(&**self, f) + } +} + +impl hash::Hash for String { + #[inline] + fn hash(&self, hasher: &mut H) { + (**self).hash(hasher) + } +} + +impl ops::Index> for String { + type Output = str; + + #[inline] + fn index(&self, index: ops::Range) -> &str { + &self[..][index] + } +} + +impl ops::Index> for String { + type Output = str; + + #[inline] + fn index(&self, index: ops::RangeTo) -> &str { + &self[..][index] + } +} + +impl ops::Index> for String { + type Output = str; + + #[inline] + fn index(&self, index: ops::RangeFrom) -> &str { + &self[..][index] + } +} + +impl ops::Index for String { + type Output = str; + + #[inline] + fn index(&self, _index: ops::RangeFull) -> &str { + unsafe { from_utf8_unchecked(&self.vec) } + } +} + +impl ops::Index> for String { + type Output = str; + + #[inline] + fn index(&self, index: ops::RangeInclusive) -> &str { + Index::index(&**self, index) + } +} + +impl ops::Index> for String { + type Output = str; + + #[inline] + fn index(&self, index: ops::RangeToInclusive) -> &str { + Index::index(&**self, index) + } +} + +impl ops::IndexMut> for String { + #[inline] + fn index_mut(&mut self, index: ops::Range) -> &mut str { + &mut self[..][index] + } +} + +impl ops::IndexMut> for String { + #[inline] + fn index_mut(&mut self, index: ops::RangeTo) -> &mut str { + &mut self[..][index] + } +} + +impl ops::IndexMut> for String { + #[inline] + fn index_mut(&mut self, index: ops::RangeFrom) -> &mut str { + &mut self[..][index] + } +} + +impl ops::IndexMut for String { + #[inline] + fn index_mut(&mut self, _index: ops::RangeFull) -> &mut str { + unsafe { from_utf8_unchecked_mut(&mut self.vec) } + } +} + +impl ops::IndexMut> for String { + #[inline] + fn index_mut(&mut self, index: ops::RangeInclusive) -> &mut str { + IndexMut::index_mut(&mut **self, index) + } +} + +impl ops::IndexMut> for String { + #[inline] + fn index_mut(&mut self, index: ops::RangeToInclusive) -> &mut str { + IndexMut::index_mut(&mut **self, index) + } +} + +impl ops::Deref for String { + type Target = str; + + #[inline] + fn deref(&self) -> &str { + unsafe { from_utf8_unchecked(&self.vec) } + } +} + +impl ops::DerefMut for String { + #[inline] + fn deref_mut(&mut self) -> &mut str { + unsafe { from_utf8_unchecked_mut(&mut self.vec) } + } +} + +impl AsRef for String { + #[inline] + fn as_ref(&self) -> &str { + self + } +} + +impl AsMut for String { + #[inline] + fn as_mut(&mut self) -> &mut str { + self + } +} + +#[cfg(feature = "std")] +impl AsRef<::rust_std::ffi::OsStr> for String { + #[inline] + fn as_ref(&self) -> &::rust_std::ffi::OsStr { + (**self).as_ref() + } +} + +impl AsRef<[u8]> for String { + #[inline] + fn as_ref(&self) -> &[u8] { + self.as_bytes() + } +} + +impl From> for String { + /// Converts the given boxed `str` slice to a [`String`]. + /// It is notable that the `str` slice is owned. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::{Box, String}; + /// + /// let s1: String = String::try_from("hello world")?; + /// let s2: Box = s1.try_into_boxed_str()?; + /// let s3: String = String::from(s2); + /// + /// assert_eq!("hello world", s3); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + fn from(s: Box) -> String { + crate::alloc::str::into_string(s) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<::rust_alloc::boxed::Box> for String { + type Error = Error; + + /// Try to convert a std `Box` into a [`String`]. + /// + /// The result is fallibly allocated on the heap. + fn try_from(s: ::rust_alloc::boxed::Box) -> Result { + Self::try_from(s.as_ref()) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<::rust_alloc::string::String> for String { + type Error = Error; + + /// Try to convert a std `String` into a [`String`]. + /// + /// The result is fallibly allocated on the heap. + fn try_from(s: ::rust_alloc::string::String) -> Result { + Self::try_from(s.as_str()) + } +} + +#[cfg(feature = "alloc")] +impl From> for ::rust_alloc::string::String { + /// Try to convert a [`String`] into a std `String`. + /// + /// The result is allocated on the heap. + fn from(s: String) -> Self { + Self::from(s.as_str()) + } +} + +impl TryFrom<&str> for String { + type Error = Error; + + /// Converts a `&str` into a [`String`]. + /// + /// The result is fallibly allocated on the heap. + fn try_from(s: &str) -> Result { + let mut out = String::try_with_capacity_in(s.len(), Global)?; + out.try_push_str(s)?; + Ok(out) + } +} + +impl TryFrom> for Box { + type Error = Error; + + /// Converts the given [`String`] to a boxed `str` slice that is owned. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{Box, String}; + /// + /// let s1: String = String::try_from("hello world")?; + /// let s2: Box = Box::try_from(s1)?; + /// let s3: String = String::from(s2); + /// + /// assert_eq!("hello world", s3); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + fn try_from(s: String) -> Result, Error> { + s.try_into_boxed_str() + } +} + +impl From> for Vec { + /// Converts the given [`String`] to a vector [`Vec`] that holds values of type [`u8`]. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{String, Vec}; + /// + /// let s1 = String::try_from("hello world")?; + /// let v1 = Vec::from(s1); + /// + /// for b in v1 { + /// println!("{b}"); + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + fn from(string: String) -> Vec { + string.into_bytes() + } +} + +/// A draining iterator for `String`. +/// +/// This struct is created by the [`drain`] method on [`String`]. See its +/// documentation for more. +/// +/// [`drain`]: String::drain +pub struct Drain<'a, A: Allocator> { + /// Will be used as &'a mut String in the destructor + string: *mut String, + /// Start of part to remove + start: usize, + /// End of part to remove + end: usize, + /// Current remaining range to remove + iter: Chars<'a>, +} + +impl fmt::Debug for Drain<'_, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("Drain").field(&self.as_str()).finish() + } +} + +unsafe impl Sync for Drain<'_, A> {} +unsafe impl Send for Drain<'_, A> {} + +impl Drop for Drain<'_, A> { + fn drop(&mut self) { + unsafe { + // Use Vec::drain. "Reaffirm" the bounds checks to avoid + // panic code being inserted again. + let self_vec = (*self.string).as_mut_vec(); + + if self.start <= self.end && self.end <= self_vec.len() { + self_vec.drain(self.start..self.end); + } + } + } +} + +impl<'a, A: Allocator> Drain<'a, A> { + /// Returns the remaining (sub)string of this iterator as a slice. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::String; + /// let mut s = String::try_from("abc")?; + /// let mut drain = s.drain(..); + /// assert_eq!(drain.as_str(), "abc"); + /// let _ = drain.next().unwrap(); + /// assert_eq!(drain.as_str(), "bc"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[must_use] + pub fn as_str(&self) -> &str { + self.iter.as_str() + } +} + +impl<'a, A: Allocator> AsRef for Drain<'a, A> { + fn as_ref(&self) -> &str { + self.as_str() + } +} + +impl<'a, A: Allocator> AsRef<[u8]> for Drain<'a, A> { + fn as_ref(&self) -> &[u8] { + self.as_str().as_bytes() + } +} + +impl Iterator for Drain<'_, A> { + type Item = char; + + #[inline] + fn next(&mut self) -> Option { + self.iter.next() + } + + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } + + #[inline] + fn last(mut self) -> Option { + self.next_back() + } +} + +impl DoubleEndedIterator for Drain<'_, A> { + #[inline] + fn next_back(&mut self) -> Option { + self.iter.next_back() + } +} + +impl FusedIterator for Drain<'_, A> {} + +impl TryWrite for String { + #[inline] + fn try_write_str(&mut self, s: &str) -> Result<(), Error> { + self.try_push_str(s) + } + + #[inline] + fn try_write_char(&mut self, c: char) -> Result<(), Error> { + self.try_push(c) + } +} + +impl TryFromIteratorIn for String { + /// Construct a string from an iterator of characters. + /// + /// ``` + /// use rune_alloc::String; + /// use rune_alloc::prelude::*; + /// + /// let string = String::try_from_iter(['a', 'b', 'c'].into_iter())?; + /// assert_eq!(string, "abc"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + fn try_from_iter_in(iter: I, alloc: A) -> Result + where + I: IntoIterator, + { + let mut this = String::new_in(alloc); + this.try_extend(iter)?; + Ok(this) + } +} + +impl TryExtend for String { + /// Extend a string using a character iterator. + /// + /// ``` + /// use rune_alloc::{String, IteratorExt, TryExtend}; + /// let mut string = String::new(); + /// string.try_extend(['a', 'b', 'c'])?; + /// assert_eq!(string, "abc"); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + fn try_extend>(&mut self, iter: I) -> Result<(), Error> { + for value in iter { + self.try_push(value)?; + } + + Ok(()) + } +} diff --git a/crates/rune-alloc/src/alloc/string/serde.rs b/crates/rune-alloc/src/alloc/string/serde.rs new file mode 100644 index 000000000..67b191c10 --- /dev/null +++ b/crates/rune-alloc/src/alloc/string/serde.rs @@ -0,0 +1,138 @@ +use core::fmt; + +use serde::de::{self, Error, Unexpected}; +use serde::ser; + +use crate::{String, TryToOwned, Vec}; + +impl ser::Serialize for String { + fn serialize(&self, serializer: S) -> Result + where + S: ser::Serializer, + { + serializer.serialize_str(self.as_str()) + } +} + +impl<'de> de::Deserialize<'de> for String { + fn deserialize(deserializer: D) -> Result + where + D: de::Deserializer<'de>, + { + deserializer.deserialize_string(StringVisitor) + } + + fn deserialize_in_place(deserializer: D, place: &mut Self) -> Result<(), D::Error> + where + D: de::Deserializer<'de>, + { + deserializer.deserialize_string(StringInPlaceVisitor(place)) + } +} + +struct StringVisitor; +struct StringInPlaceVisitor<'a>(&'a mut String); + +impl<'de> de::Visitor<'de> for StringVisitor { + type Value = String; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a string") + } + + fn visit_str(self, v: &str) -> Result + where + E: Error, + { + v.try_to_owned().map_err(E::custom) + } + + fn visit_string(self, v: ::rust_alloc::string::String) -> Result + where + E: Error, + { + String::try_from(v).map_err(E::custom) + } + + fn visit_bytes(self, v: &[u8]) -> Result + where + E: Error, + { + match core::str::from_utf8(v) { + Ok(s) => s.try_to_owned().map_err(E::custom), + Err(_) => Err(Error::invalid_value(Unexpected::Bytes(v), &self)), + } + } + + fn visit_byte_buf(self, v: ::rust_alloc::vec::Vec) -> Result + where + E: Error, + { + let v = Vec::try_from(v).map_err(E::custom)?; + + match String::from_utf8(v) { + Ok(s) => Ok(s), + Err(e) => Err(Error::invalid_value( + Unexpected::Bytes(&e.into_bytes()), + &self, + )), + } + } +} + +impl<'a, 'de> de::Visitor<'de> for StringInPlaceVisitor<'a> { + type Value = (); + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a string") + } + + fn visit_str(self, v: &str) -> Result + where + E: Error, + { + self.0.clear(); + self.0.try_push_str(v).map_err(E::custom)?; + Ok(()) + } + + fn visit_string(self, v: ::rust_alloc::string::String) -> Result + where + E: Error, + { + *self.0 = String::try_from(v.as_str()).map_err(E::custom)?; + Ok(()) + } + + fn visit_bytes(self, v: &[u8]) -> Result + where + E: Error, + { + match core::str::from_utf8(v) { + Ok(s) => { + self.0.clear(); + self.0.try_push_str(s).map_err(E::custom)?; + Ok(()) + } + Err(_) => Err(Error::invalid_value(Unexpected::Bytes(v), &self)), + } + } + + fn visit_byte_buf(self, v: ::rust_alloc::vec::Vec) -> Result + where + E: Error, + { + let v = Vec::try_from(v).map_err(E::custom)?; + + match String::from_utf8(v) { + Ok(s) => { + *self.0 = s; + Ok(()) + } + Err(e) => Err(Error::invalid_value( + Unexpected::Bytes(&e.into_bytes()), + &self, + )), + } + } +} diff --git a/crates/rune-alloc/src/alloc/string/try_to_string.rs b/crates/rune-alloc/src/alloc/string/try_to_string.rs new file mode 100644 index 000000000..3016b711f --- /dev/null +++ b/crates/rune-alloc/src/alloc/string/try_to_string.rs @@ -0,0 +1,58 @@ +//! String utilities. + +use core::fmt; + +#[cfg(test)] +use crate::alloc::testing::*; +use crate::{Error, String, TryWrite}; + +/// A trait for converting a value to a `String`. +/// +/// This trait is automatically implemented for any type which implements the +/// [`Display`] trait. As such, `ToString` shouldn't be implemented directly: +/// [`Display`] should be implemented instead, and you get the `ToString` +/// implementation for free. +/// +/// [`Display`]: core::fmt::Display +pub trait TryToString { + #[cfg(test)] + fn to_string(&self) -> String { + self.try_to_string().abort() + } + + /// Converts the given value to a `String`. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::{String, TryToString}; + /// + /// let i = 5; + /// let five = String::try_from("5")?; + /// + /// assert_eq!(five, i.try_to_string()?); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + fn try_to_string(&self) -> Result; +} + +impl TryToString for T +where + T: fmt::Display, +{ + #[inline] + fn try_to_string(&self) -> Result { + let mut s = String::new(); + core::write!(s, "{}", self)?; + Ok(s) + } +} + +impl TryToString for str { + #[inline] + fn try_to_string(&self) -> Result { + String::try_from(self) + } +} diff --git a/crates/rune-alloc/src/alloc/testing.rs b/crates/rune-alloc/src/alloc/testing.rs new file mode 100644 index 000000000..21da59a75 --- /dev/null +++ b/crates/rune-alloc/src/alloc/testing.rs @@ -0,0 +1,75 @@ +use core::convert::Infallible; +use core::fmt; + +use super::{AllocError, CustomError, Error}; + +pub(crate) trait CustomTestExt { + fn custom_result(self) -> Result; +} + +impl CustomTestExt for Result> { + fn custom_result(self) -> Result { + match self { + Ok(value) => Ok(value), + Err(CustomError::Custom(error)) => Err(error), + Err(CustomError::Error(error)) => handle_error(error), + } + } +} + +pub(crate) trait TestExt { + fn abort(self) -> T; +} + +impl TestExt for Result { + fn abort(self) -> T { + match self { + Ok(value) => value, + Err(error) => match error {}, + } + } +} + +impl TestExt for Result { + fn abort(self) -> T { + match self { + Ok(value) => value, + Err(error) => handle_error(error), + } + } +} + +impl TestExt for Result { + fn abort(self) -> T { + match self { + Ok(value) => value, + Err(error) => ::rust_alloc::alloc::handle_alloc_error(error.layout), + } + } +} + +impl TestExt for Result> +where + E: fmt::Display, +{ + fn abort(self) -> T { + match self { + Ok(value) => value, + Err(error) => match error { + CustomError::Custom(error) => { + panic!("{}", error) + } + CustomError::Error(error) => handle_error(error), + }, + } + } +} + +fn handle_error(error: Error) -> ! { + match error { + Error::AllocError { error } => ::rust_alloc::alloc::handle_alloc_error(error.layout), + error => { + panic!("{}", error) + } + } +} diff --git a/crates/rune-alloc/src/alloc/try_clone.rs b/crates/rune-alloc/src/alloc/try_clone.rs new file mode 100644 index 000000000..da3377f03 --- /dev/null +++ b/crates/rune-alloc/src/alloc/try_clone.rs @@ -0,0 +1,137 @@ +use crate::alloc::Error; + +/// Fallible `Clone` trait. +pub trait TryClone: Sized { + /// Try to clone the current value, raising an allocation error if it's unsuccessful. + fn try_clone(&self) -> Result; + + /// Performs copy-assignment from `source`. + /// + /// `a.try_clone_from(&b)` is equivalent to `a = b.clone()` in + /// functionality, but can be overridden to reuse the resources of `a` to + /// avoid unnecessary allocations. + #[inline] + fn try_clone_from(&mut self, source: &Self) -> Result<(), Error> { + *self = source.try_clone()?; + Ok(()) + } +} + +/// Marker trait for types which are `Copy`. +#[cfg_attr(rune_nightly, rustc_specialization_trait)] +pub trait TryCopy: TryClone {} + +impl TryClone for &T { + #[inline] + fn try_clone(&self) -> Result { + Ok(*self) + } +} + +macro_rules! impl_tuple { + ($count:expr $(, $ty:ident $var:ident $num:expr)*) => { + impl<$($ty,)*> TryClone for ($($ty,)*) where $($ty: TryClone,)* { + #[inline] + fn try_clone(&self) -> Result { + let ($($var,)*) = self; + Ok(($($var.try_clone()?,)*)) + } + } + } +} + +repeat_macro!(impl_tuple); + +macro_rules! impl_copy { + ($ty:ty) => { + impl TryClone for $ty { + #[inline] + fn try_clone(&self) -> Result { + Ok(*self) + } + } + + impl TryCopy for $ty {} + }; +} + +impl_copy!(usize); +impl_copy!(isize); +impl_copy!(u8); +impl_copy!(u16); +impl_copy!(u32); +impl_copy!(u64); +impl_copy!(u128); +impl_copy!(i8); +impl_copy!(i16); +impl_copy!(i32); +impl_copy!(i64); +impl_copy!(i128); +impl_copy!(f32); +impl_copy!(f64); + +impl_copy!(::core::num::NonZeroUsize); +impl_copy!(::core::num::NonZeroIsize); +impl_copy!(::core::num::NonZeroU8); +impl_copy!(::core::num::NonZeroU16); +impl_copy!(::core::num::NonZeroU32); +impl_copy!(::core::num::NonZeroU64); +impl_copy!(::core::num::NonZeroU128); +impl_copy!(::core::num::NonZeroI8); +impl_copy!(::core::num::NonZeroI16); +impl_copy!(::core::num::NonZeroI32); +impl_copy!(::core::num::NonZeroI64); +impl_copy!(::core::num::NonZeroI128); + +#[cfg(feature = "alloc")] +impl TryClone for ::rust_alloc::boxed::Box +where + T: TryClone, +{ + fn try_clone(&self) -> Result { + Ok(::rust_alloc::boxed::Box::new(self.as_ref().try_clone()?)) + } +} + +#[cfg(feature = "alloc")] +impl TryClone for ::rust_alloc::boxed::Box<[T]> +where + T: TryClone, +{ + fn try_clone(&self) -> Result { + // TODO: use a fallible box allocation. + let mut out = ::rust_alloc::vec::Vec::with_capacity(self.len()); + + for value in self.iter() { + out.push(value.try_clone()?); + } + + Ok(out.into()) + } +} + +#[cfg(feature = "alloc")] +impl TryClone for ::rust_alloc::string::String { + #[inline] + fn try_clone(&self) -> Result { + // TODO: use fallible allocations for component. + Ok(self.clone()) + } +} + +#[cfg(all(test, feature = "alloc"))] +impl TryClone for ::rust_alloc::vec::Vec +where + T: TryClone, +{ + #[inline] + fn try_clone(&self) -> Result { + let mut out = ::rust_alloc::vec::Vec::with_capacity(self.len()); + + for value in self { + out.push(value.try_clone()?); + } + + Ok(out) + } +} diff --git a/crates/rune-alloc/src/alloc/try_extend.rs b/crates/rune-alloc/src/alloc/try_extend.rs new file mode 100644 index 000000000..96cb0f50b --- /dev/null +++ b/crates/rune-alloc/src/alloc/try_extend.rs @@ -0,0 +1,98 @@ +use super::Error; + +/// Extend a collection with the contents of an iterator. +/// +/// Iterators produce a series of values, and collections can also be thought +/// of as a series of values. The `Extend` trait bridges this gap, allowing you +/// to extend a collection by including the contents of that iterator. When +/// extending a collection with an already existing key, that entry is updated +/// or, in the case of collections that permit multiple entries with equal +/// keys, that entry is inserted. +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// // You can extend a String with some chars: +/// let mut message = String::from("The first three letters are: "); +/// +/// message.extend(&['a', 'b', 'c']); +/// +/// assert_eq!("abc", &message[29..32]); +/// ``` +/// +/// Implementing `Extend`: +/// +/// ``` +/// // A sample collection, that's just a wrapper over Vec +/// #[derive(Debug)] +/// struct MyCollection(Vec); +/// +/// // Let's give it some methods so we can create one and add things +/// // to it. +/// impl MyCollection { +/// fn new() -> MyCollection { +/// MyCollection(Vec::new()) +/// } +/// +/// fn add(&mut self, elem: i32) { +/// self.0.push(elem); +/// } +/// } +/// +/// // since MyCollection has a list of i32s, we implement Extend for i32 +/// impl Extend for MyCollection { +/// +/// // This is a bit simpler with the concrete type signature: we can call +/// // extend on anything which can be turned into an Iterator which gives +/// // us i32s. Because we need i32s to put into MyCollection. +/// fn extend>(&mut self, iter: T) { +/// +/// // The implementation is very straightforward: loop through the +/// // iterator, and add() each element to ourselves. +/// for elem in iter { +/// self.add(elem); +/// } +/// } +/// } +/// +/// let mut c = MyCollection::new(); +/// +/// c.add(5); +/// c.add(6); +/// c.add(7); +/// +/// // let's extend our collection with three more numbers +/// c.extend(vec![1, 2, 3]); +/// +/// // we've added these elements onto the end +/// assert_eq!("MyCollection([5, 6, 7, 1, 2, 3])", format!("{c:?}")); +/// ``` +pub trait TryExtend { + /// Extends a collection with the contents of an iterator. + /// + /// As this is the only required method for this trait, the [trait-level] + /// docs contain more details. + /// + /// [trait-level]: TryExtend + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use rune_alloc::{String, TryExtend}; + /// + /// // You can extend a String with some chars: + /// let mut message = String::try_from("abc")?; + /// + /// message.try_extend(['d', 'e', 'f'].into_iter())?; + /// + /// assert_eq!("abcdef", &message); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + fn try_extend(&mut self, iter: I) -> Result<(), Error> + where + I: IntoIterator; +} diff --git a/crates/rune-alloc/src/alloc/try_from_iterator.rs b/crates/rune-alloc/src/alloc/try_from_iterator.rs new file mode 100644 index 000000000..be9db0e22 --- /dev/null +++ b/crates/rune-alloc/src/alloc/try_from_iterator.rs @@ -0,0 +1,86 @@ +use super::{Allocator, Error, Global}; + +/// Conversion from an [`Iterator`] within a custom allocator `A`. +/// +/// By implementing `TryFromIteratorIn` for a type, you define how it will be +/// created from an iterator. This is common for types which describe a +/// collection of some kind. +pub trait TryFromIteratorIn: Sized { + /// Creates a value from an iterator within an allocator. + fn try_from_iter_in(iter: I, alloc: A) -> Result + where + I: IntoIterator; +} + +/// Conversion from an [`Iterator`] within the [`Global`] allocator. +/// +/// By implementing `TryFromIteratorIn` for a type, you define how it will be created +/// from an iterator. This is common for types which describe a collection of +/// some kind. +pub trait TryFromIterator: TryFromIteratorIn { + /// Creates a value from an iterator within an allocator. + fn try_from_iter(iter: I) -> Result + where + I: IntoIterator; +} + +impl TryFromIterator for U +where + U: TryFromIteratorIn, +{ + #[inline] + fn try_from_iter(iter: I) -> Result + where + I: IntoIterator, + { + U::try_from_iter_in(iter, Global) + } +} + +impl TryFromIteratorIn, A> for Result +where + U: TryFromIteratorIn, +{ + fn try_from_iter_in(iter: I, alloc: A) -> Result + where + I: IntoIterator>, + { + struct Iter<'a, I, E> { + error: &'a mut Option, + iter: I, + } + + impl Iterator for Iter<'_, I, E> + where + I: Iterator>, + { + type Item = T; + + fn next(&mut self) -> Option { + let value = match self.iter.next()? { + Ok(value) => value, + Err(error) => { + *self.error = Some(error); + return None; + } + }; + + Some(value) + } + } + + let mut error = None; + + let iter = Iter { + error: &mut error, + iter: iter.into_iter(), + }; + + let out = U::try_from_iter_in(iter, alloc)?; + + match error { + Some(error) => Ok(Err(error)), + None => Ok(Ok(out)), + } + } +} diff --git a/crates/rune-alloc/src/alloc/vec/drain.rs b/crates/rune-alloc/src/alloc/vec/drain.rs new file mode 100644 index 000000000..3b2855a43 --- /dev/null +++ b/crates/rune-alloc/src/alloc/vec/drain.rs @@ -0,0 +1,232 @@ +use crate::alloc::{Allocator, Global, SizedTypeProperties}; +use crate::ptr::{self, NonNull}; + +use core::fmt; +use core::iter::FusedIterator; +use core::mem::{self, ManuallyDrop}; +use core::slice::{self}; + +use super::Vec; + +/// A draining iterator for `Vec`. +/// +/// This `struct` is created by [`Vec::drain`]. +/// See its documentation for more. +/// +/// # Example +/// +/// ``` +/// let mut v = vec![0, 1, 2]; +/// let iter: std::vec::Drain<'_, _> = v.drain(..); +/// ``` +pub struct Drain<'a, T: 'a, A: Allocator + 'a = Global> { + /// Index of tail to preserve + pub(super) tail_start: usize, + /// Length of tail + pub(super) tail_len: usize, + /// Current remaining range to remove + pub(super) iter: slice::Iter<'a, T>, + pub(super) vec: NonNull>, +} + +impl fmt::Debug for Drain<'_, T, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("Drain").field(&self.iter.as_slice()).finish() + } +} + +impl<'a, T, A: Allocator> Drain<'a, T, A> { + /// Returns the remaining items of this iterator as a slice. + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec!['a', 'b', 'c']; + /// let mut drain = vec.drain(..); + /// assert_eq!(drain.as_slice(), &['a', 'b', 'c']); + /// let _ = drain.next().unwrap(); + /// assert_eq!(drain.as_slice(), &['b', 'c']); + /// ``` + #[must_use] + pub fn as_slice(&self) -> &[T] { + self.iter.as_slice() + } + + /// Returns a reference to the underlying allocator. + #[must_use] + #[inline] + pub fn allocator(&self) -> &A { + unsafe { self.vec.as_ref().allocator() } + } + + /// Keep unyielded elements in the source `Vec`. + /// + /// # Examples + /// + /// ``` + /// let mut vec = rune_alloc::vec!['a', 'b', 'c']; + /// let mut drain = vec.drain(..); + /// + /// assert_eq!(drain.next().unwrap(), 'a'); + /// + /// // This call keeps 'b' and 'c' in the vec. + /// drain.keep_rest(); + /// + /// // If we wouldn't call `keep_rest()`, + /// // `vec` would be empty. + /// assert_eq!(vec, ['b', 'c']); + /// ``` + pub fn keep_rest(self) { + // At this moment layout looks like this: + // + // [head] [yielded by next] [unyielded] [yielded by next_back] [tail] + // ^-- start \_________/-- unyielded_len \____/-- self.tail_len + // ^-- unyielded_ptr ^-- tail + // + // Normally `Drop` impl would drop [unyielded] and then move [tail] to the `start`. + // Here we want to + // 1. Move [unyielded] to `start` + // 2. Move [tail] to a new start at `start + len(unyielded)` + // 3. Update length of the original vec to `len(head) + len(unyielded) + len(tail)` + // a. In case of ZST, this is the only thing we want to do + // 4. Do *not* drop self, as everything is put in a consistent state already, there is nothing to do + let mut this = ManuallyDrop::new(self); + + unsafe { + let source_vec = this.vec.as_mut(); + + let start = source_vec.len(); + let tail = this.tail_start; + + let unyielded_len = this.iter.len(); + let unyielded_ptr = this.iter.as_slice().as_ptr(); + + // ZSTs have no identity, so we don't need to move them around. + if !T::IS_ZST { + let start_ptr = source_vec.as_mut_ptr().add(start); + + // memmove back unyielded elements + if unyielded_ptr != start_ptr { + let src = unyielded_ptr; + let dst = start_ptr; + + ptr::copy(src, dst, unyielded_len); + } + + // memmove back untouched tail + if tail != (start + unyielded_len) { + let src = source_vec.as_ptr().add(tail); + let dst = start_ptr.add(unyielded_len); + ptr::copy(src, dst, this.tail_len); + } + } + + source_vec.set_len(start + unyielded_len + this.tail_len); + } + } +} + +impl<'a, T, A: Allocator> AsRef<[T]> for Drain<'a, T, A> { + fn as_ref(&self) -> &[T] { + self.as_slice() + } +} + +unsafe impl Sync for Drain<'_, T, A> {} +unsafe impl Send for Drain<'_, T, A> {} + +impl Iterator for Drain<'_, T, A> { + type Item = T; + + #[inline] + fn next(&mut self) -> Option { + self.iter + .next() + .map(|elt| unsafe { ptr::read(elt as *const _) }) + } + + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} + +impl DoubleEndedIterator for Drain<'_, T, A> { + #[inline] + fn next_back(&mut self) -> Option { + self.iter + .next_back() + .map(|elt| unsafe { ptr::read(elt as *const _) }) + } +} + +impl Drop for Drain<'_, T, A> { + fn drop(&mut self) { + /// Moves back the un-`Drain`ed elements to restore the original `Vec`. + struct DropGuard<'r, 'a, T, A: Allocator>(&'r mut Drain<'a, T, A>); + + impl<'r, 'a, T, A: Allocator> Drop for DropGuard<'r, 'a, T, A> { + fn drop(&mut self) { + if self.0.tail_len > 0 { + unsafe { + let source_vec = self.0.vec.as_mut(); + // memmove back untouched tail, update to new length + let start = source_vec.len(); + let tail = self.0.tail_start; + if tail != start { + let src = source_vec.as_ptr().add(tail); + let dst = source_vec.as_mut_ptr().add(start); + ptr::copy(src, dst, self.0.tail_len); + } + source_vec.set_len(start + self.0.tail_len); + } + } + } + } + + let iter = mem::take(&mut self.iter); + let drop_len = iter.len(); + + let mut vec = self.vec; + + if T::IS_ZST { + // ZSTs have no identity, so we don't need to move them around, we only need to drop the correct amount. + // this can be achieved by manipulating the Vec length instead of moving values out from `iter`. + unsafe { + let vec = vec.as_mut(); + let old_len = vec.len(); + vec.set_len(old_len + drop_len + self.tail_len); + vec.truncate(old_len + self.tail_len); + } + + return; + } + + // ensure elements are moved back into their appropriate places, even when drop_in_place panics + let _guard = DropGuard(self); + + if drop_len == 0 { + return; + } + + // as_slice() must only be called when iter.len() is > 0 because + // it also gets touched by vec::Splice which may turn it into a dangling pointer + // which would make it and the vec pointer point to different allocations which would + // lead to invalid pointer arithmetic below. + let drop_ptr = iter.as_slice().as_ptr(); + + unsafe { + // drop_ptr comes from a slice::Iter which only gives us a &[T] but for drop_in_place + // a pointer with mutable provenance is necessary. Therefore we must reconstruct + // it from the original vec but also avoid creating a &mut to the front since that could + // invalidate raw pointers to it which some unsafe code might rely on. + let vec_ptr = vec.as_mut().as_mut_ptr(); + let drop_offset = ptr::sub_ptr(drop_ptr, vec_ptr); + let to_drop = ptr::slice_from_raw_parts_mut(vec_ptr.add(drop_offset), drop_len); + ptr::drop_in_place(to_drop); + } + } +} + +impl ExactSizeIterator for Drain<'_, T, A> {} + +impl FusedIterator for Drain<'_, T, A> {} diff --git a/crates/rune-alloc/src/alloc/vec/into_iter.rs b/crates/rune-alloc/src/alloc/vec/into_iter.rs new file mode 100644 index 000000000..8d463070f --- /dev/null +++ b/crates/rune-alloc/src/alloc/vec/into_iter.rs @@ -0,0 +1,267 @@ +use core::fmt; +use core::iter::FusedIterator; +use core::marker::PhantomData; +use core::mem::{self, ManuallyDrop}; +use core::slice::{self}; + +use crate::alloc::raw_vec::RawVec; +use crate::alloc::{Allocator, Global, SizedTypeProperties}; +use crate::ptr::{self, NonNull}; + +/// An iterator that moves out of a vector. +/// +/// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec) +/// (provided by the [`IntoIterator`] trait). +/// +/// # Example +/// +/// ``` +/// let v = vec![0, 1, 2]; +/// let iter: std::vec::IntoIter<_> = v.into_iter(); +/// ``` +pub struct IntoIter { + pub(super) buf: NonNull, + pub(super) phantom: PhantomData, + pub(super) cap: usize, + // the drop impl reconstructs a RawVec from buf, cap and alloc + // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop + pub(super) alloc: ManuallyDrop, + pub(super) ptr: *const T, + pub(super) end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that + // ptr == end is a quick test for the Iterator being empty, that works + // for both ZST and non-ZST. +} + +impl fmt::Debug for IntoIter { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("IntoIter").field(&self.as_slice()).finish() + } +} + +impl IntoIter { + /// Returns the remaining items of this iterator as a slice. + /// + /// # Examples + /// + /// ``` + /// let vec = vec!['a', 'b', 'c']; + /// let mut into_iter = vec.into_iter(); + /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); + /// let _ = into_iter.next().unwrap(); + /// assert_eq!(into_iter.as_slice(), &['b', 'c']); + /// ``` + pub fn as_slice(&self) -> &[T] { + unsafe { slice::from_raw_parts(self.ptr, self.len()) } + } + + /// Returns the remaining items of this iterator as a mutable slice. + /// + /// # Examples + /// + /// ``` + /// let vec = vec!['a', 'b', 'c']; + /// let mut into_iter = vec.into_iter(); + /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); + /// into_iter.as_mut_slice()[2] = 'z'; + /// assert_eq!(into_iter.next().unwrap(), 'a'); + /// assert_eq!(into_iter.next().unwrap(), 'b'); + /// assert_eq!(into_iter.next().unwrap(), 'z'); + /// ``` + pub fn as_mut_slice(&mut self) -> &mut [T] { + unsafe { &mut *self.as_raw_mut_slice() } + } + + /// Returns a reference to the underlying allocator. + #[inline] + pub fn allocator(&self) -> &A { + &self.alloc + } + + fn as_raw_mut_slice(&mut self) -> *mut [T] { + ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len()) + } + + /// Drops remaining elements and relinquishes the backing allocation. + /// This method guarantees it won't panic before relinquishing + /// the backing allocation. + /// + /// This is roughly equivalent to the following, but more efficient + /// + /// ``` + /// # let mut into_iter = Vec::::with_capacity(10).into_iter(); + /// let mut into_iter = std::mem::replace(&mut into_iter, Vec::new().into_iter()); + /// (&mut into_iter).for_each(drop); + /// std::mem::forget(into_iter); + /// ``` + /// + /// This method is used by in-place iteration, refer to the vec::in_place_collect + /// documentation for an overview. + pub(super) fn forget_allocation_drop_remaining(&mut self) { + let remaining = self.as_raw_mut_slice(); + + // overwrite the individual fields instead of creating a new + // struct and then overwriting &mut self. + // this creates less assembly + self.cap = 0; + self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) }; + self.ptr = self.buf.as_ptr(); + self.end = self.buf.as_ptr(); + + // Dropping the remaining elements can panic, so this needs to be + // done only after updating the other fields. + unsafe { + ptr::drop_in_place(remaining); + } + } + + /// Forgets to Drop the remaining elements while still allowing the backing allocation to be freed. + pub(crate) fn forget_remaining_elements(&mut self) { + // For th ZST case, it is crucial that we mutate `end` here, not `ptr`. + // `ptr` must stay aligned, while `end` may be unaligned. + self.end = self.ptr; + } +} + +impl AsRef<[T]> for IntoIter { + fn as_ref(&self) -> &[T] { + self.as_slice() + } +} + +unsafe impl Send for IntoIter {} +unsafe impl Sync for IntoIter {} + +impl Iterator for IntoIter { + type Item = T; + + #[inline] + fn next(&mut self) -> Option { + if self.ptr == self.end { + None + } else if T::IS_ZST { + // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by + // reducing the `end`. + self.end = ptr::wrapping_byte_sub(self.end, 1); + + // Make up a value of this ZST. + Some(unsafe { mem::zeroed() }) + } else { + let old = self.ptr; + self.ptr = unsafe { self.ptr.add(1) }; + + Some(unsafe { ptr::read(old) }) + } + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let exact = if T::IS_ZST { + ptr::addr(self.end).wrapping_sub(ptr::addr(self.ptr)) + } else { + unsafe { ptr::sub_ptr(self.end, self.ptr) } + }; + (exact, Some(exact)) + } + + #[inline] + fn count(self) -> usize { + self.len() + } +} + +impl DoubleEndedIterator for IntoIter { + #[inline] + fn next_back(&mut self) -> Option { + if self.end == self.ptr { + None + } else if T::IS_ZST { + // See above for why 'ptr.offset' isn't used + self.end = ptr::wrapping_byte_sub(self.end, 1); + + // Make up a value of this ZST. + Some(unsafe { mem::zeroed() }) + } else { + self.end = unsafe { self.end.sub(1) }; + + Some(unsafe { ptr::read(self.end) }) + } + } +} + +impl ExactSizeIterator for IntoIter {} + +impl FusedIterator for IntoIter {} + +impl Default for IntoIter +where + A: Allocator + Default, +{ + /// Creates an empty `vec::IntoIter`. + /// + /// ``` + /// # use std::vec; + /// let iter: vec::IntoIter = Default::default(); + /// assert_eq!(iter.len(), 0); + /// assert_eq!(iter.as_slice(), &[]); + /// ``` + fn default() -> Self { + super::Vec::new_in(Default::default()).into_iter() + } +} + +#[doc(hidden)] +pub trait NonDrop {} + +// T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr +// and thus we can't implement drop-handling +impl NonDrop for T {} + +#[cfg(rune_nightly)] +unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter { + fn drop(&mut self) { + struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter); + + impl Drop for DropGuard<'_, T, A> { + fn drop(&mut self) { + unsafe { + // `IntoIter::alloc` is not used anymore after this and will be dropped by RawVec + let alloc = ManuallyDrop::take(&mut self.0.alloc); + // RawVec handles deallocation + let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc); + } + } + } + + let guard = DropGuard(self); + // destroy the remaining elements + unsafe { + ptr::drop_in_place(guard.0.as_raw_mut_slice()); + } + // now `guard` will be dropped and do the rest + } +} + +#[cfg(not(rune_nightly))] +impl Drop for IntoIter { + fn drop(&mut self) { + struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter); + + impl Drop for DropGuard<'_, T, A> { + fn drop(&mut self) { + unsafe { + // `IntoIter::alloc` is not used anymore after this and will be dropped by RawVec + let alloc = ManuallyDrop::take(&mut self.0.alloc); + // RawVec handles deallocation + let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc); + } + } + } + + let guard = DropGuard(self); + // destroy the remaining elements + unsafe { + ptr::drop_in_place(guard.0.as_raw_mut_slice()); + } + // now `guard` will be dropped and do the rest + } +} diff --git a/crates/rune-alloc/src/alloc/vec/is_zero.rs b/crates/rune-alloc/src/alloc/vec/is_zero.rs new file mode 100644 index 000000000..eaff447ff --- /dev/null +++ b/crates/rune-alloc/src/alloc/vec/is_zero.rs @@ -0,0 +1,180 @@ +use core::num::{Saturating, Wrapping}; + +use crate::boxed::Box; + +#[rustc_specialization_trait] +pub(super) unsafe trait IsZero { + /// Whether this value's representation is all zeros, + /// or can be represented with all zeroes. + fn is_zero(&self) -> bool; +} + +macro_rules! impl_is_zero { + ($t:ty, $is_zero:expr) => { + unsafe impl IsZero for $t { + #[inline] + fn is_zero(&self) -> bool { + $is_zero(*self) + } + } + }; +} + +impl_is_zero!(i8, |x| x == 0); // It is needed to impl for arrays and tuples of i8. +impl_is_zero!(i16, |x| x == 0); +impl_is_zero!(i32, |x| x == 0); +impl_is_zero!(i64, |x| x == 0); +impl_is_zero!(i128, |x| x == 0); +impl_is_zero!(isize, |x| x == 0); + +impl_is_zero!(u8, |x| x == 0); // It is needed to impl for arrays and tuples of u8. +impl_is_zero!(u16, |x| x == 0); +impl_is_zero!(u32, |x| x == 0); +impl_is_zero!(u64, |x| x == 0); +impl_is_zero!(u128, |x| x == 0); +impl_is_zero!(usize, |x| x == 0); + +impl_is_zero!(bool, |x| x == false); +impl_is_zero!(char, |x| x == '\0'); + +impl_is_zero!(f32, |x: f32| x.to_bits() == 0); +impl_is_zero!(f64, |x: f64| x.to_bits() == 0); + +unsafe impl IsZero for *const T { + #[inline] + fn is_zero(&self) -> bool { + (*self).is_null() + } +} + +unsafe impl IsZero for *mut T { + #[inline] + fn is_zero(&self) -> bool { + (*self).is_null() + } +} + +unsafe impl IsZero for [T; N] { + #[inline] + fn is_zero(&self) -> bool { + // Because this is generated as a runtime check, it's not obvious that + // it's worth doing if the array is really long. The threshold here + // is largely arbitrary, but was picked because as of 2022-07-01 LLVM + // fails to const-fold the check in `vec![[1; 32]; n]` + // See https://github.com/rust-lang/rust/pull/97581#issuecomment-1166628022 + // Feel free to tweak if you have better evidence. + + N <= 16 && self.iter().all(IsZero::is_zero) + } +} + +// This is recursive macro. +macro_rules! impl_for_tuples { + // Stopper + () => { + // No use for implementing for empty tuple because it is ZST. + }; + ($first_arg:ident $(,$rest:ident)*) => { + unsafe impl <$first_arg: IsZero, $($rest: IsZero,)*> IsZero for ($first_arg, $($rest,)*){ + #[inline] + fn is_zero(&self) -> bool{ + // Destructure tuple to N references + // Rust allows to hide generic params by local variable names. + #[allow(non_snake_case)] + let ($first_arg, $($rest,)*) = self; + + $first_arg.is_zero() + $( && $rest.is_zero() )* + } + } + + impl_for_tuples!($($rest),*); + } +} + +impl_for_tuples!(A, B, C, D, E, F, G, H); + +// `Option<&T>` and `Option>` are guaranteed to represent `None` as null. +// For fat pointers, the bytes that would be the pointer metadata in the `Some` +// variant are padding in the `None` variant, so ignoring them and +// zero-initializing instead is ok. +// `Option<&mut T>` never implements `Clone`, so there's no need for an impl of +// `SpecFromElem`. + +unsafe impl IsZero for Option<&T> { + #[inline] + fn is_zero(&self) -> bool { + self.is_none() + } +} + +unsafe impl IsZero for Option> { + #[inline] + fn is_zero(&self) -> bool { + self.is_none() + } +} + +// `Option` and similar have a representation guarantee that +// they're the same size as the corresponding `u32` type, as well as a guarantee +// that transmuting between `NonZeroU32` and `Option` works. +// While the documentation officially makes it UB to transmute from `None`, +// we're the standard library so we can make extra inferences, and we know that +// the only niche available to represent `None` is the one that's all zeros. + +macro_rules! impl_is_zero_option_of_nonzero { + ($($t:ident,)+) => {$( + unsafe impl IsZero for Option { + #[inline] + fn is_zero(&self) -> bool { + self.is_none() + } + } + )+}; +} + +impl_is_zero_option_of_nonzero!( + NonZeroU8, + NonZeroU16, + NonZeroU32, + NonZeroU64, + NonZeroU128, + NonZeroI8, + NonZeroI16, + NonZeroI32, + NonZeroI64, + NonZeroI128, + NonZeroUsize, + NonZeroIsize, +); + +macro_rules! impl_is_zero_option_of_num { + ($($t:ty,)+) => {$( + unsafe impl IsZero for Option<$t> { + #[inline] + fn is_zero(&self) -> bool { + const { + let none: Self = unsafe { core::mem::MaybeUninit::zeroed().assume_init() }; + assert!(none.is_none()); + } + self.is_none() + } + } + )+}; +} + +impl_is_zero_option_of_num!(u8, u16, u32, u64, u128, i8, i16, i32, i64, i128, usize, isize,); + +unsafe impl IsZero for Wrapping { + #[inline] + fn is_zero(&self) -> bool { + self.0.is_zero() + } +} + +unsafe impl IsZero for Saturating { + #[inline] + fn is_zero(&self) -> bool { + self.0.is_zero() + } +} diff --git a/crates/rune-alloc/src/alloc/vec/mod.rs b/crates/rune-alloc/src/alloc/vec/mod.rs new file mode 100644 index 000000000..f892e33cc --- /dev/null +++ b/crates/rune-alloc/src/alloc/vec/mod.rs @@ -0,0 +1,2940 @@ +//! A contiguous growable array type with heap-allocated contents, written +//! `Vec`. +//! +//! Vectors have *O*(1) indexing, amortized *O*(1) push (to the end) and +//! *O*(1) pop (from the end). +//! +//! Vectors ensure they never allocate more than `isize::MAX` bytes. +//! +//! # Examples +//! +//! You can explicitly create a [`Vec`] with [`Vec::new`]: +//! +//! ``` +//! use rune_alloc::Vec; +//! +//! let v: Vec = Vec::new(); +//! ``` +//! +//! ...or by using the [`try_vec!`][crate::try_vec!] macro: +//! +//! ``` +//! use rune_alloc::Vec; +//! +//! let v: Vec = rune_alloc::try_vec![]; +//! +//! let v = rune_alloc::try_vec![1, 2, 3, 4, 5]; +//! +//! let v = rune_alloc::try_vec![0; 10]; // ten zeroes +//! # Ok::<_, rune_alloc::Error>(()) +//! ``` +//! +//! You can [`try_push`] values onto the end of a vector (which will grow the vector +//! as needed): +//! +//! ``` +//! let mut v = rune_alloc::try_vec![1, 2]; +//! +//! v.try_push(3)?; +//! # Ok::<_, rune_alloc::Error>(()) +//! ``` +//! +//! Popping values works in much the same way: +//! +//! ``` +//! let mut v = rune_alloc::try_vec![1, 2]; +//! +//! let two = v.pop(); +//! # Ok::<_, rune_alloc::Error>(()) +//! ``` +//! +//! Vectors also support indexing (through the [`Index`] and [`IndexMut`] traits): +//! +//! ``` +//! let mut v = rune_alloc::try_vec![1, 2, 3]; +//! let three = v[2]; +//! v[1] = v[1] + 5; +//! # Ok::<_, rune_alloc::Error>(()) +//! ``` +//! +//! [`try_push`]: Vec::try_push + +pub(crate) use self::drain::Drain; + +mod drain; +pub use self::into_iter::IntoIter; + +mod into_iter; + +mod partial_eq; + +use self::spec_from_elem::SpecFromElem; +mod spec_from_elem; + +use self::spec_extend::SpecExtend; +mod spec_extend; + +use self::set_len_on_drop::SetLenOnDrop; +mod set_len_on_drop; + +mod splice; + +#[cfg(rune_nightly)] +use self::is_zero::IsZero; +#[cfg(rune_nightly)] +mod is_zero; + +pub use crate::alloc::slice::into_vec; + +use core::borrow::Borrow; +use core::cmp; +use core::cmp::Ordering; +use core::fmt; +use core::hash::{Hash, Hasher}; +use core::iter; +use core::marker::PhantomData; +use core::mem::{self, ManuallyDrop, MaybeUninit}; +use core::ops::{self, Index, IndexMut, Range, RangeBounds}; +use core::slice::{self, SliceIndex}; + +#[cfg(test)] +use crate::alloc::testing::*; +use crate::ptr::{self, NonNull}; +use crate::slice::range as slice_range; +use crate::slice::{RawIter, RawIterMut}; + +use super::raw_vec::RawVec; +use super::{ + Allocator, Box, Error, Global, SizedTypeProperties, TryClone, TryExtend, TryFromIteratorIn, +}; + +/// Construct a vector from an element that can be cloned. +#[doc(hidden)] +pub fn try_from_elem(elem: T, n: usize) -> Result, Error> { + ::from_elem(elem, n, Global) +} + +/// A contiguous growable array type, written as `Vec`, short for 'vector'. +/// +/// # Examples +/// +/// ``` +/// use rune_alloc::Vec; +/// use rune_alloc::prelude::*; +/// +/// let mut vec = Vec::new(); +/// vec.try_push(1)?; +/// vec.try_push(2)?; +/// +/// assert_eq!(vec.len(), 2); +/// assert_eq!(vec[0], 1); +/// +/// assert_eq!(vec.pop(), Some(2)); +/// assert_eq!(vec.len(), 1); +/// +/// vec[0] = 7; +/// assert_eq!(vec[0], 7); +/// +/// vec.try_extend([1, 2, 3])?; +/// +/// for x in &vec { +/// println!("{x}"); +/// } +/// +/// assert_eq!(vec, [7, 1, 2, 3]); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// The [`try_vec!`][crate::try_vec!] macro is provided for convenient +/// initialization: +/// +/// ``` +/// use rune_alloc::Vec; +/// +/// let mut vec1 = rune_alloc::try_vec![1, 2, 3]; +/// vec1.try_push(4)?; +/// let vec2 = Vec::try_from([1, 2, 3, 4])?; +/// assert_eq!(vec1, vec2); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// It can also initialize each element of a `Vec` with a given value. +/// This may be more efficient than performing allocation and initialization +/// in separate steps, especially when initializing a vector of zeros: +/// +/// ``` +/// use rune_alloc::Vec; +/// +/// let vec = rune_alloc::try_vec![0; 5]; +/// assert_eq!(vec, [0, 0, 0, 0, 0]); +/// +/// // The following is equivalent, but potentially slower: +/// let mut vec = Vec::try_with_capacity(5)?; +/// vec.try_resize(5, 0)?; +/// assert_eq!(vec, [0, 0, 0, 0, 0]); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// For more information, see +/// [Capacity and Reallocation](#capacity-and-reallocation). +/// +/// Use a `Vec` as an efficient stack: +/// +/// ``` +/// use rune_alloc::Vec; +/// +/// let mut stack = Vec::new(); +/// +/// stack.try_push(1)?; +/// stack.try_push(2)?; +/// stack.try_push(3)?; +/// +/// while let Some(top) = stack.pop() { +/// // Prints 3, 2, 1 +/// println!("{top}"); +/// } +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// # Indexing +/// +/// The `Vec` type allows to access values by index, because it implements the +/// [`Index`] trait. An example will be more explicit: +/// +/// ``` +/// let v = rune_alloc::try_vec![0, 2, 4, 6]; +/// println!("{}", v[1]); // it will display '2' +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// However be careful: if you try to access an index which isn't in the `Vec`, +/// your software will panic! You cannot do this: +/// +/// ```should_panic +/// let v = rune_alloc::try_vec![0, 2, 4, 6]; +/// println!("{}", v[6]); // it will panic! +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// Use [`get`] and [`get_mut`] if you want to check whether the index is in +/// the `Vec`. +/// +/// # Slicing +/// +/// A `Vec` can be mutable. On the other hand, slices are read-only objects. +/// To get a [slice][prim@slice], use [`&`]. Example: +/// +/// ``` +/// fn read_slice(slice: &[usize]) { +/// // ... +/// } +/// +/// let v = rune_alloc::try_vec![0, 1]; +/// read_slice(&v); +/// +/// // ... and that's all! +/// // you can also do it like this: +/// let u: &[usize] = &v; +/// // or like this: +/// let u: &[_] = &v; +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// In Rust, it's more common to pass slices as arguments rather than vectors +/// when you just want to provide read access. The same goes for [`String`] and +/// [`&str`]. +/// +/// # Capacity and reallocation +/// +/// The capacity of a vector is the amount of space allocated for any future +/// elements that will be added onto the vector. This is not to be confused with +/// the *length* of a vector, which specifies the number of actual elements +/// within the vector. If a vector's length exceeds its capacity, its capacity +/// will automatically be increased, but its elements will have to be +/// reallocated. +/// +/// For example, a vector with capacity 10 and length 0 would be an empty vector +/// with space for 10 more elements. Pushing 10 or fewer elements onto the +/// vector will not change its capacity or cause reallocation to occur. However, +/// if the vector's length is increased to 11, it will have to reallocate, which +/// can be slow. For this reason, it is recommended to use +/// [`Vec::try_with_capacity`] whenever possible to specify how big the vector +/// is expected to get. +/// +/// # Guarantees +/// +/// Due to its incredibly fundamental nature, `Vec` makes a lot of guarantees +/// about its design. This ensures that it's as low-overhead as possible in +/// the general case, and can be correctly manipulated in primitive ways +/// by unsafe code. Note that these guarantees refer to an unqualified `Vec`. +/// If additional type parameters are added (e.g., to support custom allocators), +/// overriding their defaults may change the behavior. +/// +/// Most fundamentally, `Vec` is and always will be a (pointer, capacity, length) +/// triplet. No more, no less. The order of these fields is completely +/// unspecified, and you should use the appropriate methods to modify these. +/// The pointer will never be null, so this type is null-pointer-optimized. +/// +/// However, the pointer might not actually point to allocated memory. In +/// particular, if you construct a `Vec` with capacity 0 via [`Vec::new`], +/// [`try_vec![]`], [`Vec::try_with_capacity(0)`], or by calling +/// [`try_shrink_to_fit`] on an empty Vec, it will not allocate memory. +/// Similarly, if you store zero-sized types inside a `Vec`, it will not +/// allocate space for them. *Note that in this case the `Vec` might not report +/// a [`capacity`] of 0*. `Vec` will allocate if and only if +/// [mem::size_of::\]\() * [capacity]\() > 0. In general, +/// `Vec`'s allocation details are very subtle --- if you intend to allocate +/// memory using a `Vec` and use it for something else (either to pass to unsafe +/// code, or to build your own memory-backed collection), be sure to deallocate +/// this memory by using `from_raw_parts` to recover the `Vec` and then dropping +/// it. +/// +/// [`try_vec![]`]: try_vec! +/// [`Vec::try_with_capacity(0)`]: Vec::try_with_capacity +/// +/// If a `Vec` *has* allocated memory, then the memory it points to is on the heap +/// (as defined by the allocator Rust is configured to use by default), and its +/// pointer points to [`len`] initialized, contiguous elements in order (what +/// you would see if you coerced it to a slice), followed by [capacity] - [len] +/// logically uninitialized, contiguous elements. +/// +/// A vector containing the elements `'a'` and `'b'` with capacity 4 can be +/// visualized as below. The top part is the `Vec` struct, it contains a +/// pointer to the head of the allocation in the heap, length and capacity. +/// The bottom part is the allocation on the heap, a contiguous memory block. +/// +/// ```text +/// ptr len capacity +/// +--------+--------+--------+ +/// | 0x0123 | 2 | 4 | +/// +--------+--------+--------+ +/// | +/// v +/// Heap +--------+--------+--------+--------+ +/// | 'a' | 'b' | uninit | uninit | +/// +--------+--------+--------+--------+ +/// ``` +/// +/// - **uninit** represents memory that is not initialized, see [`MaybeUninit`]. +/// - Note: the ABI is not stable and `Vec` makes no guarantees about its memory +/// layout (including the order of fields). +/// +/// `Vec` will never perform a "small optimization" where elements are actually +/// stored on the stack for two reasons: +/// +/// * It would make it more difficult for unsafe code to correctly manipulate +/// a `Vec`. The contents of a `Vec` wouldn't have a stable address if it were +/// only moved, and it would be more difficult to determine if a `Vec` had +/// actually allocated memory. +/// +/// * It would penalize the general case, incurring an additional branch +/// on every access. +/// +/// `Vec` will never automatically shrink itself, even if completely empty. This +/// ensures no unnecessary allocations or deallocations occur. Emptying a `Vec` +/// and then filling it back up to the same [`len`] should incur no calls to the +/// allocator. If you wish to free up unused memory, use [`try_shrink_to_fit`] +/// or [`try_shrink_to`]. +/// +/// [`try_push`] and [`try_insert`] will never (re)allocate if the reported capacity is +/// sufficient. [`try_push`] and [`try_insert`] *will* (re)allocate if +/// [len] == [capacity]. That is, the reported capacity is completely +/// accurate, and can be relied on. It can even be used to manually free the memory +/// allocated by a `Vec` if desired. Bulk insertion methods *may* reallocate, even +/// when not necessary. +/// +/// `Vec` does not guarantee any particular growth strategy when reallocating +/// when full, nor when [`try_reserve`] is called. The current strategy is basic +/// and it may prove desirable to use a non-constant growth factor. Whatever +/// strategy is used will of course guarantee *O*(1) amortized [`try_push`]. +/// +/// `try_vec![x; n]`, `try_vec![a, b, c, d]`, and [`Vec::try_with_capacity(n)`], +/// will all produce a `Vec` with exactly the requested capacity. If [len] +/// == [capacity], (as is the case for the [`try_vec!`] macro), then a +/// `Vec` can be converted to and from a [`Box<[T]>`][owned slice] without +/// reallocating or moving the elements. +/// +/// [`Vec::try_with_capacity(n)`]: Vec::try_with_capacity +/// +/// `Vec` will not specifically overwrite any data that is removed from it, +/// but also won't specifically preserve it. Its uninitialized memory is +/// scratch space that it may use however it wants. It will generally just do +/// whatever is most efficient or otherwise easy to implement. Do not rely on +/// removed data to be erased for security purposes. Even if you drop a `Vec`, its +/// buffer may simply be reused by another allocation. Even if you zero a `Vec`'s memory +/// first, that might not actually happen because the optimizer does not consider +/// this a side-effect that must be preserved. There is one case which we will +/// not break, however: using `unsafe` code to write to the excess capacity, +/// and then increasing the length to match, is always valid. +/// +/// Currently, `Vec` does not guarantee the order in which elements are dropped. +/// The order has changed in the past and may change again. +/// +/// [`get`]: slice::get +/// [`get_mut`]: slice::get_mut +/// [`String`]: crate::string::String +/// [`&str`]: type@str +/// [`try_shrink_to_fit`]: Vec::try_shrink_to_fit +/// [`try_shrink_to`]: Vec::try_shrink_to +/// [capacity]: Vec::capacity +/// [`capacity`]: Vec::capacity +/// [mem::size_of::\]: core::mem::size_of +/// [len]: Vec::len +/// [`len`]: Vec::len +/// [`try_push`]: Vec::try_push +/// [`try_insert`]: Vec::try_insert +/// [`try_reserve`]: Vec::try_reserve +/// [`MaybeUninit`]: core::mem::MaybeUninit +/// [owned slice]: Box +pub struct Vec { + buf: RawVec, + len: usize, +} + +//////////////////////////////////////////////////////////////////////////////// +// Inherent methods +//////////////////////////////////////////////////////////////////////////////// + +impl Vec { + /// Constructs a new, empty `Vec`. + /// + /// The vector will not allocate until elements are pushed onto it. + /// + /// # Examples + /// + /// ``` + /// # #![allow(unused_mut)] + /// let mut vec: Vec = Vec::new(); + /// ``` + #[inline] + #[must_use] + pub const fn new() -> Self { + Vec { + buf: RawVec::NEW, + len: 0, + } + } + + /// Constructs a new, empty `Vec` with at least the specified capacity. + /// + /// The vector will be able to hold at least `capacity` elements without + /// reallocating. This method is allowed to allocate for more elements than + /// `capacity`. If `capacity` is 0, the vector will not allocate. + /// + /// It is important to note that although the returned vector has the + /// minimum *capacity* specified, the vector will have a zero *length*. For + /// an explanation of the difference between length and capacity, see + /// *[Capacity and reallocation]*. + /// + /// If it is important to know the exact allocated capacity of a `Vec`, + /// always use the [`capacity`] method after construction. + /// + /// For `Vec` where `T` is a zero-sized type, there will be no allocation + /// and the capacity will always be `usize::MAX`. + /// + /// [Capacity and reallocation]: #capacity-and-reallocation + /// [`capacity`]: Vec::capacity + /// + /// # Panics + /// + /// Panics if the new capacity exceeds `isize::MAX` bytes. + /// + /// # Examples + /// + /// ``` + /// let mut vec = Vec::with_capacity(10); + /// + /// // The vector contains no items, even though it has capacity for more + /// assert_eq!(vec.len(), 0); + /// assert!(vec.capacity() >= 10); + /// + /// // These are all done without reallocating... + /// for i in 0..10 { + /// vec.push(i); + /// } + /// assert_eq!(vec.len(), 10); + /// assert!(vec.capacity() >= 10); + /// + /// // ...but this may make the vector reallocate + /// vec.push(11); + /// assert_eq!(vec.len(), 11); + /// assert!(vec.capacity() >= 11); + /// + /// // A vector of a zero-sized type will always over-allocate, since no + /// // allocation is necessary + /// let vec_units = Vec::<()>::with_capacity(10); + /// assert_eq!(vec_units.capacity(), usize::MAX); + /// ``` + #[inline] + pub fn try_with_capacity(capacity: usize) -> Result { + Self::try_with_capacity_in(capacity, Global) + } +} + +impl Vec { + /// Constructs a new, empty `Vec`. + /// + /// The vector will not allocate until elements are pushed onto it. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{Vec, Global}; + /// + /// let mut vec: Vec = Vec::new_in(Global); + /// ``` + #[inline] + pub const fn new_in(alloc: A) -> Self { + Vec { + buf: RawVec::new_in(alloc), + len: 0, + } + } + + /// Constructs a new, empty `Vec` with at least the specified capacity + /// with the provided allocator. + /// + /// The vector will be able to hold at least `capacity` elements without + /// reallocating. This method is allowed to allocate for more elements than + /// `capacity`. If `capacity` is 0, the vector will not allocate. + /// + /// It is important to note that although the returned vector has the + /// minimum *capacity* specified, the vector will have a zero *length*. For + /// an explanation of the difference between length and capacity, see + /// *[Capacity and reallocation]*. + /// + /// If it is important to know the exact allocated capacity of a `Vec`, + /// always use the [`capacity`] method after construction. + /// + /// For `Vec` where `T` is a zero-sized type, there will be no + /// allocation and the capacity will always be `usize::MAX`. + /// + /// [Capacity and reallocation]: #capacity-and-reallocation + /// [`capacity`]: Vec::capacity + /// + /// # Errors + /// + /// Errors with [`Error::CapacityOverflow`] if the new capacity exceeds + /// `isize::MAX` bytes. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{Vec, Global}; + /// + /// let mut vec = Vec::try_with_capacity_in(10, Global)?; + /// + /// // The vector contains no items, even though it has capacity for more + /// assert_eq!(vec.len(), 0); + /// assert!(vec.capacity() >= 10); + /// + /// // These are all done without reallocating... + /// for i in 0..10 { + /// vec.try_push(i)?; + /// } + /// + /// assert_eq!(vec.len(), 10); + /// assert!(vec.capacity() >= 10); + /// + /// // ...but this may make the vector reallocate + /// vec.try_push(11)?; + /// assert_eq!(vec.len(), 11); + /// assert!(vec.capacity() >= 11); + /// + /// // A vector of a zero-sized type will always over-allocate, since no + /// // allocation is necessary + /// let vec_units = Vec::<(), Global>::try_with_capacity_in(10, Global)?; + /// assert_eq!(vec_units.capacity(), usize::MAX); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn try_with_capacity_in(capacity: usize, alloc: A) -> Result { + Ok(Vec { + buf: RawVec::try_with_capacity_in(capacity, alloc)?, + len: 0, + }) + } + + /// Creates a `Vec` directly from a pointer, a capacity, a length, and + /// an allocator. + /// + /// # Safety + /// + /// This is highly unsafe, due to the number of invariants that aren't + /// checked: + /// + /// * `ptr` must be [*currently allocated*] via the given allocator `alloc`. + /// * `T` needs to have the same alignment as what `ptr` was allocated with. + /// (`T` having a less strict alignment is not sufficient, the alignment + /// really needs to be equal to satisfy the [`dealloc`] requirement that + /// memory must be allocated and deallocated with the same layout.) + /// * The size of `T` times the `capacity` (ie. the allocated size in bytes) + /// needs to be the same size as the pointer was allocated with. (Because + /// similar to alignment, [`dealloc`] must be called with the same layout + /// `size`.) + /// * `length` needs to be less than or equal to `capacity`. + /// * The first `length` values must be properly initialized values of type + /// `T`. + /// * `capacity` needs to [*fit*] the layout size that the pointer was + /// allocated with. + /// * The allocated size in bytes must be no larger than `isize::MAX`. See + /// the safety documentation of [`pointer::offset`]. + /// + /// These requirements are always upheld by any `ptr` that has been + /// allocated via `Vec`. Other allocation sources are allowed if the + /// invariants are upheld. + /// + /// Violating these may cause problems like corrupting the allocator's + /// internal data structures. For example it is **not** safe to build a + /// `Vec` from a pointer to a C `char` array with length `size_t`. It's + /// also not safe to build one from a `Vec` and its length, because the + /// allocator cares about the alignment, and these two types have different + /// alignments. The buffer was allocated with alignment 2 (for `u16`), but + /// after turning it into a `Vec` it'll be deallocated with alignment 1. + /// + /// The ownership of `ptr` is effectively transferred to the `Vec` which + /// may then deallocate, reallocate or change the contents of memory pointed + /// to by the pointer at will. Ensure that nothing else uses the pointer + /// after calling this function. + /// + /// [`String`]: crate::string::String + /// [`dealloc`]: crate::alloc::Allocator::deallocate + /// [*currently allocated*]: crate::alloc::Allocator#currently-allocated-memory + /// [*fit*]: crate::alloc::Allocator#memory-fitting + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{Vec, Global}; + /// + /// use std::ptr; + /// use std::mem; + /// + /// let mut v = Vec::try_with_capacity_in(3, Global)?; + /// v.try_push(1)?; + /// v.try_push(2)?; + /// v.try_push(3)?; + /// + /// // Prevent running `v`'s destructor so we are in complete control + /// // of the allocation. + /// let mut v = mem::ManuallyDrop::new(v); + /// + /// // Pull out the various important pieces of information about `v` + /// let p = v.as_mut_ptr(); + /// let len = v.len(); + /// let cap = v.capacity(); + /// let alloc = v.allocator(); + /// + /// unsafe { + /// // Overwrite memory with 4, 5, 6 + /// for i in 0..len { + /// ptr::write(p.add(i), 4 + i); + /// } + /// + /// // Put everything back together into a Vec + /// let rebuilt = Vec::from_raw_parts_in(p, len, cap, alloc.clone()); + /// assert_eq!(rebuilt, [4, 5, 6]); + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// Using memory that was allocated elsewhere: + /// + /// ```rust + /// use core::alloc::Layout; + /// use rune_alloc::{Vec, AllocError, Allocator, Global}; + /// + /// let layout = Layout::array::(16).expect("overflow cannot happen"); + /// + /// let vec = unsafe { + /// let mem = match Global.allocate(layout) { + /// Ok(mem) => mem.cast::().as_ptr(), + /// Err(AllocError) => return, + /// }; + /// + /// mem.write(1_000_000); + /// + /// Vec::from_raw_parts_in(mem, 1, 16, Global) + /// }; + /// + /// assert_eq!(vec, &[1_000_000]); + /// assert_eq!(vec.capacity(), 16); + /// ``` + /// + /// [`pointer::offset`]: primitive@pointer + #[inline] + pub unsafe fn from_raw_parts_in(ptr: *mut T, length: usize, capacity: usize, alloc: A) -> Self { + unsafe { + Vec { + buf: RawVec::from_raw_parts_in(ptr, capacity, alloc), + len: length, + } + } + } + + /// Returns a reference to the underlying allocator. + #[inline] + pub fn allocator(&self) -> &A { + self.buf.allocator() + } + + pub(crate) fn into_raw_vec(self) -> (RawVec, usize) { + let me = ManuallyDrop::new(self); + let buf = unsafe { ptr::read(&me.buf) }; + (buf, me.len) + } + + /// Decomposes a `Vec` into its raw components. + /// + /// Returns the raw pointer to the underlying data, the length of the vector + /// (in elements), the allocated capacity of the data (in elements), and the + /// allocator. These are the same arguments in the same order as the + /// arguments to [`from_raw_parts_in`]. + /// + /// After calling this function, the caller is responsible for the memory + /// previously managed by the `Vec`. The only way to do this is to convert + /// the raw pointer, length, and capacity back into a `Vec` with the + /// [`from_raw_parts_in`] function, allowing the destructor to perform the + /// cleanup. + /// + /// [`from_raw_parts_in`]: Vec::from_raw_parts_in + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{Vec, Global}; + /// + /// let mut v: Vec = Vec::new_in(Global); + /// v.try_push(-1)?; + /// v.try_push(0)?; + /// v.try_push(1)?; + /// + /// let (ptr, len, cap, alloc) = v.into_raw_parts_with_alloc(); + /// + /// let rebuilt = unsafe { + /// // We can now make changes to the components, such as + /// // transmuting the raw pointer to a compatible type. + /// let ptr = ptr as *mut u32; + /// + /// Vec::from_raw_parts_in(ptr, len, cap, alloc) + /// }; + /// + /// assert_eq!(rebuilt, [4294967295, 0, 1]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn into_raw_parts_with_alloc(self) -> (*mut T, usize, usize, A) { + let mut me = ManuallyDrop::new(self); + let len = me.len(); + let capacity = me.capacity(); + let ptr = me.as_mut_ptr(); + let alloc = unsafe { ptr::read(me.allocator()) }; + (ptr, len, capacity, alloc) + } + + /// Returns the total number of elements the vector can hold without + /// reallocating. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{Vec, Global}; + /// + /// let mut vec: Vec = Vec::try_with_capacity_in(10, Global)?; + /// vec.try_push(42)?; + /// assert!(vec.capacity() >= 10); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn capacity(&self) -> usize { + self.buf.capacity() + } + + /// Tries to reserve capacity for at least `additional` more elements to be inserted + /// in the given `Vec`. The collection may reserve more space to speculatively avoid + /// frequent reallocations. After calling `try_reserve`, capacity will be + /// greater than or equal to `self.len() + additional` if it returns + /// `Ok(())`. Does nothing if capacity is already sufficient. This method + /// preserves the contents even if an error occurs. + /// + /// # Errors + /// + /// If the capacity overflows, or the allocator reports a failure, then an error + /// is returned. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{Vec, Error}; + /// + /// fn process_data(data: &[u32]) -> Result, Error> { + /// let mut output = Vec::new(); + /// + /// // Pre-reserve the memory, exiting if we can't + /// output.try_reserve(data.len())?; + /// + /// for value in data { + /// output.try_push(*value)?; + /// } + /// + /// Ok(output) + /// } + /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); + /// ``` + pub fn try_reserve(&mut self, additional: usize) -> Result<(), Error> { + self.buf.try_reserve(self.len, additional) + } + + /// Tries to reserve the minimum capacity for at least `additional` + /// elements to be inserted in the given `Vec`. Unlike [`try_reserve`], + /// this will not deliberately over-allocate to speculatively avoid frequent + /// allocations. After calling `try_reserve_exact`, capacity will be greater + /// than or equal to `self.len() + additional` if it returns `Ok(())`. + /// Does nothing if the capacity is already sufficient. + /// + /// Note that the allocator may give the collection more space than it + /// requests. Therefore, capacity can not be relied upon to be precisely + /// minimal. Prefer [`try_reserve`] if future insertions are expected. + /// + /// [`try_reserve`]: Vec::try_reserve + /// + /// # Errors + /// + /// If the capacity overflows, or the allocator reports a failure, then an error + /// is returned. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{Vec, Error}; + /// use rune_alloc::prelude::*; + /// + /// fn process_data(data: &[u32]) -> Result, Error> { + /// let mut output = Vec::new(); + /// + /// // Pre-reserve the memory, exiting if we can't + /// output.try_reserve_exact(data.len())?; + /// + /// // Now we know this can't OOM in the middle of our complex work + /// output.try_extend(data.iter().map(|&val| { + /// val * 2 + 5 // very complicated + /// }))?; + /// + /// Ok(output) + /// } + /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); + /// ``` + pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), Error> { + self.buf.try_reserve_exact(self.len, additional) + } + + /// Shrinks the capacity of the vector as much as possible. + /// + /// It will drop down as close as possible to the length but the allocator + /// may still inform the vector that there is space for a few more elements. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::Vec; + /// use rune_alloc::prelude::*; + /// + /// let mut vec = Vec::try_with_capacity(10)?; + /// vec.try_extend([1, 2, 3])?; + /// assert!(vec.capacity() >= 10); + /// vec.try_shrink_to_fit()?; + /// assert!(vec.capacity() >= 3); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_shrink_to_fit(&mut self) -> Result<(), Error> { + // The capacity is never less than the length, and there's nothing to do when + // they are equal, so we can avoid the panic case in `RawVec::shrink_to_fit` + // by only calling it with a greater capacity. + if self.capacity() > self.len { + self.buf.try_shrink_to_fit(self.len)?; + } + + Ok(()) + } + + /// Shrinks the capacity of the vector with a lower bound. + /// + /// The capacity will remain at least as large as both the length + /// and the supplied value. + /// + /// If the current capacity is less than the lower limit, this is a no-op. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::Vec; + /// use rune_alloc::prelude::*; + /// + /// let mut vec = Vec::try_with_capacity(10)?; + /// vec.try_extend([1, 2, 3])?; + /// assert!(vec.capacity() >= 10); + /// vec.try_shrink_to(4)?; + /// assert!(vec.capacity() >= 4); + /// vec.try_shrink_to(0)?; + /// assert!(vec.capacity() >= 3); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_shrink_to(&mut self, min_capacity: usize) -> Result<(), Error> { + if self.capacity() > min_capacity { + self.buf + .try_shrink_to_fit(cmp::max(self.len, min_capacity))?; + } + + Ok(()) + } + + /// Converts the vector into [`Box<[T]>`][owned slice]. + /// + /// If the vector has excess capacity, its items will be moved into a + /// newly-allocated buffer with exactly the right capacity. + /// + /// [owned slice]: Box + /// + /// # Examples + /// + /// ``` + /// let v = rune_alloc::try_vec![1, 2, 3]; + /// + /// let slice = v.try_into_boxed_slice()?; + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// Any excess capacity is removed: + /// + /// ``` + /// use rune_alloc::Vec; + /// use rune_alloc::prelude::*; + /// + /// let mut vec = Vec::try_with_capacity(10)?; + /// vec.try_extend([1, 2, 3])?; + /// + /// assert!(vec.capacity() >= 10); + /// let slice = vec.try_into_boxed_slice()?; + /// assert_eq!(Vec::from(slice).capacity(), 3); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_into_boxed_slice(mut self) -> Result, Error> { + unsafe { + self.try_shrink_to_fit()?; + let me = ManuallyDrop::new(self); + let buf = ptr::read(&me.buf); + let len = me.len(); + Ok(buf.into_box(len).assume_init()) + } + } + + /// Shortens the vector, keeping the first `len` elements and dropping + /// the rest. + /// + /// If `len` is greater than the vector's current length, this has no + /// effect. + /// + /// The [`drain`] method can emulate `truncate`, but causes the excess + /// elements to be returned instead of dropped. + /// + /// Note that this method has no effect on the allocated capacity + /// of the vector. + /// + /// # Examples + /// + /// Truncating a five element vector to two elements: + /// + /// ``` + /// let mut vec = rune_alloc::try_vec![1, 2, 3, 4, 5]; + /// vec.truncate(2); + /// assert_eq!(vec, [1, 2]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// No truncation occurs when `len` is greater than the vector's current + /// length: + /// + /// ``` + /// let mut vec = rune_alloc::try_vec![1, 2, 3]; + /// vec.truncate(8); + /// assert_eq!(vec, [1, 2, 3]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// Truncating when `len == 0` is equivalent to calling the [`clear`] + /// method. + /// + /// ``` + /// let mut vec = rune_alloc::try_vec![1, 2, 3]; + /// vec.truncate(0); + /// assert_eq!(vec, []); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// [`clear`]: Vec::clear + /// [`drain`]: Vec::drain + pub fn truncate(&mut self, len: usize) { + // This is safe because: + // + // * the slice passed to `drop_in_place` is valid; the `len > self.len` + // case avoids creating an invalid slice, and + // * the `len` of the vector is shrunk before calling `drop_in_place`, + // such that no value will be dropped twice in case `drop_in_place` + // were to panic once (if it panics twice, the program aborts). + unsafe { + // Note: It's intentional that this is `>` and not `>=`. + // Changing it to `>=` has negative performance + // implications in some cases. See #78884 for more. + if len > self.len { + return; + } + let remaining_len = self.len - len; + let s = ptr::slice_from_raw_parts_mut(self.as_mut_ptr().add(len), remaining_len); + self.len = len; + ptr::drop_in_place(s); + } + } + + /// Extracts a slice containing the entire vector. + /// + /// Equivalent to `&s[..]`. + /// + /// # Examples + /// + /// ``` + /// use std::io::{self, Write}; + /// + /// let buffer = rune_alloc::try_vec![1, 2, 3, 5, 8]; + /// io::sink().write(buffer.as_slice()).unwrap(); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn as_slice(&self) -> &[T] { + self + } + + /// Extracts a mutable slice of the entire vector. + /// + /// Equivalent to `&mut s[..]`. + /// + /// # Examples + /// + /// ``` + /// use std::io::{self, Read}; + /// let mut buffer = rune_alloc::try_vec![0; 3]; + /// io::repeat(0b101).read_exact(buffer.as_mut_slice()).unwrap(); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn as_mut_slice(&mut self) -> &mut [T] { + self + } + + /// Returns a raw pointer to the vector's buffer, or a dangling raw pointer + /// valid for zero sized reads if the vector didn't allocate. + /// + /// The caller must ensure that the vector outlives the pointer this + /// function returns, or else it will end up pointing to garbage. + /// Modifying the vector may cause its buffer to be reallocated, + /// which would also make any pointers to it invalid. + /// + /// The caller must also ensure that the memory the pointer (non-transitively) points to + /// is never written to (except inside an `UnsafeCell`) using this pointer or any pointer + /// derived from it. If you need to mutate the contents of the slice, use + /// [`as_mut_ptr`]. + /// + /// # Examples + /// + /// ``` + /// let x = rune_alloc::try_vec![1, 2, 4]; + /// let x_ptr = x.as_ptr(); + /// + /// unsafe { + /// for i in 0..x.len() { + /// assert_eq!(*x_ptr.add(i), 1 << i); + /// } + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// [`as_mut_ptr`]: Vec::as_mut_ptr + #[inline] + pub fn as_ptr(&self) -> *const T { + // We shadow the slice method of the same name to avoid going through + // `deref`, which creates an intermediate reference. + self.buf.ptr() + } + + /// Returns an unsafe mutable pointer to the vector's buffer, or a dangling + /// raw pointer valid for zero sized reads if the vector didn't allocate. + /// + /// The caller must ensure that the vector outlives the pointer this + /// function returns, or else it will end up pointing to garbage. + /// Modifying the vector may cause its buffer to be reallocated, + /// which would also make any pointers to it invalid. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::Vec; + /// + /// // Allocate vector big enough for 4 elements. + /// let size = 4; + /// let mut x: Vec = Vec::try_with_capacity(size)?; + /// let x_ptr = x.as_mut_ptr(); + /// + /// // Initialize elements via raw pointer writes, then set length. + /// unsafe { + /// for i in 0..size { + /// *x_ptr.add(i) = i as i32; + /// } + /// x.set_len(size); + /// } + /// assert_eq!(&*x, &[0, 1, 2, 3]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn as_mut_ptr(&mut self) -> *mut T { + // We shadow the slice method of the same name to avoid going through + // `deref_mut`, which creates an intermediate reference. + self.buf.ptr() + } + + /// Forces the length of the vector to `new_len`. + /// + /// This is a low-level operation that maintains none of the normal + /// invariants of the type. Normally changing the length of a vector + /// is done using one of the safe operations instead, such as + /// [`truncate`], [`try_resize`], [`try_extend`], or [`clear`]. + /// + /// [`truncate`]: Vec::truncate + /// [`try_resize`]: Vec::try_resize + /// [`try_extend`]: Extend::extend + /// [`clear`]: Vec::clear + /// + /// # Safety + /// + /// - `new_len` must be less than or equal to [`capacity()`]. + /// - The elements at `old_len..new_len` must be initialized. + /// + /// [`capacity()`]: Vec::capacity + /// + /// # Examples + /// + /// This method can be useful for situations in which the vector + /// is serving as a buffer for other code, particularly over FFI: + /// + /// ```no_run + /// # #![allow(dead_code)] + /// # // This is just a minimal skeleton for the doc example; + /// # // don't use this as a starting point for a real library. + /// # pub(crate) struct StreamWrapper { strm: *mut std::ffi::c_void } + /// # const Z_OK: i32 = 0; + /// # extern "C" { + /// # fn deflateGetDictionary( + /// # strm: *mut std::ffi::c_void, + /// # dictionary: *mut u8, + /// # dictLength: *mut usize, + /// # ) -> i32; + /// # } + /// # impl StreamWrapper { + /// pub(crate) fn get_dictionary(&self) -> Option> { + /// // Per the FFI method's docs, "32768 bytes is always enough". + /// let mut dict = Vec::with_capacity(32_768); + /// let mut dict_length = 0; + /// // SAFETY: When `deflateGetDictionary` returns `Z_OK`, it holds that: + /// // 1. `dict_length` elements were initialized. + /// // 2. `dict_length` <= the capacity (32_768) + /// // which makes `set_len` safe to call. + /// unsafe { + /// // Make the FFI call... + /// let r = deflateGetDictionary(self.strm, dict.as_mut_ptr(), &mut dict_length); + /// if r == Z_OK { + /// // ...and update the length to what was initialized. + /// dict.set_len(dict_length); + /// Some(dict) + /// } else { + /// None + /// } + /// } + /// } + /// # } + /// ``` + /// + /// While the following example is sound, there is a memory leak since + /// the inner vectors were not freed prior to the `set_len` call: + /// + /// ``` + /// use rune_alloc::try_vec; + /// + /// let mut vec = try_vec![try_vec![1, 0, 0], + /// try_vec![0, 1, 0], + /// try_vec![0, 0, 1]]; + /// // SAFETY: + /// // 1. `old_len..0` is empty so no elements need to be initialized. + /// // 2. `0 <= capacity` always holds whatever `capacity` is. + /// unsafe { + /// vec.set_len(0); + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// Normally, here, one would use [`clear`] instead to correctly drop + /// the contents and thus not leak memory. + #[inline] + pub unsafe fn set_len(&mut self, new_len: usize) { + debug_assert!(new_len <= self.capacity()); + self.len = new_len; + } + + /// Removes an element from the vector and returns it. + /// + /// The removed element is replaced by the last element of the vector. + /// + /// This does not preserve ordering, but is *O*(1). + /// If you need to preserve the element order, use [`remove`] instead. + /// + /// [`remove`]: Vec::remove + /// + /// # Panics + /// + /// Panics if `index` is out of bounds. + /// + /// # Examples + /// + /// ``` + /// let mut v = rune_alloc::try_vec!["foo", "bar", "baz", "qux"]; + /// + /// assert_eq!(v.swap_remove(1), "bar"); + /// assert_eq!(v, ["foo", "qux", "baz"]); + /// + /// assert_eq!(v.swap_remove(0), "foo"); + /// assert_eq!(v, ["baz", "qux"]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn swap_remove(&mut self, index: usize) -> T { + #[cold] + #[inline(never)] + fn assert_failed(index: usize, len: usize) -> ! { + panic!("swap_remove index (is {index}) should be < len (is {len})"); + } + + let len = self.len(); + if index >= len { + assert_failed(index, len); + } + unsafe { + // We replace self[index] with the last element. Note that if the + // bounds check above succeeds there must be a last element (which + // can be self[index] itself). + let value = ptr::read(self.as_ptr().add(index)); + let base_ptr = self.as_mut_ptr(); + ptr::copy(base_ptr.add(len - 1), base_ptr.add(index), 1); + self.set_len(len - 1); + value + } + } + + /// Inserts an element at position `index` within the vector, shifting all + /// elements after it to the right. + /// + /// # Panics + /// + /// Panics if `index > len`. + /// + /// # Examples + /// + /// ``` + /// let mut vec = rune_alloc::try_vec![1, 2, 3]; + /// vec.try_insert(1, 4)?; + /// assert_eq!(vec, [1, 4, 2, 3]); + /// vec.try_insert(4, 5)?; + /// assert_eq!(vec, [1, 4, 2, 3, 5]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_insert(&mut self, index: usize, element: T) -> Result<(), Error> { + #[cold] + #[inline(never)] + fn assert_failed(index: usize, len: usize) -> ! { + panic!("insertion index (is {index}) should be <= len (is {len})"); + } + + let len = self.len(); + + // space for the new element + if len == self.buf.capacity() { + self.try_reserve(1)?; + } + + unsafe { + // infallible + // The spot to put the new value + { + let p = self.as_mut_ptr().add(index); + if index < len { + // Shift everything over to make space. (Duplicating the + // `index`th element into two consecutive places.) + ptr::copy(p, p.add(1), len - index); + } else if index == len { + // No elements need shifting. + } else { + assert_failed(index, len); + } + // Write it in, overwriting the first copy of the `index`th + // element. + ptr::write(p, element); + } + self.set_len(len + 1); + } + + Ok(()) + } + + /// Removes and returns the element at position `index` within the vector, + /// shifting all elements after it to the left. + /// + /// Note: Because this shifts over the remaining elements, it has a + /// worst-case performance of *O*(*n*). If you don't need the order of + /// elements to be preserved, use [`swap_remove`] instead. If you'd like to + /// remove elements from the beginning of the `Vec`, consider using + /// [`VecDeque::pop_front`] instead. + /// + /// [`swap_remove`]: crate::Vec::swap_remove + /// [`VecDeque::pop_front`]: crate::VecDeque::pop_front + /// + /// # Panics + /// + /// Panics if `index` is out of bounds. + /// + /// # Examples + /// + /// ``` + /// let mut v = rune_alloc::try_vec![1, 2, 3]; + /// assert_eq!(v.remove(1), 2); + /// assert_eq!(v, [1, 3]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[track_caller] + pub fn remove(&mut self, index: usize) -> T { + #[cold] + #[inline(never)] + #[track_caller] + fn assert_failed(index: usize, len: usize) -> ! { + panic!("removal index (is {index}) should be < len (is {len})"); + } + + let len = self.len(); + if index >= len { + assert_failed(index, len); + } + unsafe { + // infallible + let ret; + { + // the place we are taking from. + let ptr = self.as_mut_ptr().add(index); + // copy it out, unsafely having a copy of the value on + // the stack and in the vector at the same time. + ret = ptr::read(ptr); + + // Shift everything down to fill in that spot. + ptr::copy(ptr.add(1), ptr, len - index - 1); + } + self.set_len(len - 1); + ret + } + } + + /// Retains only the elements specified by the predicate. + /// + /// In other words, remove all elements `e` for which `f(&e)` returns `false`. + /// This method operates in place, visiting each element exactly once in the + /// original order, and preserves the order of the retained elements. + /// + /// # Examples + /// + /// ``` + /// let mut vec = rune_alloc::try_vec![1, 2, 3, 4]; + /// vec.retain(|&x| x % 2 == 0); + /// assert_eq!(vec, [2, 4]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// Because the elements are visited exactly once in the original order, + /// external state may be used to decide which elements to keep. + /// + /// ``` + /// let mut vec = rune_alloc::try_vec![1, 2, 3, 4, 5]; + /// let keep = [false, true, true, false, true]; + /// let mut iter = keep.iter(); + /// vec.retain(|_| *iter.next().unwrap()); + /// assert_eq!(vec, [2, 3, 5]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn retain(&mut self, mut f: F) + where + F: FnMut(&T) -> bool, + { + self.retain_mut(|elem| f(elem)); + } + + /// Retains only the elements specified by the predicate, passing a mutable reference to it. + /// + /// In other words, remove all elements `e` such that `f(&mut e)` returns `false`. + /// This method operates in place, visiting each element exactly once in the + /// original order, and preserves the order of the retained elements. + /// + /// # Examples + /// + /// ``` + /// let mut vec = rune_alloc::try_vec![1, 2, 3, 4]; + /// vec.retain_mut(|x| if *x <= 3 { + /// *x += 1; + /// true + /// } else { + /// false + /// }); + /// assert_eq!(vec, [2, 3, 4]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn retain_mut(&mut self, mut f: F) + where + F: FnMut(&mut T) -> bool, + { + let original_len = self.len(); + // Avoid double drop if the drop guard is not executed, + // since we may make some holes during the process. + unsafe { self.set_len(0) }; + + // Vec: [Kept, Kept, Hole, Hole, Hole, Hole, Unchecked, Unchecked] + // |<- processed len ->| ^- next to check + // |<- deleted cnt ->| + // |<- original_len ->| + // Kept: Elements which predicate returns true on. + // Hole: Moved or dropped element slot. + // Unchecked: Unchecked valid elements. + // + // This drop guard will be invoked when predicate or `drop` of element panicked. + // It shifts unchecked elements to cover holes and `set_len` to the correct length. + // In cases when predicate and `drop` never panick, it will be optimized out. + struct BackshiftOnDrop<'a, T, A: Allocator> { + v: &'a mut Vec, + processed_len: usize, + deleted_cnt: usize, + original_len: usize, + } + + impl Drop for BackshiftOnDrop<'_, T, A> { + fn drop(&mut self) { + if self.deleted_cnt > 0 { + // SAFETY: Trailing unchecked items must be valid since we never touch them. + unsafe { + ptr::copy( + self.v.as_ptr().add(self.processed_len), + self.v + .as_mut_ptr() + .add(self.processed_len - self.deleted_cnt), + self.original_len - self.processed_len, + ); + } + } + // SAFETY: After filling holes, all items are in contiguous memory. + unsafe { + self.v.set_len(self.original_len - self.deleted_cnt); + } + } + } + + let mut g = BackshiftOnDrop { + v: self, + processed_len: 0, + deleted_cnt: 0, + original_len, + }; + + fn process_loop( + original_len: usize, + f: &mut F, + g: &mut BackshiftOnDrop<'_, T, A>, + ) where + F: FnMut(&mut T) -> bool, + { + while g.processed_len != original_len { + // SAFETY: Unchecked element must be valid. + let cur = unsafe { &mut *g.v.as_mut_ptr().add(g.processed_len) }; + if !f(cur) { + // Advance early to avoid double drop if `drop_in_place` panicked. + g.processed_len += 1; + g.deleted_cnt += 1; + // SAFETY: We never touch this element again after dropped. + unsafe { ptr::drop_in_place(cur) }; + // We already advanced the counter. + if DELETED { + continue; + } else { + break; + } + } + if DELETED { + // SAFETY: `deleted_cnt` > 0, so the hole slot must not overlap with current element. + // We use copy for move, and never touch this element again. + unsafe { + let hole_slot = g.v.as_mut_ptr().add(g.processed_len - g.deleted_cnt); + ptr::copy_nonoverlapping(cur, hole_slot, 1); + } + } + g.processed_len += 1; + } + } + + // Stage 1: Nothing was deleted. + process_loop::(original_len, &mut f, &mut g); + + // Stage 2: Some elements were deleted. + process_loop::(original_len, &mut f, &mut g); + + // All item are processed. This can be optimized to `set_len` by LLVM. + drop(g); + } + + /// Removes all but the first of consecutive elements in the vector that resolve to the same + /// key. + /// + /// If the vector is sorted, this removes all duplicates. + /// + /// # Examples + /// + /// ``` + /// let mut vec = rune_alloc::try_vec![10, 20, 21, 30, 20]; + /// + /// vec.dedup_by_key(|i| *i / 10); + /// + /// assert_eq!(vec, [10, 20, 30, 20]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn dedup_by_key(&mut self, mut key: F) + where + F: FnMut(&mut T) -> K, + K: PartialEq, + { + self.dedup_by(|a, b| key(a) == key(b)) + } + + /// Removes all but the first of consecutive elements in the vector + /// satisfying a given equality relation. + /// + /// The `same_bucket` function is passed references to two elements from the + /// vector and must determine if the elements compare equal. The elements + /// are passed in opposite order from their order in the slice, so if + /// `same_bucket(a, b)` returns `true`, `a` is removed. + /// + /// If the vector is sorted, this removes all duplicates. + /// + /// # Examples + /// + /// ``` + /// let mut vec = rune_alloc::try_vec!["foo", "bar", "Bar", "baz", "bar"]; + /// + /// vec.dedup_by(|a, b| a.eq_ignore_ascii_case(b)); + /// + /// assert_eq!(vec, ["foo", "bar", "baz", "bar"]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn dedup_by(&mut self, mut same_bucket: F) + where + F: FnMut(&mut T, &mut T) -> bool, + { + let len = self.len(); + + if len <= 1 { + return; + } + + /* INVARIANT: vec.len() > read >= write > write-1 >= 0 */ + struct FillGapOnDrop<'a, T, A: Allocator> { + /* Offset of the element we want to check if it is duplicate */ + read: usize, + + /* Offset of the place where we want to place the non-duplicate + * when we find it. */ + write: usize, + + /* The Vec that would need correction if `same_bucket` panicked */ + vec: &'a mut Vec, + } + + impl<'a, T, A: Allocator> Drop for FillGapOnDrop<'a, T, A> { + fn drop(&mut self) { + /* This code gets executed when `same_bucket` panics */ + + /* SAFETY: invariant guarantees that `read - write` + * and `len - read` never overflow and that the copy is always + * in-bounds. */ + unsafe { + let ptr = self.vec.as_mut_ptr(); + let len = self.vec.len(); + + /* How many items were left when `same_bucket` panicked. + * Basically vec[read..].len() */ + let items_left = len.wrapping_sub(self.read); + + /* Pointer to first item in vec[write..write+items_left] slice */ + let dropped_ptr = ptr.add(self.write); + /* Pointer to first item in vec[read..] slice */ + let valid_ptr = ptr.add(self.read); + + /* Copy `vec[read..]` to `vec[write..write+items_left]`. + * The slices can overlap, so `copy_nonoverlapping` cannot be used */ + ptr::copy(valid_ptr, dropped_ptr, items_left); + + /* How many items have been already dropped + * Basically vec[read..write].len() */ + let dropped = self.read.wrapping_sub(self.write); + + self.vec.set_len(len - dropped); + } + } + } + + let mut gap = FillGapOnDrop { + read: 1, + write: 1, + vec: self, + }; + + let ptr = gap.vec.as_mut_ptr(); + + /* Drop items while going through Vec, it should be more efficient than + * doing slice partition_dedup + truncate */ + + /* SAFETY: Because of the invariant, read_ptr, prev_ptr and write_ptr + * are always in-bounds and read_ptr never aliases prev_ptr */ + unsafe { + while gap.read < len { + let read_ptr = ptr.add(gap.read); + let prev_ptr = ptr.add(gap.write.wrapping_sub(1)); + + if same_bucket(&mut *read_ptr, &mut *prev_ptr) { + // Increase `gap.read` now since the drop may panic. + gap.read += 1; + /* We have found duplicate, drop it in-place */ + ptr::drop_in_place(read_ptr); + } else { + let write_ptr = ptr.add(gap.write); + + /* Because `read_ptr` can be equal to `write_ptr`, we either + * have to use `copy` or conditional `copy_nonoverlapping`. + * Looks like the first option is faster. */ + ptr::copy(read_ptr, write_ptr, 1); + + /* We have filled that place, so go further */ + gap.write += 1; + gap.read += 1; + } + } + + /* Technically we could let `gap` clean up with its Drop, but + * when `same_bucket` is guaranteed to not panic, this bloats a little + * the codegen, so we just do it manually */ + gap.vec.set_len(gap.write); + mem::forget(gap); + } + } + + /// Appends an element to the back of a collection. + /// + /// # Panics + /// + /// Panics if the new capacity exceeds `isize::MAX` bytes. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{Vec, Global}; + /// + /// let mut vec: Vec = Vec::try_with_capacity_in(2, Global)?; + /// vec.try_push(1)?; + /// vec.try_push(2)?; + /// vec.try_push(3)?; + /// assert_eq!(vec, [1, 2, 3]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn try_push(&mut self, value: T) -> Result<(), Error> { + // This will panic or abort if we would allocate > isize::MAX bytes + // or if the length increment would overflow for zero-sized types. + if self.len == self.buf.capacity() { + self.buf.try_reserve_for_push(self.len)?; + } + + unsafe { + let end = self.as_mut_ptr().add(self.len); + ptr::write(end, value); + self.len += 1; + } + + Ok(()) + } + + /// Appends an element if there is sufficient spare capacity, otherwise an + /// error is returned with the element. + /// + /// Unlike [`try_push`] this method will not reallocate when there's + /// insufficient capacity. The caller should use [`try_reserve`] to ensure + /// that there is enough capacity. + /// + /// [`try_push`]: Vec::try?push + /// [`try_reserve`]: Vec::try_reserve + /// + /// # Examples + /// + /// A manual, alternative to [`TryFromIteratorIn`]: + /// + /// ``` + /// use rune_alloc::{Vec, Error}; + /// use rune_alloc::prelude::*; + /// + /// fn from_iter_fallible(iter: impl Iterator) -> Result, Error> { + /// let mut vec = Vec::new(); + /// + /// for value in iter { + /// if let Err(value) = vec.push_within_capacity(value) { + /// vec.try_reserve(1)?; + /// // this cannot fail, the previous line either returned or added at least 1 free slot + /// let _ = vec.push_within_capacity(value); + /// } + /// } + /// + /// Ok(vec) + /// } + /// + /// assert_eq!(from_iter_fallible(0..100), Ok(Vec::try_from_iter(0..100)?)); + /// # Ok::<_, Error>(()) + /// ``` + #[inline] + pub fn push_within_capacity(&mut self, value: T) -> Result<(), T> { + if self.len == self.buf.capacity() { + return Err(value); + } + + unsafe { + let end = self.as_mut_ptr().add(self.len); + ptr::write(end, value); + self.len += 1; + } + + Ok(()) + } + + /// Removes the last element from a vector and returns it, or [`None`] if it + /// is empty. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::Vec; + /// use rune_alloc::prelude::*; + /// + /// let mut vec = Vec::try_from_iter([1, 2, 3])?; + /// assert_eq!(vec.pop(), Some(3)); + /// assert_eq!(vec, [1, 2]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn pop(&mut self) -> Option { + if self.len == 0 { + None + } else { + unsafe { + self.len -= 1; + Some(ptr::read(self.as_ptr().add(self.len()))) + } + } + } + + /// Moves all the elements of `other` into `self`, leaving `other` empty. + /// + /// # Panics + /// + /// Panics if the new capacity exceeds `isize::MAX` bytes. + /// + /// # Examples + /// + /// ``` + /// let mut vec = rune_alloc::try_vec![1, 2, 3]; + /// let mut vec2 = rune_alloc::try_vec![4, 5, 6]; + /// vec.try_append(&mut vec2)?; + /// assert_eq!(vec, [1, 2, 3, 4, 5, 6]); + /// assert_eq!(vec2, []); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn try_append(&mut self, other: &mut Self) -> Result<(), Error> { + unsafe { + self.try_append_elements(other.as_slice() as _)?; + other.set_len(0); + } + + Ok(()) + } + + /// Appends elements to `self` from other buffer. + #[inline] + unsafe fn try_append_elements(&mut self, other: *const [T]) -> Result<(), Error> { + let count = unsafe { (*other).len() }; + self.try_reserve(count)?; + let len = self.len(); + unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) }; + self.len += count; + Ok(()) + } + + /// Construct a raw iterator over the current vector + /// + /// # Safety + /// + /// The caller must ensure that any pointers returned by the iterator are + /// not dereferenced unless the object they were constructed from is still + /// alive. + pub unsafe fn raw_iter(&self) -> RawIter { + RawIter::new(self) + } + + /// Construct a raw mutable iterator over the current vector + /// + /// # Safety + /// + /// The caller must ensure that any pointers returned by the iterator are + /// not dereferenced unless the object they were constructed from is still + /// alive. + /// + /// Since this is a mutable iterator, it also implies that no other accesses + /// have been performed until the iterator has been dropped. + pub unsafe fn raw_iter_mut(&mut self) -> RawIterMut { + RawIterMut::new(self) + } + + /// Removes the specified range from the vector in bulk, returning all + /// removed elements as an iterator. If the iterator is dropped before + /// being fully consumed, it drops the remaining removed elements. + /// + /// The returned iterator keeps a mutable borrow on the vector to optimize + /// its implementation. + /// + /// # Panics + /// + /// Panics if the starting point is greater than the end point or if + /// the end point is greater than the length of the vector. + /// + /// # Leaking + /// + /// If the returned iterator goes out of scope without being dropped (due to + /// [`mem::forget`], for example), the vector may have lost and leaked + /// elements arbitrarily, including elements outside the range. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{Vec, IteratorExt}; + /// + /// let mut v = rune_alloc::try_vec![1, 2, 3]; + /// let u: Vec<_> = v.drain(1..).try_collect()?; + /// assert_eq!(v, &[1]); + /// assert_eq!(u, &[2, 3]); + /// + /// // A full range clears the vector, like `clear()` does + /// v.drain(..); + /// assert_eq!(v, &[]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn drain(&mut self, range: R) -> Drain<'_, T, A> + where + R: RangeBounds, + { + // Memory safety + // + // When the Drain is first created, it shortens the length of + // the source vector to make sure no uninitialized or moved-from elements + // are accessible at all if the Drain's destructor never gets to run. + // + // Drain will ptr::read out the values to remove. + // When finished, remaining tail of the vec is copied back to cover + // the hole, and the vector length is restored to the new length. + // + let len = self.len(); + let Range { start, end } = slice_range(range, ..len); + + unsafe { + // set self.vec length's to start, to be safe in case Drain is leaked + self.set_len(start); + let range_slice = slice::from_raw_parts(self.as_ptr().add(start), end - start); + Drain { + tail_start: end, + tail_len: len - end, + iter: range_slice.iter(), + vec: NonNull::from(self), + } + } + } + + /// Clears the vector, removing all values. + /// + /// Note that this method has no effect on the allocated capacity + /// of the vector. + /// + /// # Examples + /// + /// ``` + /// let mut v = rune_alloc::try_vec![1, 2, 3]; + /// + /// v.clear(); + /// + /// assert!(v.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn clear(&mut self) { + let elems: *mut [T] = self.as_mut_slice(); + + // SAFETY: + // - `elems` comes directly from `as_mut_slice` and is therefore valid. + // - Setting `self.len` before calling `drop_in_place` means that, + // if an element's `Drop` impl panics, the vector's `Drop` impl will + // do nothing (leaking the rest of the elements) instead of dropping + // some twice. + unsafe { + self.len = 0; + ptr::drop_in_place(elems); + } + } + + /// Returns the number of elements in the vector, also referred to as its + /// 'length'. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{Vec, Global}; + /// + /// let mut a = Vec::new_in(Global); + /// + /// for value in 0..3 { + /// a.try_push(value)?; + /// } + /// + /// assert_eq!(a.len(), 3); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn len(&self) -> usize { + self.len + } + + /// Returns `true` if the vector contains no elements. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::Vec; + /// + /// let mut v = Vec::new(); + /// assert!(v.is_empty()); + /// + /// v.try_push(1)?; + /// assert!(!v.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Splits the collection into two at the given index. + /// + /// Returns a newly allocated vector containing the elements in the range + /// `[at, len)`. After the call, the original vector will be left containing + /// the elements `[0, at)` with its previous capacity unchanged. + /// + /// # Panics + /// + /// Panics if `at > len`. + /// + /// # Examples + /// + /// ``` + /// let mut vec = rune_alloc::try_vec![1, 2, 3]; + /// let vec2 = vec.try_split_off(1)?; + /// assert_eq!(vec, [1]); + /// assert_eq!(vec2, [2, 3]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + #[must_use = "use `.truncate()` if you don't need the other half"] + pub fn try_split_off(&mut self, at: usize) -> Result + where + A: Clone, + { + #[cold] + #[inline(never)] + fn assert_failed(at: usize, len: usize) -> ! { + panic!("`at` split index (is {at}) should be <= len (is {len})"); + } + + if at > self.len() { + assert_failed(at, self.len()); + } + + if at == 0 { + let new = Vec::try_with_capacity_in(self.capacity(), self.allocator().clone())?; + // the new vector can take over the original buffer and avoid the copy + return Ok(mem::replace(self, new)); + } + + let other_len = self.len - at; + let mut other = Vec::try_with_capacity_in(other_len, self.allocator().clone())?; + + // Unsafely `set_len` and copy items to `other`. + unsafe { + self.set_len(at); + other.set_len(other_len); + ptr::copy_nonoverlapping(self.as_ptr().add(at), other.as_mut_ptr(), other.len()); + } + + Ok(other) + } + + /// Resizes the `Vec` in-place so that `len` is equal to `new_len`. + /// + /// If `new_len` is greater than `len`, the `Vec` is extended by the + /// difference, with each additional slot filled with the result of + /// calling the closure `f`. The return values from `f` will end up + /// in the `Vec` in the order they have been generated. + /// + /// If `new_len` is less than `len`, the `Vec` is simply truncated. + /// + /// This method uses a closure to create new values on every push. If + /// you'd rather [`Clone`] a given value, use [`Vec::try_resize`]. If you + /// want to use the [`Default`] trait to generate values, you can + /// pass [`Default::default`] as the second argument. + /// + /// # Examples + /// + /// ``` + /// let mut vec = rune_alloc::try_vec![1, 2, 3]; + /// vec.try_resize_with(5, Default::default)?; + /// assert_eq!(vec, [1, 2, 3, 0, 0]); + /// + /// let mut vec = rune_alloc::try_vec![]; + /// let mut p = 1; + /// vec.try_resize_with(4, || { p *= 2; p })?; + /// assert_eq!(vec, [2, 4, 8, 16]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_resize_with(&mut self, new_len: usize, f: F) -> Result<(), Error> + where + F: FnMut() -> T, + { + let len = self.len(); + + if new_len > len { + self.try_extend_trusted(iter::repeat_with(f).take(new_len - len))?; + } else { + self.truncate(new_len); + } + + Ok(()) + } + + /// Consumes and leaks the `Vec`, returning a mutable reference to the contents, + /// `&'a mut [T]`. Note that the type `T` must outlive the chosen lifetime + /// `'a`. If the type has only static references, or none at all, then this + /// may be chosen to be `'static`. + /// + /// As of Rust 1.57, this method does not reallocate or shrink the `Vec`, + /// so the leaked allocation may include unused capacity that is not part + /// of the returned slice. + /// + /// This function is mainly useful for data that lives for the remainder of + /// the program's life. Dropping the returned reference will cause a memory + /// leak. + /// + /// # Examples + /// + /// Simple usage: + /// + /// ``` + /// let x = rune_alloc::try_vec![1, 2, 3]; + /// let static_ref: &'static mut [usize] = x.leak(); + /// static_ref[0] += 1; + /// assert_eq!(static_ref, &[2, 2, 3]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn leak<'a>(self) -> &'a mut [T] + where + A: 'a, + { + let mut me = ManuallyDrop::new(self); + unsafe { slice::from_raw_parts_mut(me.as_mut_ptr(), me.len) } + } + + /// Returns the remaining spare capacity of the vector as a slice of + /// `MaybeUninit`. + /// + /// The returned slice can be used to fill the vector with data (e.g. by + /// reading from a file) before marking the data as initialized using the + /// [`set_len`] method. + /// + /// [`set_len`]: Vec::set_len + /// + /// # Examples + /// + /// ``` + /// // Allocate vector big enough for 10 elements. + /// let mut v = Vec::with_capacity(10); + /// + /// // Fill in the first 3 elements. + /// let uninit = v.spare_capacity_mut(); + /// uninit[0].write(0); + /// uninit[1].write(1); + /// uninit[2].write(2); + /// + /// // Mark the first 3 elements of the vector as being initialized. + /// unsafe { + /// v.set_len(3); + /// } + /// + /// assert_eq!(&v, &[0, 1, 2]); + /// ``` + #[inline] + pub(crate) fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit] { + // Note: + // This method is not implemented in terms of `split_at_spare_mut`, + // to prevent invalidation of pointers to the buffer. + unsafe { + slice::from_raw_parts_mut( + self.as_mut_ptr().add(self.len) as *mut MaybeUninit, + self.buf.capacity() - self.len, + ) + } + } + + /// Returns vector content as a slice of `T`, along with the remaining spare + /// capacity of the vector as a slice of `MaybeUninit`. + /// + /// The returned spare capacity slice can be used to fill the vector with data + /// (e.g. by reading from a file) before marking the data as initialized using + /// the [`set_len`] method. + /// + /// [`set_len`]: Vec::set_len + /// + /// Note that this is a low-level API, which should be used with care for + /// optimization purposes. If you need to append data to a `Vec` you can use + /// [`try_push`], [`try_extend`], [`try_extend_from_slice`], + /// [`try_extend_from_within`], [`try_insert`], [`try_append`], + /// [`try_resize`] or [`try_resize_with`], depending on your exact needs. + /// + /// [`try_push`]: Vec::try_push + /// [`try_extend`]: Vec::try_extend + /// [`try_extend_from_slice`]: Vec::try_extend_from_slice + /// [`try_extend_from_within`]: Vec::try_extend_from_within + /// [`try_insert`]: Vec::try_insert + /// [`try_append`]: Vec::try_append + /// [`try_resize`]: Vec::try_resize + /// [`try_resize_with`]: Vec::try_resize_with + /// + /// # Examples + /// + /// ``` + /// let mut v = rune_alloc::try_vec![1, 1, 2]; + /// + /// // Reserve additional space big enough for 10 elements. + /// v.try_reserve(10)?; + /// + /// let (init, uninit) = v.split_at_spare_mut(); + /// let sum = init.iter().copied().sum::(); + /// + /// // Fill in the next 4 elements. + /// uninit[0].write(sum); + /// uninit[1].write(sum * 2); + /// uninit[2].write(sum * 3); + /// uninit[3].write(sum * 4); + /// + /// // Mark the 4 elements of the vector as being initialized. + /// unsafe { + /// let len = v.len(); + /// v.set_len(len + 4); + /// } + /// + /// assert_eq!(&v, &[1, 1, 2, 4, 8, 12, 16]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn split_at_spare_mut(&mut self) -> (&mut [T], &mut [MaybeUninit]) { + // SAFETY: + // - len is ignored and so never changed + let (init, spare, _) = unsafe { self.split_at_spare_mut_with_len() }; + (init, spare) + } + + /// Safety: changing returned .2 (&mut usize) is considered the same as calling `.set_len(_)`. + /// + /// This method provides unique access to all vec parts at once in `try_extend_from_within`. + unsafe fn split_at_spare_mut_with_len( + &mut self, + ) -> (&mut [T], &mut [MaybeUninit], &mut usize) { + let ptr = self.as_mut_ptr(); + // SAFETY: + // - `ptr` is guaranteed to be valid for `self.len` elements + // - but the allocation extends out to `self.buf.capacity()` elements, possibly + // uninitialized + let spare_ptr = unsafe { ptr.add(self.len) }; + let spare_ptr = spare_ptr.cast::>(); + let spare_len = self.buf.capacity() - self.len; + + // SAFETY: + // - `ptr` is guaranteed to be valid for `self.len` elements + // - `spare_ptr` is pointing one element past the buffer, so it doesn't overlap with `initialized` + unsafe { + let initialized = slice::from_raw_parts_mut(ptr, self.len); + let spare = slice::from_raw_parts_mut(spare_ptr, spare_len); + + (initialized, spare, &mut self.len) + } + } + + #[inline] + pub(crate) fn try_splice_in_place( + &mut self, + range: R, + replace_with: I, + ) -> Result<(), Error> + where + R: RangeBounds, + I: IntoIterator, + { + let mut drain = self.drain(range); + let mut iter = replace_with.into_iter(); + self::splice::splice(&mut drain, &mut iter) + } + + // specific extend for `TrustedLen` iterators, called both by the specializations + // and internal places where resolving specialization makes compilation slower + fn try_extend_trusted(&mut self, iterator: impl iter::Iterator) -> Result<(), Error> { + let (low, high) = iterator.size_hint(); + + if let Some(additional) = high { + debug_assert_eq!( + low, + additional, + "TrustedLen iterator's size hint is not exact: {:?}", + (low, high) + ); + + self.try_reserve(additional)?; + + unsafe { + let ptr = self.as_mut_ptr(); + let mut local_len = SetLenOnDrop::new(&mut self.len); + + for element in iterator { + ptr::write(ptr.add(local_len.current_len()), element); + // Since the loop executes user code which can panic we have to update + // the length every step to correctly drop what we've written. + // NB can't overflow since we would have had to alloc the address space + local_len.increment_len(1); + } + } + + Ok(()) + } else { + // Per TrustedLen contract a `None` upper bound means that the iterator length + // truly exceeds usize::MAX, which would eventually lead to a capacity overflow anyway. + // Since the other branch already panics eagerly (via `reserve()`) we do the same here. + // This avoids additional codegen for a fallback code path which would eventually + // panic anyway. + Err(Error::CapacityOverflow) + } + } +} + +impl Vec +where + T: TryClone, +{ + /// Resizes the `Vec` in-place so that `len` is equal to `new_len`. + /// + /// If `new_len` is greater than `len`, the `Vec` is extended by the + /// difference, with each additional slot filled with `value`. If `new_len` + /// is less than `len`, the `Vec` is simply truncated. + /// + /// This method requires `T` to implement [`Clone`], in order to be able to + /// clone the passed value. If you need more flexibility (or want to rely on + /// [`Default`] instead of [`Clone`]), use [`Vec::try_resize_with`]. If you + /// only need to resize to a smaller size, use [`Vec::truncate`]. + /// + /// # Examples + /// + /// ``` + /// let mut vec = rune_alloc::try_vec!["hello"]; + /// vec.try_resize(3, "world")?; + /// assert_eq!(vec, ["hello", "world", "world"]); + /// + /// let mut vec = rune_alloc::try_vec![1, 2, 3, 4]; + /// vec.try_resize(2, 0)?; + /// assert_eq!(vec, [1, 2]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_resize(&mut self, new_len: usize, value: T) -> Result<(), Error> { + let len = self.len(); + + if new_len > len { + self.try_extend_with(new_len - len, value)?; + } else { + self.truncate(new_len); + } + + Ok(()) + } + + /// Clones and appends all elements in a slice to the `Vec`. + /// + /// Iterates over the slice `other`, clones each element, and then appends + /// it to this `Vec`. The `other` slice is traversed in-order. + /// + /// Note that this function is same as [`try_extend`] except that it is + /// specialized to work with slices instead. If and when Rust gets + /// specialization this function will likely be deprecated (but still + /// available). + /// + /// # Examples + /// + /// ``` + /// let mut vec = rune_alloc::try_vec![1]; + /// vec.try_extend_from_slice(&[2, 3, 4]); + /// assert_eq!(vec, [1, 2, 3, 4]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// [`try_extend`]: Vec::try_extend + pub fn try_extend_from_slice(&mut self, other: &[T]) -> Result<(), Error> { + try_extend_desugared(self, other.iter()) + } + + /// Copies elements from `src` range to the end of the vector. + /// + /// # Panics + /// + /// Panics if the starting point is greater than the end point or if the end + /// point is greater than the length of the vector. + /// + /// # Examples + /// + /// ``` + /// let mut vec = rune_alloc::try_vec![0, 1, 2, 3, 4]; + /// + /// vec.try_extend_from_within(2..); + /// assert_eq!(vec, [0, 1, 2, 3, 4, 2, 3, 4]); + /// + /// vec.try_extend_from_within(..2); + /// assert_eq!(vec, [0, 1, 2, 3, 4, 2, 3, 4, 0, 1]); + /// + /// vec.try_extend_from_within(4..8); + /// assert_eq!(vec, [0, 1, 2, 3, 4, 2, 3, 4, 0, 1, 4, 2, 3, 4]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_extend_from_within(&mut self, src: R) -> Result<(), Error> + where + R: RangeBounds, + { + let range = slice_range(src, ..self.len()); + self.try_reserve(range.len())?; + + // SAFETY: + // - `slice::range` guarantees that the given range is valid for indexing self + unsafe { + // SAFETY: + // - len is increased only after initializing elements + let (this, spare, len) = self.split_at_spare_mut_with_len(); + + // SAFETY: + // - caller guarantees that src is a valid index + let to_clone = this.get_unchecked(range); + + for (src, dst) in iter::zip(to_clone, spare) { + dst.write(src.try_clone()?); + *len += 1 + } + } + + Ok(()) + } +} + +impl Vec<[T; N], A> { + /// Takes a `Vec<[T; N]>` and flattens it into a `Vec`. + /// + /// # Panics + /// + /// Panics if the length of the resulting vector would overflow a `usize`. + /// + /// This is only possible when flattening a vector of arrays of zero-sized + /// types, and thus tends to be irrelevant in practice. If + /// `size_of::() > 0`, this will never panic. + /// + /// # Examples + /// + /// ``` + /// let mut vec = rune_alloc::try_vec![[1, 2, 3], [4, 5, 6], [7, 8, 9]]; + /// assert_eq!(vec.pop(), Some([7, 8, 9])); + /// + /// let mut flattened = vec.into_flattened(); + /// assert_eq!(flattened.pop(), Some(6)); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn into_flattened(self) -> Vec { + let (ptr, len, cap, alloc) = self.into_raw_parts_with_alloc(); + let (new_len, new_cap) = if T::IS_ZST { + (len.checked_mul(N).expect("vec len overflow"), usize::MAX) + } else { + // SAFETY: + // - `cap * N` cannot overflow because the allocation is already in + // the address space. + // - Each `[T; N]` has `N` valid elements, so there are `len * N` + // valid elements in the allocation. + (len.wrapping_mul(N), cap.wrapping_mul(N)) + }; + // SAFETY: + // - `ptr` was allocated by `self` + // - `ptr` is well-aligned because `[T; N]` has the same alignment as `T`. + // - `new_cap` refers to the same sized allocation as `cap` because + // `new_cap * size_of::()` == `cap * size_of::<[T; N]>()` + // - `len` <= `cap`, so `len * N` <= `cap * N`. + unsafe { Vec::::from_raw_parts_in(ptr.cast(), new_len, new_cap, alloc) } + } +} + +impl Vec +where + T: TryClone, +{ + /// Extend the vector by `n` clones of value. + fn try_extend_with(&mut self, n: usize, value: T) -> Result<(), Error> { + self.try_reserve(n)?; + + unsafe { + let mut ptr = self.as_mut_ptr().add(self.len()); + // Use SetLenOnDrop to work around bug where compiler + // might not realize the store through `ptr` through self.set_len() + // don't alias. + let mut local_len = SetLenOnDrop::new(&mut self.len); + + // Write all elements except the last one + for _ in 1..n { + ptr::write(ptr, value.try_clone()?); + ptr = ptr.add(1); + // Increment the length in every step in case clone() panics + local_len.increment_len(1); + } + + if n > 0 { + // We can write the last element directly without cloning needlessly + ptr::write(ptr, value); + local_len.increment_len(1); + } + + // len set by scope guard + } + + Ok(()) + } +} + +impl Vec +where + T: PartialEq, +{ + /// Removes consecutive repeated elements in the vector according to the + /// [`PartialEq`] trait implementation. + /// + /// If the vector is sorted, this removes all duplicates. + /// + /// # Examples + /// + /// ``` + /// let mut vec = rune_alloc::try_vec![1, 2, 2, 3, 2]; + /// + /// vec.dedup(); + /// + /// assert_eq!(vec, [1, 2, 3, 2]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn dedup(&mut self) { + self.dedup_by(|a, b| a == b) + } +} + +//////////////////////////////////////////////////////////////////////////////// +// Common trait implementations for Vec +//////////////////////////////////////////////////////////////////////////////// + +impl ops::Deref for Vec { + type Target = [T]; + + #[inline] + fn deref(&self) -> &[T] { + unsafe { slice::from_raw_parts(self.as_ptr(), self.len) } + } +} + +impl ops::DerefMut for Vec { + #[inline] + fn deref_mut(&mut self) -> &mut [T] { + unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) } + } +} + +impl TryClone for Vec +where + T: TryClone, +{ + fn try_clone(&self) -> Result { + let alloc = self.allocator().clone(); + crate::alloc::slice::to_vec(self, alloc) + } +} + +#[cfg(test)] +impl Clone for Vec +where + T: TryClone, +{ + fn clone(&self) -> Self { + self.try_clone().abort() + } +} + +/// The hash of a vector is the same as that of the corresponding slice, +/// as required by the `core::borrow::Borrow` implementation. +/// +/// ``` +/// use std::hash::BuildHasher; +/// use rune_alloc::Vec; +/// +/// let b = std::collections::hash_map::RandomState::new(); +/// let v: Vec = rune_alloc::try_vec![0xa8, 0x3c, 0x09]; +/// let s: &[u8] = &[0xa8, 0x3c, 0x09]; +/// assert_eq!(b.hash_one(v), b.hash_one(s)); +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +impl Hash for Vec { + #[inline] + fn hash(&self, state: &mut H) { + Hash::hash(&**self, state) + } +} + +impl, A: Allocator> Index for Vec { + type Output = I::Output; + + #[inline] + fn index(&self, index: I) -> &Self::Output { + Index::index(&**self, index) + } +} + +impl, A: Allocator> IndexMut for Vec { + #[inline] + fn index_mut(&mut self, index: I) -> &mut Self::Output { + IndexMut::index_mut(&mut **self, index) + } +} + +impl IntoIterator for Vec { + type Item = T; + type IntoIter = IntoIter; + + /// Creates a consuming iterator, that is, one that moves each value out of + /// the vector (from start to end). The vector cannot be used after calling + /// this. + /// + /// # Examples + /// + /// ``` + /// let v = rune_alloc::try_vec!["a".to_string(), "b".to_string()]; + /// let mut v_iter = v.into_iter(); + /// + /// let first_element: Option = v_iter.next(); + /// + /// assert_eq!(first_element, Some("a".to_string())); + /// assert_eq!(v_iter.next(), Some("b".to_string())); + /// assert_eq!(v_iter.next(), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + fn into_iter(self) -> Self::IntoIter { + const fn wrapping_byte_add(this: *mut T, count: usize) -> *mut T { + this.cast::().wrapping_add(count) as *mut T + } + + unsafe { + let mut me = ManuallyDrop::new(self); + let alloc = ManuallyDrop::new(ptr::read(me.allocator())); + let begin = me.as_mut_ptr(); + let end = if T::IS_ZST { + wrapping_byte_add(begin, me.len()) + } else { + begin.add(me.len()) as *const T + }; + let cap = me.buf.capacity(); + IntoIter { + buf: NonNull::new_unchecked(begin), + phantom: PhantomData, + cap, + alloc, + ptr: begin, + end, + } + } + } +} + +impl<'a, T, A: Allocator> IntoIterator for &'a Vec { + type Item = &'a T; + type IntoIter = slice::Iter<'a, T>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +impl<'a, T, A: Allocator> IntoIterator for &'a mut Vec { + type Item = &'a mut T; + type IntoIter = slice::IterMut<'a, T>; + + fn into_iter(self) -> Self::IntoIter { + self.iter_mut() + } +} + +// leaf method to which various SpecFrom/SpecExtend implementations delegate when +// they have no further optimizations to apply +fn try_extend_desugared<'a, T, A: Allocator>( + this: &mut Vec, + mut iterator: impl Iterator, +) -> Result<(), Error> +where + T: 'a + TryClone, +{ + // This is the case for a general iterator. + // + // This function should be the moral equivalent of: + // + // for item in iterator { + // self.push(item); + // } + while let Some(element) = iterator.next() { + let len = this.len(); + if len == this.capacity() { + let (lower, _) = iterator.size_hint(); + this.try_reserve(lower.saturating_add(1))?; + } + unsafe { + ptr::write(this.as_mut_ptr().add(len), element.try_clone()?); + // Since next() executes user code which can panic we have to bump the length + // after each step. + // NB can't overflow since we would have had to alloc the address space + this.set_len(len + 1); + } + } + + Ok(()) +} + +/// Implements comparison of vectors, [lexicographically](Ord#lexicographical-comparison). +impl PartialOrd> for Vec +where + T: PartialOrd, + A1: Allocator, + A2: Allocator, +{ + #[inline] + fn partial_cmp(&self, other: &Vec) -> Option { + PartialOrd::partial_cmp(&**self, &**other) + } +} + +impl Eq for Vec {} + +/// Implements ordering of vectors, [lexicographically](Ord#lexicographical-comparison). +impl Ord for Vec { + #[inline] + fn cmp(&self, other: &Self) -> Ordering { + Ord::cmp(&**self, &**other) + } +} + +#[cfg(rune_nightly)] +unsafe impl<#[may_dangle] T, A: Allocator> Drop for Vec { + fn drop(&mut self) { + unsafe { + // use drop for [T] + // use a raw slice to refer to the elements of the vector as weakest necessary type; + // could avoid questions of validity in certain cases + ptr::drop_in_place(ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len)) + } + // RawVec handles deallocation + } +} + +#[cfg(not(rune_nightly))] +impl Drop for Vec { + fn drop(&mut self) { + unsafe { + // use drop for [T] + // use a raw slice to refer to the elements of the vector as weakest necessary type; + // could avoid questions of validity in certain cases + ptr::drop_in_place(ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len)) + } + // RawVec handles deallocation + } +} + +impl Default for Vec { + /// Creates an empty `Vec`. + /// + /// The vector will not allocate until elements are pushed onto it. + fn default() -> Vec { + Vec::new() + } +} + +impl fmt::Debug for Vec { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Debug::fmt(&**self, f) + } +} + +impl Borrow<[T]> for Vec { + #[inline] + fn borrow(&self) -> &[T] { + self + } +} + +impl AsRef> for Vec { + fn as_ref(&self) -> &Vec { + self + } +} + +impl AsMut> for Vec { + fn as_mut(&mut self) -> &mut Vec { + self + } +} + +impl AsRef<[T]> for Vec { + fn as_ref(&self) -> &[T] { + self + } +} + +impl AsMut<[T]> for Vec { + fn as_mut(&mut self) -> &mut [T] { + self + } +} + +#[cfg(feature = "alloc")] +impl From> for ::rust_alloc::vec::Vec { + /// Try to convert a [`Vec`] into a std `Vec`. + /// + /// The result is allocated on the heap. + fn from(values: Vec) -> Self { + let mut vec = ::rust_alloc::vec::Vec::with_capacity(values.len()); + + for value in values { + vec.push(value); + } + + vec + } +} + +impl TryFrom<&[T]> for Vec +where + T: TryClone, +{ + type Error = Error; + + /// Converts a `&[T]` into a [`Vec`]. + /// + /// The result is fallibly allocated on the heap. + fn try_from(values: &[T]) -> Result { + let mut out = Vec::try_with_capacity(values.len())?; + + for value in values { + out.try_push(value.try_clone()?)?; + } + + Ok(out) + } +} + +impl TryFrom<[T; N]> for Vec { + type Error = Error; + + /// Converts a `[T; N]` into a [`Vec`]. + /// + /// The result is fallibly allocated on the heap. + /// + /// ``` + /// use rune_alloc::{vec, Vec}; + /// + /// let a = Vec::try_from([1, 2, 3])?; + /// let b: Vec<_> = [1, 2, 3].try_into()?; + /// assert_eq!(a, b); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + fn try_from(arr: [T; N]) -> Result { + let mut out = Vec::try_with_capacity(arr.len())?; + let arr = ManuallyDrop::new(arr); + + if !::IS_ZST { + // SAFETY: Vec::try_with_capacity ensures that there is enough capacity. + unsafe { + ptr::copy_nonoverlapping(arr.as_ptr(), out.as_mut_ptr(), N); + } + } + + unsafe { + out.set_len(N); + } + + Ok(out) + } +} + +#[cfg(feature = "alloc")] +impl TryFrom<::rust_alloc::vec::Vec> for Vec { + type Error = Error; + + /// Converts a std `Vec` into a [`Vec`]. + /// + /// The result is allocated on the heap. + fn try_from(values: ::rust_alloc::vec::Vec) -> Result { + let mut v = Self::try_with_capacity_in(values.len(), Global)?; + + for value in values { + v.try_push(value)?; + } + + Ok(v) + } +} + +impl TryFrom> for [T; N] { + type Error = Vec; + + /// Gets the entire contents of the `Vec` as an array, + /// if its size exactly matches that of the requested array. + /// + /// # Examples + /// + /// ``` + /// assert_eq!(rune_alloc::try_vec![1, 2, 3].try_into(), Ok([1, 2, 3])); + /// assert_eq!(>::new().try_into(), Ok([])); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// If the length doesn't match, the input comes back in `Err`: + /// ``` + /// use rune_alloc::{Vec, IteratorExt}; + /// + /// let r: Result<[i32; 4], _> = (0..10).try_collect::>()?.try_into(); + /// assert_eq!(r, Err(rune_alloc::try_vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9])); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// If you're fine with just getting a prefix of the `Vec`, + /// you can call [`.truncate(N)`](Vec::truncate) first. + /// ``` + /// use rune_alloc::String; + /// + /// let mut v = String::try_from("hello world")?.into_bytes(); + /// v.sort(); + /// v.truncate(2); + /// let [a, b]: [_; 2] = v.try_into().unwrap(); + /// assert_eq!(a, b' '); + /// assert_eq!(b, b'd'); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + fn try_from(mut vec: Vec) -> Result<[T; N], Vec> { + if vec.len() != N { + return Err(vec); + } + + // SAFETY: `.set_len(0)` is always sound. + unsafe { vec.set_len(0) }; + + // SAFETY: A `Vec`'s pointer is always aligned properly, and + // the alignment the array needs is the same as the items. + // We checked earlier that we have sufficient items. + // The items will not double-drop as the `set_len` + // tells the `Vec` not to also drop them. + let array = unsafe { ptr::read(vec.as_ptr() as *const [T; N]) }; + Ok(array) + } +} + +impl From> for Vec { + /// Convert a boxed slice into a vector by transferring ownership of + /// the existing heap allocation. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{Vec, Box}; + /// + /// let b: Box<[i32]> = rune_alloc::try_vec![1, 2, 3].try_into_boxed_slice()?; + /// assert_eq!(Vec::from(b), rune_alloc::try_vec![1, 2, 3]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + fn from(s: Box<[T], A>) -> Self { + crate::alloc::slice::into_vec(s) + } +} + +impl TryFromIteratorIn for Vec { + fn try_from_iter_in(iter: I, alloc: A) -> Result + where + I: IntoIterator, + { + let mut this = Vec::new_in(alloc); + + for value in iter { + this.try_push(value)?; + } + + Ok(this) + } +} + +#[cfg(test)] +impl FromIterator for Vec { + fn from_iter(iter: I) -> Self + where + I: IntoIterator, + { + Self::try_from_iter_in(iter, Global).abort() + } +} + +impl TryExtend for Vec { + #[inline] + fn try_extend>(&mut self, iter: I) -> Result<(), Error> { + >::spec_extend(self, iter.into_iter()) + } +} diff --git a/crates/rune-alloc/src/alloc/vec/partial_eq.rs b/crates/rune-alloc/src/alloc/vec/partial_eq.rs new file mode 100644 index 000000000..98b614a96 --- /dev/null +++ b/crates/rune-alloc/src/alloc/vec/partial_eq.rs @@ -0,0 +1,31 @@ +use crate::alloc::Allocator; + +use super::Vec; + +macro_rules! __impl_slice_eq1 { + ([$($vars:tt)*] $lhs:ty, $rhs:ty $(where $ty:ty: $bound:ident)?) => { + impl PartialEq<$rhs> for $lhs + where + T: PartialEq, + $($ty: $bound)? + { + #[inline] + fn eq(&self, other: &$rhs) -> bool { self[..] == other[..] } + #[inline] + #[allow(clippy::partialeq_ne_impl)] + fn ne(&self, other: &$rhs) -> bool { self[..] != other[..] } + } + } +} + +__impl_slice_eq1! { [A: Allocator] Vec, ::rust_alloc::vec::Vec } +__impl_slice_eq1! { [A: Allocator] ::rust_alloc::vec::Vec, Vec } +__impl_slice_eq1! { [A1: Allocator, A2: Allocator] Vec, Vec } +__impl_slice_eq1! { [A: Allocator] Vec, &[U] } +__impl_slice_eq1! { [A: Allocator] Vec, &mut [U] } +__impl_slice_eq1! { [A: Allocator] &[T], Vec } +__impl_slice_eq1! { [A: Allocator] &mut [T], Vec } +__impl_slice_eq1! { [A: Allocator] Vec, [U] } +__impl_slice_eq1! { [A: Allocator] [T], Vec } +__impl_slice_eq1! { [A: Allocator, const N: usize] Vec, [U; N] } +__impl_slice_eq1! { [A: Allocator, const N: usize] Vec, &[U; N] } diff --git a/crates/rune-alloc/src/alloc/vec/set_len_on_drop.rs b/crates/rune-alloc/src/alloc/vec/set_len_on_drop.rs new file mode 100644 index 000000000..cff08b605 --- /dev/null +++ b/crates/rune-alloc/src/alloc/vec/set_len_on_drop.rs @@ -0,0 +1,36 @@ +// Set the length of the vec when the `SetLenOnDrop` value goes out of scope. +// +// The idea is: The length field in SetLenOnDrop is a local variable +// that the optimizer will see does not alias with any stores through the Vec's data +// pointer. This is a workaround for alias analysis issue #32155 +pub(super) struct SetLenOnDrop<'a> { + len: &'a mut usize, + local_len: usize, +} + +impl<'a> SetLenOnDrop<'a> { + #[inline] + pub(super) fn new(len: &'a mut usize) -> Self { + SetLenOnDrop { + local_len: *len, + len, + } + } + + #[inline] + pub(super) fn increment_len(&mut self, increment: usize) { + self.local_len += increment; + } + + #[inline] + pub(super) fn current_len(&self) -> usize { + self.local_len + } +} + +impl Drop for SetLenOnDrop<'_> { + #[inline] + fn drop(&mut self) { + *self.len = self.local_len; + } +} diff --git a/crates/rune-alloc/src/alloc/vec/spec_extend.rs b/crates/rune-alloc/src/alloc/vec/spec_extend.rs new file mode 100644 index 000000000..8f1fcd033 --- /dev/null +++ b/crates/rune-alloc/src/alloc/vec/spec_extend.rs @@ -0,0 +1,72 @@ +#[cfg(rune_nightly)] +use core::slice; + +use crate::alloc::{Allocator, Error}; +#[cfg(rune_nightly)] +use crate::alloc::{TryClone, TryCopy}; + +#[cfg(rune_nightly)] +use super::IntoIter; +use super::Vec; + +// Specialization trait used for Vec::extend +pub(super) trait SpecExtend { + fn spec_extend(&mut self, iter: I) -> Result<(), Error>; +} + +impl SpecExtend for Vec +where + I: Iterator, +{ + default_fn! { + fn spec_extend(&mut self, iter: I) -> Result<(), Error> { + for value in iter { + self.try_push(value)?; + } + + Ok(()) + } + } +} + +#[cfg(rune_nightly)] +impl SpecExtend> for Vec { + fn spec_extend(&mut self, mut iterator: IntoIter) -> Result<(), Error> { + unsafe { + self.try_append_elements(iterator.as_slice() as _)?; + } + iterator.forget_remaining_elements(); + Ok(()) + } +} + +#[cfg(rune_nightly)] +impl<'a, T: 'a, I, A: Allocator> SpecExtend<&'a T, I> for Vec +where + I: Iterator, + T: TryClone, +{ + default fn spec_extend(&mut self, iterator: I) -> Result<(), Error> { + for value in iterator { + self.try_push(value.try_clone()?)?; + } + + Ok(()) + } +} + +#[cfg(rune_nightly)] +impl<'a, T: 'a, A: Allocator> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec +where + T: TryCopy, +{ + fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) -> Result<(), Error> { + let slice = iterator.as_slice(); + + unsafe { + self.try_append_elements(slice)?; + } + + Ok(()) + } +} diff --git a/crates/rune-alloc/src/alloc/vec/spec_from_elem.rs b/crates/rune-alloc/src/alloc/vec/spec_from_elem.rs new file mode 100644 index 000000000..d6b572db3 --- /dev/null +++ b/crates/rune-alloc/src/alloc/vec/spec_from_elem.rs @@ -0,0 +1,88 @@ +#[cfg(rune_nightly)] +use core::ptr; + +#[cfg(rune_nightly)] +use crate::alloc::raw_vec::RawVec; +use crate::alloc::{Allocator, Error, TryClone}; + +#[cfg(rune_nightly)] +use super::IsZero; +use super::Vec; + +// Specialization trait used for Vec::from_elem +pub(super) trait SpecFromElem: Sized { + fn from_elem(elem: Self, n: usize, alloc: A) -> Result, Error>; +} + +impl SpecFromElem for T +where + T: TryClone, +{ + default_fn! { + fn from_elem(elem: Self, n: usize, alloc: A) -> Result, Error> { + let mut v = Vec::try_with_capacity_in(n, alloc)?; + v.try_extend_with(n, elem)?; + Ok(v) + } + } +} + +#[cfg(rune_nightly)] +impl SpecFromElem for T +where + T: TryClone + IsZero, +{ + #[inline] + default fn from_elem(elem: T, n: usize, alloc: A) -> Result, Error> { + if elem.is_zero() { + return Ok(Vec { + buf: RawVec::try_with_capacity_zeroed_in(n, alloc)?, + len: n, + }); + } + + let mut v = Vec::try_with_capacity_in(n, alloc)?; + v.try_extend_with(n, elem)?; + Ok(v) + } +} + +#[cfg(rune_nightly)] +impl SpecFromElem for i8 { + #[inline] + fn from_elem(elem: i8, n: usize, alloc: A) -> Result, Error> { + if elem == 0 { + return Ok(Vec { + buf: RawVec::try_with_capacity_zeroed_in(n, alloc)?, + len: n, + }); + } + + unsafe { + let mut v = Vec::try_with_capacity_in(n, alloc)?; + ptr::write_bytes(v.as_mut_ptr(), elem as u8, n); + v.set_len(n); + Ok(v) + } + } +} + +#[cfg(rune_nightly)] +impl SpecFromElem for u8 { + #[inline] + fn from_elem(elem: u8, n: usize, alloc: A) -> Result, Error> { + if elem == 0 { + return Ok(Vec { + buf: RawVec::try_with_capacity_zeroed_in(n, alloc)?, + len: n, + }); + } + + unsafe { + let mut v = Vec::try_with_capacity_in(n, alloc)?; + ptr::write_bytes(v.as_mut_ptr(), elem, n); + v.set_len(n); + Ok(v) + } + } +} diff --git a/crates/rune-alloc/src/alloc/vec/splice.rs b/crates/rune-alloc/src/alloc/vec/splice.rs new file mode 100644 index 000000000..25d27a8ab --- /dev/null +++ b/crates/rune-alloc/src/alloc/vec/splice.rs @@ -0,0 +1,121 @@ +use core::ptr::{self}; +use core::slice::{self}; + +use crate::alloc::{Allocator, Error}; + +use super::{Drain, Vec}; + +// NB: This is a larger rewrite than typical, but that's because the `Splice` +// does a lot of work when it's dropped instead of performing the work in-place +// like this. +pub(crate) fn splice<'a, I, A>( + drain: &mut Drain<'a, I::Item, A>, + replace_with: &mut I, +) -> Result<(), Error> +where + I: Iterator + 'a, + A: Allocator + 'a, +{ + for element in drain.by_ref() { + drop(element); + } + + // At this point draining is done and the only remaining tasks are splicing + // and moving things into the final place. + // Which means we can replace the slice::Iter with pointers that won't point to deallocated + // memory, so that Drain::drop is still allowed to call iter.len(), otherwise it would break + // the ptr.sub_ptr contract. + drain.iter = [].iter(); + + unsafe { + if drain.tail_len == 0 { + let out = drain.vec.as_mut(); + + for element in replace_with.by_ref() { + out.try_push(element)?; + } + + return Ok(()); + } + + // First fill the range left by drain(). + if !drain.fill(replace_with) { + return Ok(()); + } + + // There may be more elements. Use the lower bound as an estimate. + // FIXME: Is the upper bound a better guess? Or something else? + let (lower_bound, _upper_bound) = replace_with.size_hint(); + + if lower_bound > 0 { + drain.move_tail(lower_bound)?; + + if !drain.fill(replace_with) { + return Ok(()); + } + } + + // Collect any remaining elements. + // This is a zero-length vector which does not allocate if `lower_bound` was exact. + let mut collected = Vec::new_in(drain.vec.as_ref().allocator()); + + for element in replace_with.by_ref() { + collected.try_push(element)?; + } + + let mut collected = collected.into_iter(); + + // Now we have an exact count. + if collected.len() > 0 { + drain.move_tail(collected.len())?; + let filled = drain.fill(&mut collected); + debug_assert!(filled); + debug_assert_eq!(collected.len(), 0); + } + + Ok(()) + } + // Let `Drain::drop` move the tail back if necessary and restore `vec.len`. +} + +/// Private helper methods for `Splice::drop` +impl Drain<'_, T, A> { + /// The range from `self.vec.len` to `self.tail_start` contains elements + /// that have been moved out. + /// Fill that range as much as possible with new elements from the `replace_with` iterator. + /// Returns `true` if we filled the entire range. (`replace_with.next()` didn’t return `None`.) + unsafe fn fill>(&mut self, replace_with: &mut I) -> bool { + let vec = unsafe { self.vec.as_mut() }; + let range_start = vec.len; + let range_end = self.tail_start; + let range_slice = unsafe { + slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start) + }; + + for place in range_slice { + if let Some(new_item) = replace_with.next() { + unsafe { ptr::write(place, new_item) }; + vec.len += 1; + } else { + return false; + } + } + true + } + + /// Makes room for inserting more elements before the tail. + unsafe fn move_tail(&mut self, additional: usize) -> Result<(), Error> { + let vec = unsafe { self.vec.as_mut() }; + let len = self.tail_start + self.tail_len; + vec.buf.try_reserve(len, additional)?; + + let new_tail_start = self.tail_start + additional; + unsafe { + let src = vec.as_ptr().add(self.tail_start); + let dst = vec.as_mut_ptr().add(new_tail_start); + ptr::copy(src, dst, self.tail_len); + } + self.tail_start = new_tail_start; + Ok(()) + } +} diff --git a/crates/rune-alloc/src/alloc/vec_deque/drain.rs b/crates/rune-alloc/src/alloc/vec_deque/drain.rs new file mode 100644 index 000000000..2fc0f2195 --- /dev/null +++ b/crates/rune-alloc/src/alloc/vec_deque/drain.rs @@ -0,0 +1,209 @@ +use core::fmt; +use core::iter::FusedIterator; +use core::marker::PhantomData; +use core::mem; + +use crate::alloc::{Allocator, Global, SizedTypeProperties}; +use crate::ptr::{self, NonNull}; + +use super::VecDeque; + +/// A draining iterator over the elements of a `VecDeque`. +/// +/// This `struct` is created by the [`drain`] method on [`VecDeque`]. See its +/// documentation for more. +/// +/// [`drain`]: VecDeque::drain +pub struct Drain<'a, T: 'a, A: Allocator = Global> { + // We can't just use a &mut VecDeque, as that would make Drain invariant over T + // and we want it to be covariant instead + deque: NonNull>, + // drain_start is stored in deque.len + drain_len: usize, + // index into the logical array, not the physical one (always lies in [0..deque.len)) + idx: usize, + // number of elements after the drain range + tail_len: usize, + remaining: usize, + // Needed to make Drain covariant over T + _marker: PhantomData<&'a T>, +} + +impl<'a, T, A: Allocator> Drain<'a, T, A> { + pub(super) unsafe fn new( + deque: &'a mut VecDeque, + drain_start: usize, + drain_len: usize, + ) -> Self { + let orig_len = mem::replace(&mut deque.len, drain_start); + let tail_len = orig_len - drain_start - drain_len; + Drain { + deque: NonNull::from(deque), + drain_len, + idx: drain_start, + tail_len, + remaining: drain_len, + _marker: PhantomData, + } + } + + // Only returns pointers to the slices, as that's all we need + // to drop them. May only be called if `self.remaining != 0`. + unsafe fn as_slices(&self) -> (*mut [T], *mut [T]) { + unsafe { + let deque = self.deque.as_ref(); + + // We know that `self.idx + self.remaining <= deque.len <= usize::MAX`, so this won't overflow. + let logical_remaining_range = self.idx..self.idx + self.remaining; + + // SAFETY: `logical_remaining_range` represents the + // range into the logical buffer of elements that + // haven't been drained yet, so they're all initialized, + // and `slice::range(start..end, end) == start..end`, + // so the preconditions for `slice_ranges` are met. + let (a_range, b_range) = + deque.slice_ranges(logical_remaining_range.clone(), logical_remaining_range.end); + (deque.buffer_range(a_range), deque.buffer_range(b_range)) + } + } +} + +impl fmt::Debug for Drain<'_, T, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("Drain") + .field(&self.drain_len) + .field(&self.idx) + .field(&self.tail_len) + .field(&self.remaining) + .finish() + } +} + +unsafe impl Sync for Drain<'_, T, A> {} +unsafe impl Send for Drain<'_, T, A> {} + +impl Drop for Drain<'_, T, A> { + fn drop(&mut self) { + struct DropGuard<'r, 'a, T, A: Allocator>(&'r mut Drain<'a, T, A>); + + impl<'r, 'a, T, A: Allocator> Drop for DropGuard<'r, 'a, T, A> { + fn drop(&mut self) { + if self.0.remaining != 0 { + unsafe { + // SAFETY: We just checked that `self.remaining != 0`. + let (front, back) = self.0.as_slices(); + ptr::drop_in_place(front); + ptr::drop_in_place(back); + } + } + + let source_deque = unsafe { self.0.deque.as_mut() }; + + let drain_start = source_deque.len(); + let drain_len = self.0.drain_len; + let drain_end = drain_start + drain_len; + + let orig_len = self.0.tail_len + drain_end; + + if T::IS_ZST { + // no need to copy around any memory if T is a ZST + source_deque.len = orig_len - drain_len; + return; + } + + let head_len = drain_start; + let tail_len = self.0.tail_len; + + match (head_len, tail_len) { + (0, 0) => { + source_deque.head = 0; + source_deque.len = 0; + } + (0, _) => { + source_deque.head = source_deque.to_physical_idx(drain_len); + source_deque.len = orig_len - drain_len; + } + (_, 0) => { + source_deque.len = orig_len - drain_len; + } + _ => unsafe { + if head_len <= tail_len { + source_deque.wrap_copy( + source_deque.head, + source_deque.to_physical_idx(drain_len), + head_len, + ); + source_deque.head = source_deque.to_physical_idx(drain_len); + source_deque.len = orig_len - drain_len; + } else { + source_deque.wrap_copy( + source_deque.to_physical_idx(head_len + drain_len), + source_deque.to_physical_idx(head_len), + tail_len, + ); + source_deque.len = orig_len - drain_len; + } + }, + } + } + } + + let guard = DropGuard(self); + + if guard.0.remaining != 0 { + unsafe { + // SAFETY: We just checked that `self.remaining != 0`. + let (front, back) = guard.0.as_slices(); + // since idx is a logical index, we don't need to worry about wrapping. + guard.0.idx += ptr::slice_len(front); + guard.0.remaining -= ptr::slice_len(front); + ptr::drop_in_place(front); + guard.0.remaining = 0; + ptr::drop_in_place(back); + } + } + + // Dropping `guard` handles moving the remaining elements into place. + } +} + +impl Iterator for Drain<'_, T, A> { + type Item = T; + + #[inline] + fn next(&mut self) -> Option { + if self.remaining == 0 { + return None; + } + let wrapped_idx = unsafe { self.deque.as_ref().to_physical_idx(self.idx) }; + self.idx += 1; + self.remaining -= 1; + Some(unsafe { self.deque.as_mut().buffer_read(wrapped_idx) }) + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let len = self.remaining; + (len, Some(len)) + } +} + +impl DoubleEndedIterator for Drain<'_, T, A> { + #[inline] + fn next_back(&mut self) -> Option { + if self.remaining == 0 { + return None; + } + self.remaining -= 1; + let wrapped_idx = unsafe { + self.deque + .as_ref() + .to_physical_idx(self.idx + self.remaining) + }; + Some(unsafe { self.deque.as_mut().buffer_read(wrapped_idx) }) + } +} + +impl ExactSizeIterator for Drain<'_, T, A> {} + +impl FusedIterator for Drain<'_, T, A> {} diff --git a/crates/rune-alloc/src/alloc/vec_deque/into_iter.rs b/crates/rune-alloc/src/alloc/vec_deque/into_iter.rs new file mode 100644 index 000000000..9da531aaf --- /dev/null +++ b/crates/rune-alloc/src/alloc/vec_deque/into_iter.rs @@ -0,0 +1,178 @@ +use core::fmt; +use core::iter::FusedIterator; +use core::ptr; + +use crate::{Allocator, Error, Global, TryClone}; + +use super::VecDeque; + +/// An owning iterator over the elements of a `VecDeque`. +/// +/// This `struct` is created by the [`into_iter`] method on [`VecDeque`] +/// (provided by the [`IntoIterator`] trait). See its documentation for more. +/// +/// [`into_iter`]: VecDeque::into_iter +pub struct IntoIter { + inner: VecDeque, +} + +impl TryClone for IntoIter +where + T: TryClone, +{ + #[inline] + fn try_clone(&self) -> Result { + Ok(IntoIter { + inner: self.inner.try_clone()?, + }) + } + + #[inline] + fn try_clone_from(&mut self, source: &Self) -> Result<(), Error> { + self.inner.try_clone_from(&source.inner) + } +} + +impl IntoIter { + pub(super) fn new(inner: VecDeque) -> Self { + IntoIter { inner } + } + + pub(super) fn into_vecdeque(self) -> VecDeque { + self.inner + } +} + +impl fmt::Debug for IntoIter { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("IntoIter").field(&self.inner).finish() + } +} + +impl Iterator for IntoIter { + type Item = T; + + #[inline] + fn next(&mut self) -> Option { + self.inner.pop_front() + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let len = self.inner.len(); + (len, Some(len)) + } + + #[inline] + fn count(self) -> usize { + self.inner.len + } + + #[inline] + fn fold(mut self, mut init: B, mut f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + struct Guard<'a, T, A: Allocator> { + deque: &'a mut VecDeque, + // `consumed <= deque.len` always holds. + consumed: usize, + } + + impl<'a, T, A: Allocator> Drop for Guard<'a, T, A> { + fn drop(&mut self) { + self.deque.len -= self.consumed; + self.deque.head = self.deque.to_physical_idx(self.consumed); + } + } + + let mut guard = Guard { + deque: &mut self.inner, + consumed: 0, + }; + + let (head, tail) = guard.deque.as_slices(); + + init = head + .iter() + .map(|elem| { + guard.consumed += 1; + // SAFETY: Because we incremented `guard.consumed`, the + // deque effectively forgot the element, so we can take + // ownership + unsafe { ptr::read(elem) } + }) + .fold(init, &mut f); + + tail.iter() + .map(|elem| { + guard.consumed += 1; + // SAFETY: Same as above. + unsafe { ptr::read(elem) } + }) + .fold(init, &mut f) + } + + #[inline] + fn last(mut self) -> Option { + self.inner.pop_back() + } +} + +impl DoubleEndedIterator for IntoIter { + #[inline] + fn next_back(&mut self) -> Option { + self.inner.pop_back() + } + + #[inline] + fn rfold(mut self, mut init: B, mut f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + struct Guard<'a, T, A: Allocator> { + deque: &'a mut VecDeque, + // `consumed <= deque.len` always holds. + consumed: usize, + } + + impl<'a, T, A: Allocator> Drop for Guard<'a, T, A> { + fn drop(&mut self) { + self.deque.len -= self.consumed; + } + } + + let mut guard = Guard { + deque: &mut self.inner, + consumed: 0, + }; + + let (head, tail) = guard.deque.as_slices(); + + init = tail + .iter() + .map(|elem| { + guard.consumed += 1; + // SAFETY: See `try_fold`'s safety comment. + unsafe { ptr::read(elem) } + }) + .fold(init, &mut f); + + head.iter() + .map(|elem| { + guard.consumed += 1; + // SAFETY: Same as above. + unsafe { ptr::read(elem) } + }) + .fold(init, &mut f) + } +} + +impl ExactSizeIterator for IntoIter { + #[inline] + fn len(&self) -> usize { + self.inner.len() + } +} + +impl FusedIterator for IntoIter {} diff --git a/crates/rune-alloc/src/alloc/vec_deque/iter.rs b/crates/rune-alloc/src/alloc/vec_deque/iter.rs new file mode 100644 index 000000000..416a48e4b --- /dev/null +++ b/crates/rune-alloc/src/alloc/vec_deque/iter.rs @@ -0,0 +1,113 @@ +use core::fmt; +use core::iter::FusedIterator; +use core::mem; +use core::slice; + +/// An iterator over the elements of a `VecDeque`. +/// +/// This `struct` is created by the [`iter`] method on [`super::VecDeque`]. See its +/// documentation for more. +/// +/// [`iter`]: super::VecDeque::iter +pub struct Iter<'a, T: 'a> { + i1: slice::Iter<'a, T>, + i2: slice::Iter<'a, T>, +} + +impl<'a, T> Iter<'a, T> { + pub(super) fn new(i1: slice::Iter<'a, T>, i2: slice::Iter<'a, T>) -> Self { + Self { i1, i2 } + } +} + +impl fmt::Debug for Iter<'_, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("Iter") + .field(&self.i1.as_slice()) + .field(&self.i2.as_slice()) + .finish() + } +} + +// FIXME(#26925) Remove in favor of `#[derive(Clone)]` +impl Clone for Iter<'_, T> { + fn clone(&self) -> Self { + Iter { + i1: self.i1.clone(), + i2: self.i2.clone(), + } + } +} + +impl<'a, T> Iterator for Iter<'a, T> { + type Item = &'a T; + + #[inline] + fn next(&mut self) -> Option<&'a T> { + match self.i1.next() { + Some(val) => Some(val), + None => { + // most of the time, the iterator will either always + // call next(), or always call next_back(). By swapping + // the iterators once the first one is empty, we ensure + // that the first branch is taken as often as possible, + // without sacrificing correctness, as i1 is empty anyways + mem::swap(&mut self.i1, &mut self.i2); + self.i1.next() + } + } + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let len = self.len(); + (len, Some(len)) + } + + fn fold(self, accum: Acc, mut f: F) -> Acc + where + F: FnMut(Acc, Self::Item) -> Acc, + { + let accum = self.i1.fold(accum, &mut f); + self.i2.fold(accum, &mut f) + } + + #[inline] + fn last(mut self) -> Option<&'a T> { + self.next_back() + } +} + +impl<'a, T> DoubleEndedIterator for Iter<'a, T> { + #[inline] + fn next_back(&mut self) -> Option<&'a T> { + match self.i2.next_back() { + Some(val) => Some(val), + None => { + // most of the time, the iterator will either always + // call next(), or always call next_back(). By swapping + // the iterators once the second one is empty, we ensure + // that the first branch is taken as often as possible, + // without sacrificing correctness, as i2 is empty anyways + mem::swap(&mut self.i1, &mut self.i2); + self.i2.next_back() + } + } + } + + fn rfold(self, accum: Acc, mut f: F) -> Acc + where + F: FnMut(Acc, Self::Item) -> Acc, + { + let accum = self.i2.rfold(accum, &mut f); + self.i1.rfold(accum, &mut f) + } +} + +impl ExactSizeIterator for Iter<'_, T> { + fn len(&self) -> usize { + self.i1.len() + self.i2.len() + } +} + +impl FusedIterator for Iter<'_, T> {} diff --git a/crates/rune-alloc/src/alloc/vec_deque/iter_mut.rs b/crates/rune-alloc/src/alloc/vec_deque/iter_mut.rs new file mode 100644 index 000000000..5ee5d6a2c --- /dev/null +++ b/crates/rune-alloc/src/alloc/vec_deque/iter_mut.rs @@ -0,0 +1,103 @@ +use core::fmt; +use core::iter::FusedIterator; +use core::mem; +use core::slice; + +/// A mutable iterator over the elements of a `VecDeque`. +/// +/// This `struct` is created by the [`iter_mut`] method on [`super::VecDeque`]. See its +/// documentation for more. +/// +/// [`iter_mut`]: super::VecDeque::iter_mut +pub struct IterMut<'a, T: 'a> { + i1: slice::IterMut<'a, T>, + i2: slice::IterMut<'a, T>, +} + +impl<'a, T> IterMut<'a, T> { + pub(super) fn new(i1: slice::IterMut<'a, T>, i2: slice::IterMut<'a, T>) -> Self { + Self { i1, i2 } + } +} + +impl fmt::Debug for IterMut<'_, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("IterMut") + .field(&self.i1.as_slice()) + .field(&self.i2.as_slice()) + .finish() + } +} + +impl<'a, T> Iterator for IterMut<'a, T> { + type Item = &'a mut T; + + #[inline] + fn next(&mut self) -> Option<&'a mut T> { + match self.i1.next() { + Some(val) => Some(val), + None => { + // most of the time, the iterator will either always + // call next(), or always call next_back(). By swapping + // the iterators once the first one is empty, we ensure + // that the first branch is taken as often as possible, + // without sacrificing correctness, as i1 is empty anyways + mem::swap(&mut self.i1, &mut self.i2); + self.i1.next() + } + } + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let len = self.len(); + (len, Some(len)) + } + + fn fold(self, accum: Acc, mut f: F) -> Acc + where + F: FnMut(Acc, Self::Item) -> Acc, + { + let accum = self.i1.fold(accum, &mut f); + self.i2.fold(accum, &mut f) + } + + #[inline] + fn last(mut self) -> Option<&'a mut T> { + self.next_back() + } +} + +impl<'a, T> DoubleEndedIterator for IterMut<'a, T> { + #[inline] + fn next_back(&mut self) -> Option<&'a mut T> { + match self.i2.next_back() { + Some(val) => Some(val), + None => { + // most of the time, the iterator will either always + // call next(), or always call next_back(). By swapping + // the iterators once the first one is empty, we ensure + // that the first branch is taken as often as possible, + // without sacrificing correctness, as i2 is empty anyways + mem::swap(&mut self.i1, &mut self.i2); + self.i2.next_back() + } + } + } + + fn rfold(self, accum: Acc, mut f: F) -> Acc + where + F: FnMut(Acc, Self::Item) -> Acc, + { + let accum = self.i2.rfold(accum, &mut f); + self.i1.rfold(accum, &mut f) + } +} + +impl ExactSizeIterator for IterMut<'_, T> { + fn len(&self) -> usize { + self.i1.len() + self.i2.len() + } +} + +impl FusedIterator for IterMut<'_, T> {} diff --git a/crates/rune-alloc/src/alloc/vec_deque/macros.rs b/crates/rune-alloc/src/alloc/vec_deque/macros.rs new file mode 100644 index 000000000..3b7621564 --- /dev/null +++ b/crates/rune-alloc/src/alloc/vec_deque/macros.rs @@ -0,0 +1,18 @@ +macro_rules! __impl_slice_eq1 { + ([$($vars:tt)*] $lhs:ty, $rhs:ty, $($constraints:tt)*) => { + impl PartialEq<$rhs> for $lhs + where + T: PartialEq, + $($constraints)* + { + fn eq(&self, other: &$rhs) -> bool { + if self.len() != other.len() { + return false; + } + let (sa, sb) = self.as_slices(); + let (oa, ob) = other[..].split_at(sa.len()); + sa == oa && sb == ob + } + } + } +} diff --git a/crates/rune-alloc/src/alloc/vec_deque/mod.rs b/crates/rune-alloc/src/alloc/vec_deque/mod.rs new file mode 100644 index 000000000..a863806ad --- /dev/null +++ b/crates/rune-alloc/src/alloc/vec_deque/mod.rs @@ -0,0 +1,2872 @@ +//! A double-ended queue (deque) implemented with a growable ring buffer. +//! +//! This queue has *O*(1) amortized inserts and removals from both ends of the +//! container. It also has *O*(1) indexing like a vector. The contained elements +//! are not required to be copyable, and the queue will be sendable if the +//! contained type is sendable. + +#![allow(clippy::redundant_closure)] + +use core::cmp::{self, Ordering}; +use core::fmt; +use core::hash::{Hash, Hasher}; +use core::mem::ManuallyDrop; +use core::ops::{Index, IndexMut, Range, RangeBounds}; +use core::ptr; +use core::slice; + +// This is used in a bunch of intra-doc links. +// FIXME: For some reason, `#[cfg(doc)]` wasn't sufficient, resulting in +// failures in linkchecker even though rustdoc built the docs just fine. +#[allow(unused_imports)] +use core::mem; + +use crate::alloc::raw_vec::RawVec; +use crate::alloc::{ + Allocator, Error, Global, SizedTypeProperties, TryClone, TryExtend, TryFromIteratorIn, Vec, +}; +use crate::slice::range as slice_range; + +#[macro_use] +mod macros; + +pub use self::drain::Drain; + +mod drain; + +pub use self::iter_mut::IterMut; + +mod iter_mut; + +pub use self::into_iter::IntoIter; + +mod into_iter; + +pub use self::iter::Iter; + +mod iter; + +pub use self::raw_iter::RawIter; + +mod raw_iter; + +/// A double-ended queue implemented with a growable ring buffer. +/// +/// The "default" usage of this type as a queue is to use [`try_push_back`] to add to +/// the queue, and [`pop_front`] to remove from the queue. [`try_extend`] and [`try_append`] +/// push onto the back in this manner, and iterating over `VecDeque` goes front +/// to back. +/// +/// A `VecDeque` with a known list of items can be initialized from an array: +/// +/// ``` +/// use rune_alloc::VecDeque; +/// +/// let deq = VecDeque::try_from([-1, 0, 1])?; +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// Since `VecDeque` is a ring buffer, its elements are not necessarily contiguous +/// in memory. If you want to access the elements as a single slice, such as for +/// efficient sorting, you can use [`make_contiguous`]. It rotates the `VecDeque` +/// so that its elements do not wrap, and returns a mutable slice to the +/// now-contiguous element sequence. +/// +/// [`try_push_back`]: VecDeque::try_push_back +/// [`pop_front`]: VecDeque::pop_front +/// [`try_extend`]: VecDeque::try_extend +/// [`try_append`]: VecDeque::try_append +/// [`make_contiguous`]: VecDeque::make_contiguous +pub struct VecDeque { + // `self[0]`, if it exists, is `buf[head]`. + // `head < buf.capacity()`, unless `buf.capacity() == 0` when `head == 0`. + head: usize, + // the number of initialized elements, starting from the one at `head` and potentially wrapping around. + // if `len == 0`, the exact value of `head` is unimportant. + // if `T` is zero-Sized, then `self.len <= usize::MAX`, otherwise `self.len <= isize::MAX as usize`. + len: usize, + buf: RawVec, +} + +impl TryClone for VecDeque { + fn try_clone(&self) -> Result { + let mut deq = Self::try_with_capacity_in(self.len(), self.allocator().clone())?; + + for value in self.iter() { + deq.try_push_back(value.try_clone()?)?; + } + + Ok(deq) + } + + fn try_clone_from(&mut self, other: &Self) -> Result<(), Error> { + self.clear(); + + for value in other.iter() { + self.try_push_back(value.try_clone()?)?; + } + + Ok(()) + } +} + +#[cfg(rune_nightly)] +unsafe impl<#[may_dangle] T, A: Allocator> Drop for VecDeque { + fn drop(&mut self) { + /// Runs the destructor for all items in the slice when it gets dropped (normally or + /// during unwinding). + struct Dropper<'a, T>(&'a mut [T]); + + impl<'a, T> Drop for Dropper<'a, T> { + fn drop(&mut self) { + unsafe { + ptr::drop_in_place(self.0); + } + } + } + + let (front, back) = self.as_mut_slices(); + unsafe { + let _back_dropper = Dropper(back); + // use drop for [T] + ptr::drop_in_place(front); + } + // RawVec handles deallocation + } +} + +#[cfg(not(rune_nightly))] +impl Drop for VecDeque { + fn drop(&mut self) { + /// Runs the destructor for all items in the slice when it gets dropped (normally or + /// during unwinding). + struct Dropper<'a, T>(&'a mut [T]); + + impl<'a, T> Drop for Dropper<'a, T> { + fn drop(&mut self) { + unsafe { + ptr::drop_in_place(self.0); + } + } + } + + let (front, back) = self.as_mut_slices(); + unsafe { + let _back_dropper = Dropper(back); + // use drop for [T] + ptr::drop_in_place(front); + } + // RawVec handles deallocation + } +} + +impl Default for VecDeque { + /// Creates an empty deque. + #[inline] + fn default() -> VecDeque { + VecDeque::new() + } +} + +impl VecDeque { + /// Marginally more convenient + #[inline] + fn ptr(&self) -> *mut T { + self.buf.ptr() + } + + /// Moves an element out of the buffer + #[inline] + unsafe fn buffer_read(&mut self, off: usize) -> T { + unsafe { ptr::read(self.ptr().add(off)) } + } + + /// Writes an element into the buffer, moving it. + #[inline] + unsafe fn buffer_write(&mut self, off: usize, value: T) { + unsafe { + ptr::write(self.ptr().add(off), value); + } + } + + /// Returns a slice pointer into the buffer. + /// `range` must lie inside `0..self.capacity()`. + #[inline] + unsafe fn buffer_range(&self, range: Range) -> *mut [T] { + unsafe { + ptr::slice_from_raw_parts_mut(self.ptr().add(range.start), range.end - range.start) + } + } + + /// Returns `true` if the buffer is at full capacity. + #[inline] + fn is_full(&self) -> bool { + self.len == self.capacity() + } + + /// Returns the index in the underlying buffer for a given logical element + /// index + addend. + #[inline] + fn wrap_add(&self, idx: usize, addend: usize) -> usize { + wrap_index(idx.wrapping_add(addend), self.capacity()) + } + + #[inline] + fn to_physical_idx(&self, idx: usize) -> usize { + self.wrap_add(self.head, idx) + } + + /// Returns the index in the underlying buffer for a given logical element + /// index - subtrahend. + #[inline] + fn wrap_sub(&self, idx: usize, subtrahend: usize) -> usize { + wrap_index( + idx.wrapping_sub(subtrahend).wrapping_add(self.capacity()), + self.capacity(), + ) + } + + /// Copies a contiguous block of memory len long from src to dst + #[inline] + unsafe fn copy(&mut self, src: usize, dst: usize, len: usize) { + debug_assert!( + dst + len <= self.capacity(), + "cpy dst={} src={} len={} cap={}", + dst, + src, + len, + self.capacity() + ); + debug_assert!( + src + len <= self.capacity(), + "cpy dst={} src={} len={} cap={}", + dst, + src, + len, + self.capacity() + ); + unsafe { + ptr::copy(self.ptr().add(src), self.ptr().add(dst), len); + } + } + + /// Copies a contiguous block of memory len long from src to dst + #[inline] + unsafe fn copy_nonoverlapping(&mut self, src: usize, dst: usize, len: usize) { + debug_assert!( + dst + len <= self.capacity(), + "cno dst={} src={} len={} cap={}", + dst, + src, + len, + self.capacity() + ); + debug_assert!( + src + len <= self.capacity(), + "cno dst={} src={} len={} cap={}", + dst, + src, + len, + self.capacity() + ); + unsafe { + ptr::copy_nonoverlapping(self.ptr().add(src), self.ptr().add(dst), len); + } + } + + /// Copies a potentially wrapping block of memory len long from src to dest. + /// (abs(dst - src) + len) must be no larger than capacity() (There must be at + /// most one continuous overlapping region between src and dest). + unsafe fn wrap_copy(&mut self, src: usize, dst: usize, len: usize) { + debug_assert!( + cmp::min(src.abs_diff(dst), self.capacity() - src.abs_diff(dst)) + len + <= self.capacity(), + "wrc dst={} src={} len={} cap={}", + dst, + src, + len, + self.capacity() + ); + + // If T is a ZST, don't do any copying. + if T::IS_ZST || src == dst || len == 0 { + return; + } + + let dst_after_src = self.wrap_sub(dst, src) < len; + + let src_pre_wrap_len = self.capacity() - src; + let dst_pre_wrap_len = self.capacity() - dst; + let src_wraps = src_pre_wrap_len < len; + let dst_wraps = dst_pre_wrap_len < len; + + match (dst_after_src, src_wraps, dst_wraps) { + (_, false, false) => { + // src doesn't wrap, dst doesn't wrap + // + // S . . . + // 1 [_ _ A A B B C C _] + // 2 [_ _ A A A A B B _] + // D . . . + // + unsafe { + self.copy(src, dst, len); + } + } + (false, false, true) => { + // dst before src, src doesn't wrap, dst wraps + // + // S . . . + // 1 [A A B B _ _ _ C C] + // 2 [A A B B _ _ _ A A] + // 3 [B B B B _ _ _ A A] + // . . D . + // + unsafe { + self.copy(src, dst, dst_pre_wrap_len); + self.copy(src + dst_pre_wrap_len, 0, len - dst_pre_wrap_len); + } + } + (true, false, true) => { + // src before dst, src doesn't wrap, dst wraps + // + // S . . . + // 1 [C C _ _ _ A A B B] + // 2 [B B _ _ _ A A B B] + // 3 [B B _ _ _ A A A A] + // . . D . + // + unsafe { + self.copy(src + dst_pre_wrap_len, 0, len - dst_pre_wrap_len); + self.copy(src, dst, dst_pre_wrap_len); + } + } + (false, true, false) => { + // dst before src, src wraps, dst doesn't wrap + // + // . . S . + // 1 [C C _ _ _ A A B B] + // 2 [C C _ _ _ B B B B] + // 3 [C C _ _ _ B B C C] + // D . . . + // + unsafe { + self.copy(src, dst, src_pre_wrap_len); + self.copy(0, dst + src_pre_wrap_len, len - src_pre_wrap_len); + } + } + (true, true, false) => { + // src before dst, src wraps, dst doesn't wrap + // + // . . S . + // 1 [A A B B _ _ _ C C] + // 2 [A A A A _ _ _ C C] + // 3 [C C A A _ _ _ C C] + // D . . . + // + unsafe { + self.copy(0, dst + src_pre_wrap_len, len - src_pre_wrap_len); + self.copy(src, dst, src_pre_wrap_len); + } + } + (false, true, true) => { + // dst before src, src wraps, dst wraps + // + // . . . S . + // 1 [A B C D _ E F G H] + // 2 [A B C D _ E G H H] + // 3 [A B C D _ E G H A] + // 4 [B C C D _ E G H A] + // . . D . . + // + debug_assert!(dst_pre_wrap_len > src_pre_wrap_len); + let delta = dst_pre_wrap_len - src_pre_wrap_len; + unsafe { + self.copy(src, dst, src_pre_wrap_len); + self.copy(0, dst + src_pre_wrap_len, delta); + self.copy(delta, 0, len - dst_pre_wrap_len); + } + } + (true, true, true) => { + // src before dst, src wraps, dst wraps + // + // . . S . . + // 1 [A B C D _ E F G H] + // 2 [A A B D _ E F G H] + // 3 [H A B D _ E F G H] + // 4 [H A B D _ E F F G] + // . . . D . + // + debug_assert!(src_pre_wrap_len > dst_pre_wrap_len); + let delta = src_pre_wrap_len - dst_pre_wrap_len; + unsafe { + self.copy(0, delta, len - src_pre_wrap_len); + self.copy(self.capacity() - delta, 0, delta); + self.copy(src, dst, dst_pre_wrap_len); + } + } + } + } + + /// Copies all values from `src` to `dst`, wrapping around if needed. + /// Assumes capacity is sufficient. + #[inline] + unsafe fn copy_slice(&mut self, dst: usize, src: &[T]) { + debug_assert!(src.len() <= self.capacity()); + let head_room = self.capacity() - dst; + if src.len() <= head_room { + unsafe { + ptr::copy_nonoverlapping(src.as_ptr(), self.ptr().add(dst), src.len()); + } + } else { + let (left, right) = src.split_at(head_room); + unsafe { + ptr::copy_nonoverlapping(left.as_ptr(), self.ptr().add(dst), left.len()); + ptr::copy_nonoverlapping(right.as_ptr(), self.ptr(), right.len()); + } + } + } + + /// Writes all values from `iter` to `dst`. + /// + /// # Safety + /// + /// Assumes no wrapping around happens. + /// Assumes capacity is sufficient. + #[inline] + unsafe fn write_iter( + &mut self, + dst: usize, + iter: impl Iterator, + written: &mut usize, + ) { + iter.enumerate().for_each(|(i, element)| unsafe { + self.buffer_write(dst + i, element); + *written += 1; + }); + } + + /// Frobs the head and tail sections around to handle the fact that we + /// just reallocated. Unsafe because it trusts old_capacity. + #[inline] + unsafe fn handle_capacity_increase(&mut self, old_capacity: usize) { + let new_capacity = self.capacity(); + debug_assert!(new_capacity >= old_capacity); + + // Move the shortest contiguous section of the ring buffer + // + // H := head + // L := last element (`self.to_physical_idx(self.len - 1)`) + // + // H L + // [o o o o o o o . ] + // H L + // A [o o o o o o o . . . . . . . . . ] + // L H + // [o o o o o o o o ] + // H L + // B [. . . o o o o o o o . . . . . . ] + // L H + // [o o o o o o o o ] + // L H + // C [o o o o o . . . . . . . . . o o ] + + // can't use is_contiguous() because the capacity is already updated. + if self.head <= old_capacity - self.len { + // A + // Nop + } else { + let head_len = old_capacity - self.head; + let tail_len = self.len - head_len; + if head_len > tail_len && new_capacity - old_capacity >= tail_len { + // B + unsafe { + self.copy_nonoverlapping(0, old_capacity, tail_len); + } + } else { + // C + let new_head = new_capacity - head_len; + unsafe { + // can't use copy_nonoverlapping here, because if e.g. head_len = 2 + // and new_capacity = old_capacity + 1, then the heads overlap. + self.copy(self.head, new_head, head_len); + } + self.head = new_head; + } + } + debug_assert!(self.head < self.capacity() || self.capacity() == 0); + } +} + +impl VecDeque { + /// Creates an empty deque. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let deque: VecDeque = VecDeque::new(); + /// ``` + #[inline] + #[must_use] + pub const fn new() -> Self { + Self::new_in(Global) + } + + /// Creates an empty deque with space for at least `capacity` elements. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let deque: VecDeque = VecDeque::try_with_capacity(10)?; + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_with_capacity(capacity: usize) -> Result { + Self::try_with_capacity_in(capacity, Global) + } +} + +impl VecDeque { + /// Creates an empty deque. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let deque: VecDeque = VecDeque::new(); + /// ``` + #[inline] + pub const fn new_in(alloc: A) -> VecDeque { + VecDeque { + head: 0, + len: 0, + buf: RawVec::new_in(alloc), + } + } + + /// Creates an empty deque with space for at least `capacity` elements. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{VecDeque, Global}; + /// + /// let deque: VecDeque = VecDeque::try_with_capacity_in(10, Global)?; + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_with_capacity_in(capacity: usize, alloc: A) -> Result, Error> { + Ok(VecDeque { + head: 0, + len: 0, + buf: RawVec::try_with_capacity_in(capacity, alloc)?, + }) + } + + /// Creates a `VecDeque` from a raw allocation, when the initialized part of + /// that allocation forms a *contiguous* subslice thereof. + /// + /// For use by `vec::IntoIter::into_vecdeque` + /// + /// # Safety + /// + /// All the usual requirements on the allocated memory like in + /// `Vec::from_raw_parts_in`, but takes a *range* of elements that are + /// initialized rather than only supporting `0..len`. Requires that + /// `initialized.start` ≤ `initialized.end` ≤ `capacity`. + #[inline] + pub(crate) unsafe fn from_contiguous_raw_parts_in( + ptr: *mut T, + initialized: Range, + capacity: usize, + alloc: A, + ) -> Self { + debug_assert!(initialized.start <= initialized.end); + debug_assert!(initialized.end <= capacity); + + // SAFETY: Our safety precondition guarantees the range length won't wrap, + // and that the allocation is valid for use in `RawVec`. + unsafe { + VecDeque { + head: initialized.start, + len: initialized.end.wrapping_sub(initialized.start), + buf: RawVec::from_raw_parts_in(ptr, capacity, alloc), + } + } + } + + /// Provides a reference to the element at the given index. + /// + /// Element at index 0 is the front of the queue. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut buf = VecDeque::new(); + /// + /// buf.try_push_back(3); + /// buf.try_push_back(4); + /// buf.try_push_back(5); + /// buf.try_push_back(6); + /// + /// assert_eq!(buf.get(1), Some(&4)); + /// + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn get(&self, index: usize) -> Option<&T> { + if index < self.len { + let idx = self.to_physical_idx(index); + unsafe { Some(&*self.ptr().add(idx)) } + } else { + None + } + } + + /// Provides a mutable reference to the element at the given index. + /// + /// Element at index 0 is the front of the queue. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut buf = VecDeque::new(); + /// + /// buf.try_push_back(3)?; + /// buf.try_push_back(4)?; + /// buf.try_push_back(5)?; + /// buf.try_push_back(6)?; + /// + /// assert_eq!(buf[1], 4); + /// + /// if let Some(elem) = buf.get_mut(1) { + /// *elem = 7; + /// } + /// + /// assert_eq!(buf[1], 7); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn get_mut(&mut self, index: usize) -> Option<&mut T> { + if index < self.len { + let idx = self.to_physical_idx(index); + unsafe { Some(&mut *self.ptr().add(idx)) } + } else { + None + } + } + + /// Swaps elements at indices `i` and `j`. + /// + /// `i` and `j` may be equal. + /// + /// Element at index 0 is the front of the queue. + /// + /// # Panics + /// + /// Panics if either index is out of bounds. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut buf = VecDeque::new(); + /// + /// buf.try_push_back(3)?; + /// buf.try_push_back(4)?; + /// buf.try_push_back(5)?; + /// + /// assert_eq!(buf, [3, 4, 5]); + /// + /// buf.swap(0, 2); + /// + /// assert_eq!(buf, [5, 4, 3]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn swap(&mut self, i: usize, j: usize) { + assert!(i < self.len()); + assert!(j < self.len()); + let ri = self.to_physical_idx(i); + let rj = self.to_physical_idx(j); + unsafe { ptr::swap(self.ptr().add(ri), self.ptr().add(rj)) } + } + + /// Returns the number of elements the deque can hold without reallocating. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let buf: VecDeque = VecDeque::try_with_capacity(10)?; + /// assert!(buf.capacity() >= 10); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn capacity(&self) -> usize { + if T::IS_ZST { + usize::MAX + } else { + self.buf.capacity() + } + } + + /// Tries to reserve the minimum capacity for at least `additional` more elements to + /// be inserted in the given deque. After calling `try_reserve_exact`, + /// capacity will be greater than or equal to `self.len() + additional` if + /// it returns `Ok(())`. Does nothing if the capacity is already sufficient. + /// + /// Note that the allocator may give the collection more space than it + /// requests. Therefore, capacity can not be relied upon to be precisely + /// minimal. Prefer [`try_reserve`] if future insertions are expected. + /// + /// [`try_reserve`]: VecDeque::try_reserve + /// + /// # Errors + /// + /// If the capacity overflows `usize`, or the allocator reports a failure, then an error + /// is returned. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{VecDeque, Error, TryExtend}; + /// + /// fn process_data(data: &[u32]) -> Result, Error> { + /// let mut output = VecDeque::new(); + /// + /// // Pre-reserve the memory, exiting if we can't + /// output.try_reserve_exact(data.len())?; + /// + /// // Now we know this can't OOM(Out-Of-Memory) in the middle of our complex work + /// output.try_extend(data.iter().map(|&val| { + /// val * 2 + 5 // very complicated + /// }))?; + /// + /// Ok(output) + /// } + /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); + /// ``` + pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), Error> { + let new_cap = self + .len + .checked_add(additional) + .ok_or(Error::CapacityOverflow)?; + let old_cap = self.capacity(); + + if new_cap > old_cap { + self.buf.try_reserve_exact(self.len, additional)?; + unsafe { + self.handle_capacity_increase(old_cap); + } + } + Ok(()) + } + + /// Tries to reserve capacity for at least `additional` more elements to be inserted + /// in the given deque. The collection may reserve more space to speculatively avoid + /// frequent reallocations. After calling `try_reserve`, capacity will be + /// greater than or equal to `self.len() + additional` if it returns + /// `Ok(())`. Does nothing if capacity is already sufficient. This method + /// preserves the contents even if an error occurs. + /// + /// # Errors + /// + /// If the capacity overflows `usize`, or the allocator reports a failure, then an error + /// is returned. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{VecDeque, Error, TryExtend}; + /// + /// fn process_data(data: &[u32]) -> Result, Error> { + /// let mut output = VecDeque::new(); + /// + /// // Pre-reserve the memory, exiting if we can't + /// output.try_reserve(data.len())?; + /// + /// // Now we know this can't OOM in the middle of our complex work + /// output.try_extend(data.iter().map(|&val| { + /// val * 2 + 5 // very complicated + /// }))?; + /// + /// Ok(output) + /// } + /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); + /// ``` + pub fn try_reserve(&mut self, additional: usize) -> Result<(), Error> { + let new_cap = self + .len + .checked_add(additional) + .ok_or(Error::CapacityOverflow)?; + let old_cap = self.capacity(); + + if new_cap > old_cap { + self.buf.try_reserve(self.len, additional)?; + unsafe { + self.handle_capacity_increase(old_cap); + } + } + + Ok(()) + } + + /// Shrinks the capacity of the deque as much as possible. + /// + /// It will drop down as close as possible to the length but the allocator may still inform the + /// deque that there is space for a few more elements. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{VecDeque, TryExtend}; + /// + /// let mut buf = VecDeque::try_with_capacity(15)?; + /// buf.try_extend(0..4)?; + /// assert_eq!(buf.capacity(), 15); + /// buf.try_shrink_to_fit()?; + /// assert!(buf.capacity() >= 4); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_shrink_to_fit(&mut self) -> Result<(), Error> { + self.try_shrink_to(0) + } + + /// Shrinks the capacity of the deque with a lower bound. + /// + /// The capacity will remain at least as large as both the length + /// and the supplied value. + /// + /// If the current capacity is less than the lower limit, this is a no-op. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{VecDeque, TryExtend}; + /// + /// let mut buf = VecDeque::try_with_capacity(15)?; + /// buf.try_extend(0..4)?; + /// assert_eq!(buf.capacity(), 15); + /// buf.try_shrink_to(6)?; + /// assert!(buf.capacity() >= 6); + /// buf.try_shrink_to(0)?; + /// assert!(buf.capacity() >= 4); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_shrink_to(&mut self, min_capacity: usize) -> Result<(), Error> { + let target_cap = min_capacity.max(self.len); + + // never shrink ZSTs + if T::IS_ZST || self.capacity() <= target_cap { + return Ok(()); + } + + // There are three cases of interest: + // All elements are out of desired bounds + // Elements are contiguous, and tail is out of desired bounds + // Elements are discontiguous + // + // At all other times, element positions are unaffected. + + // `head` and `len` are at most `isize::MAX` and `target_cap < self.capacity()`, so nothing can + // overflow. + let tail_outside = (target_cap + 1..=self.capacity()).contains(&(self.head + self.len)); + + if self.len == 0 { + self.head = 0; + } else if self.head >= target_cap && tail_outside { + // Head and tail are both out of bounds, so copy all of them to the front. + // + // H := head + // L := last element + // H L + // [. . . . . . . . o o o o o o o . ] + // H L + // [o o o o o o o . ] + unsafe { + // nonoverlapping because `self.head >= target_cap >= self.len`. + self.copy_nonoverlapping(self.head, 0, self.len); + } + self.head = 0; + } else if self.head < target_cap && tail_outside { + // Head is in bounds, tail is out of bounds. + // Copy the overflowing part to the beginning of the + // buffer. This won't overlap because `target_cap >= self.len`. + // + // H := head + // L := last element + // H L + // [. . . o o o o o o o . . . . . . ] + // L H + // [o o . o o o o o ] + let len = self.head + self.len - target_cap; + unsafe { + self.copy_nonoverlapping(target_cap, 0, len); + } + } else if !self.is_contiguous() { + // The head slice is at least partially out of bounds, tail is in bounds. + // Copy the head backwards so it lines up with the target capacity. + // This won't overlap because `target_cap >= self.len`. + // + // H := head + // L := last element + // L H + // [o o o o o . . . . . . . . . o o ] + // L H + // [o o o o o . o o ] + let head_len = self.capacity() - self.head; + let new_head = target_cap - head_len; + unsafe { + // can't use `copy_nonoverlapping()` here because the new and old + // regions for the head might overlap. + self.copy(self.head, new_head, head_len); + } + self.head = new_head; + } + + self.buf.try_shrink_to_fit(target_cap)?; + + debug_assert!(self.head < self.capacity() || self.capacity() == 0); + debug_assert!(self.len <= self.capacity()); + Ok(()) + } + + /// Shortens the deque, keeping the first `len` elements and dropping + /// the rest. + /// + /// If `len` is greater than the deque's current length, this has no + /// effect. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut buf = VecDeque::new(); + /// + /// buf.try_push_back(5)?; + /// buf.try_push_back(10)?; + /// buf.try_push_back(15)?; + /// + /// assert_eq!(buf, [5, 10, 15]); + /// + /// buf.truncate(1); + /// + /// assert_eq!(buf, [5]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn truncate(&mut self, len: usize) { + /// Runs the destructor for all items in the slice when it gets dropped (normally or + /// during unwinding). + struct Dropper<'a, T>(&'a mut [T]); + + impl<'a, T> Drop for Dropper<'a, T> { + fn drop(&mut self) { + unsafe { + ptr::drop_in_place(self.0); + } + } + } + + // Safe because: + // + // * Any slice passed to `drop_in_place` is valid; the second case has + // `len <= front.len()` and returning on `len > self.len()` ensures + // `begin <= back.len()` in the first case + // * The head of the VecDeque is moved before calling `drop_in_place`, + // so no value is dropped twice if `drop_in_place` panics + unsafe { + if len >= self.len { + return; + } + + let (front, back) = self.as_mut_slices(); + if len > front.len() { + let begin = len - front.len(); + let drop_back = back.get_unchecked_mut(begin..) as *mut _; + self.len = len; + ptr::drop_in_place(drop_back); + } else { + let drop_back = back as *mut _; + let drop_front = front.get_unchecked_mut(len..) as *mut _; + self.len = len; + + // Make sure the second half is dropped even when a destructor + // in the first one panics. + let _back_dropper = Dropper(&mut *drop_back); + ptr::drop_in_place(drop_front); + } + } + } + + /// Returns a reference to the underlying allocator. + #[inline] + pub fn allocator(&self) -> &A { + self.buf.allocator() + } + + /// Returns a front-to-back iterator. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{Vec, VecDeque, IteratorExt}; + /// + /// let mut buf = VecDeque::new(); + /// buf.try_push_back(5)?; + /// buf.try_push_back(3)?; + /// buf.try_push_back(4)?; + /// let b: &[_] = &[&5, &3, &4]; + /// let c: Vec<&i32> = buf.iter().try_collect()?; + /// assert_eq!(&c[..], b); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn iter(&self) -> Iter<'_, T> { + let (a, b) = self.as_slices(); + Iter::new(a.iter(), b.iter()) + } + + /// Returns a raw front-to-back iterator. + /// + /// # Safety + /// + /// The caller must ensure that the iterator doesn't outlive `self`. + pub unsafe fn raw_iter(&self) -> RawIter { + let (a, b) = self.as_slices(); + RawIter::new(crate::slice::RawIter::new(a), crate::slice::RawIter::new(b)) + } + + /// Returns a front-to-back iterator that returns mutable references. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut buf = VecDeque::new(); + /// buf.try_push_back(5)?; + /// buf.try_push_back(3)?; + /// buf.try_push_back(4)?; + /// for num in buf.iter_mut() { + /// *num = *num - 2; + /// } + /// let b: &[_] = &[&mut 3, &mut 1, &mut 2]; + /// assert_eq!(&buf.iter_mut().collect::>()[..], b); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn iter_mut(&mut self) -> IterMut<'_, T> { + let (a, b) = self.as_mut_slices(); + IterMut::new(a.iter_mut(), b.iter_mut()) + } + + /// Returns a pair of slices which contain, in order, the contents of the + /// deque. + /// + /// If [`make_contiguous`] was previously called, all elements of the + /// deque will be in the first slice and the second slice will be empty. + /// + /// [`make_contiguous`]: VecDeque::make_contiguous + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut deque = VecDeque::new(); + /// + /// deque.try_push_back(0)?; + /// deque.try_push_back(1)?; + /// deque.try_push_back(2)?; + /// + /// assert_eq!(deque.as_slices(), (&[0, 1, 2][..], &[][..])); + /// + /// deque.try_push_front(10)?; + /// deque.try_push_front(9)?; + /// + /// assert_eq!(deque.as_slices(), (&[9, 10][..], &[0, 1, 2][..])); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn as_slices(&self) -> (&[T], &[T]) { + let (a_range, b_range) = self.slice_ranges(.., self.len); + // SAFETY: `slice_ranges` always returns valid ranges into + // the physical buffer. + unsafe { (&*self.buffer_range(a_range), &*self.buffer_range(b_range)) } + } + + /// Returns a pair of slices which contain, in order, the contents of the + /// deque. + /// + /// If [`make_contiguous`] was previously called, all elements of the + /// deque will be in the first slice and the second slice will be empty. + /// + /// [`make_contiguous`]: VecDeque::make_contiguous + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut deque = VecDeque::new(); + /// + /// deque.try_push_back(0)?; + /// deque.try_push_back(1)?; + /// + /// deque.try_push_front(10)?; + /// deque.try_push_front(9)?; + /// + /// deque.as_mut_slices().0[0] = 42; + /// deque.as_mut_slices().1[0] = 24; + /// assert_eq!(deque.as_slices(), (&[42, 10][..], &[24, 1][..])); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn as_mut_slices(&mut self) -> (&mut [T], &mut [T]) { + let (a_range, b_range) = self.slice_ranges(.., self.len); + // SAFETY: `slice_ranges` always returns valid ranges into + // the physical buffer. + unsafe { + ( + &mut *self.buffer_range(a_range), + &mut *self.buffer_range(b_range), + ) + } + } + + /// Returns the number of elements in the deque. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut deque = VecDeque::new(); + /// assert_eq!(deque.len(), 0); + /// deque.try_push_back(1)?; + /// assert_eq!(deque.len(), 1); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn len(&self) -> usize { + self.len + } + + /// Returns `true` if the deque is empty. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut deque = VecDeque::new(); + /// assert!(deque.is_empty()); + /// deque.try_push_front(1)?; + /// assert!(!deque.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn is_empty(&self) -> bool { + self.len == 0 + } + + /// Given a range into the logical buffer of the deque, this function + /// return two ranges into the physical buffer that correspond to + /// the given range. The `len` parameter should usually just be `self.len`; + /// the reason it's passed explicitly is that if the deque is wrapped in a + /// `Drain`, then `self.len` is not actually the length of the deque. + /// + /// # Safety + /// + /// This function is always safe to call. For the resulting ranges to be + /// valid ranges into the physical buffer, the caller must ensure that the + /// result of calling `slice::range(range, ..len)` represents a valid range + /// into the logical buffer, and that all elements in that range are + /// initialized. + fn slice_ranges(&self, range: R, len: usize) -> (Range, Range) + where + R: RangeBounds, + { + let Range { start, end } = slice_range(range, ..len); + let len = end - start; + + if len == 0 { + (0..0, 0..0) + } else { + // `slice_range` guarantees that `start <= end <= len`. + // because `len != 0`, we know that `start < end`, so `start < len` + // and the indexing is valid. + let wrapped_start = self.to_physical_idx(start); + + // this subtraction can never overflow because `wrapped_start` is + // at most `self.capacity()` (and if `self.capacity != 0`, then `wrapped_start` is strictly less + // than `self.capacity`). + let head_len = self.capacity() - wrapped_start; + + if head_len >= len { + // we know that `len + wrapped_start <= self.capacity <= usize::MAX`, so this addition can't overflow + (wrapped_start..wrapped_start + len, 0..0) + } else { + // can't overflow because of the if condition + let tail_len = len - head_len; + (wrapped_start..self.capacity(), 0..tail_len) + } + } + } + + /// Creates an iterator that covers the specified range in the deque. + /// + /// # Panics + /// + /// Panics if the starting point is greater than the end point or if + /// the end point is greater than the length of the deque. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{VecDeque, IteratorExt}; + /// + /// let deque: VecDeque<_> = [1, 2, 3].try_into()?; + /// let range = deque.range(2..).copied().try_collect::>()?; + /// assert_eq!(range, [3]); + /// + /// // A full range covers all contents + /// let all = deque.range(..); + /// assert_eq!(all.len(), 3); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn range(&self, range: R) -> Iter<'_, T> + where + R: RangeBounds, + { + let (a_range, b_range) = self.slice_ranges(range, self.len); + // SAFETY: The ranges returned by `slice_ranges` + // are valid ranges into the physical buffer, so + // it's ok to pass them to `buffer_range` and + // dereference the result. + let a = unsafe { &*self.buffer_range(a_range) }; + let b = unsafe { &*self.buffer_range(b_range) }; + Iter::new(a.iter(), b.iter()) + } + + /// Creates an iterator that covers the specified mutable range in the deque. + /// + /// # Panics + /// + /// Panics if the starting point is greater than the end point or if + /// the end point is greater than the length of the deque. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut deque: VecDeque<_> = [1, 2, 3].try_into()?; + /// for v in deque.range_mut(2..) { + /// *v *= 2; + /// } + /// assert_eq!(deque, [1, 2, 6]); + /// + /// // A full range covers all contents + /// for v in deque.range_mut(..) { + /// *v *= 2; + /// } + /// assert_eq!(deque, [2, 4, 12]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn range_mut(&mut self, range: R) -> IterMut<'_, T> + where + R: RangeBounds, + { + let (a_range, b_range) = self.slice_ranges(range, self.len); + // SAFETY: The ranges returned by `slice_ranges` + // are valid ranges into the physical buffer, so + // it's ok to pass them to `buffer_range` and + // dereference the result. + let a = unsafe { &mut *self.buffer_range(a_range) }; + let b = unsafe { &mut *self.buffer_range(b_range) }; + IterMut::new(a.iter_mut(), b.iter_mut()) + } + + /// Removes the specified range from the deque in bulk, returning all + /// removed elements as an iterator. If the iterator is dropped before + /// being fully consumed, it drops the remaining removed elements. + /// + /// The returned iterator keeps a mutable borrow on the queue to optimize + /// its implementation. + /// + /// + /// # Panics + /// + /// Panics if the starting point is greater than the end point or if + /// the end point is greater than the length of the deque. + /// + /// # Leaking + /// + /// If the returned iterator goes out of scope without being dropped (due to + /// [`mem::forget`], for example), the deque may have lost and leaked + /// elements arbitrarily, including elements outside the range. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{VecDeque, IteratorExt}; + /// + /// let mut deque: VecDeque<_> = [1, 2, 3].try_into()?; + /// let drained = deque.drain(2..).try_collect::>()?; + /// assert_eq!(drained, [3]); + /// assert_eq!(deque, [1, 2]); + /// + /// // A full range clears all contents, like `clear()` does + /// deque.drain(..); + /// assert!(deque.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn drain(&mut self, range: R) -> Drain<'_, T, A> + where + R: RangeBounds, + { + // Memory safety + // + // When the Drain is first created, the source deque is shortened to + // make sure no uninitialized or moved-from elements are accessible at + // all if the Drain's destructor never gets to run. + // + // Drain will ptr::read out the values to remove. + // When finished, the remaining data will be copied back to cover the hole, + // and the head/tail values will be restored correctly. + // + let Range { start, end } = slice_range(range, ..self.len); + let drain_start = start; + let drain_len = end - start; + + // The deque's elements are parted into three segments: + // * 0 -> drain_start + // * drain_start -> drain_start+drain_len + // * drain_start+drain_len -> self.len + // + // H = self.head; T = self.head+self.len; t = drain_start+drain_len; h = drain_head + // + // We store drain_start as self.len, and drain_len and self.len as + // drain_len and orig_len respectively on the Drain. This also + // truncates the effective array such that if the Drain is leaked, we + // have forgotten about the potentially moved values after the start of + // the drain. + // + // H h t T + // [. . . o o x x o o . . .] + // + // "forget" about the values after the start of the drain until after + // the drain is complete and the Drain destructor is run. + + unsafe { Drain::new(self, drain_start, drain_len) } + } + + /// Clears the deque, removing all values. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut deque = VecDeque::new(); + /// deque.try_push_back(1)?; + /// deque.clear(); + /// assert!(deque.is_empty()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn clear(&mut self) { + self.truncate(0); + // Not strictly necessary, but leaves things in a more consistent/predictable state. + self.head = 0; + } + + /// Returns `true` if the deque contains an element equal to the + /// given value. + /// + /// This operation is *O*(*n*). + /// + /// Note that if you have a sorted `VecDeque`, [`binary_search`] may be faster. + /// + /// [`binary_search`]: VecDeque::binary_search + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut deque: VecDeque = VecDeque::new(); + /// + /// deque.try_push_back(0)?; + /// deque.try_push_back(1)?; + /// + /// assert_eq!(deque.contains(&1), true); + /// assert_eq!(deque.contains(&10), false); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn contains(&self, x: &T) -> bool + where + T: PartialEq, + { + let (a, b) = self.as_slices(); + a.contains(x) || b.contains(x) + } + + /// Provides a reference to the front element, or `None` if the deque is + /// empty. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut d = VecDeque::new(); + /// assert_eq!(d.front(), None); + /// + /// d.try_push_back(1)?; + /// d.try_push_back(2)?; + /// assert_eq!(d.front(), Some(&1)); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn front(&self) -> Option<&T> { + self.get(0) + } + + /// Provides a mutable reference to the front element, or `None` if the + /// deque is empty. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut d = VecDeque::new(); + /// assert_eq!(d.front_mut(), None); + /// + /// d.try_push_back(1)?; + /// d.try_push_back(2)?; + /// match d.front_mut() { + /// Some(x) => *x = 9, + /// None => (), + /// } + /// assert_eq!(d.front(), Some(&9)); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn front_mut(&mut self) -> Option<&mut T> { + self.get_mut(0) + } + + /// Provides a reference to the back element, or `None` if the deque is + /// empty. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut d = VecDeque::new(); + /// assert_eq!(d.back(), None); + /// + /// d.try_push_back(1)?; + /// d.try_push_back(2)?; + /// assert_eq!(d.back(), Some(&2)); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn back(&self) -> Option<&T> { + self.get(self.len.wrapping_sub(1)) + } + + /// Provides a mutable reference to the back element, or `None` if the + /// deque is empty. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut d = VecDeque::new(); + /// assert_eq!(d.back(), None); + /// + /// d.try_push_back(1)?; + /// d.try_push_back(2)?; + /// match d.back_mut() { + /// Some(x) => *x = 9, + /// None => (), + /// } + /// assert_eq!(d.back(), Some(&9)); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn back_mut(&mut self) -> Option<&mut T> { + self.get_mut(self.len.wrapping_sub(1)) + } + + /// Removes the first element and returns it, or `None` if the deque is + /// empty. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut d = VecDeque::new(); + /// d.try_push_back(1)?; + /// d.try_push_back(2)?; + /// + /// assert_eq!(d.pop_front(), Some(1)); + /// assert_eq!(d.pop_front(), Some(2)); + /// assert_eq!(d.pop_front(), None); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn pop_front(&mut self) -> Option { + if self.is_empty() { + None + } else { + let old_head = self.head; + self.head = self.to_physical_idx(1); + self.len -= 1; + Some(unsafe { self.buffer_read(old_head) }) + } + } + + /// Removes the last element from the deque and returns it, or `None` if + /// it is empty. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut buf = VecDeque::new(); + /// assert_eq!(buf.pop_back(), None); + /// buf.try_push_back(1)?; + /// buf.try_push_back(3)?; + /// assert_eq!(buf.pop_back(), Some(3)); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn pop_back(&mut self) -> Option { + if self.is_empty() { + None + } else { + self.len -= 1; + Some(unsafe { self.buffer_read(self.to_physical_idx(self.len)) }) + } + } + + /// Prepends an element to the deque. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut d = VecDeque::new(); + /// d.try_push_front(1)?; + /// d.try_push_front(2)?; + /// assert_eq!(d.front(), Some(&2)); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_push_front(&mut self, value: T) -> Result<(), Error> { + if self.is_full() { + self.try_grow()?; + } + + self.head = self.wrap_sub(self.head, 1); + self.len += 1; + + unsafe { + self.buffer_write(self.head, value); + } + + Ok(()) + } + + /// Appends an element to the back of the deque. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut buf = VecDeque::new(); + /// buf.try_push_back(1)?; + /// buf.try_push_back(3)?; + /// assert_eq!(3, *buf.back().unwrap()); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_push_back(&mut self, value: T) -> Result<(), Error> { + if self.is_full() { + self.try_grow()?; + } + + unsafe { self.buffer_write(self.to_physical_idx(self.len), value) } + self.len += 1; + Ok(()) + } + + #[inline] + fn is_contiguous(&self) -> bool { + // Do the calculation like this to avoid overflowing if len + head > usize::MAX + self.head <= self.capacity() - self.len + } + + /// Removes an element from anywhere in the deque and returns it, + /// replacing it with the first element. + /// + /// This does not preserve ordering, but is *O*(1). + /// + /// Returns `None` if `index` is out of bounds. + /// + /// Element at index 0 is the front of the queue. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut buf = VecDeque::new(); + /// assert_eq!(buf.swap_remove_front(0), None); + /// buf.try_push_back(1)?; + /// buf.try_push_back(2)?; + /// buf.try_push_back(3)?; + /// assert_eq!(buf, [1, 2, 3]); + /// + /// assert_eq!(buf.swap_remove_front(2), Some(3)); + /// assert_eq!(buf, [2, 1]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn swap_remove_front(&mut self, index: usize) -> Option { + let length = self.len; + if index < length && index != 0 { + self.swap(index, 0); + } else if index >= length { + return None; + } + self.pop_front() + } + + /// Removes an element from anywhere in the deque and returns it, + /// replacing it with the last element. + /// + /// This does not preserve ordering, but is *O*(1). + /// + /// Returns `None` if `index` is out of bounds. + /// + /// Element at index 0 is the front of the queue. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut buf = VecDeque::new(); + /// assert_eq!(buf.swap_remove_back(0), None); + /// buf.try_push_back(1)?; + /// buf.try_push_back(2)?; + /// buf.try_push_back(3)?; + /// assert_eq!(buf, [1, 2, 3]); + /// + /// assert_eq!(buf.swap_remove_back(0), Some(1)); + /// assert_eq!(buf, [3, 2]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn swap_remove_back(&mut self, index: usize) -> Option { + let length = self.len; + if length > 0 && index < length - 1 { + self.swap(index, length - 1); + } else if index >= length { + return None; + } + self.pop_back() + } + + /// Inserts an element at `index` within the deque, shifting all elements + /// with indices greater than or equal to `index` towards the back. + /// + /// Element at index 0 is the front of the queue. + /// + /// # Panics + /// + /// Panics if `index` is greater than deque's length + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut vec_deque = VecDeque::new(); + /// vec_deque.try_push_back('a')?; + /// vec_deque.try_push_back('b')?; + /// vec_deque.try_push_back('c')?; + /// assert_eq!(vec_deque, &['a', 'b', 'c']); + /// + /// vec_deque.try_insert(1, 'd')?; + /// assert_eq!(vec_deque, &['a', 'd', 'b', 'c']); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_insert(&mut self, index: usize, value: T) -> Result<(), Error> { + assert!(index <= self.len(), "index out of bounds"); + + if self.is_full() { + self.try_grow()?; + } + + let k = self.len - index; + + if k < index { + // `index + 1` can't overflow, because if index was usize::MAX, then either the + // assert would've failed, or the deque would've tried to grow past usize::MAX + // and panicked. + unsafe { + // see `remove()` for explanation why this wrap_copy() call is safe. + self.wrap_copy( + self.to_physical_idx(index), + self.to_physical_idx(index + 1), + k, + ); + self.buffer_write(self.to_physical_idx(index), value); + self.len += 1; + } + } else { + let old_head = self.head; + self.head = self.wrap_sub(self.head, 1); + unsafe { + self.wrap_copy(old_head, self.head, index); + self.buffer_write(self.to_physical_idx(index), value); + self.len += 1; + } + } + + Ok(()) + } + + /// Removes and returns the element at `index` from the deque. + /// Whichever end is closer to the removal point will be moved to make + /// room, and all the affected elements will be moved to new positions. + /// Returns `None` if `index` is out of bounds. + /// + /// Element at index 0 is the front of the queue. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut buf = VecDeque::new(); + /// buf.try_push_back(1)?; + /// buf.try_push_back(2)?; + /// buf.try_push_back(3)?; + /// assert_eq!(buf, [1, 2, 3]); + /// + /// assert_eq!(buf.remove(1), Some(2)); + /// assert_eq!(buf, [1, 3]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn remove(&mut self, index: usize) -> Option { + if self.len <= index { + return None; + } + + let wrapped_idx = self.to_physical_idx(index); + + let elem = unsafe { Some(self.buffer_read(wrapped_idx)) }; + + let k = self.len - index - 1; + // safety: due to the nature of the if-condition, whichever wrap_copy gets called, + // its length argument will be at most `self.len / 2`, so there can't be more than + // one overlapping area. + if k < index { + unsafe { self.wrap_copy(self.wrap_add(wrapped_idx, 1), wrapped_idx, k) }; + self.len -= 1; + } else { + let old_head = self.head; + self.head = self.to_physical_idx(1); + unsafe { self.wrap_copy(old_head, self.head, index) }; + self.len -= 1; + } + + elem + } + + /// Splits the deque into two at the given index. + /// + /// Returns a newly allocated `VecDeque`. `self` contains elements `[0, at)`, + /// and the returned deque contains elements `[at, len)`. + /// + /// Note that the capacity of `self` does not change. + /// + /// Element at index 0 is the front of the queue. + /// + /// # Panics + /// + /// Panics if `at > len`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut buf: VecDeque<_> = [1, 2, 3].try_into()?; + /// let buf2 = buf.try_split_off(1)?; + /// assert_eq!(buf, [1]); + /// assert_eq!(buf2, [2, 3]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + #[must_use = "use `.truncate()` if you don't need the other half"] + pub fn try_split_off(&mut self, at: usize) -> Result + where + A: Clone, + { + let len = self.len; + assert!(at <= len, "`at` out of bounds"); + + let other_len = len - at; + let mut other = VecDeque::try_with_capacity_in(other_len, self.allocator().clone())?; + + unsafe { + let (first_half, second_half) = self.as_slices(); + + let first_len = first_half.len(); + let second_len = second_half.len(); + if at < first_len { + // `at` lies in the first half. + let amount_in_first = first_len - at; + + ptr::copy_nonoverlapping(first_half.as_ptr().add(at), other.ptr(), amount_in_first); + + // just take all of the second half. + ptr::copy_nonoverlapping( + second_half.as_ptr(), + other.ptr().add(amount_in_first), + second_len, + ); + } else { + // `at` lies in the second half, need to factor in the elements we skipped + // in the first half. + let offset = at - first_len; + let amount_in_second = second_len - offset; + ptr::copy_nonoverlapping( + second_half.as_ptr().add(offset), + other.ptr(), + amount_in_second, + ); + } + } + + // Cleanup where the ends of the buffers are + self.len = at; + other.len = other_len; + + Ok(other) + } + + /// Moves all the elements of `other` into `self`, leaving `other` empty. + /// + /// # Panics + /// + /// Panics if the new number of elements in self overflows a `usize`. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut buf: VecDeque<_> = [1, 2].try_into()?; + /// let mut buf2: VecDeque<_> = [3, 4].try_into()?; + /// buf.try_append(&mut buf2)?; + /// assert_eq!(buf, [1, 2, 3, 4]); + /// assert_eq!(buf2, []); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn try_append(&mut self, other: &mut Self) -> Result<(), Error> { + if T::IS_ZST { + self.len = self + .len + .checked_add(other.len) + .ok_or(Error::CapacityOverflow)?; + other.len = 0; + other.head = 0; + return Ok(()); + } + + self.try_reserve(other.len)?; + + unsafe { + let (left, right) = other.as_slices(); + self.copy_slice(self.to_physical_idx(self.len), left); + // no overflow, because self.capacity() >= old_cap + left.len() >= self.len + left.len() + self.copy_slice(self.to_physical_idx(self.len + left.len()), right); + } + + // SAFETY: Update pointers after copying to avoid leaving doppelganger + // in case of panics. + self.len += other.len; + // Now that we own its values, forget everything in `other`. + other.len = 0; + other.head = 0; + Ok(()) + } + + /// Retains only the elements specified by the predicate. + /// + /// In other words, remove all elements `e` for which `f(&e)` returns false. + /// This method operates in place, visiting each element exactly once in the + /// original order, and preserves the order of the retained elements. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{VecDeque, TryExtend}; + /// + /// let mut buf = VecDeque::new(); + /// buf.try_extend(1..5)?; + /// buf.retain(|&x| x % 2 == 0); + /// assert_eq!(buf, [2, 4]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// Because the elements are visited exactly once in the original order, + /// external state may be used to decide which elements to keep. + /// + /// ``` + /// use rune_alloc::{VecDeque, TryExtend}; + /// + /// let mut buf = VecDeque::new(); + /// buf.try_extend(1..6)?; + /// + /// let keep = [false, true, true, false, true]; + /// let mut iter = keep.iter(); + /// buf.retain(|_| *iter.next().unwrap()); + /// assert_eq!(buf, [2, 3, 5]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn retain(&mut self, mut f: F) + where + F: FnMut(&T) -> bool, + { + self.retain_mut(|elem| f(elem)); + } + + /// Retains only the elements specified by the predicate. + /// + /// In other words, remove all elements `e` for which `f(&e)` returns false. + /// This method operates in place, visiting each element exactly once in the + /// original order, and preserves the order of the retained elements. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{VecDeque, TryExtend}; + /// + /// let mut buf = VecDeque::new(); + /// buf.try_extend(1..5)?; + /// buf.retain_mut(|x| if *x % 2 == 0 { + /// *x += 1; + /// true + /// } else { + /// false + /// }); + /// assert_eq!(buf, [3, 5]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn retain_mut(&mut self, mut f: F) + where + F: FnMut(&mut T) -> bool, + { + let len = self.len; + let mut idx = 0; + let mut cur = 0; + + // Stage 1: All values are retained. + while cur < len { + if !f(&mut self[cur]) { + cur += 1; + break; + } + cur += 1; + idx += 1; + } + // Stage 2: Swap retained value into current idx. + while cur < len { + if !f(&mut self[cur]) { + cur += 1; + continue; + } + + self.swap(idx, cur); + cur += 1; + idx += 1; + } + // Stage 3: Truncate all values after idx. + if cur != idx { + self.truncate(idx); + } + } + + // Double the buffer size. This method is inline(never), so we expect it to only + // be called in cold paths. + // This may panic or abort + #[inline(never)] + fn try_grow(&mut self) -> Result<(), Error> { + // Extend or possibly remove this assertion when valid use-cases for growing the + // buffer without it being full emerge + debug_assert!(self.is_full()); + let old_cap = self.capacity(); + self.buf.try_reserve_for_push(old_cap)?; + unsafe { + self.handle_capacity_increase(old_cap); + } + debug_assert!(!self.is_full()); + Ok(()) + } + + /// Modifies the deque in-place so that `len()` is equal to `new_len`, + /// either by removing excess elements from the back or by appending + /// elements generated by calling `generator` to the back. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut buf = VecDeque::new(); + /// buf.try_push_back(5)?; + /// buf.try_push_back(10)?; + /// buf.try_push_back(15)?; + /// assert_eq!(buf, [5, 10, 15]); + /// + /// buf.try_resize_with(5, Default::default)?; + /// assert_eq!(buf, [5, 10, 15, 0, 0]); + /// + /// buf.try_resize_with(2, || unreachable!())?; + /// assert_eq!(buf, [5, 10]); + /// + /// let mut state = 100; + /// buf.try_resize_with(5, || { state += 1; state })?; + /// assert_eq!(buf, [5, 10, 101, 102, 103]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_resize_with( + &mut self, + new_len: usize, + mut generator: impl FnMut() -> T, + ) -> Result<(), Error> { + let len = self.len; + + if new_len > len { + for _ in 0..new_len - len { + self.try_push_back(generator())?; + } + } else { + self.truncate(new_len); + } + + Ok(()) + } + + /// Rearranges the internal storage of this deque so it is one contiguous + /// slice, which is then returned. + /// + /// This method does not allocate and does not change the order of the + /// inserted elements. As it returns a mutable slice, this can be used to + /// sort a deque. + /// + /// Once the internal storage is contiguous, the [`as_slices`] and + /// [`as_mut_slices`] methods will return the entire contents of the + /// deque in a single slice. + /// + /// [`as_slices`]: VecDeque::as_slices + /// [`as_mut_slices`]: VecDeque::as_mut_slices + /// + /// # Examples + /// + /// Sorting the content of a deque. + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut buf = VecDeque::try_with_capacity(15)?; + /// + /// buf.try_push_back(2)?; + /// buf.try_push_back(1)?; + /// buf.try_push_front(3)?; + /// + /// // sorting the deque + /// buf.make_contiguous().sort(); + /// assert_eq!(buf.as_slices(), (&[1, 2, 3] as &[_], &[] as &[_])); + /// + /// // sorting it in reverse order + /// buf.make_contiguous().sort_by(|a, b| b.cmp(a)); + /// assert_eq!(buf.as_slices(), (&[3, 2, 1] as &[_], &[] as &[_])); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// Getting immutable access to the contiguous slice. + /// + /// ```rust + /// use rune_alloc::VecDeque; + /// + /// let mut buf = VecDeque::new(); + /// + /// buf.try_push_back(2)?; + /// buf.try_push_back(1)?; + /// buf.try_push_front(3)?; + /// + /// buf.make_contiguous(); + /// if let (slice, &[]) = buf.as_slices() { + /// // we can now be sure that `slice` contains all elements of the deque, + /// // while still having immutable access to `buf`. + /// assert_eq!(buf.len(), slice.len()); + /// assert_eq!(slice, &[3, 2, 1] as &[_]); + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn make_contiguous(&mut self) -> &mut [T] { + if T::IS_ZST { + self.head = 0; + } + + if self.is_contiguous() { + unsafe { return slice::from_raw_parts_mut(self.ptr().add(self.head), self.len) } + } + + let &mut Self { head, len, .. } = self; + let ptr = self.ptr(); + let cap = self.capacity(); + + let free = cap - len; + let head_len = cap - head; + let tail = len - head_len; + let tail_len = tail; + + if free >= head_len { + // there is enough free space to copy the head in one go, + // this means that we first shift the tail backwards, and then + // copy the head to the correct position. + // + // from: DEFGH....ABC + // to: ABCDEFGH.... + unsafe { + self.copy(0, head_len, tail_len); + // ...DEFGH.ABC + self.copy_nonoverlapping(head, 0, head_len); + // ABCDEFGH.... + } + + self.head = 0; + } else if free >= tail_len { + // there is enough free space to copy the tail in one go, + // this means that we first shift the head forwards, and then + // copy the tail to the correct position. + // + // from: FGH....ABCDE + // to: ...ABCDEFGH. + unsafe { + self.copy(head, tail, head_len); + // FGHABCDE.... + self.copy_nonoverlapping(0, tail + head_len, tail_len); + // ...ABCDEFGH. + } + + self.head = tail; + } else { + // `free` is smaller than both `head_len` and `tail_len`. + // the general algorithm for this first moves the slices + // right next to each other and then uses `slice::rotate` + // to rotate them into place: + // + // initially: HIJK..ABCDEFG + // step 1: ..HIJKABCDEFG + // step 2: ..ABCDEFGHIJK + // + // or: + // + // initially: FGHIJK..ABCDE + // step 1: FGHIJKABCDE.. + // step 2: ABCDEFGHIJK.. + + // pick the shorter of the 2 slices to reduce the amount + // of memory that needs to be moved around. + if head_len > tail_len { + // tail is shorter, so: + // 1. copy tail forwards + // 2. rotate used part of the buffer + // 3. update head to point to the new beginning (which is just `free`) + + unsafe { + // if there is no free space in the buffer, then the slices are already + // right next to each other and we don't need to move any memory. + if free != 0 { + // because we only move the tail forward as much as there's free space + // behind it, we don't overwrite any elements of the head slice, and + // the slices end up right next to each other. + self.copy(0, free, tail_len); + } + + // We just copied the tail right next to the head slice, + // so all of the elements in the range are initialized + let slice = &mut *self.buffer_range(free..self.capacity()); + + // because the deque wasn't contiguous, we know that `tail_len < self.len == slice.len()`, + // so this will never panic. + slice.rotate_left(tail_len); + + // the used part of the buffer now is `free..self.capacity()`, so set + // `head` to the beginning of that range. + self.head = free; + } + } else { + // head is shorter so: + // 1. copy head backwards + // 2. rotate used part of the buffer + // 3. update head to point to the new beginning (which is the beginning of the buffer) + + unsafe { + // if there is no free space in the buffer, then the slices are already + // right next to each other and we don't need to move any memory. + if free != 0 { + // copy the head slice to lie right behind the tail slice. + self.copy(self.head, tail_len, head_len); + } + + // because we copied the head slice so that both slices lie right + // next to each other, all the elements in the range are initialized. + let slice = &mut *self.buffer_range(0..self.len); + + // because the deque wasn't contiguous, we know that `head_len < self.len == slice.len()` + // so this will never panic. + slice.rotate_right(head_len); + + // the used part of the buffer now is `0..self.len`, so set + // `head` to the beginning of that range. + self.head = 0; + } + } + } + + unsafe { slice::from_raw_parts_mut(ptr.add(self.head), self.len) } + } + + /// Rotates the double-ended queue `mid` places to the left. + /// + /// Equivalently, + /// - Rotates item `mid` into the first position. + /// - Pops the first `mid` items and pushes them to the end. + /// - Rotates `len() - mid` places to the right. + /// + /// # Panics + /// + /// If `mid` is greater than `len()`. Note that `mid == len()` + /// does _not_ panic and is a no-op rotation. + /// + /// # Complexity + /// + /// Takes `*O*(min(mid, len() - mid))` time and no extra space. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{VecDeque, IteratorExt}; + /// + /// let mut buf: VecDeque<_> = (0..10).try_collect()?; + /// + /// buf.rotate_left(3); + /// assert_eq!(buf, [3, 4, 5, 6, 7, 8, 9, 0, 1, 2]); + /// + /// for i in 1..10 { + /// assert_eq!(i * 3 % 10, buf[0]); + /// buf.rotate_left(3); + /// } + /// assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn rotate_left(&mut self, mid: usize) { + assert!(mid <= self.len()); + let k = self.len - mid; + if mid <= k { + unsafe { self.rotate_left_inner(mid) } + } else { + unsafe { self.rotate_right_inner(k) } + } + } + + /// Rotates the double-ended queue `k` places to the right. + /// + /// Equivalently, + /// - Rotates the first item into position `k`. + /// - Pops the last `k` items and pushes them to the front. + /// - Rotates `len() - k` places to the left. + /// + /// # Panics + /// + /// If `k` is greater than `len()`. Note that `k == len()` + /// does _not_ panic and is a no-op rotation. + /// + /// # Complexity + /// + /// Takes `*O*(min(k, len() - k))` time and no extra space. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{VecDeque, IteratorExt}; + /// + /// let mut buf: VecDeque<_> = (0..10).try_collect()?; + /// + /// buf.rotate_right(3); + /// assert_eq!(buf, [7, 8, 9, 0, 1, 2, 3, 4, 5, 6]); + /// + /// for i in 1..10 { + /// assert_eq!(0, buf[i * 3 % 10]); + /// buf.rotate_right(3); + /// } + /// assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn rotate_right(&mut self, k: usize) { + assert!(k <= self.len()); + let mid = self.len - k; + if k <= mid { + unsafe { self.rotate_right_inner(k) } + } else { + unsafe { self.rotate_left_inner(mid) } + } + } + + // SAFETY: the following two methods require that the rotation amount + // be less than half the length of the deque. + // + // `wrap_copy` requires that `min(x, capacity() - x) + copy_len <= capacity()`, + // but then `min` is never more than half the capacity, regardless of x, + // so it's sound to call here because we're calling with something + // less than half the length, which is never above half the capacity. + + unsafe fn rotate_left_inner(&mut self, mid: usize) { + debug_assert!(mid * 2 <= self.len()); + unsafe { + self.wrap_copy(self.head, self.to_physical_idx(self.len), mid); + } + self.head = self.to_physical_idx(mid); + } + + unsafe fn rotate_right_inner(&mut self, k: usize) { + debug_assert!(k * 2 <= self.len()); + self.head = self.wrap_sub(self.head, k); + unsafe { + self.wrap_copy(self.to_physical_idx(self.len), self.head, k); + } + } + + /// Binary searches this `VecDeque` for a given element. + /// If the `VecDeque` is not sorted, the returned result is unspecified and + /// meaningless. + /// + /// If the value is found then [`Result::Ok`] is returned, containing the + /// index of the matching element. If there are multiple matches, then any + /// one of the matches could be returned. If the value is not found then + /// [`Result::Err`] is returned, containing the index where a matching + /// element could be inserted while maintaining sorted order. + /// + /// See also [`binary_search_by`], [`binary_search_by_key`], and [`partition_point`]. + /// + /// [`binary_search_by`]: VecDeque::binary_search_by + /// [`binary_search_by_key`]: VecDeque::binary_search_by_key + /// [`partition_point`]: VecDeque::partition_point + /// + /// # Examples + /// + /// Looks up a series of four elements. The first is found, with a + /// uniquely determined position; the second and third are not + /// found; the fourth could match any position in `[1, 4]`. + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let deque: VecDeque<_> = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].try_into()?; + /// + /// assert_eq!(deque.binary_search(&13), Ok(9)); + /// assert_eq!(deque.binary_search(&4), Err(7)); + /// assert_eq!(deque.binary_search(&100), Err(13)); + /// let r = deque.binary_search(&1); + /// assert!(matches!(r, Ok(1..=4))); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// If you want to insert an item to a sorted deque, while maintaining + /// sort order, consider using [`partition_point`]: + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut deque: VecDeque<_> = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].try_into()?; + /// let num = 42; + /// let idx = deque.partition_point(|&x| x < num); + /// // The above is equivalent to `let idx = deque.binary_search(&num).unwrap_or_else(|x| x);` + /// deque.try_insert(idx, num)?; + /// assert_eq!(deque, &[0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 42, 55]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn binary_search(&self, x: &T) -> Result + where + T: Ord, + { + self.binary_search_by(|e| e.cmp(x)) + } + + /// Binary searches this `VecDeque` with a comparator function. + /// + /// The comparator function should return an order code that indicates + /// whether its argument is `Less`, `Equal` or `Greater` the desired + /// target. + /// If the `VecDeque` is not sorted or if the comparator function does not + /// implement an order consistent with the sort order of the underlying + /// `VecDeque`, the returned result is unspecified and meaningless. + /// + /// If the value is found then [`Result::Ok`] is returned, containing the + /// index of the matching element. If there are multiple matches, then any + /// one of the matches could be returned. If the value is not found then + /// [`Result::Err`] is returned, containing the index where a matching + /// element could be inserted while maintaining sorted order. + /// + /// See also [`binary_search`], [`binary_search_by_key`], and [`partition_point`]. + /// + /// [`binary_search`]: VecDeque::binary_search + /// [`binary_search_by_key`]: VecDeque::binary_search_by_key + /// [`partition_point`]: VecDeque::partition_point + /// + /// # Examples + /// + /// Looks up a series of four elements. The first is found, with a + /// uniquely determined position; the second and third are not + /// found; the fourth could match any position in `[1, 4]`. + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let deque: VecDeque<_> = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].try_into()?; + /// + /// assert_eq!(deque.binary_search_by(|x| x.cmp(&13)), Ok(9)); + /// assert_eq!(deque.binary_search_by(|x| x.cmp(&4)), Err(7)); + /// assert_eq!(deque.binary_search_by(|x| x.cmp(&100)), Err(13)); + /// let r = deque.binary_search_by(|x| x.cmp(&1)); + /// assert!(matches!(r, Ok(1..=4))); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result + where + F: FnMut(&'a T) -> Ordering, + { + let (front, back) = self.as_slices(); + let cmp_back = back.first().map(|elem| f(elem)); + + if let Some(Ordering::Equal) = cmp_back { + Ok(front.len()) + } else if let Some(Ordering::Less) = cmp_back { + back.binary_search_by(f) + .map(|idx| idx + front.len()) + .map_err(|idx| idx + front.len()) + } else { + front.binary_search_by(f) + } + } + + /// Binary searches this `VecDeque` with a key extraction function. + /// + /// Assumes that the deque is sorted by the key, for instance with + /// [`make_contiguous().sort_by_key()`] using the same key extraction function. + /// If the deque is not sorted by the key, the returned result is + /// unspecified and meaningless. + /// + /// If the value is found then [`Result::Ok`] is returned, containing the + /// index of the matching element. If there are multiple matches, then any + /// one of the matches could be returned. If the value is not found then + /// [`Result::Err`] is returned, containing the index where a matching + /// element could be inserted while maintaining sorted order. + /// + /// See also [`binary_search`], [`binary_search_by`], and [`partition_point`]. + /// + /// [`make_contiguous().sort_by_key()`]: VecDeque::make_contiguous + /// [`binary_search`]: VecDeque::binary_search + /// [`binary_search_by`]: VecDeque::binary_search_by + /// [`partition_point`]: VecDeque::partition_point + /// + /// # Examples + /// + /// Looks up a series of four elements in a slice of pairs sorted by + /// their second elements. The first is found, with a uniquely + /// determined position; the second and third are not found; the + /// fourth could match any position in `[1, 4]`. + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let deque: VecDeque<_> = [(0, 0), (2, 1), (4, 1), (5, 1), + /// (3, 1), (1, 2), (2, 3), (4, 5), (5, 8), (3, 13), + /// (1, 21), (2, 34), (4, 55)].try_into()?; + /// + /// assert_eq!(deque.binary_search_by_key(&13, |&(a, b)| b), Ok(9)); + /// assert_eq!(deque.binary_search_by_key(&4, |&(a, b)| b), Err(7)); + /// assert_eq!(deque.binary_search_by_key(&100, |&(a, b)| b), Err(13)); + /// let r = deque.binary_search_by_key(&1, |&(a, b)| b); + /// assert!(matches!(r, Ok(1..=4))); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[inline] + pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result + where + F: FnMut(&'a T) -> B, + B: Ord, + { + self.binary_search_by(|k| f(k).cmp(b)) + } + + /// Returns the index of the partition point according to the given predicate + /// (the index of the first element of the second partition). + /// + /// The deque is assumed to be partitioned according to the given predicate. + /// This means that all elements for which the predicate returns true are at the start of the deque + /// and all elements for which the predicate returns false are at the end. + /// For example, `[7, 15, 3, 5, 4, 12, 6]` is partitioned under the predicate `x % 2 != 0` + /// (all odd numbers are at the start, all even at the end). + /// + /// If the deque is not partitioned, the returned result is unspecified and meaningless, + /// as this method performs a kind of binary search. + /// + /// See also [`binary_search`], [`binary_search_by`], and [`binary_search_by_key`]. + /// + /// [`binary_search`]: VecDeque::binary_search + /// [`binary_search_by`]: VecDeque::binary_search_by + /// [`binary_search_by_key`]: VecDeque::binary_search_by_key + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let deque: VecDeque<_> = [1, 2, 3, 3, 5, 6, 7].try_into()?; + /// let i = deque.partition_point(|&x| x < 5); + /// + /// assert_eq!(i, 4); + /// assert!(deque.iter().take(i).all(|&x| x < 5)); + /// assert!(deque.iter().skip(i).all(|&x| !(x < 5))); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + /// + /// If you want to insert an item to a sorted deque, while maintaining + /// sort order: + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut deque: VecDeque<_> = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].try_into()?; + /// let num = 42; + /// let idx = deque.partition_point(|&x| x < num); + /// deque.try_insert(idx, num)?; + /// assert_eq!(deque, &[0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 42, 55]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn partition_point

(&self, mut pred: P) -> usize + where + P: FnMut(&T) -> bool, + { + let (front, back) = self.as_slices(); + + if let Some(true) = back.first().map(|v| pred(v)) { + back.partition_point(pred) + front.len() + } else { + front.partition_point(pred) + } + } +} + +impl VecDeque { + /// Modifies the deque in-place so that `len()` is equal to new_len, + /// either by removing excess elements from the back or by appending clones of `value` + /// to the back. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let mut buf = VecDeque::new(); + /// buf.try_push_back(5)?; + /// buf.try_push_back(10)?; + /// buf.try_push_back(15)?; + /// assert_eq!(buf, [5, 10, 15]); + /// + /// buf.try_resize(2, 0)?; + /// assert_eq!(buf, [5, 10]); + /// + /// buf.try_resize(5, 20)?; + /// assert_eq!(buf, [5, 10, 20, 20, 20]); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + pub fn try_resize(&mut self, new_len: usize, value: T) -> Result<(), Error> { + if new_len > self.len() { + let extra = new_len - self.len(); + + for _ in 0..extra { + self.try_push_back(value.try_clone()?)?; + } + } else { + self.truncate(new_len); + } + + Ok(()) + } +} + +/// Returns the index in the underlying buffer for a given logical element index. +#[inline] +fn wrap_index(logical_index: usize, capacity: usize) -> usize { + debug_assert!( + (logical_index == 0 && capacity == 0) + || logical_index < capacity + || (logical_index - capacity) < capacity + ); + if logical_index >= capacity { + logical_index - capacity + } else { + logical_index + } +} + +impl PartialEq for VecDeque { + fn eq(&self, other: &Self) -> bool { + if self.len != other.len() { + return false; + } + let (sa, sb) = self.as_slices(); + let (oa, ob) = other.as_slices(); + if sa.len() == oa.len() { + sa == oa && sb == ob + } else if sa.len() < oa.len() { + // Always divisible in three sections, for example: + // self: [a b c|d e f] + // other: [0 1 2 3|4 5] + // front = 3, mid = 1, + // [a b c] == [0 1 2] && [d] == [3] && [e f] == [4 5] + let front = sa.len(); + let mid = oa.len() - front; + + let (oa_front, oa_mid) = oa.split_at(front); + let (sb_mid, sb_back) = sb.split_at(mid); + debug_assert_eq!(sa.len(), oa_front.len()); + debug_assert_eq!(sb_mid.len(), oa_mid.len()); + debug_assert_eq!(sb_back.len(), ob.len()); + sa == oa_front && sb_mid == oa_mid && sb_back == ob + } else { + let front = oa.len(); + let mid = sa.len() - front; + + let (sa_front, sa_mid) = sa.split_at(front); + let (ob_mid, ob_back) = ob.split_at(mid); + debug_assert_eq!(sa_front.len(), oa.len()); + debug_assert_eq!(sa_mid.len(), ob_mid.len()); + debug_assert_eq!(sb.len(), ob_back.len()); + sa_front == oa && sa_mid == ob_mid && sb == ob_back + } + } +} + +impl Eq for VecDeque {} + +__impl_slice_eq1! { [] VecDeque, Vec, } +__impl_slice_eq1! { [] VecDeque, &[U], } +__impl_slice_eq1! { [] VecDeque, &mut [U], } +__impl_slice_eq1! { [const N: usize] VecDeque, [U; N], } +__impl_slice_eq1! { [const N: usize] VecDeque, &[U; N], } +__impl_slice_eq1! { [const N: usize] VecDeque, &mut [U; N], } + +impl PartialOrd for VecDeque { + fn partial_cmp(&self, other: &Self) -> Option { + self.iter().partial_cmp(other.iter()) + } +} + +impl Ord for VecDeque { + #[inline] + fn cmp(&self, other: &Self) -> Ordering { + self.iter().cmp(other.iter()) + } +} + +impl Hash for VecDeque { + fn hash(&self, state: &mut H) { + state.write_usize(self.len); + // It's not possible to use Hash::hash_slice on slices + // returned by as_slices method as their length can vary + // in otherwise identical deques. + // + // Hasher only guarantees equivalence for the exact same + // set of calls to its methods. + self.iter().for_each(|elem| elem.hash(state)); + } +} + +impl Index for VecDeque { + type Output = T; + + #[inline] + fn index(&self, index: usize) -> &T { + self.get(index).expect("Out of bounds access") + } +} + +impl IndexMut for VecDeque { + #[inline] + fn index_mut(&mut self, index: usize) -> &mut T { + self.get_mut(index).expect("Out of bounds access") + } +} + +impl IntoIterator for VecDeque { + type Item = T; + type IntoIter = IntoIter; + + /// Consumes the deque into a front-to-back iterator yielding elements by + /// value. + fn into_iter(self) -> IntoIter { + IntoIter::new(self) + } +} + +impl<'a, T, A: Allocator> IntoIterator for &'a VecDeque { + type Item = &'a T; + type IntoIter = Iter<'a, T>; + + fn into_iter(self) -> Iter<'a, T> { + self.iter() + } +} + +impl<'a, T, A: Allocator> IntoIterator for &'a mut VecDeque { + type Item = &'a mut T; + type IntoIter = IterMut<'a, T>; + + fn into_iter(self) -> IterMut<'a, T> { + self.iter_mut() + } +} + +impl fmt::Debug for VecDeque { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.iter()).finish() + } +} + +impl From> for VecDeque { + /// Turn a [`Vec`] into a [`VecDeque`]. + /// + /// [`Vec`]: crate::Vec + /// [`VecDeque`]: crate::VecDeque + /// + /// This conversion is guaranteed to run in *O*(1) time + /// and to not re-allocate the `Vec`'s buffer or allocate + /// any additional memory. + #[inline] + fn from(other: Vec) -> Self { + let (buf, len) = other.into_raw_vec(); + Self { head: 0, len, buf } + } +} + +impl From> for Vec { + /// Turn a [`VecDeque`] into a [`Vec`]. + /// + /// [`Vec`]: crate::Vec + /// [`VecDeque`]: crate::VecDeque + /// + /// This never needs to re-allocate, but does need to do *O*(*n*) data movement if + /// the circular buffer doesn't happen to be at the beginning of the allocation. + /// + /// # Examples + /// + /// ``` + /// use rune_alloc::{VecDeque, Vec, IteratorExt}; + /// + /// // This one is *O*(1). + /// let deque: VecDeque<_> = (1..5).try_collect()?; + /// let ptr = deque.as_slices().0.as_ptr(); + /// let vec = Vec::from(deque); + /// assert_eq!(vec, [1, 2, 3, 4]); + /// assert_eq!(vec.as_ptr(), ptr); + /// + /// // This one needs data rearranging. + /// let mut deque: VecDeque<_> = (1..5).try_collect()?; + /// deque.try_push_front(9)?; + /// deque.try_push_front(8)?; + /// let ptr = deque.as_slices().1.as_ptr(); + /// let vec = Vec::from(deque); + /// assert_eq!(vec, [8, 9, 1, 2, 3, 4]); + /// assert_eq!(vec.as_ptr(), ptr); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + fn from(mut other: VecDeque) -> Self { + other.make_contiguous(); + + unsafe { + let other = ManuallyDrop::new(other); + let buf = other.buf.ptr(); + let len = other.len(); + let cap = other.capacity(); + let alloc = ptr::read(other.allocator()); + + if other.head != 0 { + ptr::copy(buf.add(other.head), buf, len); + } + Vec::from_raw_parts_in(buf, len, cap, alloc) + } + } +} + +impl TryFrom<[T; N]> for VecDeque { + type Error = Error; + + /// Converts a `[T; N]` into a `VecDeque`. + /// + /// ``` + /// use rune_alloc::VecDeque; + /// + /// let deq1 = VecDeque::try_from([1, 2, 3, 4])?; + /// let deq2: VecDeque<_> = [1, 2, 3, 4].try_into()?; + /// assert_eq!(deq1, deq2); + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + fn try_from(arr: [T; N]) -> Result { + Ok(VecDeque::from(Vec::try_from(arr)?)) + } +} + +impl TryFromIteratorIn for VecDeque { + fn try_from_iter_in(iter: I, alloc: A) -> Result + where + I: IntoIterator, + { + let mut this = VecDeque::new_in(alloc); + this.try_extend(iter)?; + Ok(this) + } +} + +impl TryExtend for VecDeque { + #[inline] + fn try_extend>(&mut self, iter: I) -> Result<(), Error> { + for value in iter { + self.try_push_back(value)?; + } + + Ok(()) + } +} diff --git a/crates/rune-alloc/src/alloc/vec_deque/raw_iter.rs b/crates/rune-alloc/src/alloc/vec_deque/raw_iter.rs new file mode 100644 index 000000000..8993811f7 --- /dev/null +++ b/crates/rune-alloc/src/alloc/vec_deque/raw_iter.rs @@ -0,0 +1,111 @@ +use core::fmt; +use core::iter::FusedIterator; +use core::mem; + +use crate::slice; + +/// An iterator over the elements of a `VecDeque`. +/// +/// This `struct` is created by the [`iter`] method on [`super::VecDeque`]. See its +/// documentation for more. +/// +/// [`iter`]: super::VecDeque::iter +pub struct RawIter { + i1: slice::RawIter, + i2: slice::RawIter, +} + +impl RawIter { + pub(super) fn new(i1: slice::RawIter, i2: slice::RawIter) -> Self { + Self { i1, i2 } + } +} + +impl fmt::Debug for RawIter { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("Iter").finish() + } +} + +// FIXME(#26925) Remove in favor of `#[derive(Clone)]` +impl Clone for RawIter { + fn clone(&self) -> Self { + RawIter { + i1: self.i1.clone(), + i2: self.i2.clone(), + } + } +} + +impl Iterator for RawIter { + type Item = *const T; + + #[inline] + fn next(&mut self) -> Option<*const T> { + match self.i1.next() { + Some(val) => Some(val), + None => { + // most of the time, the iterator will either always + // call next(), or always call next_back(). By swapping + // the iterators once the first one is empty, we ensure + // that the first branch is taken as often as possible, + // without sacrificing correctness, as i1 is empty anyways + mem::swap(&mut self.i1, &mut self.i2); + self.i1.next() + } + } + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let len = self.len(); + (len, Some(len)) + } + + fn fold(self, accum: Acc, mut f: F) -> Acc + where + F: FnMut(Acc, Self::Item) -> Acc, + { + let accum = self.i1.fold(accum, &mut f); + self.i2.fold(accum, &mut f) + } + + #[inline] + fn last(mut self) -> Option<*const T> { + self.next_back() + } +} + +impl DoubleEndedIterator for RawIter { + #[inline] + fn next_back(&mut self) -> Option<*const T> { + match self.i2.next_back() { + Some(val) => Some(val), + None => { + // most of the time, the iterator will either always + // call next(), or always call next_back(). By swapping + // the iterators once the second one is empty, we ensure + // that the first branch is taken as often as possible, + // without sacrificing correctness, as i2 is empty anyways + mem::swap(&mut self.i1, &mut self.i2); + self.i2.next_back() + } + } + } + + fn rfold(self, accum: Acc, mut f: F) -> Acc + where + F: FnMut(Acc, Self::Item) -> Acc, + { + let accum = self.i2.rfold(accum, &mut f); + self.i1.rfold(accum, &mut f) + } +} + +impl ExactSizeIterator for RawIter { + fn len(&self) -> usize { + self.i1.len() + self.i2.len() + } +} + +impl FusedIterator for RawIter {} diff --git a/crates/rune-alloc/src/fmt.rs b/crates/rune-alloc/src/fmt.rs new file mode 100644 index 000000000..e52be7336 --- /dev/null +++ b/crates/rune-alloc/src/fmt.rs @@ -0,0 +1,109 @@ +//! Built-in formatting utilities. + +use core::fmt; + +use crate::alloc::Error; + +/// Fallible write formatting implementation. +pub trait TryWrite { + /// Writes a string slice into this writer, returning whether the write + /// succeeded. + /// + /// This method can only succeed if the entire string slice was successfully + /// written, and this method will not return until all data has been + /// written or an error occurs. + /// + /// # Errors + /// + /// This function will return an instance of [`Error`] on error. + /// + /// # Examples + /// + /// ``` + /// use std::fmt::{Error, Write}; + /// + /// fn writer(f: &mut W, s: &str) -> Result<(), Error> { + /// f.write_str(s) + /// } + /// + /// let mut buf = String::new(); + /// writer(&mut buf, "hola").unwrap(); + /// assert_eq!(&buf, "hola"); + /// ``` + fn try_write_str(&mut self, s: &str) -> Result<(), Error>; + + /// Writes a [`char`] into this writer, returning whether the write succeeded. + /// + /// A single [`char`] may be encoded as more than one byte. + /// This method can only succeed if the entire byte sequence was successfully + /// written, and this method will not return until all data has been + /// written or an error occurs. + /// + /// # Errors + /// + /// This function will return an instance of [`Error`] on error. + /// + /// # Examples + /// + /// ``` + /// use std::fmt::{Error, Write}; + /// + /// fn writer(f: &mut W, c: char) -> Result<(), Error> { + /// f.write_char(c) + /// } + /// + /// let mut buf = String::new(); + /// writer(&mut buf, 'a').unwrap(); + /// writer(&mut buf, 'b').unwrap(); + /// assert_eq!(&buf, "ab"); + /// ``` + #[inline] + fn try_write_char(&mut self, c: char) -> Result<(), Error> { + self.try_write_str(c.encode_utf8(&mut [0; 4])) + } + + #[inline] + #[doc(hidden)] + fn write_fmt(&mut self, args: fmt::Arguments<'_>) -> Result<(), Error> + where + Self: Sized, + { + struct Writer<'a> { + target: &'a mut dyn TryWrite, + error: Option, + } + + impl fmt::Write for Writer<'_> { + #[inline] + fn write_str(&mut self, s: &str) -> fmt::Result { + if let Err(error) = (*self.target).try_write_str(s) { + self.error = Some(error); + } + + Ok(()) + } + + #[inline] + fn write_char(&mut self, c: char) -> fmt::Result { + if let Err(error) = (*self.target).try_write_char(c) { + self.error = Some(error); + } + + Ok(()) + } + } + + let mut writer = Writer { + target: self, + error: None, + }; + + fmt::write(&mut writer, args).unwrap(); + + if let Some(error) = writer.error { + Err(error) + } else { + Ok(()) + } + } +} diff --git a/crates/rune-alloc/src/hint.rs b/crates/rune-alloc/src/hint.rs new file mode 100644 index 000000000..012b2988e --- /dev/null +++ b/crates/rune-alloc/src/hint.rs @@ -0,0 +1,12 @@ +cfg_if! { + if #[cfg(rune_nightly)] { + pub(crate) use core::intrinsics::{likely, unlikely, assume}; + } else { + pub(crate) use core::convert::{identity as likely, identity as unlikely}; + + #[inline(always)] + pub(crate) fn assume(_: bool) { + // do nothing + } + } +} diff --git a/crates/rune-alloc/src/iter/ext.rs b/crates/rune-alloc/src/iter/ext.rs new file mode 100644 index 000000000..a8a12a38a --- /dev/null +++ b/crates/rune-alloc/src/iter/ext.rs @@ -0,0 +1,29 @@ +use crate::alloc::{Allocator, Error, Global, TryFromIteratorIn}; + +/// Iterator extension trait. +pub trait IteratorExt: Iterator + self::sealed::Sealed { + /// Transforms an iterator into a collection using fallible allocations. + fn try_collect(self) -> Result + where + Self: Sized, + B: TryFromIteratorIn, + { + self.try_collect_in(Global) + } + + /// Transforms an iterator into a collection using fallible allocations. + fn try_collect_in(self, alloc: A) -> Result + where + Self: Sized, + B: TryFromIteratorIn, + { + TryFromIteratorIn::try_from_iter_in(self, alloc) + } +} + +impl IteratorExt for I where I: Iterator {} + +mod sealed { + pub trait Sealed {} + impl Sealed for I where I: Iterator {} +} diff --git a/crates/rune-alloc/src/iter/mod.rs b/crates/rune-alloc/src/iter/mod.rs new file mode 100644 index 000000000..5295e824d --- /dev/null +++ b/crates/rune-alloc/src/iter/mod.rs @@ -0,0 +1,4 @@ +//! Composable external iteration. + +pub use self::ext::IteratorExt; +mod ext; diff --git a/crates/rune-alloc/src/lib.rs b/crates/rune-alloc/src/lib.rs new file mode 100644 index 000000000..7eb8061bf --- /dev/null +++ b/crates/rune-alloc/src/lib.rs @@ -0,0 +1,101 @@ +//! Raw extension utilities of std for Rune. +//! +//! Note that there is lots of unsafety in here. Use with caution. + +// Quite a few parts copied from the Rust Project under the MIT license. +// +// Copyright 2014-2023 The Rust Project Developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. Files in the project +// may not be copied, modified, or distributed except according to those terms. + +// alloc/hashbrown +// +// Copyright (c) 2016 Amanieu d'Antras +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. Files in the project +// may not be copied, modified, or distributed except according to those terms. + +#![no_std] +// TODO: get rid of this once we've evaluated what we want to have public. +#![allow(dead_code)] +#![cfg_attr(rune_nightly, feature(core_intrinsics))] +#![cfg_attr(rune_nightly, feature(dropck_eyepatch))] +#![cfg_attr(rune_nightly, feature(min_specialization))] +#![cfg_attr(rune_nightly, feature(ptr_sub_ptr))] +#![cfg_attr(rune_nightly, feature(set_ptr_value))] +#![cfg_attr(rune_nightly, feature(slice_ptr_len))] +#![cfg_attr(rune_nightly, feature(slice_range))] +#![cfg_attr(rune_nightly, feature(strict_provenance))] +#![cfg_attr(rune_nightly, feature(saturating_int_impl))] +#![cfg_attr(rune_nightly, feature(inline_const))] +#![cfg_attr(rune_nightly, feature(const_maybe_uninit_zeroed))] +// The only feature we use is `rustc_specialization_trait`. +#![cfg_attr(rune_nightly, allow(internal_features))] +#![cfg_attr(rune_nightly, feature(rustc_attrs))] +#![allow(clippy::comparison_chain)] +#![allow(clippy::manual_map)] +#![allow(clippy::type_complexity)] +#![allow(clippy::drop_non_drop)] + +#[cfg(feature = "std")] +extern crate std as rust_std; + +#[cfg(feature = "alloc")] +extern crate alloc as rust_alloc; + +// This is here for forward compatibility when we can support allocation-free +// execution. +#[cfg(not(feature = "alloc"))] +compile_error!("The `alloc` feature is currently required to build rune-alloc, but will change for parts of rune in the future."); + +#[macro_use] +mod public_macros; + +#[macro_use] +mod macros; + +pub use self::alloc::boxed::{self, Box}; +pub use self::alloc::btree::{map as btree_map, map::BTreeMap}; +pub use self::alloc::btree::{set as btree_set, set::BTreeSet}; +pub use self::alloc::hashbrown; +pub use self::alloc::hashbrown::{map as hash_map, map::HashMap}; +pub use self::alloc::hashbrown::{set as hash_set, set::HashSet}; +pub use self::alloc::str; +pub use self::alloc::string::{self, String, TryToString}; +pub use self::alloc::vec::{self, Vec}; +pub use self::alloc::vec_deque::{self, VecDeque}; +pub use self::alloc::{ + AllocError, Allocator, CustomError, Error, Global, TryClone, TryExtend, TryFromIterator, + TryFromIteratorIn, TryToOwned, +}; +pub(crate) mod alloc; + +pub use self::iter::IteratorExt; +pub mod iter; + +pub use self::fmt::TryWrite; +pub mod fmt; + +pub(crate) mod hint; +pub(crate) mod ptr; +pub(crate) mod slice; + +pub mod prelude { + //! Prelude for common traits used in combination with this crate which + //! matches the behavior of the std prelude. + pub use crate::{ + IteratorExt, TryClone, TryExtend, TryFromIterator, TryFromIteratorIn, TryToOwned, + TryToString, + }; +} + +#[cfg(test)] +mod testing; + +#[cfg(test)] +mod tests; diff --git a/crates/rune/src/hashbrown/fork/macros.rs b/crates/rune-alloc/src/macros.rs similarity index 54% rename from crates/rune/src/hashbrown/fork/macros.rs rename to crates/rune-alloc/src/macros.rs index eaba6bed1..0800463c3 100644 --- a/crates/rune/src/hashbrown/fork/macros.rs +++ b/crates/rune-alloc/src/macros.rs @@ -54,17 +54,47 @@ macro_rules! cfg_if { }; } +/// Call the given macro with repeated type arguments and counts. +macro_rules! repeat_macro { + ($macro:ident) => { + $macro!(0); + $macro!(1, A a 0); + $macro!(2, A a 0, B b 1); + $macro!(3, A a 0, B b 1, C c 2); + $macro!(4, A a 0, B b 1, C c 2, D d 3); + $macro!(5, A a 0, B b 1, C c 2, D d 3, E e 4); + $macro!(6, A a 0, B b 1, C c 2, D d 3, E e 4, F f 5); + $macro!(7, A a 0, B b 1, C c 2, D d 3, E e 4, F f 5, G g 6); + $macro!(8, A a 0, B b 1, C c 2, D d 3, E e 4, F f 5, G g 6, H h 7); + $macro!(9, A a 0, B b 1, C c 2, D d 3, E e 4, F f 5, G g 6, H h 7, I i 8); + $macro!(10, A a 0, B b 1, C c 2, D d 3, E e 4, F f 5, G g 6, H h 7, I i 8, J j 9); + $macro!(11, A a 0, B b 1, C c 2, D d 3, E e 4, F f 5, G g 6, H h 7, I i 8, J j 9, K k 10); + $macro!(12, A a 0, B b 1, C c 2, D d 3, E e 4, F f 5, G g 6, H h 7, I i 8, J j 9, K k 10, L l 11); + $macro!(13, A a 0, B b 1, C c 2, D d 3, E e 4, F f 5, G g 6, H h 7, I i 8, J j 9, K k 10, L l 11, M m 12); + $macro!(14, A a 0, B b 1, C c 2, D d 3, E e 4, F f 5, G g 6, H h 7, I i 8, J j 9, K k 10, L l 11, M m 12, N n 13); + $macro!(15, A a 0, B b 1, C c 2, D d 3, E e 4, F f 5, G g 6, H h 7, I i 8, J j 9, K k 10, L l 11, M m 12, N n 13, O o 14); + $macro!(16, A a 0, B b 1, C c 2, D d 3, E e 4, F f 5, G g 6, H h 7, I i 8, J j 9, K k 10, L l 11, M m 12, N n 13, O o 14, P p 15); + }; +} + // Helper macro for specialization. This also helps avoid parse errors if the // default fn syntax for specialization changes in the future. -#[cfg(feature = "nightly")] +#[cfg(rune_nightly)] macro_rules! default_fn { - (#[$($a:tt)*] $($tt:tt)*) => { - #[$($a)*] default $($tt)* + ($(#[$meta:meta])* unsafe fn $($tt:tt)*) => { + $(#[$meta])* + default unsafe fn $($tt)* + }; + + ($(#[$meta:meta])* fn $($tt:tt)*) => { + $(#[$meta])* + default fn $($tt)* } } -#[cfg(not(feature = "nightly"))] + +#[cfg(not(rune_nightly))] macro_rules! default_fn { - ($($tt:tt)*) => { + ($($tt:tt)*) => { $($tt)* } } diff --git a/crates/rune-alloc/src/ptr.rs b/crates/rune-alloc/src/ptr.rs new file mode 100644 index 000000000..c51978aac --- /dev/null +++ b/crates/rune-alloc/src/ptr.rs @@ -0,0 +1,124 @@ +//! This module contains (hopefully sound) re-implementations of unstable +//! `core::ptr` APIs. + +pub(crate) use self::unique::Unique; +mod unique; + +use core::mem; +pub(crate) use core::ptr::NonNull; + +// Stable re-exports. +pub(crate) use core::ptr::{ + addr_of, addr_of_mut, copy, copy_nonoverlapping, drop_in_place, read, slice_from_raw_parts_mut, + write, +}; + +pub(crate) const unsafe fn nonnull_add(this: NonNull, delta: usize) -> NonNull +where + T: Sized, +{ + // SAFETY: We require that the delta stays in-bounds of the object, and + // thus it cannot become null, as that would require wrapping the + // address space, which no legal objects are allowed to do. + // And the caller promised the `delta` is sound to add. + let pointer = this.as_ptr(); + unsafe { NonNull::new_unchecked(pointer.add(delta)) } +} + +pub(crate) const unsafe fn nonnull_sub(this: NonNull, delta: usize) -> NonNull +where + T: Sized, +{ + // SAFETY: We require that the delta stays in-bounds of the object, and + // thus it cannot become null, as that would require wrapping the + // address space, which no legal objects are allowed to do. + // And the caller promised the `delta` is sound to add. + let pointer = this.as_ptr(); + unsafe { NonNull::new_unchecked(pointer.sub(delta)) } +} + +#[inline(always)] +#[allow(clippy::useless_transmute)] +pub const fn invalid(addr: usize) -> *const T { + // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic. + // We use transmute rather than a cast so tools like Miri can tell that this + // is *not* the same as from_exposed_addr. + // SAFETY: every valid integer is also a valid pointer (as long as you don't dereference that + // pointer). + unsafe { mem::transmute(addr) } +} + +#[inline(always)] +#[allow(clippy::useless_transmute)] +pub const fn invalid_mut(addr: usize) -> *mut T { + // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic. + // We use transmute rather than a cast so tools like Miri can tell that this + // is *not* the same as from_exposed_addr. + // SAFETY: every valid integer is also a valid pointer (as long as you don't dereference that + // pointer). + unsafe { mem::transmute(addr) } +} + +cfg_if! { + if #[cfg(rune_nightly)] { + #[inline(always)] + pub(crate) const fn wrapping_byte_sub(ptr: *const T, count: usize) -> *const T { + ptr.cast::().wrapping_sub(count).with_metadata_of(ptr) + } + } else { + #[inline(always)] + pub(crate) const fn wrapping_byte_sub(ptr: *const T, count: usize) -> *const T { + // TODO: We need to use metadata. + ptr.cast::().wrapping_sub(count) as *const T + } + } +} + +cfg_if! { + if #[cfg(rune_nightly)] { + #[inline(always)] + pub(crate) unsafe fn sub_ptr(from: *const T, to: *const T) -> usize { + from.sub_ptr(to) + } + } else { + #[inline(always)] + pub(crate) unsafe fn sub_ptr(from: *const T, to: *const T) -> usize { + let pointee_size = mem::size_of::(); + assert!(0 < pointee_size && pointee_size <= isize::MAX as usize); + debug_assert!(addr(from) >= addr(to)); + addr(from).wrapping_sub(addr(to)).saturating_div(pointee_size) + } + } +} + +cfg_if! { + if #[cfg(rune_nightly)] { + #[inline(always)] + pub(crate) fn addr(from: *const T) -> usize { + from.addr() + } + } else { + #[inline(always)] + pub(crate) fn addr(from: *const T) -> usize { + from as usize + } + } +} + +cfg_if! { + if #[cfg(rune_nightly)] { + #[inline(always)] + pub(crate) fn slice_len(from: *const [T]) -> usize { + from.len() + } + } else { + #[inline(always)] + pub(crate) fn slice_len(from: *const [T]) -> usize { + // SAFETY: This is *a bit* tricky, but the raw pointer contains the + // length and *should* be safe to dereference like this. However, + // walking through the dereferenced `[T]` is not necessarily + // correct. + unsafe { (*from).len() } + } + } +} diff --git a/crates/rune-alloc/src/ptr/unique.rs b/crates/rune-alloc/src/ptr/unique.rs new file mode 100644 index 000000000..820fa7c39 --- /dev/null +++ b/crates/rune-alloc/src/ptr/unique.rs @@ -0,0 +1,206 @@ +use core::convert::From; +use core::fmt; +use core::marker::PhantomData; + +use crate::ptr::NonNull; + +/// A wrapper around a raw non-null `*mut T` that indicates that the possessor +/// of this wrapper owns the referent. Useful for building abstractions like +/// `Box`, `Vec`, `String`, and `HashMap`. +/// +/// Unlike `*mut T`, `Unique` behaves "as if" it were an instance of `T`. +/// It implements `Send`/`Sync` if `T` is `Send`/`Sync`. It also implies +/// the kind of strong aliasing guarantees an instance of `T` can expect: +/// the referent of the pointer should not be modified without a unique path to +/// its owning Unique. +/// +/// If you're uncertain of whether it's correct to use `Unique` for your purposes, +/// consider using `NonNull`, which has weaker semantics. +/// +/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer +/// is never dereferenced. This is so that enums may use this forbidden value +/// as a discriminant -- `Option>` has the same size as `Unique`. +/// However the pointer may still dangle if it isn't dereferenced. +/// +/// Unlike `*mut T`, `Unique` is covariant over `T`. This should always be correct +/// for any type which upholds Unique's aliasing requirements. +#[doc(hidden)] +#[repr(transparent)] +pub struct Unique { + pointer: NonNull, + // NOTE: this marker has no consequences for variance, but is necessary + // for dropck to understand that we logically own a `T`. + // + // For details, see: + // https://github.com/rust-lang/rfcs/blob/master/text/0769-sound-generic-drop.md#phantom-data + _marker: PhantomData, +} + +/// `Unique` pointers are `Send` if `T` is `Send` because the data they +/// reference is unaliased. Note that this aliasing invariant is +/// unenforced by the type system; the abstraction using the +/// `Unique` must enforce it. +unsafe impl Send for Unique {} + +/// `Unique` pointers are `Sync` if `T` is `Sync` because the data they +/// reference is unaliased. Note that this aliasing invariant is +/// unenforced by the type system; the abstraction using the +/// `Unique` must enforce it. +unsafe impl Sync for Unique {} + +impl Unique { + /// Creates a new `Unique` that is dangling, but well-aligned. + /// + /// This is useful for initializing types which lazily allocate, like + /// `Vec::new` does. + /// + /// Note that the pointer value may potentially represent a valid pointer to + /// a `T`, which means this must not be used as a "not yet initialized" + /// sentinel value. Types that lazily allocate must track initialization by + /// some other means. + #[must_use] + #[inline] + pub const fn dangling() -> Self { + // FIXME(const-hack) replace with `From` + Unique { + pointer: NonNull::dangling(), + _marker: PhantomData, + } + } +} + +impl Unique<[T]> { + /// Unique pointer for an empty slice. + #[must_use] + #[inline] + pub(crate) fn dangling_empty_slice() -> Self { + let pointer = NonNull::::dangling(); + + Unique { + pointer: NonNull::slice_from_raw_parts(pointer, 0), + _marker: PhantomData, + } + } +} + +impl Unique { + /// Creates a new `Unique`. + /// + /// # Safety + /// + /// `ptr` must be non-null. + #[inline] + pub const unsafe fn new_unchecked(ptr: *mut T) -> Self { + // SAFETY: the caller must guarantee that `ptr` is non-null. + unsafe { + Unique { + pointer: NonNull::new_unchecked(ptr), + _marker: PhantomData, + } + } + } + + /// Creates a new `Unique` if `ptr` is non-null. + #[inline] + pub fn new(ptr: *mut T) -> Option { + NonNull::new(ptr).map(|pointer| Unique { + pointer, + _marker: PhantomData, + }) + } + + /// Acquires the underlying `*mut` pointer. + #[must_use = "`self` will be dropped if the result is not used"] + #[inline] + pub const fn as_ptr(self) -> *mut T { + self.pointer.as_ptr() + } + + /// Dereferences the content. + /// + /// The resulting lifetime is bound to self so this behaves "as if" + /// it were actually an instance of T that is getting borrowed. If a longer + /// (unbound) lifetime is needed, use `&*my_ptr.as_ptr()`. + #[must_use] + #[inline] + pub unsafe fn as_ref(&self) -> &T { + // SAFETY: the caller must guarantee that `self` meets all the + // requirements for a reference. + unsafe { self.pointer.as_ref() } + } + + /// Mutably dereferences the content. + /// + /// The resulting lifetime is bound to self so this behaves "as if" + /// it were actually an instance of T that is getting borrowed. If a longer + /// (unbound) lifetime is needed, use `&mut *my_ptr.as_ptr()`. + #[must_use] + #[inline] + pub unsafe fn as_mut(&mut self) -> &mut T { + // SAFETY: the caller must guarantee that `self` meets all the + // requirements for a mutable reference. + unsafe { self.pointer.as_mut() } + } + + /// Casts to a pointer of another type. + #[must_use = "`self` will be dropped if the result is not used"] + #[inline] + pub const fn cast(self) -> Unique { + // FIXME(const-hack): replace with `From` + // SAFETY: is `NonNull` + unsafe { Unique::new_unchecked(self.pointer.cast().as_ptr()) } + } +} + +impl Clone for Unique { + #[inline] + fn clone(&self) -> Self { + *self + } +} + +impl Copy for Unique {} + +impl fmt::Debug for Unique { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Pointer::fmt(&self.as_ptr(), f) + } +} + +impl fmt::Pointer for Unique { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Pointer::fmt(&self.as_ptr(), f) + } +} + +impl From<&mut T> for Unique { + /// Converts a `&mut T` to a `Unique`. + /// + /// This conversion is infallible since references cannot be null. + #[inline] + fn from(reference: &mut T) -> Self { + Self::from(NonNull::from(reference)) + } +} + +impl From> for Unique { + /// Converts a `NonNull` to a `Unique`. + /// + /// This conversion is infallible since `NonNull` cannot be null. + #[inline] + fn from(pointer: NonNull) -> Self { + Unique { + pointer, + _marker: PhantomData, + } + } +} + +impl From> for NonNull { + #[inline] + fn from(unique: Unique) -> Self { + // SAFETY: A Unique pointer cannot be null, so the conditions for + // new_unchecked() are respected. + unsafe { NonNull::new_unchecked(unique.as_ptr()) } + } +} diff --git a/crates/rune-alloc/src/public_macros.rs b/crates/rune-alloc/src/public_macros.rs new file mode 100644 index 000000000..390d60986 --- /dev/null +++ b/crates/rune-alloc/src/public_macros.rs @@ -0,0 +1,30 @@ +#[macro_export] +macro_rules! try_vec { + () => ( + $crate::vec::Vec::new() + ); + + ($elem:expr; $n:expr) => ( + $crate::vec::try_from_elem($elem, $n)? + ); + + ($($x:expr),+ $(,)?) => ( + $crate::vec::into_vec( + // This rustc_box is not required, but it produces a dramatic improvement in compile + // time when constructing arrays with many elements. + $crate::boxed::Box::try_from([$($x),+])? + ) + ); +} + +#[macro_export] +macro_rules! try_format { + ($($tt:tt)*) => {{ + (|| { + use $crate::fmt::TryWrite; + let mut s = $crate::alloc::string::String::new(); + core::write!(s, $($tt)*)?; + Ok::<_, $crate::Error>(s) + })() + }}; +} diff --git a/crates/rune-alloc/src/slice.rs b/crates/rune-alloc/src/slice.rs new file mode 100644 index 000000000..ff17e2ee5 --- /dev/null +++ b/crates/rune-alloc/src/slice.rs @@ -0,0 +1,61 @@ +pub(crate) use self::iter::{RawIter, RawIterMut}; +pub(crate) mod iter; + +cfg_if! { + if #[cfg(rune_nightly)] { + pub(crate) use core::slice::range; + } else { + use core::ops; + + #[must_use] + pub(crate) fn range(range: R, bounds: ops::RangeTo) -> ops::Range + where + R: ops::RangeBounds, + { + let len = bounds.end; + + let start: ops::Bound<&usize> = range.start_bound(); + let start = match start { + ops::Bound::Included(&start) => start, + ops::Bound::Excluded(start) => start + .checked_add(1) + .unwrap_or_else(|| slice_start_index_overflow_fail()), + ops::Bound::Unbounded => 0, + }; + + let end: ops::Bound<&usize> = range.end_bound(); + let end = match end { + ops::Bound::Included(end) => end + .checked_add(1) + .unwrap_or_else(|| slice_end_index_overflow_fail()), + ops::Bound::Excluded(&end) => end, + ops::Bound::Unbounded => len, + }; + + if start > end { + slice_index_order_fail(start, end); + } + if end > len { + slice_end_index_len_fail(end, len); + } + + ops::Range { start, end } + } + + const fn slice_start_index_overflow_fail() -> ! { + panic!("attempted to index slice from after maximum usize"); + } + + const fn slice_end_index_overflow_fail() -> ! { + panic!("attempted to index slice up to maximum usize"); + } + + fn slice_index_order_fail(index: usize, end: usize) -> ! { + panic!("slice index starts at {index} but ends at {end}"); + } + + fn slice_end_index_len_fail(index: usize, len: usize) -> ! { + panic!("range end index {index} out of range for slice of length {len}"); + } + } +} diff --git a/crates/rune-alloc/src/slice/iter.rs b/crates/rune-alloc/src/slice/iter.rs new file mode 100644 index 000000000..f7b0b662a --- /dev/null +++ b/crates/rune-alloc/src/slice/iter.rs @@ -0,0 +1,278 @@ +//! Definitions of a bunch of iterators for `[T]`. + +#![allow(unused_unsafe)] + +#[macro_use] +mod macros; + +use core::fmt; +use core::iter::FusedIterator; +use core::marker::{Send, Sized, Sync}; +use core::slice::{from_raw_parts, from_raw_parts_mut}; + +use crate::alloc::SizedTypeProperties; +use crate::hint::assume; +use crate::ptr::{self, invalid, invalid_mut, NonNull}; + +/// Immutable slice iterator +/// +/// This struct is created by the [`iter`] method on [slices]. +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// // First, we declare a type which has `iter` method to get the `Iter` struct (`&[usize]` here): +/// let vec = rune_alloc::try_vec![1, 2, 3]; +/// +/// // Then, we iterate over it: +/// for element in vec.raw_iter() { +/// unsafe { +/// println!("{}", *element); +/// } +/// } +/// # Ok::<_, rune_alloc::Error>(()) +/// ``` +/// +/// [`iter`]: slice::iter +/// [slices]: slice +#[must_use = "iterators are lazy and do nothing unless consumed"] +pub struct RawIter { + /// The pointer to the next element to return, or the past-the-end location + /// if the iterator is empty. + /// + /// This address will be used for all ZST elements, never changed. + ptr: NonNull, + /// For non-ZSTs, the non-null pointer to the past-the-end element. + /// + /// For ZSTs, this is `ptr::invalid(len)`. + end_or_len: *const T, +} + +impl fmt::Debug for RawIter { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("Iter").finish() + } +} + +unsafe impl Sync for RawIter {} +unsafe impl Send for RawIter {} + +impl RawIter { + #[inline] + pub(crate) fn new(slice: &[T]) -> Self { + let ptr = slice.as_ptr(); + // SAFETY: Similar to `IterMut::new`. + unsafe { + let end_or_len = if T::IS_ZST { + invalid(slice.len()) + } else { + ptr.add(slice.len()) + }; + + Self { + ptr: NonNull::new_unchecked(ptr as *mut T), + end_or_len, + } + } + } + + /// Views the underlying data as a subslice of the original data. + /// + /// This has the same lifetime as the original slice, and so the + /// iterator can continue to be used while this exists. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// // First, we declare a type which has the `iter` method to get the `Iter` + /// // struct (`&[usize]` here): + /// let slice = rune_alloc::try_vec![1, 2, 3]; + /// + /// unsafe { + /// // Then, we get the iterator: + /// let mut iter = slice.raw_iter(); + /// + /// // So if we print what `as_slice` method returns here, we have "[1, 2, 3]": + /// println!("{:?}", iter.as_slice()); + /// + /// // Next, we move to the second element of the slice: + /// iter.next(); + /// // Now `as_slice` returns "[2, 3]": + /// println!("{:?}", iter.as_slice()); + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[must_use] + #[inline] + pub unsafe fn as_slice<'a>(&self) -> &'a [T] { + self.make_slice() + } +} + +iterator! {struct RawIter -> *const T, *const T, const, {/* no mut */}, as_ref, {}} + +impl Clone for RawIter { + #[inline] + fn clone(&self) -> Self { + RawIter { + ptr: self.ptr, + end_or_len: self.end_or_len, + } + } +} + +/// Mutable slice iterator. +/// +/// This struct is created by the [`iter_mut`] method on [slices]. +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// // First, we declare a type which has `iter_mut` method to get the `IterMut` +/// // struct (`&[usize]` here): +/// let mut slice = &mut [1, 2, 3]; +/// +/// // Then, we iterate over it and increment each element value: +/// for element in slice.iter_mut() { +/// *element += 1; +/// } +/// +/// // We now have "[2, 3, 4]": +/// println!("{slice:?}"); +/// ``` +/// +/// [`iter_mut`]: slice::iter_mut +/// [slices]: slice +#[must_use = "iterators are lazy and do nothing unless consumed"] +pub struct RawIterMut { + /// The pointer to the next element to return, or the past-the-end location + /// if the iterator is empty. + /// + /// This address will be used for all ZST elements, never changed. + ptr: NonNull, + /// For non-ZSTs, the non-null pointer to the past-the-end element. + /// + /// For ZSTs, this is `ptr::invalid_mut(len)`. + end_or_len: *mut T, +} + +impl fmt::Debug for RawIterMut { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("IterMut").finish() + } +} + +unsafe impl Sync for RawIterMut {} +unsafe impl Send for RawIterMut {} + +impl RawIterMut { + #[inline] + pub(crate) fn new(slice: &mut [T]) -> Self { + let ptr = slice.as_mut_ptr(); + // SAFETY: There are several things here: + // + // `ptr` has been obtained by `slice.as_ptr()` where `slice` is a valid + // reference thus it is non-NUL and safe to use and pass to + // `NonNull::new_unchecked` . + // + // Adding `slice.len()` to the starting pointer gives a pointer + // at the end of `slice`. `end` will never be dereferenced, only checked + // for direct pointer equality with `ptr` to check if the iterator is + // done. + // + // In the case of a ZST, the end pointer is just the length. It's never + // used as a pointer at all, and thus it's fine to have no provenance. + // + // See the `next_unchecked!` and `is_empty!` macros as well as the + // `post_inc_start` method for more information. + unsafe { + let end_or_len = if T::IS_ZST { + invalid_mut(slice.len()) + } else { + ptr.add(slice.len()) + }; + + Self { + ptr: NonNull::new_unchecked(ptr), + end_or_len, + } + } + } + + /// Views the underlying data as a subslice of the original data. + /// + /// To avoid creating `&mut [T]` references that alias, the returned slice + /// borrows its lifetime from the iterator the method is applied on. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let mut slice = rune_alloc::try_vec![1, 2, 3]; + /// + /// unsafe { + /// // First, we get the iterator: + /// let mut iter = slice.raw_iter_mut(); + /// + /// // So if we check what the `as_slice` method returns here, we have "[1, 2, 3]": + /// assert_eq!(iter.as_slice(), &[1, 2, 3]); + /// + /// // Next, we move to the second element of the slice: + /// iter.next(); + /// // Now `as_slice` returns "[2, 3]": + /// assert_eq!(iter.as_slice(), &[2, 3]); + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[must_use] + #[inline] + pub unsafe fn as_slice<'a>(&self) -> &'a [T] { + self.make_slice() + } + + /// Views the underlying data as a mutable subslice of the original data. + /// + /// To avoid creating `&mut [T]` references that alias, the returned slice + /// borrows its lifetime from the iterator the method is applied on. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let mut slice = rune_alloc::try_vec![1, 2, 3]; + /// + /// unsafe { + /// // First, we get the iterator: + /// let mut iter = slice.raw_iter_mut(); + /// // Then, we get a mutable slice from it: + /// let mut_slice = iter.as_mut_slice(); + /// // So if we check what the `as_mut_slice` method returned, we have "[1, 2, 3]": + /// assert_eq!(mut_slice, &mut [1, 2, 3]); + /// + /// // We can use it to mutate the slice: + /// mut_slice[0] = 4; + /// mut_slice[2] = 5; + /// + /// // Next, we can move to the second element of the slice, checking that + /// // it yields the value we just wrote: + /// assert_eq!(iter.next(), Some(&mut 4)); + /// // Now `as_mut_slice` returns "[2, 5]": + /// assert_eq!(iter.as_mut_slice(), &mut [2, 5]); + /// } + /// # Ok::<_, rune_alloc::Error>(()) + /// ``` + #[must_use] + pub unsafe fn as_mut_slice<'a>(&mut self) -> &'a mut [T] { + from_raw_parts_mut(self.ptr.as_ptr(), len!(self)) + } +} + +iterator! {struct RawIterMut -> *mut T, *mut T, mut, {mut}, as_mut, {}} diff --git a/crates/rune-alloc/src/slice/iter/macros.rs b/crates/rune-alloc/src/slice/iter/macros.rs new file mode 100644 index 000000000..4c8a534b5 --- /dev/null +++ b/crates/rune-alloc/src/slice/iter/macros.rs @@ -0,0 +1,390 @@ +//! Macros used by iterators of slice. + +/// Convenience & performance macro for consuming the `end_or_len` field, by +/// giving a `(&mut) usize` or `(&mut) NonNull` depending whether `T` is +/// or is not a ZST respectively. +/// +/// Internally, this reads the `end` through a pointer-to-`NonNull` so that +/// it'll get the appropriate non-null metadata in the backend without needing +/// to call `assume` manually. +macro_rules! if_zst { + (mut $this:ident, $len:ident => $zst_body:expr, $end:ident => $other_body:expr,) => {{ + if T::IS_ZST { + // SAFETY: for ZSTs, the pointer is storing a provenance-free length, + // so consuming and updating it as a `usize` is fine. + let $len = unsafe { &mut *ptr::addr_of_mut!($this.end_or_len).cast::() }; + $zst_body + } else { + // SAFETY: for non-ZSTs, the type invariant ensures it cannot be null + let $end = unsafe { &mut *ptr::addr_of_mut!($this.end_or_len).cast::>() }; + $other_body + } + }}; + ($this:ident, $len:ident => $zst_body:expr, $end:ident => $other_body:expr,) => {{ + if T::IS_ZST { + let $len = ptr::addr($this.end_or_len); + $zst_body + } else { + // SAFETY: for non-ZSTs, the type invariant ensures it cannot be null + let $end = unsafe { *ptr::addr_of!($this.end_or_len).cast::>() }; + $other_body + } + }}; +} + +// Inlining is_empty and len makes a huge performance difference +macro_rules! is_empty { + ($self: ident) => { + if_zst!($self, + len => len == 0, + end => $self.ptr == end, + ) + }; +} + +macro_rules! len { + ($self: ident) => {{ + if_zst!($self, + len => len, + end => { + // To get rid of some bounds checks (see `position`), we use ptr_sub instead of + // offset_from (Tested by `codegen/slice-position-bounds-check`.) + // SAFETY: by the type invariant pointers are aligned and `start <= end` + unsafe { ptr::sub_ptr(end.as_ptr(), $self.ptr.as_ptr()) } + }, + ) + }}; +} + +// The shared definition of the `Iter` and `IterMut` iterators +macro_rules! iterator { + ( + struct $name:ident -> $ptr:ty, + $elem:ty, + $raw_mut:tt, + {$( $mut_:tt )?}, + $into_ref:ident, + {$($extra:tt)*} + ) => { + // Returns the first element and moves the start of the iterator forwards by 1. + // Greatly improves performance compared to an inlined function. The iterator + // must not be empty. + macro_rules! next_unchecked { + ($self: ident) => { $self.post_inc_start(1).$into_ref() } + } + + // Returns the last element and moves the end of the iterator backwards by 1. + // Greatly improves performance compared to an inlined function. The iterator + // must not be empty. + macro_rules! next_back_unchecked { + ($self: ident) => { $self.pre_dec_end(1).$into_ref() } + } + + impl $name { + // Helper function for creating a slice from the iterator. + #[inline(always)] + unsafe fn make_slice<'a>(&self) -> &'a [T] { + // SAFETY: the iterator was created from a slice with pointer + // `self.ptr` and length `len!(self)`. This guarantees that all + // the prerequisites for `from_raw_parts` are fulfilled. + from_raw_parts(self.ptr.as_ptr(), len!(self)) + } + + // Helper function for moving the start of the iterator forwards by `offset` elements, + // returning the old start. + // Unsafe because the offset must not exceed `self.len()`. + #[inline(always)] + unsafe fn post_inc_start(&mut self, offset: usize) -> NonNull { + let old = self.ptr; + + // SAFETY: the caller guarantees that `offset` doesn't exceed `self.len()`, + // so this new pointer is inside `self` and thus guaranteed to be non-null. + unsafe { + if_zst!(mut self, + len => *len = len.wrapping_sub(offset), + _end => self.ptr = ptr::nonnull_add(self.ptr, offset), + ); + } + old + } + + // Helper function for moving the end of the iterator backwards by `offset` elements, + // returning the new end. + // Unsafe because the offset must not exceed `self.len()`. + #[inline(always)] + unsafe fn pre_dec_end(&mut self, offset: usize) -> NonNull { + if_zst!(mut self, + // SAFETY: By our precondition, `offset` can be at most the + // current length, so the subtraction can never overflow. + len => unsafe { + *len = len.wrapping_sub(offset); + self.ptr + }, + // SAFETY: the caller guarantees that `offset` doesn't exceed `self.len()`, + // which is guaranteed to not overflow an `isize`. Also, the resulting pointer + // is in bounds of `slice`, which fulfills the other requirements for `offset`. + end => unsafe { + *end = ptr::nonnull_sub(*end, offset); + *end + }, + ) + } + } + + impl ExactSizeIterator for $name { + #[inline(always)] + fn len(&self) -> usize { + len!(self) + } + } + + impl Iterator for $name { + type Item = $elem; + + #[inline] + fn next(&mut self) -> Option<$elem> { + // could be implemented with slices, but this avoids bounds checks + + // SAFETY: The call to `next_unchecked!` is + // safe since we check if the iterator is empty first. + unsafe { + if is_empty!(self) { + None + } else { + Some(next_unchecked!(self)) + } + } + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let exact = len!(self); + (exact, Some(exact)) + } + + #[inline] + fn count(self) -> usize { + len!(self) + } + + #[inline] + fn nth(&mut self, n: usize) -> Option<$elem> { + if n >= len!(self) { + // This iterator is now empty. + if_zst!(mut self, + len => *len = 0, + end => self.ptr = *end, + ); + return None; + } + // SAFETY: We are in bounds. `post_inc_start` does the right thing even for ZSTs. + unsafe { + self.post_inc_start(n); + Some(next_unchecked!(self)) + } + } + + #[inline] + fn last(mut self) -> Option<$elem> { + self.next_back() + } + + #[inline] + fn fold(self, init: B, mut f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + // this implementation consists of the following optimizations compared to the + // default implementation: + // - do-while loop, as is llvm's preferred loop shape, + // see https://releases.llvm.org/16.0.0/docs/LoopTerminology.html#more-canonical-loops + // - bumps an index instead of a pointer since the latter case inhibits + // some optimizations, see #111603 + // - avoids Option wrapping/matching + if is_empty!(self) { + return init; + } + let mut acc = init; + let mut i = 0; + let len = len!(self); + loop { + // SAFETY: the loop iterates `i in 0..len`, which always is in bounds of + // the slice allocation + acc = f(acc, unsafe { & $( $mut_ )? *ptr::nonnull_add(self.ptr, i).as_ptr() }); + // SAFETY: `i` can't overflow since it'll only reach usize::MAX if the + // slice had that length, in which case we'll break out of the loop + // after the increment + i = unsafe { i.wrapping_add(1) }; + if i == len { + break; + } + } + acc + } + + // We override the default implementation, which uses `try_fold`, + // because this simple implementation generates less LLVM IR and is + // faster to compile. + #[inline] + fn for_each(mut self, mut f: F) + where + Self: Sized, + F: FnMut(Self::Item), + { + while let Some(x) = self.next() { + f(x); + } + } + + // We override the default implementation, which uses `try_fold`, + // because this simple implementation generates less LLVM IR and is + // faster to compile. + #[inline] + fn all(&mut self, mut f: F) -> bool + where + Self: Sized, + F: FnMut(Self::Item) -> bool, + { + while let Some(x) = self.next() { + if !f(x) { + return false; + } + } + true + } + + // We override the default implementation, which uses `try_fold`, + // because this simple implementation generates less LLVM IR and is + // faster to compile. + #[inline] + fn any(&mut self, mut f: F) -> bool + where + Self: Sized, + F: FnMut(Self::Item) -> bool, + { + while let Some(x) = self.next() { + if f(x) { + return true; + } + } + false + } + + // We override the default implementation, which uses `try_fold`, + // because this simple implementation generates less LLVM IR and is + // faster to compile. + #[inline] + fn find

(&mut self, mut predicate: P) -> Option + where + Self: Sized, + P: FnMut(&Self::Item) -> bool, + { + while let Some(x) = self.next() { + if predicate(&x) { + return Some(x); + } + } + None + } + + // We override the default implementation, which uses `try_fold`, + // because this simple implementation generates less LLVM IR and is + // faster to compile. + #[inline] + fn find_map(&mut self, mut f: F) -> Option + where + Self: Sized, + F: FnMut(Self::Item) -> Option, + { + while let Some(x) = self.next() { + if let Some(y) = f(x) { + return Some(y); + } + } + None + } + + // We override the default implementation, which uses `try_fold`, + // because this simple implementation generates less LLVM IR and is + // faster to compile. Also, the `assume` avoids a bounds check. + #[inline] + fn position

(&mut self, mut predicate: P) -> Option where + Self: Sized, + P: FnMut(Self::Item) -> bool, + { + let n = len!(self); + let mut i = 0; + while let Some(x) = self.next() { + if predicate(x) { + // SAFETY: we are guaranteed to be in bounds by the loop invariant: + // when `i >= n`, `self.next()` returns `None` and the loop breaks. + unsafe { assume(i < n) }; + return Some(i); + } + i += 1; + } + None + } + + // We override the default implementation, which uses `try_fold`, + // because this simple implementation generates less LLVM IR and is + // faster to compile. Also, the `assume` avoids a bounds check. + #[inline] + fn rposition

(&mut self, mut predicate: P) -> Option where + P: FnMut(Self::Item) -> bool, + Self: Sized + ExactSizeIterator + DoubleEndedIterator + { + let n = len!(self); + let mut i = n; + while let Some(x) = self.next_back() { + i -= 1; + if predicate(x) { + // SAFETY: `i` must be lower than `n` since it starts at `n` + // and is only decreasing. + unsafe { assume(i < n) }; + return Some(i); + } + } + None + } + + $($extra)* + } + + impl DoubleEndedIterator for $name { + #[inline] + fn next_back(&mut self) -> Option<$elem> { + // could be implemented with slices, but this avoids bounds checks + + // SAFETY: The call to `next_back_unchecked!` + // is safe since we check if the iterator is empty first. + unsafe { + if is_empty!(self) { + None + } else { + Some(next_back_unchecked!(self)) + } + } + } + + #[inline] + fn nth_back(&mut self, n: usize) -> Option<$elem> { + if n >= len!(self) { + // This iterator is now empty. + if_zst!(mut self, + len => *len = 0, + end => *end = self.ptr, + ); + return None; + } + // SAFETY: We are in bounds. `pre_dec_end` does the right thing even for ZSTs. + unsafe { + self.pre_dec_end(n); + Some(next_back_unchecked!(self)) + } + } + } + + impl FusedIterator for $name {} + } +} diff --git a/crates/rune-alloc/src/testing/crash_test.rs b/crates/rune-alloc/src/testing/crash_test.rs new file mode 100644 index 000000000..e3cae3acb --- /dev/null +++ b/crates/rune-alloc/src/testing/crash_test.rs @@ -0,0 +1,142 @@ +// We avoid relying on anything else in the crate, apart from the `Debug` trait. + +use core::cmp::Ordering; +use core::fmt::Debug; +use core::sync::atomic::{AtomicUsize, Ordering::SeqCst}; + +use crate::alloc::{Error, TryClone}; + +/// A blueprint for crash test dummy instances that monitor particular events. +/// Some instances may be configured to panic at some point. +/// Events are `clone`, `drop` or some anonymous `query`. +/// +/// Crash test dummies are identified and ordered by an id, so they can be used +/// as keys in a BTreeMap. +#[derive(Debug)] +pub struct CrashTestDummy { + pub id: usize, + cloned: AtomicUsize, + dropped: AtomicUsize, + queried: AtomicUsize, +} + +impl CrashTestDummy { + /// Creates a crash test dummy design. The `id` determines order and equality of instances. + pub fn new(id: usize) -> CrashTestDummy { + CrashTestDummy { + id, + cloned: AtomicUsize::new(0), + dropped: AtomicUsize::new(0), + queried: AtomicUsize::new(0), + } + } + + /// Creates an instance of a crash test dummy that records what events it experiences + /// and optionally panics. + pub fn spawn(&self, panic: Panic) -> Instance<'_> { + Instance { + origin: self, + panic, + } + } + + /// Returns how many times instances of the dummy have been cloned. + pub fn cloned(&self) -> usize { + self.cloned.load(SeqCst) + } + + /// Returns how many times instances of the dummy have been dropped. + pub fn dropped(&self) -> usize { + self.dropped.load(SeqCst) + } + + /// Returns how many times instances of the dummy have had their `query` member invoked. + pub fn queried(&self) -> usize { + self.queried.load(SeqCst) + } +} + +#[derive(Debug)] +pub struct Instance<'a> { + origin: &'a CrashTestDummy, + panic: Panic, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum Panic { + Never, + InClone, + InDrop, + InQuery, +} + +impl TryClone for Panic { + #[inline] + fn try_clone(&self) -> Result { + Ok(*self) + } +} + +impl Instance<'_> { + pub fn id(&self) -> usize { + self.origin.id + } + + /// Some anonymous query, the result of which is already given. + pub fn query(&self, result: R) -> R { + self.origin.queried.fetch_add(1, SeqCst); + if self.panic == Panic::InQuery { + panic!("panic in `query`"); + } + result + } +} + +impl Clone for Instance<'_> { + fn clone(&self) -> Self { + self.origin.cloned.fetch_add(1, SeqCst); + if self.panic == Panic::InClone { + panic!("panic in `clone`"); + } + Self { + origin: self.origin, + panic: Panic::Never, + } + } +} + +impl TryClone for Instance<'_> { + #[inline] + fn try_clone(&self) -> Result { + Ok(self.clone()) + } +} + +impl Drop for Instance<'_> { + fn drop(&mut self) { + self.origin.dropped.fetch_add(1, SeqCst); + if self.panic == Panic::InDrop { + panic!("panic in `drop`"); + } + } +} + +impl PartialOrd for Instance<'_> { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for Instance<'_> { + fn cmp(&self, other: &Self) -> Ordering { + self.id().cmp(&other.id()) + } +} + +impl PartialEq for Instance<'_> { + fn eq(&self, other: &Self) -> bool { + self.id().eq(&other.id()) + } +} + +impl Eq for Instance<'_> {} diff --git a/crates/rune-alloc/src/testing/mod.rs b/crates/rune-alloc/src/testing/mod.rs new file mode 100644 index 000000000..7a094f8a5 --- /dev/null +++ b/crates/rune-alloc/src/testing/mod.rs @@ -0,0 +1,3 @@ +pub mod crash_test; +pub mod ord_chaos; +pub mod rng; diff --git a/crates/rune-alloc/src/testing/ord_chaos.rs b/crates/rune-alloc/src/testing/ord_chaos.rs new file mode 100644 index 000000000..75e197dd4 --- /dev/null +++ b/crates/rune-alloc/src/testing/ord_chaos.rs @@ -0,0 +1,89 @@ +#![allow(clippy::needless_borrow)] + +use core::cell::Cell; +use core::cmp::Ordering::{self, *}; +use core::ptr; + +// Minimal type with an `Ord` implementation violating transitivity. +#[derive(Debug)] +pub enum Cyclic3 { + A, + B, + C, +} +use Cyclic3::*; + +impl PartialOrd for Cyclic3 { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for Cyclic3 { + fn cmp(&self, other: &Self) -> Ordering { + match (self, other) { + (A, A) | (B, B) | (C, C) => Equal, + (A, B) | (B, C) | (C, A) => Less, + (A, C) | (B, A) | (C, B) => Greater, + } + } +} + +impl PartialEq for Cyclic3 { + fn eq(&self, other: &Self) -> bool { + self.cmp(&other) == Equal + } +} + +impl Eq for Cyclic3 {} + +// Controls the ordering of values wrapped by `Governed`. +#[derive(Debug)] +pub struct Governor { + flipped: Cell, +} + +impl Governor { + pub fn new() -> Self { + Governor { + flipped: Cell::new(false), + } + } + + pub fn flip(&self) { + self.flipped.set(!self.flipped.get()); + } +} + +// Type with an `Ord` implementation that forms a total order at any moment +// (assuming that `T` respects total order), but can suddenly be made to invert +// that total order. +#[derive(Debug)] +pub struct Governed<'a, T>(pub T, pub &'a Governor); + +impl PartialOrd for Governed<'_, T> { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for Governed<'_, T> { + fn cmp(&self, other: &Self) -> Ordering { + assert!(ptr::eq(self.1, other.1)); + let ord = self.0.cmp(&other.0); + if self.1.flipped.get() { + ord.reverse() + } else { + ord + } + } +} + +impl PartialEq for Governed<'_, T> { + fn eq(&self, other: &Self) -> bool { + assert!(ptr::eq(self.1, other.1)); + self.0.eq(&other.0) + } +} + +impl Eq for Governed<'_, T> {} diff --git a/crates/rune-alloc/src/testing/rng.rs b/crates/rune-alloc/src/testing/rng.rs new file mode 100644 index 000000000..ffe5dd24f --- /dev/null +++ b/crates/rune-alloc/src/testing/rng.rs @@ -0,0 +1,34 @@ +/// XorShiftRng +pub struct DeterministicRng { + count: usize, + x: u32, + y: u32, + z: u32, + w: u32, +} + +impl DeterministicRng { + pub fn new() -> Self { + DeterministicRng { + count: 0, + x: 0x193a6754, + y: 0xa8a7d469, + z: 0x97830e05, + w: 0x113ba7bb, + } + } + + /// Guarantees that each returned number is unique. + pub fn next(&mut self) -> u32 { + self.count += 1; + assert!(self.count <= 70029); + let x = self.x; + let t = x ^ (x << 11); + self.x = self.y; + self.y = self.z; + self.z = self.w; + let w_ = self.w; + self.w = w_ ^ (w_ >> 19) ^ (t ^ (t >> 8)); + self.w + } +} diff --git a/crates/rune-alloc/src/tests.rs b/crates/rune-alloc/src/tests.rs new file mode 100644 index 000000000..9a3ffc6aa --- /dev/null +++ b/crates/rune-alloc/src/tests.rs @@ -0,0 +1,14 @@ +use crate::{Error, Vec}; + +#[test] +fn test_vec_macro() -> Result<(), Error> { + let vec: Vec = try_vec![1, 2, 3]; + assert_eq!(vec, [1, 2, 3]); + + let vec: Vec = try_vec![1; 3]; + assert_eq!(vec, [1, 1, 1]); + + let vec: Vec = try_vec![]; + assert_eq!(vec, []); + Ok(()) +} diff --git a/crates/rune-alloc/third-party/.gitignore b/crates/rune-alloc/third-party/.gitignore new file mode 100644 index 000000000..72e8ffc0d --- /dev/null +++ b/crates/rune-alloc/third-party/.gitignore @@ -0,0 +1 @@ +* diff --git a/crates/rune-alloc/tools/import.ps1 b/crates/rune-alloc/tools/import.ps1 new file mode 100644 index 000000000..57f7fb62f --- /dev/null +++ b/crates/rune-alloc/tools/import.ps1 @@ -0,0 +1,8 @@ +$Path = "D:\Repo\rust" +Copy-Item $Path\library\alloc\src\collections\btree\ -Destination third-party -Recurse -Force +Copy-Item $Path\library\alloc\src\vec\ -Destination third-party -Recurse -Force +Copy-Item $Path\library\alloc\src\collections\vec_deque\ -Destination third-party -Recurse -Force +Copy-Item $Path\library\alloc\src\testing\ -Destination third-party -Recurse -Force + +$Path = "D:\Repo\hashbrown" +Copy-Item $Path\src\ -Destination third-party\hashbrown -Recurse -Force diff --git a/crates/rune-core/Cargo.toml b/crates/rune-core/Cargo.toml index 2dfc4dd77..cfdb7d9af 100644 --- a/crates/rune-core/Cargo.toml +++ b/crates/rune-core/Cargo.toml @@ -17,9 +17,11 @@ categories = ["parser-implementations"] default = ["alloc"] doc = [] std = ["alloc"] -alloc = ["serde/alloc"] +alloc = ["serde/alloc", "rune-alloc/alloc"] [dependencies] +rune-alloc = { path = "../rune-alloc", version = "0.12.3" } + twox-hash = { version = "1.6.3", default-features = false } serde = { version = "1.0.163", default-features = false, features = ["derive"] } smallvec = { version = "1.10.0", default-features = false, features = ["const_new", "serde"] } diff --git a/crates/rune-core/src/error.rs b/crates/rune-core/src/error.rs index 5b4e9902b..69d2f309d 100644 --- a/crates/rune-core/src/error.rs +++ b/crates/rune-core/src/error.rs @@ -20,3 +20,5 @@ where (**self).source() } } + +impl Error for ::rune_alloc::Error {} diff --git a/crates/rune-core/src/hash/into_hash.rs b/crates/rune-core/src/hash/into_hash.rs index acb029cba..55e6bdd16 100644 --- a/crates/rune-core/src/hash/into_hash.rs +++ b/crates/rune-core/src/hash/into_hash.rs @@ -13,7 +13,8 @@ mod sealed { impl Sealed for Params {} } -/// Trait for types which can be converted into a [Hash][crate::hash::Hash]. +/// Trait for types which can be converted into a +/// [Hash][struct@crate::hash::Hash]. pub trait IntoHash: self::sealed::Sealed { /// Convert current type into a hash. fn into_hash(self) -> Hash; diff --git a/crates/rune-core/src/item/component.rs b/crates/rune-core/src/item/component.rs index 4587869f8..bb58d07ba 100644 --- a/crates/rune-core/src/item/component.rs +++ b/crates/rune-core/src/item/component.rs @@ -6,6 +6,9 @@ use serde::{Deserialize, Serialize}; use crate::item::ComponentRef; +#[cfg(feature = "alloc")] +use rune_alloc::{Error, TryClone}; + /// The component of an item. /// /// All indexes refer to sibling indexes. So two sibling id components could @@ -49,3 +52,12 @@ impl fmt::Display for Component { } } } + +#[cfg(feature = "alloc")] +impl TryClone for Component { + #[inline] + fn try_clone(&self) -> Result { + // TODO: use fallible allocations for component. + Ok(self.clone()) + } +} diff --git a/crates/rune-macros/src/any.rs b/crates/rune-macros/src/any.rs index 2ee7aa13a..bcddfce68 100644 --- a/crates/rune-macros/src/any.rs +++ b/crates/rune-macros/src/any.rs @@ -658,11 +658,7 @@ where type Guard = #raw_into_ref; unsafe fn unsafe_to_ref<'a>(value: #value) -> #vm_result<(&'a Self, Self::Guard)> { - let (value, guard) = match value.into_any_ptr() { - #vm_result::Ok(value) => value, - #vm_result::Err(err) => return #vm_result::Err(err), - }; - + let (value, guard) = #vm_try!(value.into_any_ptr()); #vm_result::Ok((#non_null::as_ref(&value), guard)) } } @@ -672,11 +668,7 @@ where type Guard = #raw_into_mut; unsafe fn unsafe_to_mut<'a>(value: #value) -> #vm_result<(&'a mut Self, Self::Guard)> { - let (mut value, guard) = match value.into_any_mut() { - #vm_result::Ok(value) => value, - #vm_result::Err(err) => return #vm_result::Err(err), - }; - + let (mut value, guard) = #vm_try!(value.into_any_mut()); #vm_result::Ok((#non_null::as_mut(&mut value), guard)) } } @@ -686,7 +678,7 @@ where type Guard = #pointer_guard; unsafe fn unsafe_to_value(self) -> #vm_result<(#value, Self::Guard)> { - let (shared, guard) = #shared::from_ref(self); + let (shared, guard) = #vm_try!(#shared::from_ref(self)); #vm_result::Ok((#value::from(shared), guard)) } } @@ -696,7 +688,7 @@ where type Guard = #pointer_guard; unsafe fn unsafe_to_value(self) -> #vm_result<(#value, Self::Guard)> { - let (shared, guard) = #shared::from_mut(self); + let (shared, guard) = #vm_try!(#shared::from_mut(self)); #vm_result::Ok((#value::from(shared), guard)) } } diff --git a/crates/rune-macros/src/context.rs b/crates/rune-macros/src/context.rs index 82dc5d142..b9e9ba38a 100644 --- a/crates/rune-macros/src/context.rs +++ b/crates/rune-macros/src/context.rs @@ -594,59 +594,61 @@ impl Context { Tokens { any_type_info: path(m, ["runtime", "AnyTypeInfo"]), any: path(m, ["Any"]), + box_: path(m, ["__private", "Box"]), + vec: path(m, ["alloc", "Vec"]), + clone: path(&core, ["clone", "Clone"]), compile_error: path(m, ["compile", "Error"]), context_error: path(m, ["compile", "ContextError"]), + double_ended_iterator: path(&core, ["iter", "DoubleEndedIterator"]), from_value: path(m, ["runtime", "FromValue"]), - raw_ref: path(m, ["runtime", "RawRef"]), - raw_mut: path(m, ["runtime", "RawMut"]), - ref_: path(m, ["runtime", "Ref"]), - mut_: path(m, ["runtime", "Mut"]), - vm_try: path(m, ["vm_try"]), full_type_of: path(m, ["runtime", "FullTypeOf"]), hash: path(m, ["Hash"]), id: path(m, ["parse", "Id"]), install_with: path(m, ["__private", "InstallWith"]), + into_iterator: path(&core, ["iter", "IntoIterator"]), + iterator: path(&core, ["iter", "Iterator"]), macro_context: path(m, ["macros", "MacroContext"]), maybe_type_of: path(m, ["runtime", "MaybeTypeOf"]), module: path(m, ["__private", "Module"]), + mut_: path(m, ["runtime", "Mut"]), named: path(m, ["compile", "Named"]), + non_null: path(&core, ["ptr", "NonNull"]), object: path(m, ["runtime", "Object"]), opaque: path(m, ["parse", "Opaque"]), option_spanned: path(m, ["ast", "OptionSpanned"]), + option: path(&core, ["option", "Option"]), + owned_tuple: path(m, ["runtime", "OwnedTuple"]), parse: path(m, ["parse", "Parse"]), parser: path(m, ["parse", "Parser"]), pointer_guard: path(m, ["runtime", "SharedPointerGuard"]), protocol: path(m, ["runtime", "Protocol"]), raw_into_mut: path(m, ["runtime", "RawMut"]), raw_into_ref: path(m, ["runtime", "RawRef"]), + raw_mut: path(m, ["runtime", "RawMut"]), + raw_ref: path(m, ["runtime", "RawRef"]), raw_str: path(m, ["runtime", "RawStr"]), + ref_: path(m, ["runtime", "Ref"]), result: path(&core, ["result", "Result"]), shared: path(m, ["runtime", "Shared"]), span: path(m, ["ast", "Span"]), spanned: path(m, ["ast", "Spanned"]), + static_type_mod: path(m, ["runtime", "static_type"]), + string: path(m, ["alloc", "String"]), to_tokens: path(m, ["macros", "ToTokens"]), to_value: path(m, ["runtime", "ToValue"]), token_stream: path(m, ["macros", "TokenStream"]), - try_result: path(m, ["runtime", "try_result"]), - owned_tuple: path(m, ["runtime", "OwnedTuple"]), + try_from: path(&core, ["convert", "TryFrom"]), tuple: path(m, ["runtime", "Tuple"]), type_info: path(m, ["runtime", "TypeInfo"]), type_name: path(&core, ["any", "type_name"]), type_of: path(m, ["runtime", "TypeOf"]), - unsafe_to_value: path(m, ["runtime", "UnsafeToValue"]), - unsafe_to_ref: path(m, ["runtime", "UnsafeToRef"]), unsafe_to_mut: path(m, ["runtime", "UnsafeToMut"]), + unsafe_to_ref: path(m, ["runtime", "UnsafeToRef"]), + unsafe_to_value: path(m, ["runtime", "UnsafeToValue"]), value: path(m, ["runtime", "Value"]), variant_data: path(m, ["runtime", "VariantData"]), - vm_error: path(m, ["runtime", "VmError"]), vm_result: path(m, ["runtime", "VmResult"]), - into_iterator: path(&core, ["iter", "IntoIterator"]), - iterator: path(&core, ["iter", "Iterator"]), - double_ended_iterator: path(&core, ["iter", "DoubleEndedIterator"]), - option: path(&core, ["option", "Option"]), - non_null: path(&core, ["ptr", "NonNull"]), - box_: path(&alloc, ["boxed", "Box"]), - static_type_mod: path(m, ["runtime", "static_type"]), + vm_try: path(m, ["vm_try"]), } } } @@ -683,58 +685,60 @@ fn path(base: &syn::Path, path: [&'static str; N]) -> syn::Path pub(crate) struct Tokens { pub(crate) any_type_info: syn::Path, pub(crate) any: syn::Path, + pub(crate) box_: syn::Path, + pub(crate) vec: syn::Path, + pub(crate) clone: syn::Path, pub(crate) compile_error: syn::Path, pub(crate) context_error: syn::Path, + pub(crate) double_ended_iterator: syn::Path, pub(crate) from_value: syn::Path, pub(crate) full_type_of: syn::Path, pub(crate) hash: syn::Path, pub(crate) id: syn::Path, pub(crate) install_with: syn::Path, + pub(crate) into_iterator: syn::Path, + pub(crate) iterator: syn::Path, pub(crate) macro_context: syn::Path, pub(crate) maybe_type_of: syn::Path, pub(crate) module: syn::Path, + pub(crate) mut_: syn::Path, pub(crate) named: syn::Path, + pub(crate) non_null: syn::Path, pub(crate) object: syn::Path, pub(crate) opaque: syn::Path, pub(crate) option_spanned: syn::Path, + pub(crate) option: syn::Path, + pub(crate) owned_tuple: syn::Path, pub(crate) parse: syn::Path, pub(crate) parser: syn::Path, pub(crate) pointer_guard: syn::Path, pub(crate) protocol: syn::Path, pub(crate) raw_into_mut: syn::Path, pub(crate) raw_into_ref: syn::Path, + pub(crate) raw_mut: syn::Path, + pub(crate) raw_ref: syn::Path, pub(crate) raw_str: syn::Path, + pub(crate) ref_: syn::Path, pub(crate) result: syn::Path, pub(crate) shared: syn::Path, pub(crate) span: syn::Path, pub(crate) spanned: syn::Path, + pub(crate) static_type_mod: syn::Path, + pub(crate) string: syn::Path, pub(crate) to_tokens: syn::Path, pub(crate) to_value: syn::Path, pub(crate) token_stream: syn::Path, - pub(crate) try_result: syn::Path, - pub(crate) owned_tuple: syn::Path, + pub(crate) try_from: syn::Path, pub(crate) tuple: syn::Path, pub(crate) type_info: syn::Path, pub(crate) type_name: syn::Path, pub(crate) type_of: syn::Path, - pub(crate) unsafe_to_value: syn::Path, - pub(crate) unsafe_to_ref: syn::Path, pub(crate) unsafe_to_mut: syn::Path, + pub(crate) unsafe_to_ref: syn::Path, + pub(crate) unsafe_to_value: syn::Path, pub(crate) value: syn::Path, pub(crate) variant_data: syn::Path, - pub(crate) vm_error: syn::Path, pub(crate) vm_result: syn::Path, - pub(crate) into_iterator: syn::Path, - pub(crate) iterator: syn::Path, - pub(crate) double_ended_iterator: syn::Path, - pub(crate) option: syn::Path, - pub(crate) non_null: syn::Path, - pub(crate) box_: syn::Path, - pub(crate) static_type_mod: syn::Path, - pub(crate) raw_ref: syn::Path, - pub(crate) raw_mut: syn::Path, - pub(crate) ref_: syn::Path, - pub(crate) mut_: syn::Path, pub(crate) vm_try: syn::Path, } @@ -744,18 +748,4 @@ impl Tokens { let protocol = &self.protocol; quote!(#protocol::#sym) } - - /// Expand a `vm_try!` expression. - pub(crate) fn vm_try(&self, expr: impl ToTokens) -> impl ToTokens { - let vm_result = &self.vm_result; - let vm_error = &self.vm_error; - let try_result = &self.try_result; - - quote! { - match #try_result(#expr) { - #vm_result::Ok(value) => value, - #vm_result::Err(err) => return #vm_result::Err(#vm_error::from(err)), - } - } - } } diff --git a/crates/rune-macros/src/from_value.rs b/crates/rune-macros/src/from_value.rs index 249c11285..4655f546b 100644 --- a/crates/rune-macros/src/from_value.rs +++ b/crates/rune-macros/src/from_value.rs @@ -1,6 +1,6 @@ use crate::context::{Context, Tokens}; use proc_macro2::TokenStream; -use quote::{quote, quote_spanned}; +use quote::quote_spanned; use syn::spanned::Spanned as _; struct Expander { @@ -22,6 +22,7 @@ impl Expander { from_value, vm_result, tuple, + vm_try, .. } = &self.tokens; @@ -41,7 +42,6 @@ impl Expander { } syn::Fields::Unnamed(f) => { let expanded = self.expand_unnamed(f)?; - let borrow_ref = self.tokens.vm_try(quote!(tuple.borrow_ref())); let expanded = quote_spanned! { f.span() => @@ -50,11 +50,11 @@ impl Expander { #vm_result::Ok(Self(#expanded)) } #value::Tuple(tuple) => { - let tuple = #borrow_ref; + let tuple = #vm_try!(tuple.borrow_ref()); #vm_result::Ok(Self(#expanded)) } #value::TupleStruct(tuple) => { - let tuple = #borrow_ref; + let tuple = #vm_try!(tuple.borrow_ref()); #vm_result::Ok(Self(#expanded)) } }; @@ -63,16 +63,15 @@ impl Expander { } syn::Fields::Named(f) => { let expanded = self.expand_named(f)?; - let borrow_ref = self.tokens.vm_try(quote!(object.borrow_ref())); let expanded = quote_spanned! { f.span() => #value::Object(object) => { - let object = #borrow_ref; + let object = #vm_try!(object.borrow_ref()); #vm_result::Ok(Self { #expanded }) } #value::Struct(object) => { - let object = #borrow_ref; + let object = #vm_try!(object.borrow_ref()); #vm_result::Ok(Self { #expanded }) } }; @@ -81,8 +80,6 @@ impl Expander { } }; - let actual_type_info = self.tokens.vm_try(quote!(actual.type_info())); - Ok(quote_spanned! { input.span() => #[automatically_derived] impl #from_value for #ident { @@ -90,7 +87,7 @@ impl Expander { match value { #expanded actual => { - #vm_result::expected::<#expected>(#actual_type_info) + #vm_result::expected::<#expected>(#vm_try!(actual.type_info())) } } } @@ -115,6 +112,7 @@ impl Expander { variant_data, value, vm_result, + vm_try, .. } = &self.tokens; @@ -145,15 +143,13 @@ impl Expander { } } - let borrow_ref = self.tokens.vm_try(quote!(variant.borrow_ref())); - let missing = quote_spanned! { input.span() => name => #vm_result::__rune_macros__missing_variant(name) }; let variant = quote_spanned! { input.span() => #value::Variant(variant) => { - let variant = #borrow_ref; + let variant = #vm_try!(variant.borrow_ref()); let mut it = variant.rtti().item.iter(); let name = match it.next_back_str() { @@ -175,8 +171,6 @@ impl Expander { } }; - let actual_type_info = self.tokens.vm_try(quote!(actual.type_info())); - Ok(quote_spanned! { input.span() => #[automatically_derived] impl #from_value for #ident { @@ -184,7 +178,7 @@ impl Expander { match value { #variant, actual => { - #vm_result::__rune_macros__expected_variant(#actual_type_info) + #vm_result::__rune_macros__expected_variant(#vm_try!(actual.type_info())) } } } @@ -214,19 +208,18 @@ impl Expander { from_value, vm_result, type_name, + vm_try, + clone, .. } = &self.tokens; for (index, field) in unnamed.unnamed.iter().enumerate() { let _ = self.cx.field_attrs(&field.attrs)?; - let from_value = self - .tokens - .vm_try(quote!(#from_value::from_value(value.clone()))); from_values.push(quote_spanned! { field.span() => match tuple.get(#index) { - Some(value) => #from_value, + Some(value) => #vm_try!(#from_value::from_value(#clone::clone(value))), None => { return #vm_result::__rune_macros__missing_tuple_index(#type_name::(), #index); } @@ -251,17 +244,14 @@ impl Expander { from_value, vm_result, type_name, + vm_try, .. } = &self.tokens; - let from_value = self - .tokens - .vm_try(quote!(#from_value::from_value(value.clone()))); - from_values.push(quote_spanned! { field.span() => #ident: match object.get(#name) { - Some(value) => #from_value, + Some(value) => #vm_try!(#from_value::from_value(value.clone())), None => { return #vm_result::__rune_macros__missing_struct_field(#type_name::(), #name); } diff --git a/crates/rune-macros/src/lib.rs b/crates/rune-macros/src/lib.rs index 0d3bda9a7..360aaed10 100644 --- a/crates/rune-macros/src/lib.rs +++ b/crates/rune-macros/src/lib.rs @@ -243,6 +243,26 @@ pub fn instrument( .into() } +/// Shim for an ignored `#[stable]` attribute. +#[proc_macro_attribute] +#[doc(hidden)] +pub fn stable( + _attr: proc_macro::TokenStream, + item: proc_macro::TokenStream, +) -> proc_macro::TokenStream { + item +} + +/// Shim for an ignored `#[unstable]` attribute. +#[proc_macro_attribute] +#[doc(hidden)] +pub fn unstable( + _attr: proc_macro::TokenStream, + item: proc_macro::TokenStream, +) -> proc_macro::TokenStream { + item +} + #[proc_macro_derive(InstDisplay, attributes(inst_display))] #[doc(hidden)] pub fn inst_display(input: proc_macro::TokenStream) -> proc_macro::TokenStream { diff --git a/crates/rune-macros/src/to_value.rs b/crates/rune-macros/src/to_value.rs index dea9602fe..3eafaffb4 100644 --- a/crates/rune-macros/src/to_value.rs +++ b/crates/rune-macros/src/to_value.rs @@ -1,7 +1,6 @@ use crate::context::{Context, Tokens}; use proc_macro2::TokenStream; -use quote::{quote, quote_spanned}; -use syn::spanned::Spanned as _; +use quote::quote; struct Expander { cx: Context, @@ -26,7 +25,7 @@ impl Expander { .. } = &self.tokens; - Ok(quote_spanned! { input.span() => + Ok(quote! { #[automatically_derived] impl #to_value for #ident { fn to_value(self) -> #vm_result<#value> { @@ -60,23 +59,25 @@ impl Expander { value, owned_tuple, vm_result, + vm_try, + try_from, + vec, .. } = &self.tokens; for (index, f) in unnamed.unnamed.iter().enumerate() { let _ = self.cx.field_attrs(&f.attrs)?; let index = syn::Index::from(index); - let to_value = self.tokens.vm_try(quote!(#to_value::to_value(self.#index))); - to_values.push(quote_spanned!(f.span() => tuple.push(#to_value))); + to_values.push(quote!(#vm_try!(#vec::try_push(&mut tuple, #vm_try!(#to_value::to_value(self.#index)))))); } let cap = unnamed.unnamed.len(); - Ok(quote_spanned! { - unnamed.span() => - let mut tuple = Vec::with_capacity(#cap); + Ok(quote! { + let mut tuple = #vm_try!(#vec::try_with_capacity(#cap)); #(#to_values;)* - #vm_result::Ok(#value::from(#owned_tuple::from(tuple))) + let tuple = #vm_try!(<#owned_tuple as #try_from<_>>::try_from(tuple)); + #vm_result::Ok(#vm_try!(<#value as #try_from<_>>::try_from(tuple))) }) } @@ -87,6 +88,9 @@ impl Expander { value, object, vm_result, + vm_try, + string, + try_from, .. } = &self.tokens; @@ -97,16 +101,16 @@ impl Expander { let _ = self.cx.field_attrs(&f.attrs)?; let name = &syn::LitStr::new(&ident.to_string(), ident.span()); - let to_value = self.tokens.vm_try(quote!(#to_value::to_value(self.#ident))); - to_values - .push(quote_spanned!(f.span() => object.insert(String::from(#name), #to_value))); + + to_values.push(quote! { + object.insert(#vm_try!(<#string as #try_from<_>>::try_from(#name)), #vm_try!(#to_value::to_value(self.#ident))) + }); } - Ok(quote_spanned! { - named.span() => + Ok(quote! { let mut object = <#object>::new(); #(#to_values;)* - #vm_result::Ok(#value::from(object)) + #vm_result::Ok(#vm_try!(<#value as #try_from<_>>::try_from(object))) }) } } diff --git a/crates/rune-modules/src/http.rs b/crates/rune-modules/src/http.rs index 12682d2a2..d3b535738 100644 --- a/crates/rune-modules/src/http.rs +++ b/crates/rune-modules/src/http.rs @@ -49,9 +49,8 @@ //! ``` use rune::{Any, Module, Value, ContextError}; -use rune::runtime::{Bytes, Ref, Formatter}; -use std::fmt; -use std::fmt::Write; +use rune::runtime::{Bytes, Ref, Formatter, VmResult}; +use rune::alloc::TryWrite; /// Construct the `http` module. pub fn module(_stdio: bool) -> Result { @@ -96,8 +95,9 @@ impl From for Error { impl Error { #[rune::function(instance, protocol = STRING_DISPLAY)] - fn string_display(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "{}", self.inner) + fn string_display(&self, f: &mut Formatter) -> VmResult<()> { + rune::vm_write!(f, "{}", self.inner); + VmResult::Ok(()) } } @@ -146,8 +146,9 @@ pub struct StatusCode { impl StatusCode { #[rune::function(instance, protocol = STRING_DISPLAY)] - fn string_display(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "{}", self.inner) + fn string_display(&self, f: &mut Formatter) -> VmResult<()> { + rune::vm_write!(f, "{}", self.inner); + VmResult::Ok(()) } } diff --git a/crates/rune-modules/src/process.rs b/crates/rune-modules/src/process.rs index 0136e8fb1..086756697 100644 --- a/crates/rune-modules/src/process.rs +++ b/crates/rune-modules/src/process.rs @@ -31,7 +31,7 @@ use rune::{Any, Module, ContextError}; use rune::runtime::{Bytes, Shared, Value, VmResult, Formatter}; -use std::fmt; +use rune::alloc::TryWrite; use std::io; use tokio::process; @@ -129,8 +129,8 @@ impl Child { VmResult::Ok(Ok(Output { status: ExitStatus { status: output.status }, - stdout: Shared::new(Bytes::from_vec(output.stdout)), - stderr: Shared::new(Bytes::from_vec(output.stderr)), + stdout: rune::vm_try!(Shared::new(Bytes::from_vec(output.stdout))), + stderr: rune::vm_try!(Shared::new(Bytes::from_vec(output.stderr))), })) } } @@ -154,9 +154,9 @@ struct ExitStatus { impl ExitStatus { #[rune::function(protocol = STRING_DISPLAY)] - fn string_display(&self, f: &mut Formatter) -> fmt::Result { - use std::fmt::Write as _; - write!(f, "{}", self.status) + fn string_display(&self, f: &mut Formatter) -> VmResult<()> { + rune::vm_write!(f, "{}", self.status); + VmResult::Ok(()) } #[rune::function] diff --git a/crates/rune/Cargo.toml b/crates/rune/Cargo.toml index 9d05a3bd8..d73fd42fa 100644 --- a/crates/rune/Cargo.toml +++ b/crates/rune/Cargo.toml @@ -25,12 +25,13 @@ byte-code = ["alloc", "musli-storage"] capture-io = ["alloc", "parking_lot"] disable-io = ["alloc"] fmt = ["alloc"] -std = ["num/std", "serde/std", "rune-core/std", "musli/std", "musli-storage/std", "alloc", "anyhow", "lazy_static"] -alloc = [] +std = ["num/std", "serde/std", "rune-core/std", "rune-alloc/std", "musli/std", "musli-storage/std", "alloc", "anyhow", "once_cell/std"] +alloc = ["rune-alloc/alloc", "rune-core/alloc", "once_cell/alloc"] [dependencies] rune-macros = { version = "=0.12.3", path = "../rune-macros" } -rune-core = { version = "=0.12.3", path = "../rune-core", features = ["musli", "alloc"] } +rune-core = { version = "=0.12.3", path = "../rune-core", features = ["musli"] } +rune-alloc = { version = "=0.12.3", path = "../rune-alloc", default-features = false } futures-core = { version = "0.3.28", default-features = false } futures-util = { version = "0.3.28", default-features = false, features = ["alloc"] } @@ -45,6 +46,7 @@ tracing = { version = "0.1.37", default-features = false, features = ["attribut hashbrown = { version = "0.14.0", features = ["serde"] } musli = { version = "0.0.42", default-features = false, features = ["alloc"] } slab = { version = "0.4.8", default-features = false } +once_cell = { version = "1.18.0", default-features = false, features = ["critical-section"] } musli-storage = { version = "0.0.42", default-features = false, optional = true, features = ["alloc"] } anyhow = { version = "1.0.71", features = ["std"], optional = true } @@ -74,7 +76,6 @@ similar = { version = "2.2.1", optional = true, features = ["inline", "bytes"] } sha2 = { version = "0.10.6", optional = true } base64 = { version = "0.21.0", optional = true } rand = { version = "0.8.5", optional = true } -lazy_static = { version = "1.4.0", optional = true } [dev-dependencies] tokio = { version = "1.28.1", features = ["full"] } diff --git a/crates/rune/src/build.rs b/crates/rune/src/build.rs index 88602845b..616dcd1b3 100644 --- a/crates/rune/src/build.rs +++ b/crates/rune/src/build.rs @@ -6,7 +6,9 @@ use crate::no_std::prelude::*; use crate::ast::{Span, Spanned}; use crate::compile; -use crate::compile::{CompileVisitor, FileSourceLoader, Located, Options, Pool, SourceLoader}; +use crate::compile::{ + CompileVisitor, FileSourceLoader, Located, MetaError, Options, Pool, SourceLoader, +}; use crate::runtime::unit::{DefaultStorage, UnitEncoder}; use crate::runtime::Unit; use crate::{Context, Diagnostics, SourceId, Sources}; @@ -108,10 +110,12 @@ struct CompileVisitorGroup<'a> { } impl<'a> compile::CompileVisitor for CompileVisitorGroup<'a> { - fn register_meta(&mut self, meta: compile::MetaRef<'_>) { + fn register_meta(&mut self, meta: compile::MetaRef<'_>) -> Result<(), MetaError> { for v in self.visitors.iter_mut() { - v.register_meta(meta) + v.register_meta(meta)?; } + + Ok(()) } fn visit_meta(&mut self, location: &dyn Located, meta: compile::MetaRef<'_>) { diff --git a/crates/rune/src/cli/doc.rs b/crates/rune/src/cli/doc.rs index 08712a1b4..b73e5879a 100644 --- a/crates/rune/src/cli/doc.rs +++ b/crates/rune/src/cli/doc.rs @@ -81,7 +81,7 @@ where let name = naming.name(&e); let item = ItemBuf::with_crate(&name); - let mut visitor = crate::doc::Visitor::new(item); + let mut visitor = crate::doc::Visitor::new(item)?; let mut sources = Sources::new(); let source = Source::from_path(e.path()) .with_context(|| e.path().display().to_string())?; diff --git a/crates/rune/src/cli/run.rs b/crates/rune/src/cli/run.rs index 68db5559c..e6f340060 100644 --- a/crates/rune/src/cli/run.rs +++ b/crates/rune/src/cli/run.rs @@ -309,7 +309,7 @@ pub(super) async fn run( /// Perform a detailed trace of the program. async fn do_trace( - io: &mut Io<'_>, + io: &Io<'_>, execution: &mut VmExecution, sources: &Sources, dump_stack: bool, diff --git a/crates/rune/src/cli/tests.rs b/crates/rune/src/cli/tests.rs index fb8268e88..fb87ce0d1 100644 --- a/crates/rune/src/cli/tests.rs +++ b/crates/rune/src/cli/tests.rs @@ -112,7 +112,7 @@ where Diagnostics::without_warnings() }; - let mut doc_visitor = crate::doc::Visitor::new(item); + let mut doc_visitor = crate::doc::Visitor::new(item)?; let mut functions = visitor::FunctionVisitor::new(visitor::Attribute::Test); let mut source_loader = FileSourceLoader::new(); diff --git a/crates/rune/src/cli/visitor.rs b/crates/rune/src/cli/visitor.rs index 0c71e8975..01860ef7c 100644 --- a/crates/rune/src/cli/visitor.rs +++ b/crates/rune/src/cli/visitor.rs @@ -1,7 +1,7 @@ use crate::no_std::prelude::*; use crate::compile::meta; -use crate::compile::{CompileVisitor, ItemBuf, MetaRef}; +use crate::compile::{MetaError, CompileVisitor, ItemBuf, MetaRef}; use crate::Hash; /// Attribute to collect. @@ -36,13 +36,14 @@ impl FunctionVisitor { } impl CompileVisitor for FunctionVisitor { - fn register_meta(&mut self, meta: MetaRef<'_>) { + fn register_meta(&mut self, meta: MetaRef<'_>) -> Result<(), MetaError> { let type_hash = match (self.attribute, &meta.kind) { (Attribute::Test, meta::Kind::Function { is_test, .. }) if *is_test => meta.hash, (Attribute::Bench, meta::Kind::Function { is_bench, .. }) if *is_bench => meta.hash, - _ => return, + _ => return Ok(()), }; self.functions.push((type_hash, meta.item.to_owned())); + Ok(()) } } diff --git a/crates/rune/src/compile.rs b/crates/rune/src/compile.rs index a6bb9695e..8e338d231 100644 --- a/crates/rune/src/compile.rs +++ b/crates/rune/src/compile.rs @@ -9,7 +9,7 @@ pub(crate) use self::assembly::{Assembly, AssemblyInst}; pub(crate) mod attrs; pub(crate) mod error; -pub use self::error::{Error, ImportStep}; +pub use self::error::{Error, ImportStep, MetaError}; pub(crate) use self::error::{ErrorKind, IrErrorKind}; mod compile_visitor; diff --git a/crates/rune/src/compile/compile_visitor.rs b/crates/rune/src/compile/compile_visitor.rs index e08f5a6e7..7b69af5f8 100644 --- a/crates/rune/src/compile/compile_visitor.rs +++ b/crates/rune/src/compile/compile_visitor.rs @@ -1,12 +1,14 @@ use crate::ast::Spanned; -use crate::compile::{Item, Located, MetaRef}; +use crate::compile::{Item, Located, MetaError, MetaRef}; use crate::hash::Hash; use crate::SourceId; /// A visitor that will be called for every language item compiled. pub trait CompileVisitor { /// Called when a meta item is registered. - fn register_meta(&mut self, _meta: MetaRef<'_>) {} + fn register_meta(&mut self, _meta: MetaRef<'_>) -> Result<(), MetaError> { + Ok(()) + } /// Mark that we've resolved a specific compile meta at the given location. fn visit_meta(&mut self, _location: &dyn Located, _meta: MetaRef<'_>) {} diff --git a/crates/rune/src/compile/context.rs b/crates/rune/src/compile/context.rs index 614c0898a..702fd1ed5 100644 --- a/crates/rune/src/compile/context.rs +++ b/crates/rune/src/compile/context.rs @@ -372,7 +372,7 @@ impl Context { /// Install the given meta. fn install_meta(&mut self, meta: ContextMeta) -> Result<(), ContextError> { if let Some(item) = &meta.item { - self.names.insert(item); + self.names.insert(item)?; self.item_to_hash .entry(item.clone()) @@ -395,7 +395,7 @@ impl Context { /// Install a module, ensuring that its meta is defined. fn install_module(&mut self, m: &Module) -> Result<(), ContextError> { - self.names.insert(&m.item); + self.names.insert(&m.item)?; let mut current = Some((m.item.as_ref(), Some(&m.docs))); @@ -608,7 +608,7 @@ impl Context { f: &ModuleFunction, ) -> Result<(), ContextError> { let item = module.item.join(&f.item); - self.names.insert(&item); + self.names.insert(&item)?; let hash = Hash::type_hash(&item); diff --git a/crates/rune/src/compile/context_error.rs b/crates/rune/src/compile/context_error.rs index 6f6f4cb06..a3d781ae5 100644 --- a/crates/rune/src/compile/context_error.rs +++ b/crates/rune/src/compile/context_error.rs @@ -2,6 +2,7 @@ use core::fmt; use crate::no_std::prelude::*; +use crate::alloc::AllocError; use crate::compile::ItemBuf; use crate::runtime::{TypeInfo, VmError}; use crate::Hash; @@ -11,6 +12,9 @@ use crate::Hash; #[allow(missing_docs)] #[non_exhaustive] pub enum ContextError { + AllocError { + error: AllocError, + }, UnitAlreadyPresent, InternalAlreadyPresent { name: &'static str, @@ -111,9 +115,18 @@ pub enum ContextError { }, } +impl From for ContextError { + fn from(error: AllocError) -> Self { + ContextError::AllocError { error } + } +} + impl fmt::Display for ContextError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { + ContextError::AllocError { error } => { + error.fmt(f)?; + } ContextError::UnitAlreadyPresent {} => { write!(f, "Unit `()` type is already present")?; } diff --git a/crates/rune/src/compile/error.rs b/crates/rune/src/compile/error.rs index f77d41167..0035e82b7 100644 --- a/crates/rune/src/compile/error.rs +++ b/crates/rune/src/compile/error.rs @@ -4,6 +4,7 @@ use crate::no_std::io; use crate::no_std::path::PathBuf; use crate::no_std::prelude::*; +use crate::alloc::AllocError; use crate::ast; use crate::ast::unescape; use crate::ast::{Span, Spanned}; @@ -193,8 +194,11 @@ pub(crate) enum ErrorKind { Unsupported { what: Expectation, }, + AllocError { + error: rune_alloc::Error, + }, IrError(IrErrorKind), - MetaConflict(MetaConflict), + MetaError(MetaError), AccessError(AccessError), EncodeError(EncodeError), MissingLastId(MissingLastId), @@ -479,8 +483,9 @@ pub(crate) enum ErrorKind { impl crate::no_std::error::Error for ErrorKind { fn source(&self) -> Option<&(dyn crate::no_std::error::Error + 'static)> { match self { + ErrorKind::AllocError { error, .. } => Some(error), ErrorKind::IrError(source) => Some(source), - ErrorKind::MetaConflict(source) => Some(source), + ErrorKind::MetaError(source) => Some(source), ErrorKind::AccessError(source) => Some(source), ErrorKind::EncodeError(source) => Some(source), ErrorKind::MissingLastId(source) => Some(source), @@ -512,10 +517,13 @@ impl fmt::Display for ErrorKind { ErrorKind::Unsupported { what } => { write!(f, "Unsupported `{what}`", what = what)?; } + ErrorKind::AllocError { error } => { + error.fmt(f)?; + } ErrorKind::IrError(error) => { error.fmt(f)?; } - ErrorKind::MetaConflict(error) => { + ErrorKind::MetaError(error) => { error.fmt(f)?; } ErrorKind::AccessError(error) => { @@ -989,24 +997,40 @@ impl fmt::Display for ErrorKind { } } +impl From for ErrorKind { + #[inline] + fn from(error: AllocError) -> Self { + ErrorKind::AllocError { + error: error.into(), + } + } +} + +impl From for ErrorKind { + #[inline] + fn from(error: rune_alloc::Error) -> Self { + ErrorKind::AllocError { error } + } +} + impl From for ErrorKind { #[inline] - fn from(source: IrErrorKind) -> Self { - ErrorKind::IrError(source) + fn from(error: IrErrorKind) -> Self { + ErrorKind::IrError(error) } } -impl From for ErrorKind { +impl From for ErrorKind { #[inline] - fn from(source: MetaConflict) -> Self { - ErrorKind::MetaConflict(source) + fn from(error: MetaError) -> Self { + ErrorKind::MetaError(error) } } impl From for ErrorKind { #[inline] - fn from(source: AccessError) -> Self { - ErrorKind::AccessError(source) + fn from(error: AccessError) -> Self { + ErrorKind::AccessError(error) } } @@ -1161,29 +1185,63 @@ pub struct ImportStep { pub item: ItemBuf, } +/// A meta error. +#[derive(Debug)] +pub struct MetaError { + kind: Box, +} + +impl MetaError { + /// Construct a new meta error. + pub(crate) fn new(kind: E) -> Self + where + MetaErrorKind: From, + { + Self { + kind: Box::new(kind.into()), + } + } +} + +impl From for MetaError { + #[inline] + fn from(error: AllocError) -> Self { + Self::new(MetaErrorKind::AllocError { error }) + } +} + #[derive(Debug)] /// Tried to add an item that already exists. -pub(crate) struct MetaConflict { - /// The meta we tried to insert. - pub(crate) current: MetaInfo, - /// The existing item. - pub(crate) existing: MetaInfo, - /// Parameters hash. - pub(crate) parameters: Hash, +pub(crate) enum MetaErrorKind { + AllocError { + error: AllocError, + }, + MetaConflict { + /// The meta we tried to insert. + current: MetaInfo, + /// The existing item. + existing: MetaInfo, + /// Parameters hash. + parameters: Hash, + }, } -impl fmt::Display for MetaConflict { +impl fmt::Display for MetaError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let MetaConflict { - current, - existing, - parameters, - } = self; - write!(f, "Can't insert item `{current}` ({parameters}) because conflicting meta `{existing}` already exists") + match &*self.kind { + MetaErrorKind::AllocError { error } => error.fmt(f), + MetaErrorKind::MetaConflict { + current, + existing, + parameters, + } => { + write!(f, "Can't insert item `{current}` ({parameters}) because conflicting meta `{existing}` already exists") + } + } } } -impl crate::no_std::error::Error for MetaConflict {} +impl crate::no_std::error::Error for MetaError {} #[derive(Debug)] pub(crate) struct MissingScope(pub(crate) usize); diff --git a/crates/rune/src/compile/ir/compiler.rs b/crates/rune/src/compile/ir/compiler.rs index 741397b10..b172a817a 100644 --- a/crates/rune/src/compile/ir/compiler.rs +++ b/crates/rune/src/compile/ir/compiler.rs @@ -4,7 +4,7 @@ use crate::no_std::prelude::*; use crate::ast::{self, Span, Spanned}; use crate::compile::ir; -use crate::compile::{self, ErrorKind}; +use crate::compile::{self, ErrorKind, WithSpan}; use crate::hir; use crate::query::Query; use crate::runtime::{Bytes, Shared}; @@ -50,7 +50,7 @@ pub(crate) fn expr(hir: &hir::Expr<'_>, c: &mut Ctxt<'_, '_>) -> compile::Result )); }; - ir::Ir::new(span, ir::Value::from_const(value)) + ir::Ir::new(span, ir::Value::from_const(value).with_span(span)?) } hir::ExprKind::Variable(name) => { return Ok(ir::Ir::new(span, name.into_owned())); @@ -203,14 +203,16 @@ fn expr_binary( fn lit(c: &mut Ctxt<'_, '_>, span: Span, hir: hir::Lit<'_>) -> compile::Result { Ok(match hir { hir::Lit::Bool(boolean) => ir::Ir::new(span, ir::Value::Bool(boolean)), - hir::Lit::Str(string) => { - ir::Ir::new(span, ir::Value::String(Shared::new(string.to_owned()))) - } + hir::Lit::Str(string) => ir::Ir::new( + span, + ir::Value::String(Shared::new(string.to_owned()).with_span(span)?), + ), hir::Lit::Integer(n) => ir::Ir::new(span, ir::Value::Integer(n)), hir::Lit::Float(n) => ir::Ir::new(span, ir::Value::Float(n)), hir::Lit::Byte(b) => ir::Ir::new(span, ir::Value::Byte(b)), hir::Lit::ByteStr(byte_str) => { - let value = ir::Value::Bytes(Shared::new(Bytes::from_vec(byte_str.to_vec()))); + let value = + ir::Value::Bytes(Shared::new(Bytes::from_vec(byte_str.to_vec())).with_span(span)?); ir::Ir::new(span, value) } hir::Lit::Char(c) => ir::Ir::new(span, ir::Value::Char(c)), diff --git a/crates/rune/src/compile/ir/eval.rs b/crates/rune/src/compile/ir/eval.rs index 2050cebd9..d4ef0ebc1 100644 --- a/crates/rune/src/compile/ir/eval.rs +++ b/crates/rune/src/compile/ir/eval.rs @@ -145,7 +145,7 @@ fn eval_ir_binary( let b = b.borrow_ref().with_span(span)?; let mut a = String::from(&*a); a.push_str(&b); - Ok(Shared::new(a)) + Ok(Shared::new(a).with_span(span)?) } } @@ -284,7 +284,7 @@ fn eval_ir_object( object.insert(key.as_ref().to_owned(), eval_ir(value, interp, used)?); } - Ok(ir::Value::Object(Shared::new(object))) + Ok(ir::Value::Object(Shared::new(object).with_span(ir)?)) } fn eval_ir_scope( @@ -360,7 +360,7 @@ fn eval_ir_template( } } - Ok(ir::Value::String(Shared::new(buf))) + Ok(ir::Value::String(Shared::new(buf).with_span(ir)?)) } fn eval_ir_tuple( @@ -374,7 +374,9 @@ fn eval_ir_tuple( items.push(eval_ir(item, interp, used)?); } - Ok(ir::Value::Tuple(Shared::new(items.into_boxed_slice()))) + Ok(ir::Value::Tuple( + Shared::new(items.into_boxed_slice()).with_span(ir)?, + )) } fn eval_ir_vec( @@ -388,7 +390,7 @@ fn eval_ir_vec( vec.push(eval_ir(item, interp, used)?); } - Ok(ir::Value::Vec(Shared::new(vec))) + Ok(ir::Value::Vec(Shared::new(vec).with_span(ir)?)) } /// IrEval the interior expression. diff --git a/crates/rune/src/compile/ir/interpreter.rs b/crates/rune/src/compile/ir/interpreter.rs index ae18a5bff..fc0de48c2 100644 --- a/crates/rune/src/compile/ir/interpreter.rs +++ b/crates/rune/src/compile/ir/interpreter.rs @@ -103,7 +103,7 @@ impl Interpreter<'_, '_> { let item = self.q.pool.alloc_item(base.extended(name.to_string())); if let Some(const_value) = self.q.consts.get(item) { - return Ok(ir::Value::from_const(const_value)); + return Ok(ir::Value::from_const(const_value).with_span(span)?); } if let Some(meta) = self.q.query_meta(span, item, used)? { @@ -116,7 +116,7 @@ impl Interpreter<'_, '_> { )); }; - return Ok(ir::Value::from_const(const_value)); + return Ok(ir::Value::from_const(const_value).with_span(span)?); } _ => { return Err(compile::Error::new( diff --git a/crates/rune/src/compile/ir/value.rs b/crates/rune/src/compile/ir/value.rs index 41ee9203e..5b95446f5 100644 --- a/crates/rune/src/compile/ir/value.rs +++ b/crates/rune/src/compile/ir/value.rs @@ -1,6 +1,7 @@ use crate::no_std::collections::HashMap; use crate::no_std::prelude::*; +use crate::alloc::Error; use crate::ast::Spanned; use crate::compile::{self, WithSpan}; use crate::runtime as rt; @@ -56,47 +57,48 @@ impl Value { } /// Convert a constant value into an interpreter value. - pub(crate) fn from_const(value: &ConstValue) -> Self { - match value { + pub(crate) fn from_const(value: &ConstValue) -> Result { + Ok(match value { ConstValue::EmptyTuple => Self::EmptyTuple, ConstValue::Byte(b) => Self::Byte(*b), ConstValue::Char(c) => Self::Char(*c), ConstValue::Bool(b) => Self::Bool(*b), ConstValue::Integer(n) => Self::Integer(*n), ConstValue::Float(n) => Self::Float(*n), - ConstValue::String(s) => Self::String(Shared::new(s.clone())), - ConstValue::Bytes(b) => Self::Bytes(Shared::new(b.clone())), - ConstValue::Option(option) => Self::Option(Shared::new( - option.as_ref().map(|some| Self::from_const(some)), - )), + ConstValue::String(s) => Self::String(Shared::new(s.clone())?), + ConstValue::Bytes(b) => Self::Bytes(Shared::new(b.clone())?), + ConstValue::Option(option) => Self::Option(Shared::new(match option { + Some(some) => Some(Self::from_const(some)?), + None => None, + })?), ConstValue::Vec(vec) => { let mut ir_vec = Vec::with_capacity(vec.len()); for value in vec { - ir_vec.push(Self::from_const(value)); + ir_vec.push(Self::from_const(value)?); } - Self::Vec(Shared::new(ir_vec)) + Self::Vec(Shared::new(ir_vec)?) } ConstValue::Tuple(tuple) => { let mut ir_tuple = Vec::with_capacity(tuple.len()); for value in tuple.iter() { - ir_tuple.push(Self::from_const(value)); + ir_tuple.push(Self::from_const(value)?); } - Self::Tuple(Shared::new(ir_tuple.into_boxed_slice())) + Self::Tuple(Shared::new(ir_tuple.into_boxed_slice())?) } ConstValue::Object(object) => { let mut ir_object = HashMap::with_capacity(object.len()); for (key, value) in object { - ir_object.insert(key.clone(), Self::from_const(value)); + ir_object.insert(key.clone(), Self::from_const(value)?); } - Self::Object(Shared::new(ir_object)) + Self::Object(Shared::new(ir_object)?) } - } + }) } /// Convert into constant value. diff --git a/crates/rune/src/compile/named.rs b/crates/rune/src/compile/named.rs index 3beda941b..686d3a5dc 100644 --- a/crates/rune/src/compile/named.rs +++ b/crates/rune/src/compile/named.rs @@ -1,8 +1,8 @@ use core::cmp::Ordering; -use crate::no_std::prelude::*; - +use crate::alloc::String; use crate::module::InstallWith; +use crate::no_std::boxed::Box; use crate::runtime::RawStr; /// The trait used for something that can be statically named. diff --git a/crates/rune/src/compile/names.rs b/crates/rune/src/compile/names.rs index a36e438f2..d8fdf6b34 100644 --- a/crates/rune/src/compile/names.rs +++ b/crates/rune/src/compile/names.rs @@ -1,18 +1,25 @@ use core::mem::replace; -use crate::no_std::collections::BTreeMap; - +use crate::alloc::{AllocError, BTreeMap, Error, TryClone}; use crate::compile::{Component, ComponentRef, IntoComponent}; /// A tree of names. -#[derive(Default, Debug, Clone)] +#[derive(Default, Debug)] pub struct Names { root: Node, } +impl TryClone for Names { + fn try_clone(&self) -> Result { + Ok(Self { + root: self.root.try_clone()?, + }) + } +} + impl Names { /// Insert the given item as an import. - pub(crate) fn insert(&mut self, iter: I) -> bool + pub(crate) fn insert(&mut self, iter: I) -> Result where I: IntoIterator, I::Item: IntoComponent, @@ -20,10 +27,13 @@ impl Names { let mut current = &mut self.root; for c in iter { - current = current.children.entry(c.into_component()).or_default(); + current = current + .children + .entry(c.into_component()) + .or_try_default()?; } - replace(&mut current.term, true) + Ok(replace(&mut current.term, true)) } /// Test if the given import exists. @@ -101,7 +111,7 @@ impl Names { } } -#[derive(Default, Debug, Clone)] +#[derive(Default, Debug)] struct Node { /// If the node is terminating. term: bool, @@ -109,6 +119,15 @@ struct Node { children: BTreeMap, } +impl TryClone for Node { + fn try_clone(&self) -> Result { + Ok(Self { + term: self.term, + children: self.children.try_clone()?, + }) + } +} + #[cfg(test)] mod tests { use super::Names; @@ -117,17 +136,17 @@ mod tests { fn insert() { let mut names = Names::default(); assert!(!names.contains(["test"])); - assert!(!names.insert(["test"])); + assert!(!names.insert(["test"]).unwrap()); assert!(names.contains(["test"])); - assert!(names.insert(["test"])); + assert!(names.insert(["test"]).unwrap()); } #[test] fn contains() { let mut names = Names::default(); assert!(!names.contains(["test"])); - assert!(!names.insert(["test"])); + assert!(!names.insert(["test"]).unwrap()); assert!(names.contains(["test"])); - assert!(names.insert(["test"])); + assert!(names.insert(["test"]).unwrap()); } } diff --git a/crates/rune/src/compile/unit_builder.rs b/crates/rune/src/compile/unit_builder.rs index e5cbdc0e5..65ec79872 100644 --- a/crates/rune/src/compile/unit_builder.rs +++ b/crates/rune/src/compile/unit_builder.rs @@ -295,7 +295,7 @@ impl UnitBuilder { &mut self, span: &dyn Spanned, meta: &meta::Meta, - pool: &mut Pool, + pool: &Pool, query: &mut QueryInner, ) -> compile::Result<()> { match meta.kind { diff --git a/crates/rune/src/doc/visitor.rs b/crates/rune/src/doc/visitor.rs index 188c24948..dccbdb53e 100644 --- a/crates/rune/src/doc/visitor.rs +++ b/crates/rune/src/doc/visitor.rs @@ -2,9 +2,11 @@ use crate::no_std::prelude::*; use crate::no_std::collections::{hash_map, HashMap}; use crate::compile::{ - meta, CompileVisitor, IntoComponent, Item, ItemBuf, MetaRef, Names, Located, + MetaError, CompileVisitor, IntoComponent, Item, ItemBuf, MetaRef, Names, Located, }; +use crate::compile::meta; use crate::hash::Hash; +use crate::alloc::AllocError; pub(crate) struct VisitorData { pub(crate) item: ItemBuf, @@ -38,7 +40,7 @@ pub struct Visitor { impl Visitor { /// Construct a visitor with the given base component. - pub fn new(base: I) -> Self + pub fn new(base: I) -> Result where I: IntoIterator, I::Item: IntoComponent, @@ -52,10 +54,10 @@ impl Visitor { }; let hash = Hash::type_hash(&this.base); - this.names.insert(&this.base); + this.names.insert(&this.base)?; this.data.insert(hash, VisitorData::new(this.base.clone(), hash, Some(meta::Kind::Module))); this.item_to_hash.insert(this.base.clone(), hash); - this + Ok(this) } /// Get meta by item. @@ -71,16 +73,16 @@ impl Visitor { } impl CompileVisitor for Visitor { - fn register_meta(&mut self, meta: MetaRef<'_>) { + fn register_meta(&mut self, meta: MetaRef<'_>) -> Result<(), MetaError> { // Skip over context meta, since we pick that up separately. if meta.context { - return; + return Ok(()); } let item = self.base.join(meta.item); tracing::trace!(base = ?self.base, meta = ?meta.item, ?item, "register meta"); - self.names.insert(&item); + self.names.insert(&item)?; self.item_to_hash.insert(item.to_owned(), meta.hash); match self.data.entry(meta.hash) { @@ -98,6 +100,8 @@ impl CompileVisitor for Visitor { .or_default() .push(meta.hash); } + + Ok(()) } fn visit_doc_comment(&mut self, _location: &dyn Located, item: &Item, hash: Hash, string: &str) { diff --git a/crates/rune/src/exported_macros.rs b/crates/rune/src/exported_macros.rs index 85b521c5d..1c8eb0416 100644 --- a/crates/rune/src/exported_macros.rs +++ b/crates/rune/src/exported_macros.rs @@ -7,7 +7,7 @@ macro_rules! vm_try { match $crate::runtime::try_result($expr) { $crate::runtime::VmResult::Ok(value) => value, $crate::runtime::VmResult::Err(err) => { - return $crate::runtime::VmResult::Err($crate::runtime::VmError::from(err)) + return $crate::runtime::VmResult::Err($crate::runtime::VmError::from(err)); } } }; @@ -20,8 +20,11 @@ macro_rules! vm_try { #[macro_export] macro_rules! vm_write { ($($tt:tt)*) => { - if let core::result::Result::Err(error) = core::write!($($tt)*) { - return VmResult::Ok(core::result::Result::Err(error)); + match core::write!($($tt)*) { + Ok(()) => (), + Err(err) => { + return $crate::runtime::VmResult::Err($crate::runtime::VmError::from(err)); + } } }; } diff --git a/crates/rune/src/hashbrown/fork.rs b/crates/rune/src/hashbrown/fork.rs deleted file mode 100644 index 2778616b2..000000000 --- a/crates/rune/src/hashbrown/fork.rs +++ /dev/null @@ -1,33 +0,0 @@ -#![allow(unused)] -#![allow(clippy::manual_map)] - -// Copied and modified under the MIT license. -// Copyright (c) 2016 Amanieu d'Antras -// -// Imported using import_hashbrown.ps1, the below section is the only part -// copied by hand. -// -// After an import of the crate some sections might need to be modified. -// -// See: https://github.com/rust-lang/hashbrown -// The relevant fork: https://github.com/udoprog/hashbrown/tree/raw-infallible-context -// Relevant issue: https://github.com/rust-lang/hashbrown/issues/456 - -#[macro_use] -mod macros; -pub(crate) mod raw; -mod scopeguard; - -/// The error type for `try_reserve` methods. -#[derive(Clone, PartialEq, Eq, Debug)] -pub enum TryReserveError { - /// Error due to the computed capacity exceeding the collection's maximum - /// (usually `isize::MAX` bytes). - CapacityOverflow, - - /// The memory allocator returned an error - AllocError { - /// The layout of the allocation request that failed. - layout: alloc::alloc::Layout, - }, -} diff --git a/crates/rune/src/hashbrown/fork/raw/alloc.rs b/crates/rune/src/hashbrown/fork/raw/alloc.rs deleted file mode 100644 index 15299e7b0..000000000 --- a/crates/rune/src/hashbrown/fork/raw/alloc.rs +++ /dev/null @@ -1,86 +0,0 @@ -pub(crate) use self::inner::{do_alloc, Allocator, Global}; - -// Nightly-case. -// Use unstable `allocator_api` feature. -// This is compatible with `allocator-api2` which can be enabled or not. -// This is used when building for `std`. -#[cfg(feature = "nightly")] -mod inner { - use crate::alloc::alloc::Layout; - pub use crate::alloc::alloc::{Allocator, Global}; - use core::ptr::NonNull; - - #[allow(clippy::map_err_ignore)] - pub(crate) fn do_alloc(alloc: &A, layout: Layout) -> Result, ()> { - match alloc.allocate(layout) { - Ok(ptr) => Ok(ptr.as_non_null_ptr()), - Err(_) => Err(()), - } - } -} - -// Basic non-nightly case. -// This uses `allocator-api2` enabled by default. -// If any crate enables "nightly" in `allocator-api2`, -// this will be equivalent to the nightly case, -// since `allocator_api2::alloc::Allocator` would be re-export of -// `core::alloc::Allocator`. -#[cfg(all(not(feature = "nightly"), feature = "allocator-api2"))] -mod inner { - use crate::alloc::alloc::Layout; - pub use allocator_api2::alloc::{Allocator, Global}; - use core::ptr::NonNull; - - #[allow(clippy::map_err_ignore)] - pub(crate) fn do_alloc(alloc: &A, layout: Layout) -> Result, ()> { - match alloc.allocate(layout) { - Ok(ptr) => Ok(ptr.cast()), - Err(_) => Err(()), - } - } -} - -// No-defaults case. -// When building with default-features turned off and -// neither `nightly` nor `allocator-api2` is enabled, -// this will be used. -// Making it impossible to use any custom allocator with collections defined -// in this crate. -// Any crate in build-tree can enable `allocator-api2`, -// or `nightly` without disturbing users that don't want to use it. -#[cfg(not(any(feature = "nightly", feature = "allocator-api2")))] -mod inner { - use crate::alloc::alloc::{alloc, dealloc, Layout}; - use core::ptr::NonNull; - - #[allow(clippy::missing_safety_doc)] // not exposed outside of this crate - pub unsafe trait Allocator { - fn allocate(&self, layout: Layout) -> Result, ()>; - unsafe fn deallocate(&self, ptr: NonNull, layout: Layout); - } - - #[derive(Copy, Clone)] - pub struct Global; - - unsafe impl Allocator for Global { - #[inline] - fn allocate(&self, layout: Layout) -> Result, ()> { - unsafe { NonNull::new(alloc(layout)).ok_or(()) } - } - #[inline] - unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { - dealloc(ptr.as_ptr(), layout); - } - } - - impl Default for Global { - #[inline] - fn default() -> Self { - Global - } - } - - pub(crate) fn do_alloc(alloc: &A, layout: Layout) -> Result, ()> { - alloc.allocate(layout) - } -} diff --git a/crates/rune/src/hashbrown.rs b/crates/rune/src/hashbrown/mod.rs similarity index 52% rename from crates/rune/src/hashbrown.rs rename to crates/rune/src/hashbrown/mod.rs index 86b52d3f3..00a7dee10 100644 --- a/crates/rune/src/hashbrown.rs +++ b/crates/rune/src/hashbrown/mod.rs @@ -1,5 +1,2 @@ -mod table; -pub(crate) use self::fork::raw::RawIter; pub(crate) use self::table::{IterRef, Table}; - -mod fork; +mod table; diff --git a/crates/rune/src/hashbrown/table.rs b/crates/rune/src/hashbrown/table.rs index 3c06178c0..744e2229f 100644 --- a/crates/rune/src/hashbrown/table.rs +++ b/crates/rune/src/hashbrown/table.rs @@ -1,35 +1,40 @@ +use rune_alloc::hash_map; + use core::hash::BuildHasher; use core::iter; use core::marker::PhantomData; use core::mem; use core::ptr; -use crate::hashbrown::fork::raw::{RawIter, RawTable}; -use std::collections::hash_map::{DefaultHasher, RandomState}; +use crate::alloc::{Allocator, Error, Global, TryClone}; + +#[cfg(feature = "alloc")] +use crate::runtime::Hasher; +use crate::runtime::{ProtocolCaller, RawRef, Ref, Value, VmError, VmResult}; -use crate::runtime::{Hasher, ProtocolCaller, RawRef, Ref, Value, VmError, VmResult}; +use crate::alloc::hashbrown::raw::{RawIter, RawTable}; +use crate::alloc::hashbrown::ErrorOrInsertSlot; -#[derive(Clone)] -pub(crate) struct Table { - table: RawTable<(Value, V)>, - state: RandomState, +pub(crate) struct Table { + table: RawTable<(Value, V), A>, + state: hash_map::RandomState, } -impl Table { +impl Table { #[inline(always)] - pub(crate) fn new() -> Self { + pub(crate) fn new_in(alloc: A) -> Self { Self { - table: RawTable::new(), - state: RandomState::new(), + table: RawTable::new_in(alloc), + state: hash_map::RandomState::new(), } } #[inline(always)] - pub(crate) fn with_capacity(capacity: usize) -> Self { - Self { - table: RawTable::with_capacity(capacity), - state: RandomState::new(), - } + pub(crate) fn try_with_capacity_in(capacity: usize, alloc: A) -> Result { + Ok(Self { + table: RawTable::try_with_capacity_in(capacity, alloc)?, + state: hash_map::RandomState::new(), + }) } #[inline(always)] @@ -59,25 +64,20 @@ impl Table { { let hash = vm_try!(hash(&self.state, &key, caller)); - let result = match self.table.find_or_find_insert_slot_with( - caller, - hash, - eq(&key), - hasher(&self.state), - ) { - Ok(result) => result, - Err(error) => return VmResult::Err(error), - }; - - let existing = match result { - Ok(bucket) => Some(mem::replace(unsafe { &mut bucket.as_mut().1 }, value)), - Err(slot) => { - unsafe { - self.table.insert_in_slot(hash, slot, (key, value)); + let existing = + match self + .table + .find_or_find_insert_slot(caller, hash, eq(&key), hasher(&self.state)) + { + Ok(bucket) => Some(mem::replace(unsafe { &mut bucket.as_mut().1 }, value)), + Err(ErrorOrInsertSlot::InsertSlot(slot)) => { + unsafe { + self.table.insert_in_slot(hash, slot, (key, value)); + } + None } - None - } - }; + Err(ErrorOrInsertSlot::Error(error)) => return VmResult::err(error), + }; VmResult::Ok(existing) } @@ -91,7 +91,7 @@ impl Table { } let hash = vm_try!(hash(&self.state, key, caller)); - VmResult::Ok(vm_try!(self.table.get_with(caller, hash, eq(key)))) + VmResult::Ok(vm_try!(self.table.get(caller, hash, eq(key)))) } #[inline(always)] @@ -101,7 +101,7 @@ impl Table { { let hash = vm_try!(hash(&self.state, key, caller)); - match self.table.remove_entry_with(caller, hash, eq(key)) { + match self.table.remove_entry(caller, hash, eq(key)) { Ok(value) => VmResult::Ok(value.map(|(_, value)| value)), Err(error) => VmResult::Err(error), } @@ -155,6 +155,23 @@ impl Table { } } +impl TryClone for Table +where + V: TryClone, +{ + fn try_clone(&self) -> Result { + Ok(Self { + table: self.table.try_clone()?, + state: self.state.clone(), + }) + } + + #[inline] + fn try_clone_from(&mut self, source: &Self) -> Result<(), Error> { + self.table.try_clone_from(&source.table) + } +} + pub(crate) struct Iter<'a, V> { iter: RawIter<(Value, V)>, _marker: PhantomData<&'a V>, @@ -244,7 +261,7 @@ where /// Convenience function to hash a value. fn hash(state: &S, value: &Value, caller: &mut impl ProtocolCaller) -> VmResult where - S: BuildHasher, + S: BuildHasher, { let mut hasher = Hasher::new_with(state); vm_try!(value.hash_with(&mut hasher, caller)); @@ -255,7 +272,7 @@ where fn hasher(state: &S) -> impl Fn(&mut P, &(Value, V)) -> Result + '_ where P: ?Sized + ProtocolCaller, - S: BuildHasher, + S: BuildHasher, { move |caller, (key, _): &(Value, V)| hash(state, key, caller).into_result() } diff --git a/crates/rune/src/hir/lowering.rs b/crates/rune/src/hir/lowering.rs index ea89e8a40..99bc4935f 100644 --- a/crates/rune/src/hir/lowering.rs +++ b/crates/rune/src/hir/lowering.rs @@ -1002,7 +1002,7 @@ fn expr_break<'hir>( /// Unroll a continue expression, capturing all variables which are in scope at /// the time of it. fn expr_continue<'hir>( - cx: &mut Ctxt<'hir, '_, '_>, + cx: &Ctxt<'hir, '_, '_>, ast: &ast::ExprContinue, ) -> compile::Result> { alloc_with!(cx, ast); @@ -1292,7 +1292,7 @@ fn pat<'hir>(cx: &mut Ctxt<'hir, '_, '_>, ast: &ast::Pat) -> compile::Result( - cx: &mut Ctxt<'hir, '_, '_>, + cx: &Ctxt<'hir, '_, '_>, ast: &'ast ast::ObjectKey, ) -> compile::Result<(&'ast dyn Spanned, &'hir str)> { alloc_with!(cx, ast); diff --git a/crates/rune/src/indexing/index.rs b/crates/rune/src/indexing/index.rs index fa71e3d4f..1f96bfd94 100644 --- a/crates/rune/src/indexing/index.rs +++ b/crates/rune/src/indexing/index.rs @@ -401,7 +401,7 @@ impl<'a, 'arena> Indexer<'a, 'arena> { fn expand_attribute_macro( &mut self, attr: &mut ast::Attribute, - item: &mut ast::Item, + item: &ast::Item, ) -> compile::Result> where T: Parse, @@ -567,8 +567,7 @@ pub(crate) fn file(idx: &mut Indexer<'_, '_>, ast: &mut ast::File) -> compile::R // for the `item` handler or to be used by the macro_call expansion // below. if let Some(mut attr) = item.remove_first_attribute() { - let Some(file) = idx.expand_attribute_macro::(&mut attr, &mut item)? - else { + let Some(file) = idx.expand_attribute_macro::(&mut attr, &item)? else { skipped_attributes.push(attr); if !matches!(item, ast::Item::MacroCall(_)) && item.attributes().is_empty() { @@ -699,7 +698,7 @@ pub(crate) fn empty_block_fn( is_bench: false, impl_item: None, }), - }); + })?; Ok(()) } @@ -849,9 +848,9 @@ pub(crate) fn item_fn_immediate( || is_bench; if is_exported { - idx.q.index_and_build(entry); + idx.q.index_and_build(entry)?; } else { - idx.q.index(entry); + idx.q.index(entry)?; } Ok(()) diff --git a/crates/rune/src/languageserver/state.rs b/crates/rune/src/languageserver/state.rs index 0daf2a335..4014cb9ea 100644 --- a/crates/rune/src/languageserver/state.rs +++ b/crates/rune/src/languageserver/state.rs @@ -314,7 +314,7 @@ impl<'a> State<'a> { } Ok(script_builds) => { for script_build in script_builds { - script_results.push(self.build_scripts(script_build, Some(&mut visited))); + script_results.push(self.build_scripts(script_build, Some(&mut visited))?); } } }; @@ -342,7 +342,7 @@ impl<'a> State<'a> { }; build.sources.insert(input); - script_results.push(self.build_scripts(build, None)); + script_results.push(self.build_scripts(build, None)?); } // We need to pupulate diagnostics for everything we know about, in @@ -457,16 +457,16 @@ impl<'a> State<'a> { &self, mut build: Build, built: Option<&mut HashSet>, - ) -> ( + ) -> Result<( crate::Diagnostics, Build, Visitor, crate::doc::Visitor, Result, - ) { + )> { let mut diagnostics = crate::Diagnostics::new(); let mut source_visitor = Visitor::default(); - let mut doc_visitor = crate::doc::Visitor::new(ItemBuf::new()); + let mut doc_visitor = crate::doc::Visitor::new(ItemBuf::new())?; let mut source_loader = ScriptSourceLoader::new(&self.workspace.sources); @@ -483,7 +483,7 @@ impl<'a> State<'a> { build.visit(built); } - (diagnostics, build, source_visitor, doc_visitor, unit) + Ok((diagnostics, build, source_visitor, doc_visitor, unit)) } } diff --git a/crates/rune/src/lib.rs b/crates/rune/src/lib.rs index 8faf5da9f..fe568a15e 100644 --- a/crates/rune/src/lib.rs +++ b/crates/rune/src/lib.rs @@ -160,7 +160,7 @@ extern crate std; compile_error!("The `alloc` feature is currently required to build rune, but will change for parts of rune in the future."); #[macro_use] -extern crate alloc; +extern crate alloc as rust_alloc; /// A macro that can be used to construct a [Span][crate::ast::Span] that can be /// pattern matched over. @@ -192,6 +192,8 @@ pub type Result = ::core::result::Result; /// Boxed error type, which is an alias of [anyhow::Error]. pub type Error = crate::no_std::Error; +pub use rune_alloc as alloc; + #[macro_use] #[cfg(test)] pub(crate) mod testing; @@ -236,6 +238,9 @@ pub use self::diagnostics::Diagnostics; mod hash; pub use self::hash::{Hash, ToTypeHash}; +#[cfg(feature = "alloc")] +mod hashbrown; + mod params; pub use self::params::Params; @@ -269,9 +274,6 @@ cfg_workspace! { pub mod workspace; } -#[cfg(feature = "std")] -mod hashbrown; - // Macros used internally and re-exported. pub(crate) use rune_macros::__internal_impl_any; @@ -457,8 +459,9 @@ pub(crate) use rune_macros::__internal_impl_any; /// /// ``` /// use rune::{Any, Module, ContextError}; -/// use rune::runtime::Formatter; -/// use std::fmt::{self, Write}; +/// use rune::vm_write; +/// use rune::runtime::{Formatter, VmResult}; +/// use rune::alloc::TryWrite; /// /// #[derive(Any)] /// struct String { @@ -498,8 +501,9 @@ pub(crate) use rune_macros::__internal_impl_any; /// /// assert_eq!(format!("{}", string), "hello"); /// /// ``` /// #[rune::function(protocol = STRING_DISPLAY)] -/// fn display(&self, f: &mut Formatter) -> fmt::Result { -/// write!(f, "{}", self.inner) +/// fn display(&self, f: &mut Formatter) -> VmResult<()> { +/// vm_write!(f, "{}", self.inner); +/// VmResult::Ok(()) /// } /// } /// @@ -585,6 +589,7 @@ pub mod __private { }; pub use crate::params::Params; pub use crate::runtime::TypeOf; + pub use rust_alloc::boxed::Box; } #[cfg(test)] diff --git a/crates/rune/src/module/function_traits.rs b/crates/rune/src/module/function_traits.rs index 272a5f760..0a4626d90 100644 --- a/crates/rune/src/module/function_traits.rs +++ b/crates/rune/src/module/function_traits.rs @@ -247,7 +247,7 @@ macro_rules! impl_function_traits { $(drop($var.1);)* let ret = vm_try!(ToValue::to_value(ret)); - stack.push(ret); + vm_try!(stack.push(ret)); VmResult::Ok(()) } } @@ -281,7 +281,7 @@ macro_rules! impl_function_traits { VmResult::Ok(vm_try!(output.to_value())) }); - stack.push(ret); + vm_try!(stack.push(vm_try!(Value::try_from(ret)))); VmResult::Ok(()) } } diff --git a/crates/rune/src/module/module.rs b/crates/rune/src/module/module.rs index 03472e624..eb9c1752d 100644 --- a/crates/rune/src/module/module.rs +++ b/crates/rune/src/module/module.rs @@ -1182,7 +1182,7 @@ impl Module { /// /// ``` /// use rune::Module; - /// use rune::runtime::{Stack, VmResult}; + /// use rune::runtime::{Stack, VmResult, ToValue}; /// use rune::vm_try; /// /// fn sum(stack: &mut Stack, args: usize) -> VmResult<()> { @@ -1192,7 +1192,7 @@ impl Module { /// number += vm_try!(vm_try!(stack.pop()).into_integer()); /// } /// - /// stack.push(number); + /// stack.push(vm_try!(number.to_value())); /// VmResult::Ok(()) /// } /// diff --git a/crates/rune/src/modules/any.rs b/crates/rune/src/modules/any.rs index c6d41ef7a..6afd5385b 100644 --- a/crates/rune/src/modules/any.rs +++ b/crates/rune/src/modules/any.rs @@ -1,10 +1,8 @@ //! The `std::any` rune module. -use core::fmt::{self, Write}; - -use crate::no_std::prelude::*; - use crate as rune; +use crate::alloc::fmt::TryWrite; +use crate::alloc::String; use crate::runtime::{Formatter, Type, Value, VmResult}; use crate::{ContextError, Module}; @@ -53,8 +51,9 @@ fn type_of_val(value: Value) -> VmResult { /// assert_eq!(format!("{}", any::Type::of_val(42)), "Type(0x1cad9186c9641c4f)"); /// ``` #[rune::function(instance, protocol = STRING_DISPLAY)] -fn format_type(ty: Type, f: &mut Formatter) -> fmt::Result { - write!(f, "{:?}", ty) +fn format_type(ty: Type, f: &mut Formatter) -> VmResult<()> { + vm_write!(f, "{:?}", ty); + VmResult::Ok(()) } /// Get the type name of a value. diff --git a/crates/rune/src/modules/capture_io.rs b/crates/rune/src/modules/capture_io.rs index 094b3c1ec..7d52174b0 100644 --- a/crates/rune/src/modules/capture_io.rs +++ b/crates/rune/src/modules/capture_io.rs @@ -97,6 +97,6 @@ fn dbg_impl(o: &mut Vec, stack: &mut Stack, args: usize) -> VmResult<()> { vm_try!(writeln!(o, "{:?}", value).map_err(VmError::panic)); } - stack.push(Value::EmptyTuple); + vm_try!(stack.push(Value::EmptyTuple)); VmResult::Ok(()) } diff --git a/crates/rune/src/modules/cmp.rs b/crates/rune/src/modules/cmp.rs index e9d05238f..7f93f0880 100644 --- a/crates/rune/src/modules/cmp.rs +++ b/crates/rune/src/modules/cmp.rs @@ -1,9 +1,9 @@ //! The `std::cmp` module. use core::cmp::Ordering; -use core::fmt::{self, Write}; use crate as rune; +use crate::alloc::fmt::TryWrite; use crate::runtime::{Formatter, Value, VmResult}; use crate::{ContextError, Module}; @@ -143,6 +143,7 @@ fn ordering_eq(this: Ordering, other: Ordering) -> bool { /// assert_eq!(format!("{:?}", Ordering::Less), "Less"); /// ``` #[rune::function(instance, protocol = STRING_DEBUG)] -fn ordering_string_debug(this: Ordering, s: &mut Formatter) -> fmt::Result { - write!(s, "{:?}", this) +fn ordering_string_debug(this: Ordering, s: &mut Formatter) -> VmResult<()> { + vm_write!(s, "{:?}", this); + VmResult::Ok(()) } diff --git a/crates/rune/src/modules/collections.rs b/crates/rune/src/modules/collections.rs index 39c51d1fe..62eed02b6 100644 --- a/crates/rune/src/modules/collections.rs +++ b/crates/rune/src/modules/collections.rs @@ -1,16 +1,16 @@ //! `std::collections` module. -#[cfg(feature = "std")] +#[cfg(feature = "alloc")] mod hash_map; -#[cfg(feature = "std")] +#[cfg(feature = "alloc")] mod hash_set; mod vec_deque; use crate::{ContextError, Module}; -#[cfg(feature = "std")] +#[cfg(feature = "alloc")] pub(crate) use self::hash_map::HashMap; -#[cfg(feature = "std")] +#[cfg(feature = "alloc")] pub(crate) use self::hash_set::HashSet; pub(crate) use self::vec_deque::VecDeque; use crate as rune; @@ -19,9 +19,9 @@ use crate as rune; /// The `std::collections` module. pub fn module() -> Result { let mut module = Module::from_meta(self::module_meta); - #[cfg(feature = "std")] + #[cfg(feature = "alloc")] hash_map::setup(&mut module)?; - #[cfg(feature = "std")] + #[cfg(feature = "alloc")] hash_set::setup(&mut module)?; vec_deque::setup(&mut module)?; Ok(module) diff --git a/crates/rune/src/modules/collections/hash_map.rs b/crates/rune/src/modules/collections/hash_map.rs index 2f7c84b87..0f800d805 100644 --- a/crates/rune/src/modules/collections/hash_map.rs +++ b/crates/rune/src/modules/collections/hash_map.rs @@ -1,6 +1,6 @@ -use core::fmt::{self, Write}; - use crate as rune; +use crate::alloc::fmt::TryWrite; +use crate::alloc::{Global, TryClone}; use crate::hashbrown::Table; use crate::runtime::{ EnvProtocolCaller, Formatter, FromValue, Iterator, ProtocolCaller, Ref, Value, VmErrorKind, @@ -35,7 +35,7 @@ pub(super) fn setup(module: &mut Module) -> Result<(), ContextError> { Ok(()) } -#[derive(Any, Clone)] +#[derive(Any)] #[rune(item = ::std::collections)] pub(crate) struct HashMap { table: Table, @@ -56,7 +56,7 @@ impl HashMap { #[rune::function(keep, path = Self::new)] fn new() -> Self { Self { - table: Table::new(), + table: Table::new_in(Global), } } @@ -73,10 +73,10 @@ impl HashMap { /// let map = HashMap::with_capacity(10); /// ``` #[rune::function(keep, path = Self::with_capacity)] - fn with_capacity(capacity: usize) -> Self { - Self { - table: Table::with_capacity(capacity), - } + fn with_capacity(capacity: usize) -> VmResult { + VmResult::Ok(Self { + table: vm_try!(Table::try_with_capacity_in(capacity, Global)), + }) } /// Returns the number of elements in the map. @@ -371,8 +371,10 @@ impl HashMap { /// assert_eq!(b.len(), 3); /// ``` #[rune::function(keep, instance, path = Self::clone)] - fn clone(this: &HashMap) -> HashMap { - Clone::clone(this) + fn clone(this: &HashMap) -> VmResult { + VmResult::Ok(Self { + table: vm_try!(this.table.try_clone()), + }) } pub(crate) fn from_iter

(mut it: Iterator, caller: &mut P) -> VmResult @@ -463,7 +465,7 @@ impl HashMap { /// assert_eq!(format!("{:?}", map), "{1: \"a\"}"); /// ``` #[rune::function(keep, protocol = STRING_DEBUG)] - fn string_debug(&self, f: &mut Formatter) -> VmResult { + fn string_debug(&self, f: &mut Formatter) -> VmResult<()> { self.string_debug_with(f, &mut EnvProtocolCaller) } @@ -471,21 +473,15 @@ impl HashMap { &self, f: &mut Formatter, caller: &mut impl ProtocolCaller, - ) -> VmResult { + ) -> VmResult<()> { vm_write!(f, "{{"); let mut it = self.table.iter().peekable(); while let Some((key, value)) = it.next() { - if let Err(fmt::Error) = vm_try!(key.string_debug_with(f, caller)) { - return VmResult::Ok(Err(fmt::Error)); - } - + vm_try!(key.string_debug_with(f, caller)); vm_write!(f, ": "); - - if let Err(fmt::Error) = vm_try!(value.string_debug_with(f, caller)) { - return VmResult::Ok(Err(fmt::Error)); - } + vm_try!(value.string_debug_with(f, caller)); if it.peek().is_some() { vm_write!(f, ", "); @@ -493,7 +489,7 @@ impl HashMap { } vm_write!(f, "}}"); - VmResult::Ok(Ok(())) + VmResult::Ok(()) } /// Perform a partial equality check over two maps. diff --git a/crates/rune/src/modules/collections/hash_set.rs b/crates/rune/src/modules/collections/hash_set.rs index 6d6e5c932..eefcc871a 100644 --- a/crates/rune/src/modules/collections/hash_set.rs +++ b/crates/rune/src/modules/collections/hash_set.rs @@ -1,10 +1,12 @@ -use core::fmt::{self, Write}; +use crate::alloc::fmt::TryWrite; use core::iter; use core::ptr; use crate as rune; -use crate::hashbrown::{IterRef, RawIter, Table}; +use crate::alloc::hashbrown::raw::RawIter; +use crate::alloc::{Global, TryClone}; +use crate::hashbrown::{IterRef, Table}; use crate::runtime::{ EnvProtocolCaller, Formatter, Iterator, ProtocolCaller, RawRef, Ref, Value, VmResult, }; @@ -35,7 +37,7 @@ pub(super) fn setup(module: &mut Module) -> Result<(), ContextError> { Ok(()) } -#[derive(Any, Clone)] +#[derive(Any)] #[rune(module = crate, item = ::std::collections)] pub(crate) struct HashSet { table: Table<()>, @@ -57,7 +59,7 @@ impl HashSet { #[rune::function(keep, path = Self::new)] fn new() -> Self { Self { - table: Table::new(), + table: Table::new_in(Global), } } @@ -76,10 +78,10 @@ impl HashSet { /// assert!(set.capacity() >= 10); /// ``` #[rune::function(keep, path = Self::with_capacity)] - fn with_capacity(capacity: usize) -> Self { - Self { - table: Table::with_capacity(capacity), - } + fn with_capacity(capacity: usize) -> VmResult { + VmResult::Ok(Self { + table: vm_try!(Table::try_with_capacity_in(capacity, Global)), + }) } /// Returns the number of elements in the set. @@ -301,8 +303,8 @@ impl HashSet { // use longest as lead and then append any missing that are in second let iter = if this.as_ref().len() >= other.as_ref().len() { - let this_iter = Table::iter_ref_raw(this); - let other_iter = Table::iter_ref_raw(other); + let this_iter = Table::<_, Global>::iter_ref_raw(this); + let other_iter = Table::<_, Global>::iter_ref_raw(other); Union { this, @@ -311,8 +313,8 @@ impl HashSet { _guards: (this_guard, other_guard), } } else { - let this_iter = Table::iter_ref_raw(other); - let other_iter = Table::iter_ref_raw(this); + let this_iter = Table::<_, Global>::iter_ref_raw(other); + let other_iter = Table::<_, Global>::iter_ref_raw(this); Union { this: other, @@ -397,15 +399,11 @@ impl HashSet { /// println!("{:?}", set); /// ``` #[rune::function(keep, protocol = STRING_DEBUG)] - fn string_debug(&self, f: &mut Formatter) -> VmResult { + fn string_debug(&self, f: &mut Formatter) -> VmResult<()> { self.string_debug_with(f, &mut EnvProtocolCaller) } - fn string_debug_with( - &self, - f: &mut Formatter, - _: &mut impl ProtocolCaller, - ) -> VmResult { + fn string_debug_with(&self, f: &mut Formatter, _: &mut impl ProtocolCaller) -> VmResult<()> { vm_write!(f, "{{"); let mut it = self.table.iter().peekable(); @@ -419,14 +417,14 @@ impl HashSet { } vm_write!(f, "}}"); - VmResult::Ok(Ok(())) + VmResult::Ok(()) } pub(crate) fn from_iter

(mut it: Iterator, caller: &mut P) -> VmResult where P: ?Sized + ProtocolCaller, { - let mut set = Table::with_capacity(it.size_hint().0); + let mut set = vm_try!(Table::try_with_capacity_in(it.size_hint().0, Global)); while let Some(key) = vm_try!(it.next()) { vm_try!(set.insert_with(key, (), caller)); @@ -491,8 +489,10 @@ impl HashSet { } #[rune::function(keep, instance, path = Self::clone)] - fn clone(this: &HashSet) -> HashSet { - this.clone() + fn clone(this: &HashSet) -> VmResult { + VmResult::Ok(Self { + table: vm_try!(this.table.try_clone()), + }) } } diff --git a/crates/rune/src/modules/collections/vec_deque.rs b/crates/rune/src/modules/collections/vec_deque.rs index 1ddb01fa5..2e8f93ae0 100644 --- a/crates/rune/src/modules/collections/vec_deque.rs +++ b/crates/rune/src/modules/collections/vec_deque.rs @@ -1,11 +1,12 @@ use core::cmp::Ordering; -use core::fmt::{self, Write}; +use core::iter; use crate as rune; -use crate::no_std::collections; - +use crate::alloc::fmt::TryWrite; +use crate::alloc::{self, Error, Global, TryClone}; use crate::runtime::{ - EnvProtocolCaller, Formatter, Iterator, Protocol, ProtocolCaller, Value, VmErrorKind, VmResult, + EnvProtocolCaller, Formatter, Iterator, Protocol, ProtocolCaller, RawRef, Ref, Value, + VmErrorKind, VmResult, }; use crate::{Any, ContextError, Module}; @@ -63,10 +64,10 @@ pub(super) fn setup(m: &mut Module) -> Result<(), ContextError> { Ok(()) } -#[derive(Any, Clone, Default)] +#[derive(Any, Default)] #[rune(module = crate, item = ::std::collections)] pub(crate) struct VecDeque { - inner: collections::VecDeque, + inner: alloc::VecDeque, } impl VecDeque { @@ -82,7 +83,7 @@ impl VecDeque { #[rune::function(path = Self::new)] fn new() -> VecDeque { Self { - inner: collections::VecDeque::new(), + inner: alloc::VecDeque::new(), } } @@ -97,10 +98,10 @@ impl VecDeque { /// assert!(deque.capacity() >= 10); /// ``` #[rune::function(path = Self::with_capacity)] - fn with_capacity(count: usize) -> VecDeque { - Self { - inner: collections::VecDeque::with_capacity(count), - } + fn with_capacity(count: usize) -> VmResult { + VmResult::Ok(Self { + inner: vm_try!(alloc::VecDeque::try_with_capacity_in(count, Global)), + }) } /// Extend this VecDeque with something that implements the [`INTO_ITER`] @@ -122,7 +123,7 @@ impl VecDeque { let mut it = vm_try!(value.into_iter()); while let Some(value) = vm_try!(it.next()) { - self.inner.push_back(value); + vm_try!(self.inner.try_push_back(value)); } VmResult::Ok(()) @@ -181,8 +182,9 @@ impl VecDeque { /// assert_eq!(Some(3), buf.back()); /// ``` #[rune::function] - fn push_back(&mut self, value: Value) { - self.inner.push_back(value); + fn push_back(&mut self, value: Value) -> VmResult<()> { + vm_try!(self.inner.try_push_back(value)); + VmResult::Ok(()) } /// Prepends an element to the deque. @@ -198,8 +200,9 @@ impl VecDeque { /// assert_eq!(d.front(), Some(2)); /// ``` #[rune::function] - fn push_front(&mut self, value: Value) { - self.inner.push_front(value); + fn push_front(&mut self, value: Value) -> VmResult<()> { + vm_try!(self.inner.try_push_front(value)); + VmResult::Ok(()) } /// Removes the first element and returns it, or `None` if the deque is @@ -260,8 +263,9 @@ impl VecDeque { /// assert!(buf.capacity() >= 11); /// ``` #[rune::function] - fn reserve(&mut self, index: usize) { - self.inner.reserve(index); + fn reserve(&mut self, index: usize) -> VmResult<()> { + vm_try!(self.inner.try_reserve(index)); + VmResult::Ok(()) } /// Returns the number of elements in the deque. @@ -328,7 +332,7 @@ impl VecDeque { }); } - self.inner.insert(index, value); + vm_try!(self.inner.try_insert(index, value)); VmResult::Ok(()) } @@ -461,22 +465,51 @@ impl VecDeque { /// buf.push_back(5); /// buf.push_back(3); /// buf.push_back(4); - /// let b = [5, 3, 4]; - /// let c = buf; - /// assert_eq!(c, b); + /// + /// assert_eq!([5, 3, 4], buf.iter()); + /// assert_eq!([4, 3, 5], buf.iter().rev()); /// ``` #[inline] - #[rune::function] - fn iter(&self) -> Iterator { - let iter = self.inner.clone().into_iter(); - Iterator::from("std::collections::vec_deque::Iter", iter) + #[rune::function(instance, path = Self::iter)] + fn iter(this: Ref) -> Iterator { + struct Iter { + iter: alloc::vec_deque::RawIter, + // Drop must happen after the raw iterator. + _guard: RawRef, + } + + impl iter::Iterator for Iter { + type Item = Value; + + #[inline] + fn next(&mut self) -> Option { + // SAFETY: We're holding onto the reference guard. + unsafe { Some((*self.iter.next()?).clone()) } + } + } + + impl iter::DoubleEndedIterator for Iter { + fn next_back(&mut self) -> Option { + // SAFETY: We're holding onto the reference guard. + unsafe { Some((*self.iter.next_back()?).clone()) } + } + } + + // SAFETY: We're holding onto the reference guard. + let iter = unsafe { this.inner.raw_iter() }; + let (_, _guard) = Ref::into_raw(this); + let iter = Iter { iter, _guard }; + Iterator::from_double_ended("std::collections::vec_deque::Iter", iter) } pub(crate) fn from_iter(mut it: Iterator) -> VmResult { - let mut inner = collections::VecDeque::with_capacity(it.size_hint().0); + let mut inner = vm_try!(alloc::VecDeque::try_with_capacity_in( + it.size_hint().0, + Global + )); while let Some(value) = vm_try!(it.next()) { - inner.push_back(value); + vm_try!(inner.try_push_back(value)); } VmResult::Ok(Self { inner }) @@ -519,7 +552,7 @@ impl VecDeque { /// assert_eq!(format!("{:?}", deque), "[1, 2, 3]"); /// ``` #[rune::function(protocol = STRING_DEBUG)] - fn string_debug(&self, f: &mut Formatter) -> VmResult { + fn string_debug(&self, f: &mut Formatter) -> VmResult<()> { self.string_debug_with(f, &mut EnvProtocolCaller) } @@ -528,15 +561,13 @@ impl VecDeque { &self, f: &mut Formatter, caller: &mut impl ProtocolCaller, - ) -> VmResult { + ) -> VmResult<()> { let mut it = self.inner.iter().peekable(); vm_write!(f, "["); while let Some(value) = it.next() { - if let Err(fmt::Error) = vm_try!(value.string_debug_with(f, caller)) { - return VmResult::Ok(Err(fmt::Error)); - } + vm_try!(value.string_debug_with(f, caller)); if it.peek().is_some() { vm_write!(f, ", "); @@ -544,7 +575,7 @@ impl VecDeque { } vm_write!(f, "]"); - VmResult::Ok(Ok(())) + VmResult::Ok(()) } /// Perform a partial equality check with this deque. @@ -709,6 +740,20 @@ impl VecDeque { } } +impl TryClone for VecDeque { + #[inline] + fn try_clone(&self) -> Result { + Ok(Self { + inner: self.inner.try_clone()?, + }) + } + + #[inline] + fn try_clone_from(&mut self, source: &Self) -> Result<(), Error> { + self.inner.try_clone_from(&source.inner) + } +} + /// Construct a [`VecDeque`] from a value. /// /// # Examples diff --git a/crates/rune/src/modules/disable_io.rs b/crates/rune/src/modules/disable_io.rs index d7165b040..46a7b1c0f 100644 --- a/crates/rune/src/modules/disable_io.rs +++ b/crates/rune/src/modules/disable_io.rs @@ -9,7 +9,7 @@ //! # Ok::<_, ContextError>(()) //! ``` -use crate::runtime::{Stack, VmResult}; +use crate::runtime::{Stack, Value, VmResult}; use crate::{ContextError, Module}; /// Provide a bunch of `std::io` functions which will cause any output to be ignored. @@ -23,7 +23,7 @@ pub fn module() -> Result { module.raw_fn(["dbg"], move |stack: &mut Stack, args: usize| { // NB: still need to maintain the stack. drop(vm_try!(stack.drain(args))); - stack.push(()); + vm_try!(stack.push(Value::from(()))); VmResult::Ok(()) })?; diff --git a/crates/rune/src/modules/fmt.rs b/crates/rune/src/modules/fmt.rs index 90696bec1..5553dc42b 100644 --- a/crates/rune/src/modules/fmt.rs +++ b/crates/rune/src/modules/fmt.rs @@ -1,12 +1,13 @@ //! The `std::fmt` module. -use core::fmt::{self, Write}; +use core::fmt; use crate as rune; +use crate::alloc::fmt::TryWrite; use crate::compile; use crate::macros::{FormatArgs, MacroContext, TokenStream}; use crate::parse::Parser; -use crate::runtime::{Format, Formatter}; +use crate::runtime::{Format, Formatter, VmResult}; use crate::{ContextError, Module}; /// Construct the `std::fmt` module. @@ -21,8 +22,9 @@ pub fn module() -> Result { } #[rune::function(instance, protocol = STRING_DISPLAY)] -fn fmt_error_string_display(error: &fmt::Error, f: &mut Formatter) -> fmt::Result { - write!(f, "{}", error) +fn fmt_error_string_display(error: &fmt::Error, f: &mut Formatter) -> VmResult<()> { + vm_write!(f, "{}", error); + VmResult::Ok(()) } /// Format a string using a format specifier. diff --git a/crates/rune/src/modules/future.rs b/crates/rune/src/modules/future.rs index 6379fc940..3d2154af6 100644 --- a/crates/rune/src/modules/future.rs +++ b/crates/rune/src/modules/future.rs @@ -1,7 +1,6 @@ //! The `std::future` module. -use crate::no_std::prelude::*; - +use crate::alloc::Vec; use crate::runtime::{Future, SelectFuture, Shared, Stack, Value, VmErrorKind, VmResult}; use crate::{ContextError, Module}; @@ -52,12 +51,12 @@ pub fn module() -> Result { async fn try_join_impl<'a, I, F>(values: I, len: usize, factory: F) -> VmResult where I: IntoIterator, - F: FnOnce(Vec) -> Value, + F: FnOnce(Vec) -> VmResult, { use futures_util::stream::StreamExt as _; let mut futures = futures_util::stream::FuturesUnordered::new(); - let mut results = Vec::with_capacity(len); + let mut results = vm_try!(Vec::try_with_capacity(len)); for (index, value) in values.into_iter().enumerate() { let future = match value { @@ -71,7 +70,7 @@ where }; futures.push(SelectFuture::new(index, future)); - results.push(Value::EmptyTuple); + vm_try!(results.try_push(Value::EmptyTuple)); } while !futures.is_empty() { @@ -79,7 +78,7 @@ where *results.get_mut(index).unwrap() = value; } - VmResult::Ok(factory(results)) + factory(results) } async fn join(value: Value) -> VmResult { @@ -88,7 +87,10 @@ async fn join(value: Value) -> VmResult { Value::Tuple(tuple) => { let tuple = vm_try!(tuple.borrow_ref()); VmResult::Ok(vm_try!( - try_join_impl(tuple.iter(), tuple.len(), Value::tuple).await + try_join_impl(tuple.iter(), tuple.len(), |vec| VmResult::Ok(vm_try!( + Value::tuple(vec) + ))) + .await )) } Value::Vec(vec) => { @@ -99,7 +101,7 @@ async fn join(value: Value) -> VmResult { } actual => VmResult::err([ VmErrorKind::bad_argument(0), - VmErrorKind::expected::>(vm_try!(actual.type_info())), + VmErrorKind::expected::(vm_try!(actual.type_info())), ]), } } @@ -114,7 +116,7 @@ fn raw_join(stack: &mut Stack, args: usize) -> VmResult<()> { } let value = vm_try!(stack.pop()); - let value = Value::Future(Shared::new(Future::new(join(value)))); - stack.push(value); + let value = Value::Future(vm_try!(Shared::new(Future::new(join(value))))); + vm_try!(stack.push(value)); VmResult::Ok(()) } diff --git a/crates/rune/src/modules/i64.rs b/crates/rune/src/modules/i64.rs index 297b1f981..479a49052 100644 --- a/crates/rune/src/modules/i64.rs +++ b/crates/rune/src/modules/i64.rs @@ -4,7 +4,8 @@ use core::cmp::Ordering; use core::num::ParseIntError; use crate as rune; -use crate::no_std::prelude::*; +use crate::alloc; +use crate::alloc::string::TryToString; use crate::runtime::{VmErrorKind, VmResult}; use crate::{ContextError, Module}; @@ -598,6 +599,6 @@ fn cmp(this: i64, rhs: i64) -> Ordering { /// ``` #[rune::function(instance)] #[inline] -fn to_string(this: i64) -> String { - this.to_string() +fn to_string(this: i64) -> VmResult { + VmResult::Ok(vm_try!(this.try_to_string())) } diff --git a/crates/rune/src/modules/io.rs b/crates/rune/src/modules/io.rs index 9b9236cbf..f54c2a694 100644 --- a/crates/rune/src/modules/io.rs +++ b/crates/rune/src/modules/io.rs @@ -1,9 +1,9 @@ //! The `std::io` module. -use std::fmt::{self, Write as _}; use std::io::{self, Write as _}; use crate as rune; +use crate::alloc::fmt::TryWrite; use crate::compile; use crate::macros::{quote, FormatArgs, MacroContext, TokenStream}; use crate::parse::Parser; @@ -67,8 +67,9 @@ pub fn module(stdio: bool) -> Result { } #[rune::function(instance, protocol = STRING_DISPLAY)] -fn io_error_string_display(error: &io::Error, f: &mut Formatter) -> fmt::Result { - write!(f, "{}", error) +fn io_error_string_display(error: &io::Error, f: &mut Formatter) -> VmResult<()> { + vm_write!(f, "{}", error); + VmResult::Ok(()) } fn dbg_impl(stack: &mut Stack, args: usize) -> VmResult<()> { @@ -79,7 +80,7 @@ fn dbg_impl(stack: &mut Stack, args: usize) -> VmResult<()> { vm_try!(writeln!(stdout, "{:?}", value).map_err(Panic::custom)); } - stack.push(Value::EmptyTuple); + vm_try!(stack.push(Value::EmptyTuple)); VmResult::Ok(()) } diff --git a/crates/rune/src/modules/iter.rs b/crates/rune/src/modules/iter.rs index d6bea9b5e..a54d21fa9 100644 --- a/crates/rune/src/modules/iter.rs +++ b/crates/rune/src/modules/iter.rs @@ -1,12 +1,11 @@ //! The `std::iter` module. -use crate::no_std::prelude::*; - use crate as rune; +use crate::alloc::String; use crate::modules::collections::VecDeque; -#[cfg(feature = "std")] +#[cfg(feature = "alloc")] use crate::modules::collections::{HashMap, HashSet}; -#[cfg(feature = "std")] +#[cfg(feature = "alloc")] use crate::runtime::EnvProtocolCaller; use crate::runtime::{ FromValue, Function, Iterator, Object, OwnedTuple, Protocol, Value, Vec, VmResult, @@ -1075,7 +1074,7 @@ fn collect_vec_deque(it: Iterator) -> VmResult { /// assert_eq!((0..3).iter().collect::(), HashSet::from([0, 1, 2])); /// ``` #[rune::function(instance, path = collect::)] -#[cfg(feature = "std")] +#[cfg(feature = "alloc")] fn collect_hash_set(it: Iterator) -> VmResult { let mut caller = EnvProtocolCaller; HashSet::from_iter(it, &mut caller) @@ -1093,7 +1092,7 @@ fn collect_hash_set(it: Iterator) -> VmResult { /// assert_eq!(actual, expected); /// ``` #[rune::function(instance, path = collect::)] -#[cfg(feature = "std")] +#[cfg(feature = "alloc")] fn collect_hash_map(it: Iterator) -> VmResult { let mut caller = EnvProtocolCaller; HashMap::from_iter(it, &mut caller) @@ -1108,7 +1107,9 @@ fn collect_hash_map(it: Iterator) -> VmResult { /// ``` #[rune::function(instance, path = collect::)] fn collect_tuple(it: Iterator) -> VmResult { - VmResult::Ok(OwnedTuple::from(vm_try!(it.collect::()))) + VmResult::Ok(vm_try!(OwnedTuple::try_from( + vm_try!(it.collect::()) + ))) } /// Collect the iterator as an [`Object`]. @@ -1121,11 +1122,11 @@ fn collect_tuple(it: Iterator) -> VmResult { #[rune::function(instance, path = collect::)] fn collect_object(mut it: Iterator) -> VmResult { let (cap, _) = it.size_hint(); - let mut object = Object::with_capacity(cap); + let mut object = vm_try!(Object::with_capacity(cap)); while let Some(value) = vm_try!(it.next()) { let (key, value) = vm_try!(<(String, Value)>::from_value(value)); - object.insert(key, value); + vm_try!(object.insert(key, value)); } VmResult::Ok(object) @@ -1145,11 +1146,11 @@ fn collect_string(mut it: Iterator) -> VmResult { while let Some(value) = vm_try!(it.next()) { match value { Value::Char(c) => { - string.push(c); + vm_try!(string.try_push(c)); } Value::String(s) => { let s = vm_try!(s.into_ref()); - string.push_str(s.as_str()); + vm_try!(string.try_push_str(s.as_str())); } value => { return VmResult::expected::(vm_try!(value.type_info())); diff --git a/crates/rune/src/modules/object.rs b/crates/rune/src/modules/object.rs index 89adb02a5..109eb5699 100644 --- a/crates/rune/src/modules/object.rs +++ b/crates/rune/src/modules/object.rs @@ -5,6 +5,7 @@ use core::cmp::Ordering; use crate::no_std::prelude::*; use crate as rune; +use crate::alloc::TryClone; use crate::runtime::{EnvProtocolCaller, Iterator, Object, Protocol, Value, VmResult}; use crate::{ContextError, Module}; @@ -15,10 +16,10 @@ pub fn module() -> Result { m.ty::()?; m.function_meta(Object::new__meta)?; - m.function_meta(Object::with_capacity__meta)?; + m.function_meta(Object::rune_with_capacity)?; m.function_meta(Object::len__meta)?; m.function_meta(Object::is_empty__meta)?; - m.function_meta(Object::insert__meta)?; + m.function_meta(Object::rune_insert)?; m.function_meta(remove)?; m.function_meta(Object::clear__meta)?; m.function_meta(contains_key)?; @@ -97,9 +98,18 @@ fn get(object: &Object, key: &str) -> Option { /// ``` #[rune::function(instance)] #[inline] -fn keys(object: &Object) -> Iterator { - let iter = object.keys().cloned().collect::>().into_iter(); - Iterator::from_double_ended("std::object::Keys", iter) +fn keys(object: &Object) -> VmResult { + // TODO: implement as lazy iteration. + let mut keys = Vec::new(); + + for key in object.keys() { + keys.push(vm_try!(key.try_clone())); + } + + VmResult::Ok(Iterator::from_double_ended( + "std::object::Keys", + keys.into_iter(), + )) } /// An iterator visiting all values in arbitrary order. diff --git a/crates/rune/src/modules/ops.rs b/crates/rune/src/modules/ops.rs index 81805b10a..1c4b7f201 100644 --- a/crates/rune/src/modules/ops.rs +++ b/crates/rune/src/modules/ops.rs @@ -2,16 +2,17 @@ use core::cmp::Ordering; +use once_cell::sync::OnceCell; +use rune_alloc::hash_map::RandomState; + use crate as rune; -#[cfg(feature = "std")] -use crate::runtime::Hasher; use crate::runtime::{ - ControlFlow, EnvProtocolCaller, Function, Generator, GeneratorState, Iterator, Range, + ControlFlow, EnvProtocolCaller, Function, Generator, GeneratorState, Hasher, Iterator, Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive, Value, Vm, VmResult, }; use crate::{ContextError, Module}; -#[cfg(feature = "std")] -use std::collections::hash_map::RandomState; + +static STATE: OnceCell = OnceCell::new(); #[rune::module(::std::ops)] /// Overloadable operators. @@ -100,7 +101,6 @@ pub fn module() -> Result { m.function_meta(eq)?; m.function_meta(partial_cmp)?; m.function_meta(cmp)?; - #[cfg(feature = "std")] m.function_meta(hash)?; Ok(m) } @@ -207,11 +207,6 @@ fn cmp(lhs: Value, rhs: Value) -> VmResult { Value::cmp(&lhs, &rhs) } -#[cfg(feature = "std")] -lazy_static::lazy_static! { - static ref STATE: RandomState = RandomState::new(); -} - /// Hashes the given value. /// /// For non-builtin types this uses the [`HASH`] protocol. @@ -235,9 +230,9 @@ lazy_static::lazy_static! { /// assert_eq!(hash([1, 2]), hash((1, 2))); /// ``` #[rune::function] -#[cfg(feature = "std")] fn hash(value: Value) -> VmResult { - let mut hasher = Hasher::new_with(&*STATE); + let state = STATE.get_or_init(RandomState::new); + let mut hasher = Hasher::new_with(state); vm_try!(Value::hash_with( &value, diff --git a/crates/rune/src/modules/option.rs b/crates/rune/src/modules/option.rs index e43d73289..77e3e686b 100644 --- a/crates/rune/src/modules/option.rs +++ b/crates/rune/src/modules/option.rs @@ -1,7 +1,5 @@ //! The `std::option` module. -use core::fmt; - use crate as rune; use crate::runtime::{ControlFlow, Formatter, Function, Iterator, Panic, Shared, Value, VmResult}; use crate::{ContextError, Module}; @@ -73,11 +71,7 @@ fn expect(option: Option, message: Value) -> VmResult { Some(some) => VmResult::Ok(some), None => { let mut f = Formatter::new(); - - if let Err(fmt::Error) = vm_try!(message.string_display(&mut f)) { - return VmResult::err(Panic::msg("Failed to format message")); - } - + vm_try!(message.string_display(&mut f)); VmResult::err(Panic::custom(f.into_string())) } } @@ -259,8 +253,8 @@ fn transpose(this: Option) -> VmResult { let value = match this { Some(value) => value, None => { - let none = Value::from(Shared::new(Option::::None)); - let result = Value::from(Shared::new(Result::::Ok(none))); + let none = Value::from(vm_try!(Shared::new(Option::::None))); + let result = Value::from(vm_try!(Shared::new(Result::::Ok(none)))); return VmResult::Ok(result); } }; @@ -269,12 +263,14 @@ fn transpose(this: Option) -> VmResult { match result { Ok(ok) => { - let some = Value::from(Shared::new(Option::::Some(ok.clone()))); - let result = Value::from(Shared::new(Result::::Ok(some))); + let some = Value::from(vm_try!(Shared::new(Option::::Some(ok.clone())))); + let result = Value::from(vm_try!(Shared::new(Result::::Ok(some)))); VmResult::Ok(result) } Err(err) => { - let result = Value::from(Shared::new(Result::::Err(err.clone()))); + let result = Value::from(vm_try!(Shared::new(Result::::Err( + err.clone() + )))); VmResult::Ok(result) } } @@ -412,9 +408,9 @@ fn ok_or_else(this: Option, err: Function) -> VmResult) -> ControlFlow { - match this { +pub(crate) fn option_try(this: Option) -> VmResult { + VmResult::Ok(match this { Some(value) => ControlFlow::Continue(value), - None => ControlFlow::Break(Value::Option(Shared::new(None))), - } + None => ControlFlow::Break(Value::Option(vm_try!(Shared::new(None)))), + }) } diff --git a/crates/rune/src/modules/result.rs b/crates/rune/src/modules/result.rs index 9154fe5f6..117da4d94 100644 --- a/crates/rune/src/modules/result.rs +++ b/crates/rune/src/modules/result.rs @@ -1,8 +1,7 @@ //! The `std::result` module. -use core::fmt; - use crate as rune; +use crate::alloc::fmt::TryWrite; use crate::runtime::{ControlFlow, Formatter, Function, Panic, Shared, Value, VmResult}; use crate::{ContextError, Module}; @@ -194,17 +193,9 @@ fn expect(result: Result, message: Value) -> VmResult { Ok(value) => VmResult::Ok(value), Err(err) => { let mut f = Formatter::new(); - - if let Err(fmt::Error) = vm_try!(message.string_display(&mut f)) { - return VmResult::err(Panic::msg("Failed to format message")); - } - - f.push_str(": "); - - if let Err(fmt::Error) = vm_try!(err.string_debug(&mut f)) { - return VmResult::err(Panic::msg("Failed to format error")); - } - + vm_try!(message.string_display(&mut f)); + vm_try!(f.try_write_str(": ")); + vm_try!(err.string_debug(&mut f)); VmResult::err(Panic::custom(f.into_string())) } } @@ -273,9 +264,9 @@ fn map(this: &Result, then: Function) -> VmResult) -> ControlFlow { - match this { +pub(crate) fn result_try(this: Result) -> VmResult { + VmResult::Ok(match this { Ok(value) => ControlFlow::Continue(value), - Err(error) => ControlFlow::Break(Value::Result(Shared::new(Err(error)))), - } + Err(error) => ControlFlow::Break(Value::Result(vm_try!(Shared::new(Err(error))))), + }) } diff --git a/crates/rune/src/modules/string.rs b/crates/rune/src/modules/string.rs index 7cbf0537b..dc66145bb 100644 --- a/crates/rune/src/modules/string.rs +++ b/crates/rune/src/modules/string.rs @@ -2,15 +2,13 @@ use core::char; use core::cmp::Ordering; -use core::fmt::{self, Write}; use core::num::{ParseFloatError, ParseIntError}; -use alloc::string::FromUtf8Error; - -use crate::no_std::prelude::*; - use crate as rune; -use crate::runtime::{Bytes, Formatter, Iterator, Panic, Protocol, Value, VmErrorKind, VmResult}; +use crate::alloc::string::FromUtf8Error; +use crate::alloc::{String, TryClone, TryToOwned, TryWrite, Vec}; +use crate::no_std::std; +use crate::runtime::{Bytes, Formatter, Iterator, Panic, Value, VmErrorKind, VmResult}; use crate::{Any, ContextError, Module}; /// Construct the `std::string` module. @@ -55,9 +53,9 @@ pub fn module() -> Result { module.function_meta(parse_int)?; module.function_meta(parse_char)?; - module.associated_function(Protocol::ADD, add)?; - module.associated_function(Protocol::ADD_ASSIGN, String::push_str)?; - module.associated_function(Protocol::INDEX_GET, string_index_get)?; + module.function_meta(add)?; + module.function_meta(add_assign)?; + module.function_meta(index_get)?; Ok(module) } @@ -67,8 +65,9 @@ struct NotCharBoundary(()); impl NotCharBoundary { #[rune::function(instance, protocol = STRING_DISPLAY)] - fn string_display(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "index outside of character boundary") + fn string_display(&self, f: &mut Formatter) -> VmResult<()> { + vm_write!(f, "index outside of character boundary"); + VmResult::Ok(()) } fn install(m: &mut Module) -> Result<(), ContextError> { @@ -128,8 +127,9 @@ impl NotCharBoundary { /// [`&str`]: prim@str "&str" /// [`into_bytes`]: String::into_bytes #[rune::function(free, path = String::from_utf8)] -fn from_utf8(bytes: &[u8]) -> Result { - String::from_utf8(bytes.to_vec()) +fn from_utf8(bytes: &[u8]) -> VmResult> { + let vec = vm_try!(Vec::try_from(bytes)); + VmResult::Ok(String::from_utf8(vec)) } /// Returns a byte slice of this `String`'s contents. @@ -164,13 +164,13 @@ fn as_bytes(s: &str) -> Bytes { /// assert_eq!(s, "hello"); /// ``` #[rune::function(free, path = String::from)] -fn string_from(value: &str) -> String { - String::from(value) +fn string_from(value: &str) -> VmResult { + VmResult::Ok(vm_try!(String::try_from(value))) } #[rune::function(free, path = String::from_str)] -fn string_from_str(value: &str) -> String { - String::from(value) +fn string_from_str(value: &str) -> VmResult { + VmResult::Ok(vm_try!(String::try_from(value))) } /// Creates a new empty `String`. @@ -234,8 +234,8 @@ fn string_new() -> String { /// s.push('a'); /// ``` #[rune::function(free, path = String::with_capacity)] -fn string_with_capacity(capacity: usize) -> String { - String::with_capacity(capacity) +fn string_with_capacity(capacity: usize) -> VmResult { + VmResult::Ok(vm_try!(String::try_with_capacity(capacity))) } #[rune::function(instance)] @@ -397,8 +397,9 @@ fn contains(this: &str, other: &str) -> bool { /// assert_eq!("abc123", s); /// ``` #[rune::function(instance)] -fn push(this: &mut String, c: char) { - this.push(c); +fn push(this: &mut String, c: char) -> VmResult<()> { + vm_try!(this.try_push(c)); + VmResult::Ok(()) } /// Appends a given string slice onto the end of this `String`. @@ -415,8 +416,9 @@ fn push(this: &mut String, c: char) { /// assert_eq!("foobar", s); /// ``` #[rune::function(instance)] -fn push_str(this: &mut String, other: &str) { - this.push_str(other); +fn push_str(this: &mut String, other: &str) -> VmResult<()> { + vm_try!(this.try_push_str(other)); + VmResult::Ok(()) } /// Reserves capacity for at least `additional` bytes more than the current @@ -460,8 +462,9 @@ fn push_str(this: &mut String, other: &str) { /// assert_eq!(capacity, s.capacity()); /// ``` #[rune::function(instance)] -fn reserve(this: &mut String, additional: usize) { - this.reserve(additional); +fn reserve(this: &mut String, additional: usize) -> VmResult<()> { + vm_try!(this.try_reserve(additional)); + VmResult::Ok(()) } /// Reserves the minimum capacity for at least `additional` bytes more than the @@ -507,8 +510,9 @@ fn reserve(this: &mut String, additional: usize) { /// assert_eq!(capacity, s.capacity()); /// ``` #[rune::function(instance)] -fn reserve_exact(this: &mut String, additional: usize) { - this.reserve_exact(additional); +fn reserve_exact(this: &mut String, additional: usize) -> VmResult<()> { + vm_try!(this.try_reserve_exact(additional)); + VmResult::Ok(()) } /// Returns a byte slice of this `String`'s contents while moving the string. @@ -529,7 +533,7 @@ fn reserve_exact(this: &mut String, additional: usize) { /// ``` #[rune::function(instance)] fn into_bytes(s: String) -> Bytes { - Bytes::from_vec(s.into_bytes()) + Bytes::from_vec(std::Vec::from(s.into_bytes())) } /// Checks that `index`-th byte is the first byte in a UTF-8 code point sequence @@ -602,8 +606,8 @@ fn char_at(s: &str, index: usize) -> Option { /// ``` #[rune::function(instance)] #[allow(clippy::ptr_arg)] -fn clone(s: &String) -> String { - s.clone() +fn clone(s: &String) -> VmResult { + VmResult::Ok(vm_try!(s.try_clone())) } /// Shrinks the capacity of this `String` to match its length. @@ -622,8 +626,9 @@ fn clone(s: &String) -> String { /// assert_eq!(3, s.capacity()); /// ``` #[rune::function(instance)] -fn shrink_to_fit(s: &mut String) { - s.shrink_to_fit(); +fn shrink_to_fit(s: &mut String) -> VmResult<()> { + vm_try!(s.try_shrink_to_fit()); + VmResult::Ok(()) } /// An iterator over substrings of this string slice, separated by @@ -734,17 +739,34 @@ fn shrink_to_fit(s: &mut String) { /// [`split_whitespace`]: str::split_whitespace #[rune::function(instance)] fn split(this: &str, value: Value) -> VmResult { + const NAME: &str = "std::str::Split"; + let lines = match value { - Value::String(s) => this - .split(vm_try!(s.borrow_ref()).as_str()) - .map(String::from) - .collect::>(), - Value::Char(pat) => this.split(pat).map(String::from).collect::>(), + Value::String(s) => { + let mut out = Vec::new(); + + for value in this.split(vm_try!(s.borrow_ref()).as_str()) { + let value = vm_try!(String::try_from(value)); + vm_try!(out.try_push(value)); + } + + out + } + Value::Char(pat) => { + let mut out = Vec::new(); + + for value in this.split(pat) { + let value = vm_try!(String::try_from(value)); + vm_try!(out.try_push(value)); + } + + out + } Value::Function(f) => { let f = vm_try!(f.borrow_ref()); let mut err = None; - let lines = this.split(|c: char| match f.call::<_, bool>((c,)) { + let iter = this.split(|c: char| match f.call::<_, bool>((c,)) { VmResult::Ok(b) => b, VmResult::Err(e) => { if err.is_none() { @@ -755,13 +777,18 @@ fn split(this: &str, value: Value) -> VmResult { } }); - let lines = lines.map(String::from).collect::>(); + let mut out = Vec::new(); + + for value in iter { + let value = vm_try!(String::try_from(value)); + vm_try!(out.try_push(value)); + } if let Some(e) = err.take() { return VmResult::Err(e); } - lines + out } actual => { return VmResult::err([ @@ -771,10 +798,7 @@ fn split(this: &str, value: Value) -> VmResult { } }; - VmResult::Ok(Iterator::from_double_ended( - "std::str::Split", - lines.into_iter(), - )) + VmResult::Ok(Iterator::from_double_ended(NAME, lines.into_iter())) } /// Returns a string slice with leading and trailing whitespace removed. @@ -792,8 +816,8 @@ fn split(this: &str, value: Value) -> VmResult { /// assert_eq!("Hello\tworld", s.trim()); /// ``` #[rune::function(instance)] -fn trim(this: &str) -> String { - this.trim().to_owned() +fn trim(this: &str) -> VmResult { + VmResult::Ok(vm_try!(this.trim().try_to_owned())) } /// Returns a string slice with trailing whitespace removed. @@ -827,16 +851,8 @@ fn trim(this: &str) -> String { /// assert!(Some('ת') == s.trim_end().chars().rev().next()); /// ``` #[rune::function(instance)] -fn trim_end(this: &str) -> String { - this.trim_end().to_owned() -} - -/// The add operation for strings. -fn add(a: &str, b: &str) -> String { - let mut string = String::with_capacity(a.len() + b.len()); - string.push_str(a); - string.push_str(b); - string +fn trim_end(this: &str) -> VmResult { + VmResult::Ok(vm_try!(this.trim_end().try_to_owned())) } /// Returns `true` if `self` has a length of zero bytes. @@ -881,8 +897,8 @@ fn is_empty(this: &str) -> bool { /// assert_eq!(s, s.replace("cookie monster", "little lamb")); /// ``` #[rune::function(instance)] -fn replace(a: &str, from: &str, to: &str) -> String { - a.replace(from, to) +fn replace(a: &str, from: &str, to: &str) -> VmResult { + VmResult::Ok(vm_try!(String::try_from(a.replace(from, to)))) } /// Returns an iterator over the [`char`]s of a string slice. @@ -934,7 +950,7 @@ fn replace(a: &str, from: &str, to: &str) -> String { /// ``` #[rune::function(instance)] fn chars(s: &str) -> Iterator { - let iter = s.chars().collect::>().into_iter(); + let iter = s.chars().collect::>().into_iter(); Iterator::from_double_ended("std::str::Chars", iter) } @@ -1002,11 +1018,28 @@ fn get(this: &str, key: Value) -> VmResult> { return VmResult::Ok(None); }; - VmResult::Ok(Some(slice.to_owned())) + VmResult::Ok(Some(vm_try!(slice.try_to_owned()))) +} + +/// The add operation for strings. +#[rune::function(instance, protocol = ADD)] +fn add(a: &str, b: &str) -> VmResult { + let mut string = vm_try!(String::try_with_capacity(a.len() + b.len())); + vm_try!(string.try_push_str(a)); + vm_try!(string.try_push_str(b)); + VmResult::Ok(string) +} + +/// The add assign operation for strings. +#[rune::function(instance, protocol = ADD_ASSIGN)] +fn add_assign(this: &mut String, other: &str) -> VmResult<()> { + vm_try!(this.try_push_str(other)); + VmResult::Ok(()) } /// Get a specific string index. -fn string_index_get(s: &str, key: Value) -> VmResult { +#[rune::function(instance, protocol = INDEX_GET)] +fn index_get(s: &str, key: Value) -> VmResult { match vm_try!(__rune_fn__get(s, key)) { Some(slice) => VmResult::Ok(slice), None => VmResult::err(Panic::custom("missing string slice")), diff --git a/crates/rune/src/modules/vec.rs b/crates/rune/src/modules/vec.rs index c4ba74526..45e0a3616 100644 --- a/crates/rune/src/modules/vec.rs +++ b/crates/rune/src/modules/vec.rs @@ -1,9 +1,9 @@ //! The `std::vec` module. use core::cmp::Ordering; -use core::fmt; use crate as rune; +use crate::alloc::TryClone; #[cfg(feature = "std")] use crate::runtime::Hasher; use crate::runtime::{ @@ -119,8 +119,8 @@ fn vec_new() -> Vec { /// assert!(vec.capacity() >= 11); /// ``` #[rune::function(free, path = Vec::with_capacity)] -fn vec_with_capacity(capacity: usize) -> Vec { - Vec::with_capacity(capacity) +fn vec_with_capacity(capacity: usize) -> VmResult { + VmResult::Ok(vm_try!(Vec::with_capacity(capacity))) } /// Returns the number of elements in the vector, also referred to as its @@ -370,8 +370,9 @@ fn pop(this: &mut Vec) -> Option { /// assert_eq!(vec, [1, 2, 3]); /// ``` #[rune::function(instance)] -fn push(this: &mut Vec, value: Value) { - this.push(value); +fn push(this: &mut Vec, value: Value) -> VmResult<()> { + vm_try!(this.push(value)); + VmResult::Ok(()) } /// Removes and returns the element at position `index` within the vector, @@ -440,7 +441,7 @@ fn insert(this: &mut Vec, index: usize, value: Value) -> VmResult<()> { }); } - this.insert(index, value); + vm_try!(this.insert(index, value)); VmResult::Ok(()) } @@ -458,8 +459,8 @@ fn insert(this: &mut Vec, index: usize, value: Value) -> VmResult<()> { /// assert_eq!(b, [1, 2, 3, 4]); /// ``` #[rune::function(instance)] -fn clone(this: &Vec) -> Vec { - this.clone() +fn clone(this: &Vec) -> VmResult { + VmResult::Ok(vm_try!(this.try_clone())) } /// Construct an iterator over the tuple. @@ -547,7 +548,7 @@ fn index_set(this: &mut Vec, index: usize, value: Value) -> VmResult<()> { /// assert_eq!(format!("{:?}", vec), "[1, 2, 3]"); /// ``` #[rune::function(instance, protocol = STRING_DEBUG)] -fn string_debug(this: &Vec, f: &mut Formatter) -> VmResult { +fn string_debug(this: &Vec, f: &mut Formatter) -> VmResult<()> { Vec::string_debug_with(this, f, &mut EnvProtocolCaller) } diff --git a/crates/rune/src/no_std.rs b/crates/rune/src/no_std/mod.rs similarity index 80% rename from crates/rune/src/no_std.rs rename to crates/rune/src/no_std/mod.rs index 9da8199e9..e48ff609c 100644 --- a/crates/rune/src/no_std.rs +++ b/crates/rune/src/no_std/mod.rs @@ -22,7 +22,7 @@ macro_rules! alloc { ($($vis:vis use $(::$tail:ident)+;)*) => { $( #[allow(unused)] - $vis use alloc $(::$tail)+; + $vis use ::rust_alloc $(::$tail)+; )* } } @@ -45,6 +45,14 @@ alloc! { pub(crate) use ::core::fmt; +pub(crate) mod std { + alloc! { + pub(crate) use ::boxed::Box; + pub(crate) use ::vec::Vec; + pub(crate) use ::string::String; + } +} + pub(crate) mod prelude { alloc! { pub(crate) use ::string::String; @@ -57,9 +65,9 @@ pub(crate) mod prelude { #[allow(unused)] pub(crate) mod collections { - pub(crate) use alloc::collections::{btree_map, BTreeMap}; - pub(crate) use alloc::collections::{btree_set, BTreeSet}; - pub(crate) use alloc::collections::{vec_deque, VecDeque}; + pub(crate) use ::rust_alloc::collections::{btree_map, BTreeMap}; + pub(crate) use ::rust_alloc::collections::{btree_set, BTreeSet}; + pub(crate) use ::rust_alloc::collections::{vec_deque, VecDeque}; #[cfg(not(feature = "std"))] pub(crate) use hashbrown::{hash_map, HashMap}; #[cfg(not(feature = "std"))] @@ -71,7 +79,7 @@ pub(crate) mod collections { } #[cfg(feature = "std")] -pub(crate) use std::io; +pub(crate) use ::std::io; #[cfg(not(feature = "std"))] pub(crate) mod io; @@ -83,7 +91,7 @@ pub(crate) use rune_core::error; pub(crate) mod path; #[cfg(feature = "std")] -pub(crate) use std::path; +pub(crate) use ::std::path; #[cfg(not(feature = "std"))] extern "C" { @@ -98,5 +106,5 @@ pub(crate) fn abort() -> ! { #[cfg(feature = "std")] pub(crate) fn abort() -> ! { - std::process::abort() + ::std::process::abort() } diff --git a/crates/rune/src/no_std/path.rs b/crates/rune/src/no_std/path.rs index 60f25f8b3..e7a970159 100644 --- a/crates/rune/src/no_std/path.rs +++ b/crates/rune/src/no_std/path.rs @@ -3,9 +3,9 @@ use core::convert::AsRef; use core::fmt; use core::ops::Deref; -use alloc::borrow::Borrow; -use alloc::borrow::ToOwned; -use alloc::boxed::Box; +use ::rust_alloc::borrow::Borrow; +use ::rust_alloc::borrow::ToOwned; +use ::rust_alloc::boxed::Box; #[derive(Debug)] #[repr(transparent)] diff --git a/crates/rune/src/parse.rs b/crates/rune/src/parse.rs index f0465723f..a2d1ac9ea 100644 --- a/crates/rune/src/parse.rs +++ b/crates/rune/src/parse.rs @@ -22,10 +22,9 @@ pub(crate) use self::resolve::{Resolve, ResolveContext}; use crate::compile; use crate::SourceId; -/// Parse the given input as the given type that implements -/// [Parse][crate::parse::Parse]. The specified `source_id` will be used when -/// referencing any parsed elements. `shebang` indicates if the parser should -/// try to parse a shebang or not. +/// Parse the given input as the given type that implements [Parse]. The +/// specified `source_id` will be used when referencing any parsed elements. +/// `shebang` indicates if the parser should try to parse a shebang or not. /// /// This will raise an error through [Parser::eof] if the specified `source` is /// not fully consumed by the parser. diff --git a/crates/rune/src/query/query.rs b/crates/rune/src/query/query.rs index e33c3f74a..7a11b4b1a 100644 --- a/crates/rune/src/query/query.rs +++ b/crates/rune/src/query/query.rs @@ -8,14 +8,15 @@ use crate::no_std::prelude::*; use crate::no_std::rc::Rc; use crate::no_std::sync::Arc; +use crate::alloc::AllocError; use crate::ast::{Span, Spanned}; use crate::compile::context::ContextMeta; use crate::compile::ir; use crate::compile::meta::{self, FieldMeta}; use crate::compile::{ self, CompileVisitor, ComponentRef, Doc, DynLocation, ErrorKind, ImportStep, IntoComponent, - Item, ItemBuf, ItemId, ItemMeta, Located, Location, ModId, ModMeta, Names, Pool, Prelude, - SourceLoader, SourceMeta, UnitBuilder, Visibility, WithSpan, + Item, ItemBuf, ItemId, ItemMeta, Located, Location, MetaError, ModId, ModMeta, Names, Pool, + Prelude, SourceLoader, SourceMeta, UnitBuilder, Visibility, WithSpan, }; use crate::hir; use crate::indexing::{self, FunctionAst, Indexed, Items}; @@ -453,7 +454,8 @@ impl<'a, 'arena> Query<'a, 'arena> { self.index_and_build(indexing::Entry { item_meta: item, indexed: Indexed::Module, - }); + })?; + Ok(query_mod) } @@ -462,9 +464,9 @@ impl<'a, 'arena> Query<'a, 'arena> { &mut self, item_id: NonZeroId, source_id: SourceId, - spanned: Span, + span: Span, ) -> compile::Result { - let location = Location::new(source_id, spanned); + let location = Location::new(source_id, span); let module = self.pool.alloc_module(ModMeta { #[cfg(feature = "emit")] @@ -485,7 +487,7 @@ impl<'a, 'arena> Query<'a, 'arena> { }, ); - self.insert_name(ItemId::default()); + self.insert_name(ItemId::default()).with_span(span)?; Ok(module) } @@ -507,12 +509,8 @@ impl<'a, 'arena> Query<'a, 'arena> { } /// Insert the given compile meta. - #[allow(clippy::result_large_err)] - pub(crate) fn insert_meta( - &mut self, - meta: meta::Meta, - ) -> Result<&ItemMeta, compile::error::MetaConflict> { - self.visitor.register_meta(meta.as_meta_ref(self.pool)); + pub(crate) fn insert_meta(&mut self, meta: meta::Meta) -> Result<&ItemMeta, MetaError> { + self.visitor.register_meta(meta.as_meta_ref(self.pool))?; let meta = match self .inner @@ -520,11 +518,13 @@ impl<'a, 'arena> Query<'a, 'arena> { .entry((meta.item_meta.item, meta.parameters)) { hash_map::Entry::Occupied(e) => { - return Err(compile::error::MetaConflict { - current: meta.info(self.pool), - existing: e.get().info(self.pool), - parameters: meta.parameters, - }); + return Err(MetaError::new( + compile::error::MetaErrorKind::MetaConflict { + current: meta.info(self.pool), + existing: e.get().info(self.pool), + parameters: meta.parameters, + }, + )); } hash_map::Entry::Vacant(e) => e.insert(meta), }; @@ -635,21 +635,24 @@ impl<'a, 'arena> Query<'a, 'arena> { /// Index the given entry. It is not allowed to overwrite other entries. #[tracing::instrument(skip_all)] - pub(crate) fn index(&mut self, entry: indexing::Entry) { + pub(crate) fn index(&mut self, entry: indexing::Entry) -> compile::Result<()> { tracing::trace!(item = ?self.pool.item(entry.item_meta.item)); - self.insert_name(entry.item_meta.item); + self.insert_name(entry.item_meta.item) + .with_span(entry.item_meta.location.span)?; self.inner .indexed .entry(entry.item_meta.item) .or_default() .push(entry); + + Ok(()) } /// Same as `index`, but also queues the indexed entry up for building. #[tracing::instrument(skip_all)] - pub(crate) fn index_and_build(&mut self, entry: indexing::Entry) { + pub(crate) fn index_and_build(&mut self, entry: indexing::Entry) -> compile::Result<()> { self.set_used(&entry.item_meta); self.inner.queue.push_back(BuildEntry { @@ -657,7 +660,8 @@ impl<'a, 'arena> Query<'a, 'arena> { build: Build::Query, }); - self.index(entry); + self.index(entry)?; + Ok(()) } /// Index a constant expression. @@ -674,7 +678,7 @@ impl<'a, 'arena> Query<'a, 'arena> { indexed: Indexed::ConstExpr(indexing::ConstExpr { ast: Box::new(ast.clone()), }), - }); + })?; Ok(()) } @@ -693,7 +697,7 @@ impl<'a, 'arena> Query<'a, 'arena> { indexed: Indexed::ConstBlock(indexing::ConstBlock { ast: Box::new(ast.clone()), }), - }); + })?; Ok(()) } @@ -710,7 +714,7 @@ impl<'a, 'arena> Query<'a, 'arena> { self.index(indexing::Entry { item_meta, indexed: Indexed::ConstFn(indexing::ConstFn { item_fn }), - }); + })?; Ok(()) } @@ -723,7 +727,7 @@ impl<'a, 'arena> Query<'a, 'arena> { self.index(indexing::Entry { item_meta, indexed: Indexed::Enum, - }); + })?; Ok(()) } @@ -740,7 +744,7 @@ impl<'a, 'arena> Query<'a, 'arena> { self.index(indexing::Entry { item_meta, indexed: Indexed::Struct(indexing::Struct { ast }), - }); + })?; Ok(()) } @@ -763,7 +767,7 @@ impl<'a, 'arena> Query<'a, 'arena> { ast, index, }), - }); + })?; Ok(()) } @@ -1108,7 +1112,7 @@ impl<'a, 'arena> Query<'a, 'arena> { self.index(indexing::Entry { item_meta, indexed: Indexed::Import(indexing::Import { wildcard, entry }), - }); + })?; Ok(()) } @@ -1530,9 +1534,10 @@ impl<'a, 'arena> Query<'a, 'arena> { } /// Insert the given name into the unit. - fn insert_name(&mut self, item: ItemId) { + fn insert_name(&mut self, item: ItemId) -> Result<(), AllocError> { let item = self.pool.item(item); - self.inner.names.insert(item); + self.inner.names.insert(item)?; + Ok(()) } /// Handle an imported indexed entry. diff --git a/crates/rune/src/runtime.rs b/crates/rune/src/runtime.rs index ac18f9091..9850c05fe 100644 --- a/crates/rune/src/runtime.rs +++ b/crates/rune/src/runtime.rs @@ -70,9 +70,6 @@ pub use self::iterator::{Iterator, IteratorTrait}; mod type_; pub use self::type_::Type; -mod key; -pub use self::key::Key; - mod label; pub use self::label::DebugLabel; pub(crate) use self::label::Label; @@ -185,7 +182,7 @@ pub use self::fmt::Formatter; mod control_flow; pub use self::control_flow::ControlFlow; -#[cfg(feature = "std")] +#[cfg(feature = "alloc")] mod hasher; -#[cfg(feature = "std")] +#[cfg(feature = "alloc")] pub use self::hasher::Hasher; diff --git a/crates/rune/src/runtime/access.rs b/crates/rune/src/runtime/access.rs index 2519d1366..b07f39f93 100644 --- a/crates/rune/src/runtime/access.rs +++ b/crates/rune/src/runtime/access.rs @@ -380,7 +380,7 @@ impl<'a, T: ?Sized> BorrowRef<'a, T> { /// ``` /// use rune::runtime::{BorrowRef, Shared}; /// - /// let vec = Shared::>::new(vec![1, 2, 3, 4]); + /// let vec = Shared::>::new(vec![1, 2, 3, 4])?; /// let vec = vec.borrow_ref()?; /// let value: BorrowRef<[u32]> = BorrowRef::map(vec, |vec| &vec[0..2]); /// @@ -404,7 +404,7 @@ impl<'a, T: ?Sized> BorrowRef<'a, T> { /// ``` /// use rune::runtime::{BorrowRef, Shared}; /// - /// let vec = Shared::>::new(vec![1, 2, 3, 4]); + /// let vec = Shared::>::new(vec![1, 2, 3, 4])?; /// let vec = vec.borrow_ref()?; /// let mut value: Option> = BorrowRef::try_map(vec, |vec| vec.get(0..2)); /// @@ -518,7 +518,7 @@ impl<'a, T: ?Sized> BorrowMut<'a, T> { /// ``` /// use rune::runtime::{BorrowMut, Shared}; /// - /// let vec = Shared::>::new(vec![1, 2, 3, 4]); + /// let vec = Shared::>::new(vec![1, 2, 3, 4])?; /// let vec = vec.borrow_mut()?; /// let value: BorrowMut<[u32]> = BorrowMut::map(vec, |vec| &mut vec[0..2]); /// @@ -542,7 +542,7 @@ impl<'a, T: ?Sized> BorrowMut<'a, T> { /// ``` /// use rune::runtime::{BorrowMut, Shared}; /// - /// let vec = Shared::>::new(vec![1, 2, 3, 4]); + /// let vec = Shared::>::new(vec![1, 2, 3, 4])?; /// let vec = vec.borrow_mut()?; /// let mut value: Option> = BorrowMut::try_map(vec, |vec| vec.get_mut(0..2)); /// diff --git a/crates/rune/src/runtime/args.rs b/crates/rune/src/runtime/args.rs index e8d1aa4ec..442cdf946 100644 --- a/crates/rune/src/runtime/args.rs +++ b/crates/rune/src/runtime/args.rs @@ -1,5 +1,4 @@ -use crate::no_std::prelude::*; - +use crate::alloc::Vec; use crate::runtime::{Stack, ToValue, Value, VmResult}; /// Trait for converting arguments onto the stack. @@ -8,7 +7,7 @@ pub trait Args { fn into_stack(self, stack: &mut Stack) -> VmResult<()>; /// Convert arguments into a vector. - fn into_vec(self) -> VmResult>; + fn try_into_vec(self) -> VmResult>; /// The number of arguments. fn count(&self) -> usize; @@ -23,15 +22,16 @@ macro_rules! impl_into_args { #[allow(unused)] fn into_stack(self, stack: &mut Stack) -> VmResult<()> { let ($($value,)*) = self; - $(stack.push(vm_try!($value.to_value()));)* + $(vm_try!(stack.push(vm_try!($value.to_value())));)* VmResult::Ok(()) } #[allow(unused)] - fn into_vec(self) -> VmResult> { + fn try_into_vec(self) -> VmResult> { let ($($value,)*) = self; - $(let $value = vm_try!(<$ty>::to_value($value));)* - VmResult::Ok(vec![$($value,)*]) + let mut vec = vm_try!(Vec::try_with_capacity($count)); + $(vm_try!(vec.try_push(vm_try!(<$ty>::to_value($value))));)* + VmResult::Ok(vec) } fn count(&self) -> usize { @@ -46,16 +46,39 @@ repeat_macro!(impl_into_args); impl Args for Vec { fn into_stack(self, stack: &mut Stack) -> VmResult<()> { for value in self { - stack.push(value); + vm_try!(stack.push(value)); } VmResult::Ok(()) } - fn into_vec(self) -> VmResult> { + #[inline] + fn try_into_vec(self) -> VmResult> { VmResult::Ok(self) } + #[inline] + fn count(&self) -> usize { + self.len() + } +} + +#[cfg(feature = "alloc")] +impl Args for ::rust_alloc::vec::Vec { + fn into_stack(self, stack: &mut Stack) -> VmResult<()> { + for value in self { + vm_try!(stack.push(value)); + } + + VmResult::Ok(()) + } + + #[inline] + fn try_into_vec(self) -> VmResult> { + VmResult::Ok(vm_try!(Vec::try_from(self))) + } + + #[inline] fn count(&self) -> usize { self.len() } diff --git a/crates/rune/src/runtime/awaited.rs b/crates/rune/src/runtime/awaited.rs index 05c388d3f..0a15de873 100644 --- a/crates/rune/src/runtime/awaited.rs +++ b/crates/rune/src/runtime/awaited.rs @@ -15,12 +15,12 @@ impl Awaited { match self { Self::Future(future) => { let value = vm_try!(vm_try!(future.borrow_mut()).await.with_vm(vm)); - vm.stack_mut().push(value); + vm_try!(vm.stack_mut().push(value)); } Self::Select(select) => { let (branch, value) = vm_try!(select.await.with_vm(vm)); - vm.stack_mut().push(value); - vm.stack_mut().push(vm_try!(ToValue::to_value(branch))); + vm_try!(vm.stack_mut().push(value)); + vm_try!(vm.stack_mut().push(vm_try!(ToValue::to_value(branch)))); } } diff --git a/crates/rune/src/runtime/bytes.rs b/crates/rune/src/runtime/bytes.rs index 56dc22932..a0e1c8e45 100644 --- a/crates/rune/src/runtime/bytes.rs +++ b/crates/rune/src/runtime/bytes.rs @@ -355,7 +355,7 @@ mod tests { #[test] #[allow(clippy::let_and_return)] fn test_clone_issue() -> Result<(), Box> { - let shared = Value::Bytes(Shared::new(Bytes::new())); + let shared = Value::Bytes(Shared::new(Bytes::new())?); let _ = { let shared = shared.into_bytes().into_result()?; diff --git a/crates/rune/src/runtime/call.rs b/crates/rune/src/runtime/call.rs index c4e21a933..211ae7373 100644 --- a/crates/rune/src/runtime/call.rs +++ b/crates/rune/src/runtime/call.rs @@ -25,12 +25,13 @@ impl Call { #[inline] pub(crate) fn call_with_vm(self, vm: Vm) -> VmResult { VmResult::Ok(match self { - Call::Stream => Value::from(Stream::new(vm)), - Call::Generator => Value::from(Generator::new(vm)), + Call::Stream => vm_try!(Value::try_from(Stream::new(vm))), + Call::Generator => vm_try!(Value::try_from(Generator::new(vm))), Call::Immediate => vm_try!(vm.complete()), Call::Async => { let mut execution = vm.into_execution(); - Value::from(Future::new(async move { execution.async_complete().await })) + let future = Future::new(async move { execution.async_complete().await }); + vm_try!(Value::try_from(future)) } }) } diff --git a/crates/rune/src/runtime/const_value.rs b/crates/rune/src/runtime/const_value.rs index bc19c17cb..2eadcfb5c 100644 --- a/crates/rune/src/runtime/const_value.rs +++ b/crates/rune/src/runtime/const_value.rs @@ -1,14 +1,14 @@ use serde::{Deserialize, Serialize}; use crate::no_std::collections::HashMap; -use crate::no_std::prelude::*; -use crate::no_std::vec; - +use crate::no_std::std; use crate::runtime::{ Bytes, FromValue, Object, OwnedTuple, Shared, ToValue, TypeInfo, Value, Vec, VmErrorKind, VmResult, }; +use crate::alloc::{Error, TryClone}; + /// A constant value. #[derive(Debug, Clone, Deserialize, Serialize)] pub enum ConstValue { @@ -25,17 +25,17 @@ pub enum ConstValue { /// An float constant. Float(f64), /// A string constant designated by its slot. - String(String), + String(std::String), /// A byte string. Bytes(Bytes), /// A vector of values. - Vec(vec::Vec), + Vec(std::Vec), /// An anonymous tuple. - Tuple(Box<[ConstValue]>), + Tuple(std::Box<[ConstValue]>), /// An anonymous object. - Object(HashMap), + Object(HashMap), /// An option. - Option(Option>), + Option(Option>), } impl ConstValue { @@ -44,47 +44,53 @@ impl ConstValue { /// We provide this associated method since a constant value can be /// converted into a value infallibly, which is not captured by the trait /// otherwise. - pub fn into_value(self) -> Value { - match self { + pub fn into_value(self) -> Result { + Ok(match self { Self::Byte(b) => Value::Byte(b), Self::Char(c) => Value::Char(c), Self::Bool(b) => Value::Bool(b), Self::Integer(n) => Value::Integer(n), Self::Float(n) => Value::Float(n), - Self::String(s) => Value::String(Shared::new(s)), - Self::Bytes(b) => Value::Bytes(Shared::new(b)), - Self::Option(option) => { - Value::Option(Shared::new(option.map(|some| some.into_value()))) + Self::String(string) => { + let string = rune_alloc::String::try_from(string)?; + Value::String(Shared::new(string)?) } + Self::Bytes(b) => Value::Bytes(Shared::new(b)?), + Self::Option(option) => Value::Option(Shared::new(match option { + Some(some) => Some(some.into_value()?), + None => None, + })?), Self::Vec(vec) => { - let mut v = Vec::with_capacity(vec.len()); + let mut v = Vec::with_capacity(vec.len())?; for value in vec { - v.push(value.into_value()); + v.push(value.into_value()?)?; } - Value::Vec(Shared::new(v)) + Value::Vec(Shared::new(v)?) } Self::EmptyTuple => Value::EmptyTuple, Self::Tuple(tuple) => { - let mut t = vec::Vec::with_capacity(tuple.len()); + let mut t = rune_alloc::Vec::try_with_capacity(tuple.len())?; - for value in vec::Vec::from(tuple) { - t.push(value.into_value()); + for value in std::Vec::from(tuple) { + let value = value.into_value()?; + t.try_push(value)?; } - Value::Tuple(Shared::new(OwnedTuple::from(t))) + Value::Tuple(Shared::new(OwnedTuple::try_from(t)?)?) } Self::Object(object) => { - let mut o = Object::with_capacity(object.len()); + let mut o = Object::with_capacity(object.len())?; for (key, value) in object { - o.insert(key, value.into_value()); + let key = rune_alloc::String::try_from(key)?; + o.insert(key, value.into_value()?)?; } - Value::Object(Shared::new(o)) + Value::Object(Shared::new(o)?) } - } + }) } /// Try to coerce into boolean. @@ -125,10 +131,10 @@ impl FromValue for ConstValue { Value::Float(f) => Self::Float(f), Value::String(s) => { let s = vm_try!(s.take()); - Self::String(s) + Self::String(std::String::from(s)) } Value::Option(option) => Self::Option(match vm_try!(option.take()) { - Some(some) => Some(Box::new(vm_try!(Self::from_value(some)))), + Some(some) => Some(std::Box::new(vm_try!(Self::from_value(some)))), None => None, }), Value::Bytes(b) => { @@ -137,7 +143,7 @@ impl FromValue for ConstValue { } Value::Vec(vec) => { let vec = vm_try!(vec.take()); - let mut const_vec = vec::Vec::with_capacity(vec.len()); + let mut const_vec = std::Vec::with_capacity(vec.len()); for value in vec { const_vec.push(vm_try!(Self::from_value(value))); @@ -147,9 +153,9 @@ impl FromValue for ConstValue { } Value::Tuple(tuple) => { let tuple = vm_try!(tuple.take()); - let mut const_tuple = vec::Vec::with_capacity(tuple.len()); + let mut const_tuple = std::Vec::with_capacity(tuple.len()); - for value in vec::Vec::from(tuple.into_inner()) { + for value in rune_alloc::Vec::from(tuple.into_inner()) { const_tuple.push(vm_try!(Self::from_value(value))); } @@ -160,6 +166,7 @@ impl FromValue for ConstValue { let mut const_object = HashMap::with_capacity(object.len()); for (key, value) in object { + let key = std::String::from(key); const_object.insert(key, vm_try!(Self::from_value(value))); } @@ -174,8 +181,15 @@ impl FromValue for ConstValue { } } +impl TryClone for ConstValue { + fn try_clone(&self) -> Result { + // TODO: perform fallible allocations. + Ok(self.clone()) + } +} + impl ToValue for ConstValue { fn to_value(self) -> VmResult { - VmResult::Ok(ConstValue::into_value(self)) + VmResult::Ok(vm_try!(ConstValue::into_value(self))) } } diff --git a/crates/rune/src/runtime/control_flow.rs b/crates/rune/src/runtime/control_flow.rs index 65f5e3a05..ba0f87efc 100644 --- a/crates/rune/src/runtime/control_flow.rs +++ b/crates/rune/src/runtime/control_flow.rs @@ -1,7 +1,7 @@ -use core::fmt::{self, Write}; use core::ops; use crate as rune; +use crate::alloc::fmt::TryWrite; use crate::runtime::{Formatter, FromValue, ProtocolCaller, ToValue, Value, VmResult}; use crate::Any; @@ -36,29 +36,21 @@ impl ControlFlow { &self, f: &mut Formatter, caller: &mut impl ProtocolCaller, - ) -> VmResult { + ) -> VmResult<()> { match self { ControlFlow::Continue(value) => { vm_write!(f, "Continue("); - - if let Err(fmt::Error) = vm_try!(Value::string_debug_with(value, f, caller)) { - return VmResult::Ok(Err(fmt::Error)); - } - + vm_try!(Value::string_debug_with(value, f, caller)); vm_write!(f, ")"); } ControlFlow::Break(value) => { vm_write!(f, "Break("); - - if let Err(fmt::Error) = vm_try!(Value::string_debug_with(value, f, caller)) { - return VmResult::Ok(Err(fmt::Error)); - } - + vm_try!(Value::string_debug_with(value, f, caller)); vm_write!(f, ")"); } } - VmResult::Ok(Ok(())) + VmResult::Ok(()) } pub(crate) fn partial_eq_with( @@ -104,7 +96,7 @@ where ops::ControlFlow::Break(value) => ControlFlow::Break(vm_try!(ToValue::to_value(value))), }; - VmResult::Ok(Value::from(value)) + VmResult::Ok(vm_try!(Value::try_from(value))) } } diff --git a/crates/rune/src/runtime/fmt.rs b/crates/rune/src/runtime/fmt.rs index ae5d06884..37de8968d 100644 --- a/crates/rune/src/runtime/fmt.rs +++ b/crates/rune/src/runtime/fmt.rs @@ -1,8 +1,7 @@ -use core::fmt; - use crate as rune; -use crate::no_std::string::String; +use crate::alloc::fmt::TryWrite; +use crate::alloc::{Error, Global, String}; use crate::Any; /// A formatter for the rune virtual machine. @@ -12,52 +11,61 @@ use crate::Any; /// /// [`STRING_DEBUG`]: crate::runtime::Protocol::STRING_DEBUG /// [`STRING_DISPLAY`]: crate::runtime::Protocol::STRING_DISPLAY -#[derive(Any, Default)] +#[derive(Any)] #[rune(item = ::std::fmt)] pub struct Formatter { - pub(crate) string: String, - pub(crate) buf: String, + pub(crate) string: String, + pub(crate) buf: String, } impl Formatter { + /// Construct a new empty formatter. + /// + /// # Examples + /// + /// ``` + /// use rune::runtime::Formatter; + /// + /// let mut f = Formatter::new(); + /// ``` #[inline] - pub(crate) fn new() -> Self { + pub fn new() -> Self { Self { - string: String::new(), - buf: String::new(), + string: String::new_in(Global), + buf: String::new_in(Global), } } #[inline] - pub(crate) fn with_capacity(capacity: usize) -> Self { - Self { - string: String::with_capacity(capacity), - buf: String::new(), - } + pub(crate) fn with_capacity(capacity: usize) -> Result { + Ok(Self { + string: String::try_with_capacity_in(capacity, Global)?, + buf: String::new_in(Global), + }) } #[inline] - pub(crate) fn parts_mut(&mut self) -> (&mut String, &str) { + pub(crate) fn parts_mut(&mut self) -> (&mut String, &str) { (&mut self.string, &self.buf) } #[inline] - pub(crate) fn buf_mut(&mut self) -> &mut String { + pub(crate) fn buf_mut(&mut self) -> &mut String { &mut self.buf } #[inline] - pub(crate) fn push(&mut self, c: char) { - self.string.push(c); + pub(crate) fn push(&mut self, c: char) -> Result<(), Error> { + self.string.try_push(c) } #[inline] - pub(crate) fn push_str(&mut self, s: &str) { - self.string.push_str(s); + pub(crate) fn push_str(&mut self, s: &str) -> Result<(), Error> { + self.string.try_push_str(s) } #[inline] - pub(crate) fn into_string(self) -> String { + pub(crate) fn into_string(self) -> String { self.string } @@ -67,16 +75,20 @@ impl Formatter { } } -impl fmt::Write for Formatter { +impl Default for Formatter { + fn default() -> Self { + Self::new() + } +} + +impl TryWrite for Formatter { #[inline] - fn write_str(&mut self, s: &str) -> fmt::Result { - self.string.push_str(s); - Ok(()) + fn try_write_str(&mut self, s: &str) -> Result<(), Error> { + self.string.try_push_str(s) } #[inline] - fn write_char(&mut self, c: char) -> fmt::Result { - self.string.push(c); - Ok(()) + fn try_write_char(&mut self, c: char) -> Result<(), Error> { + self.string.try_push(c) } } diff --git a/crates/rune/src/runtime/format.rs b/crates/rune/src/runtime/format.rs index e341c4396..bc73ae6c3 100644 --- a/crates/rune/src/runtime/format.rs +++ b/crates/rune/src/runtime/format.rs @@ -1,19 +1,18 @@ //! Types for dealing with formatting specifications. use core::fmt; -use core::fmt::Write; use core::iter; use core::mem::take; use core::num::NonZeroUsize; use core::str; -use crate::no_std::prelude::*; - use musli::{Decode, Encode}; use serde::{Deserialize, Serialize}; use crate as rune; -use crate::runtime::{Formatter, FromValue, ProtocolCaller, Value, VmErrorKind, VmResult}; +use crate::alloc::fmt::TryWrite; +use crate::alloc::{Global, String}; +use crate::runtime::{Formatter, ProtocolCaller, Value, VmErrorKind, VmResult}; use crate::Any; /// Error raised when trying to parse a type string and it fails. @@ -49,12 +48,7 @@ pub struct Format { pub(crate) spec: FormatSpec, } -impl FromValue for Format { - #[inline] - fn from_value(value: Value) -> VmResult { - VmResult::Ok(*vm_try!(value.into_format())) - } -} +from_value!(Format, into_format); /// A format specification. #[derive(Debug, Clone, Copy, Serialize, Deserialize, Decode, Encode)] @@ -125,36 +119,43 @@ impl FormatSpec { } /// Format the given number. - fn format_number(&self, buf: &mut String, n: i64) { + fn format_number(&self, buf: &mut String, n: i64) -> VmResult<()> { let mut buffer = itoa::Buffer::new(); - buf.push_str(buffer.format(n)); + vm_try!(buf.try_push_str(buffer.format(n))); + VmResult::Ok(()) } /// Format the given float. - fn format_float(&self, buf: &mut String, n: f64) -> Result<(), VmErrorKind> { + fn format_float(&self, buf: &mut String, n: f64) -> VmResult<()> { if let Some(precision) = self.precision { - write!(buf, "{:.*}", precision.get(), n).map_err(|_| VmErrorKind::FormatError)?; + vm_write!(buf, "{:.*}", precision.get(), n); } else { let mut buffer = ryu::Buffer::new(); - buf.push_str(buffer.format(n)); + vm_try!(buf.try_push_str(buffer.format(n))); } - Ok(()) + VmResult::Ok(()) } /// Format fill. - fn format_fill(&self, f: &mut Formatter, align: Alignment, fill: char, sign: Option) { + fn format_fill( + &self, + f: &mut Formatter, + align: Alignment, + fill: char, + sign: Option, + ) -> VmResult<()> { let (f, buf) = f.parts_mut(); if let Some(sign) = sign { - f.push(sign); + vm_try!(f.try_push(sign)); } let mut w = self.width.map(|n| n.get()).unwrap_or_default(); if w == 0 { - f.push_str(buf); - return; + vm_try!(f.try_push_str(buf)); + return VmResult::Ok(()); } w = w @@ -162,27 +163,41 @@ impl FormatSpec { .saturating_sub(sign.map(|_| 1).unwrap_or_default()); if w == 0 { - f.push_str(buf); - return; + vm_try!(f.try_push_str(buf)); + return VmResult::Ok(()); } let mut filler = iter::repeat(fill).take(w); match align { Alignment::Left => { - f.push_str(buf); - f.extend(filler); + vm_try!(f.try_push_str(buf)); + + for c in filler { + vm_try!(f.try_push(c)); + } } Alignment::Center => { - f.extend((&mut filler).take(w / 2)); - f.push_str(buf); - f.extend(filler); + for c in (&mut filler).take(w / 2) { + vm_try!(f.try_push(c)); + } + + vm_try!(f.try_push_str(buf)); + + for c in filler { + vm_try!(f.try_push(c)); + } } Alignment::Right => { - f.extend(filler); - f.push_str(buf); + for c in filler { + vm_try!(f.try_push(c)); + } + + vm_try!(f.try_push_str(buf)); } } + + VmResult::Ok(()) } fn format_display( @@ -193,26 +208,25 @@ impl FormatSpec { ) -> VmResult<()> { match value { Value::Char(c) => { - f.buf_mut().push(*c); - self.format_fill(f, self.align, self.fill, None); + vm_try!(f.buf_mut().try_push(*c)); + vm_try!(self.format_fill(f, self.align, self.fill, None)); } Value::String(s) => { - f.buf_mut().push_str(&vm_try!(s.borrow_ref())); - self.format_fill(f, self.align, self.fill, None); + vm_try!(f.buf_mut().try_push_str(&vm_try!(s.borrow_ref()))); + vm_try!(self.format_fill(f, self.align, self.fill, None)); } Value::Integer(n) => { let (n, align, fill, sign) = self.int_traits(*n); - self.format_number(f.buf_mut(), n); - self.format_fill(f, align, fill, sign); + vm_try!(self.format_number(f.buf_mut(), n)); + vm_try!(self.format_fill(f, align, fill, sign)); } Value::Float(n) => { let (n, align, fill, sign) = self.float_traits(*n); vm_try!(self.format_float(f.buf_mut(), n)); - self.format_fill(f, align, fill, sign); + vm_try!(self.format_fill(f, align, fill, sign)); } _ => { - let result = vm_try!(value.string_display_with(f, caller)); - vm_try!(result.map_err(|_| VmErrorKind::FormatError)); + vm_try!(value.string_display_with(f, caller)); } } @@ -228,86 +242,84 @@ impl FormatSpec { match value { Value::String(s) => { let s = vm_try!(s.borrow_ref()); - vm_try!(write!(f, "{:?}", &*s).map_err(|_| VmErrorKind::FormatError)); + vm_write!(f, "{:?}", &*s); } Value::Integer(n) => { let (n, align, fill, sign) = self.int_traits(*n); - self.format_number(f.buf_mut(), n); - self.format_fill(f, align, fill, sign); + vm_try!(self.format_number(f.buf_mut(), n)); + vm_try!(self.format_fill(f, align, fill, sign)); } Value::Float(n) => { let (n, align, fill, sign) = self.float_traits(*n); vm_try!(self.format_float(f.buf_mut(), n)); - self.format_fill(f, align, fill, sign); + vm_try!(self.format_fill(f, align, fill, sign)); } value => { - let result = vm_try!(value.string_debug_with(f, caller)); - vm_try!(result.map_err(|_| VmErrorKind::FormatError)); + vm_try!(value.string_debug_with(f, caller)); } } VmResult::Ok(()) } - fn format_upper_hex(&self, value: &Value, f: &mut Formatter) -> Result<(), VmErrorKind> { + fn format_upper_hex(&self, value: &Value, f: &mut Formatter) -> VmResult<()> { match value { Value::Integer(n) => { let (n, align, fill, sign) = self.int_traits(*n); - write!(f.buf_mut(), "{:X}", n).map_err(|_| VmErrorKind::FormatError)?; - self.format_fill(f, align, fill, sign); + vm_write!(f.buf_mut(), "{:X}", n); + vm_try!(self.format_fill(f, align, fill, sign)); } _ => { - return Err(VmErrorKind::FormatError); + return VmResult::err(VmErrorKind::IllegalFormat); } } - Ok(()) + VmResult::Ok(()) } - fn format_lower_hex(&self, value: &Value, f: &mut Formatter) -> Result<(), VmErrorKind> { + fn format_lower_hex(&self, value: &Value, f: &mut Formatter) -> VmResult<()> { match value { Value::Integer(n) => { let (n, align, fill, sign) = self.int_traits(*n); - write!(f.buf_mut(), "{:x}", n).map_err(|_| VmErrorKind::FormatError)?; - self.format_fill(f, align, fill, sign); + vm_write!(f.buf_mut(), "{:x}", n); + vm_try!(self.format_fill(f, align, fill, sign)); } _ => { - return Err(VmErrorKind::FormatError); + return VmResult::err(VmErrorKind::IllegalFormat); } } - Ok(()) + VmResult::Ok(()) } - fn format_binary(&self, value: &Value, f: &mut Formatter) -> Result<(), VmErrorKind> { + fn format_binary(&self, value: &Value, f: &mut Formatter) -> VmResult<()> { match value { Value::Integer(n) => { let (n, align, fill, sign) = self.int_traits(*n); - write!(f.buf_mut(), "{:b}", n).map_err(|_| VmErrorKind::FormatError)?; - self.format_fill(f, align, fill, sign); + vm_write!(f.buf_mut(), "{:b}", n); + vm_try!(self.format_fill(f, align, fill, sign)); } _ => { - return Err(VmErrorKind::FormatError); + return VmResult::err(VmErrorKind::IllegalFormat); } } - Ok(()) + VmResult::Ok(()) } - fn format_pointer(&self, value: &Value, f: &mut Formatter) -> Result<(), VmErrorKind> { + fn format_pointer(&self, value: &Value, f: &mut Formatter) -> VmResult<()> { match value { Value::Integer(n) => { let (n, align, fill, sign) = self.int_traits(*n); - write!(f.buf_mut(), "{:p}", n as *const ()) - .map_err(|_| VmErrorKind::FormatError)?; - self.format_fill(f, align, fill, sign); + vm_write!(f.buf_mut(), "{:p}", n as *const ()); + vm_try!(self.format_fill(f, align, fill, sign)); } _ => { - return Err(VmErrorKind::FormatError); + return VmResult::err(VmErrorKind::IllegalFormat); } } - Ok(()) + VmResult::Ok(()) } /// Format the given value to the out buffer `out`, using `buf` for diff --git a/crates/rune/src/runtime/from_value.rs b/crates/rune/src/runtime/from_value.rs index 70640c82b..cf31ed425 100644 --- a/crates/rune/src/runtime/from_value.rs +++ b/crates/rune/src/runtime/from_value.rs @@ -1,7 +1,6 @@ use core::cmp::Ordering; -use crate::no_std::collections::HashMap; -use crate::no_std::prelude::*; +use crate::no_std::std; use crate::runtime::{ AnyObj, Mut, RawMut, RawRef, Ref, Shared, Value, VmError, VmErrorKind, VmResult, @@ -257,38 +256,73 @@ impl UnsafeToMut for Option { } } -impl FromValue for String { +impl FromValue for rune_alloc::String { fn from_value(value: Value) -> VmResult { match value { Value::String(string) => VmResult::Ok(vm_try!(string.take())), - actual => VmResult::err(VmErrorKind::expected::(vm_try!(actual.type_info()))), + actual => VmResult::err(VmErrorKind::expected::(vm_try!( + actual.type_info() + ))), } } } -impl FromValue for Box { +impl FromValue for std::String { + fn from_value(value: Value) -> VmResult { + VmResult::Ok(std::String::from(vm_try!(rune_alloc::String::from_value( + value + )))) + } +} + +impl FromValue for rune_alloc::Box { fn from_value(value: Value) -> VmResult { match value { - Value::String(string) => VmResult::Ok(vm_try!(string.take()).into_boxed_str()), - actual => VmResult::err(VmErrorKind::expected::(vm_try!(actual.type_info()))), + Value::String(string) => { + let string = vm_try!(string.take()); + let string = vm_try!(string.try_into_boxed_str()); + VmResult::Ok(string) + } + actual => VmResult::err(VmErrorKind::expected::(vm_try!( + actual.type_info() + ))), } } } -impl FromValue for Mut { +impl FromValue for std::Box { + fn from_value(value: Value) -> VmResult { + match value { + Value::String(string) => { + let string = vm_try!(string.take()); + let string = std::Box::from(string.as_str()); + VmResult::Ok(string) + } + actual => VmResult::err(VmErrorKind::expected::(vm_try!( + actual.type_info() + ))), + } + } +} + +impl FromValue for Mut { fn from_value(value: Value) -> VmResult { match value { Value::String(string) => VmResult::Ok(vm_try!(string.into_mut())), - actual => VmResult::err(VmErrorKind::expected::(vm_try!(actual.type_info()))), + actual => VmResult::err(VmErrorKind::expected::(vm_try!( + actual.type_info() + ))), } } } -impl FromValue for Ref { +impl FromValue for Ref { fn from_value(value: Value) -> VmResult { match value { Value::String(string) => VmResult::Ok(vm_try!(string.into_ref())), - actual => VmResult::err(VmErrorKind::expected::(vm_try!(actual.type_info()))), + actual => VmResult::err(VmErrorKind::expected::(vm_try!( + actual.type_info() + ))), } } } @@ -299,7 +333,9 @@ impl FromValue for Ref { Value::String(string) => { VmResult::Ok(Ref::map(vm_try!(string.into_ref()), |s| s.as_str())) } - actual => VmResult::err(VmErrorKind::expected::(vm_try!(actual.type_info()))), + actual => VmResult::err(VmErrorKind::expected::(vm_try!( + actual.type_info() + ))), } } } @@ -314,7 +350,9 @@ impl UnsafeToRef for str { let (string, guard) = Ref::into_raw(string); VmResult::Ok((string.as_ref(), guard)) } - actual => VmResult::err(VmErrorKind::expected::(vm_try!(actual.type_info()))), + actual => VmResult::err(VmErrorKind::expected::(vm_try!( + actual.type_info() + ))), } } } @@ -329,12 +367,14 @@ impl UnsafeToMut for str { let (mut string, guard) = Mut::into_raw(string); VmResult::Ok((string.as_mut().as_mut_str(), guard)) } - actual => VmResult::err(VmErrorKind::expected::(vm_try!(actual.type_info()))), + actual => VmResult::err(VmErrorKind::expected::(vm_try!( + actual.type_info() + ))), } } } -impl UnsafeToRef for String { +impl UnsafeToRef for rune_alloc::String { type Guard = RawRef; unsafe fn unsafe_to_ref<'a>(value: Value) -> VmResult<(&'a Self, Self::Guard)> { @@ -344,12 +384,14 @@ impl UnsafeToRef for String { let (string, guard) = Ref::into_raw(string); VmResult::Ok((string.as_ref(), guard)) } - actual => VmResult::err(VmErrorKind::expected::(vm_try!(actual.type_info()))), + actual => VmResult::err(VmErrorKind::expected::(vm_try!( + actual.type_info() + ))), } } } -impl UnsafeToMut for String { +impl UnsafeToMut for rune_alloc::String { type Guard = RawMut; unsafe fn unsafe_to_mut<'a>(value: Value) -> VmResult<(&'a mut Self, Self::Guard)> { @@ -359,7 +401,9 @@ impl UnsafeToMut for String { let (mut string, guard) = Mut::into_raw(string); VmResult::Ok((string.as_mut(), guard)) } - actual => VmResult::err(VmErrorKind::expected::(vm_try!(actual.type_info()))), + actual => VmResult::err(VmErrorKind::expected::(vm_try!( + actual.type_info() + ))), } } } @@ -463,8 +507,9 @@ impl FromValue for f32 { } } +#[cfg(feature = "std")] macro_rules! impl_map { - ($ty:ty) => { + ($ty:ty, $key:ty) => { impl FromValue for $ty where T: FromValue, @@ -476,7 +521,9 @@ macro_rules! impl_map { let mut output = <$ty>::with_capacity(object.len()); for (key, value) in object { - output.insert(key, vm_try!(T::from_value(value))); + let key = vm_try!(<$key>::try_from(key)); + let value = vm_try!(::from_value(value)); + output.insert(key, value); } VmResult::Ok(output) @@ -485,7 +532,10 @@ macro_rules! impl_map { }; } -impl_map!(HashMap); +#[cfg(feature = "std")] +impl_map!(::std::collections::HashMap, rune_alloc::String); +#[cfg(feature = "std")] +impl_map!(::std::collections::HashMap, std::String); impl FromValue for Ordering { #[inline] diff --git a/crates/rune/src/runtime/function.rs b/crates/rune/src/runtime/function.rs index 637ee024c..a03233516 100644 --- a/crates/rune/src/runtime/function.rs +++ b/crates/rune/src/runtime/function.rs @@ -1,10 +1,10 @@ use core::fmt; use core::future::Future; -use crate::no_std::prelude::*; use crate::no_std::sync::Arc; use crate as rune; +use crate::alloc::{Box, Error, TryClone, Vec}; use crate::module; use crate::runtime::{ Args, Call, ConstValue, FromValue, FunctionHandler, OwnedTuple, Rtti, RuntimeContext, Stack, @@ -357,7 +357,6 @@ impl Function { /// A callable sync function. This currently only supports a subset of values /// that are supported by the Vm. -#[derive(Clone)] #[repr(transparent)] pub struct SyncFunction(FunctionImpl); @@ -470,20 +469,34 @@ impl SyncFunction { } } +impl TryClone for SyncFunction { + fn try_clone(&self) -> Result { + Ok(Self(self.0.try_clone()?)) + } +} + /// A stored function, of some specific kind. -#[derive(Clone)] -struct FunctionImpl +struct FunctionImpl { + inner: Inner, +} + +impl TryClone for FunctionImpl where - V: Clone, - OwnedTuple: From>, + V: TryClone, { - inner: Inner, + #[inline] + fn try_clone(&self) -> Result { + Ok(Self { + inner: self.inner.try_clone()?, + }) + } } impl FunctionImpl where - V: Clone, - OwnedTuple: From>, + V: TryClone, + OwnedTuple: TryFrom>, + VmErrorKind: From<>>::Error>, { fn call(&self, args: A) -> VmResult where @@ -493,30 +506,38 @@ where let value = match &self.inner { Inner::FnHandler(handler) => { let arg_count = args.count(); - let mut stack = Stack::with_capacity(arg_count); + let mut stack = vm_try!(Stack::with_capacity(arg_count)); vm_try!(args.into_stack(&mut stack)); vm_try!((handler.handler)(&mut stack, arg_count)); vm_try!(stack.pop()) } Inner::FnOffset(fn_offset) => vm_try!(fn_offset.call(args, ())), - Inner::FnClosureOffset(closure) => vm_try!(closure - .fn_offset - .call(args, (OwnedTuple::from(closure.environment.clone()),))), + Inner::FnClosureOffset(closure) => { + let environment = vm_try!(closure.environment.try_clone()); + let environment = vm_try!(OwnedTuple::try_from(environment)); + vm_try!(closure.fn_offset.call(args, (environment,))) + } Inner::FnUnitStruct(empty) => { vm_try!(check_args(args.count(), 0)); - Value::empty_struct(empty.rtti.clone()) + vm_try!(Value::empty_struct(empty.rtti.clone())) } Inner::FnTupleStruct(tuple) => { vm_try!(check_args(args.count(), tuple.args)); - Value::tuple_struct(tuple.rtti.clone(), vm_try!(args.into_vec())) + vm_try!(Value::tuple_struct( + tuple.rtti.clone(), + vm_try!(args.try_into_vec()) + )) } Inner::FnUnitVariant(unit) => { vm_try!(check_args(args.count(), 0)); - Value::unit_variant(unit.rtti.clone()) + vm_try!(Value::unit_variant(unit.rtti.clone())) } Inner::FnTupleVariant(tuple) => { vm_try!(check_args(args.count(), tuple.args)); - Value::tuple_variant(tuple.rtti.clone(), vm_try!(args.into_vec())) + vm_try!(Value::tuple_variant( + tuple.rtti.clone(), + vm_try!(args.try_into_vec()) + )) } }; @@ -569,11 +590,12 @@ where None } Inner::FnClosureOffset(closure) => { - if let Some(vm_call) = vm_try!(closure.fn_offset.call_with_vm( - vm, - args, - (OwnedTuple::from(closure.environment.clone()),), - )) { + let environment = vm_try!(closure.environment.try_clone()); + let environment = vm_try!(OwnedTuple::try_from(environment)); + + if let Some(vm_call) = + vm_try!(closure.fn_offset.call_with_vm(vm, args, (environment,))) + { return VmResult::Ok(Some(VmHalt::VmCall(vm_call))); } @@ -581,34 +603,38 @@ where } Inner::FnUnitStruct(empty) => { vm_try!(check_args(args, 0)); - vm.stack_mut().push(Value::empty_struct(empty.rtti.clone())); + let value = vm_try!(Value::empty_struct(empty.rtti.clone())); + vm_try!(vm.stack_mut().push(value)); None } Inner::FnTupleStruct(tuple) => { vm_try!(check_args(args, tuple.args)); - let value = Value::tuple_struct( + let value = vm_try!(Value::tuple_struct( tuple.rtti.clone(), - vm_try!(vm.stack_mut().pop_sequence(args)), - ); - vm.stack_mut().push(value); + vm_try!(vm_try!(vm.stack_mut().pop_sequence(args))), + )); + + vm_try!(vm.stack_mut().push(value)); None } Inner::FnUnitVariant(tuple) => { vm_try!(check_args(args, 0)); - let value = Value::unit_variant(tuple.rtti.clone()); - vm.stack_mut().push(value); + let value = vm_try!(Value::unit_variant(tuple.rtti.clone())); + + vm_try!(vm.stack_mut().push(value)); None } Inner::FnTupleVariant(tuple) => { vm_try!(check_args(args, tuple.args)); - let value = Value::tuple_variant( + let value = vm_try!(Value::tuple_variant( tuple.rtti.clone(), - vm_try!(vm.stack_mut().pop_sequence(args)), - ); - vm.stack_mut().push(value); + vm_try!(vm_try!(vm.stack_mut().pop_sequence(args))), + )); + + vm_try!(vm.stack_mut().push(value)); None } }; @@ -717,15 +743,15 @@ impl FunctionImpl { fn into_sync(self) -> VmResult> { let inner = match self.inner { Inner::FnClosureOffset(closure) => { - let mut env = Vec::with_capacity(closure.environment.len()); + let mut env = vm_try!(Vec::try_with_capacity(closure.environment.len())); - for value in closure.environment.into_vec() { - env.push(vm_try!(FromValue::from_value(value))); + for value in Vec::from(closure.environment) { + vm_try!(env.try_push(vm_try!(FromValue::from_value(value)))); } Inner::FnClosureOffset(FnClosureOffset { fn_offset: closure.fn_offset, - environment: env.into_boxed_slice(), + environment: vm_try!(env.try_into_boxed_slice()), }) } Inner::FnHandler(inner) => Inner::FnHandler(inner), @@ -774,7 +800,7 @@ impl fmt::Debug for Function { } } -#[derive(Debug, Clone)] +#[derive(Debug)] enum Inner { /// A native function handler. /// This is wrapped as an `Arc`. @@ -799,6 +825,23 @@ enum Inner { FnTupleVariant(FnTupleVariant), } +impl TryClone for Inner +where + V: TryClone, +{ + fn try_clone(&self) -> Result { + Ok(match self { + Inner::FnHandler(inner) => Inner::FnHandler(inner.clone()), + Inner::FnOffset(inner) => Inner::FnOffset(inner.clone()), + Inner::FnClosureOffset(inner) => Inner::FnClosureOffset(inner.try_clone()?), + Inner::FnUnitStruct(inner) => Inner::FnUnitStruct(inner.clone()), + Inner::FnTupleStruct(inner) => Inner::FnTupleStruct(inner.clone()), + Inner::FnUnitVariant(inner) => Inner::FnUnitVariant(inner.clone()), + Inner::FnTupleVariant(inner) => Inner::FnTupleVariant(inner.clone()), + }) + } +} + #[derive(Clone)] struct FnHandler { /// The function handler. @@ -892,7 +935,7 @@ impl fmt::Debug for FnOffset { } } -#[derive(Debug, Clone)] +#[derive(Debug)] struct FnClosureOffset { /// The offset in the associated unit that the function lives. fn_offset: FnOffset, @@ -900,6 +943,19 @@ struct FnClosureOffset { environment: Box<[V]>, } +impl TryClone for FnClosureOffset +where + V: TryClone, +{ + #[inline] + fn try_clone(&self) -> Result { + Ok(Self { + fn_offset: self.fn_offset.clone(), + environment: self.environment.try_clone()?, + }) + } +} + #[derive(Debug, Clone)] struct FnUnitStruct { /// The type of the empty. diff --git a/crates/rune/src/runtime/guarded_args.rs b/crates/rune/src/runtime/guarded_args.rs index 180159051..6e6c1f8d5 100644 --- a/crates/rune/src/runtime/guarded_args.rs +++ b/crates/rune/src/runtime/guarded_args.rs @@ -34,7 +34,7 @@ macro_rules! impl_into_args { unsafe fn unsafe_into_stack(self, stack: &mut Stack) -> VmResult { let ($($value,)*) = self; $(let $value = vm_try!($value.unsafe_to_value());)* - $(stack.push($value.0);)* + $(vm_try!(stack.push($value.0));)* VmResult::Ok(($($value.1,)*)) } diff --git a/crates/rune/src/runtime/hasher.rs b/crates/rune/src/runtime/hasher.rs index f90c4478f..37b4d5657 100644 --- a/crates/rune/src/runtime/hasher.rs +++ b/crates/rune/src/runtime/hasher.rs @@ -1,6 +1,6 @@ use core::hash::{BuildHasher, Hasher as _}; -use crate::no_std::collections::hash_map::DefaultHasher; +use rune_alloc::hash_map; use crate as rune; use crate::Any; @@ -9,14 +9,14 @@ use crate::Any; #[derive(Any)] #[rune(item = ::std::hash)] pub struct Hasher { - hasher: DefaultHasher, + hasher: hash_map::Hasher, } impl Hasher { /// Construct a new empty hasher. pub(crate) fn new_with(build_hasher: &S) -> Self where - S: BuildHasher, + S: BuildHasher, { Self { hasher: build_hasher.build_hasher(), diff --git a/crates/rune/src/runtime/iterator.rs b/crates/rune/src/runtime/iterator.rs index ca56f486c..bb7c81f23 100644 --- a/crates/rune/src/runtime/iterator.rs +++ b/crates/rune/src/runtime/iterator.rs @@ -3,9 +3,9 @@ use core::fmt; use core::iter; use crate::no_std::prelude::*; -use crate::no_std::vec; use crate as rune; +use crate::alloc::{self, Global}; use crate::runtime::{FromValue, Function, Panic, ToValue, Value, VmErrorKind, VmResult}; use crate::Any; @@ -287,15 +287,15 @@ impl Iterator { } #[inline] - pub(crate) fn collect(mut self) -> VmResult> + pub(crate) fn collect(mut self) -> VmResult> where T: FromValue, { let (cap, _) = self.iter.size_hint(); - let mut vec = vec::Vec::with_capacity(cap); + let mut vec = vm_try!(alloc::Vec::try_with_capacity_in(cap, Global)); while let Some(value) = vm_try!(self.next()) { - vec.push(vm_try!(T::from_value(value))); + vm_try!(vec.try_push(vm_try!(T::from_value(value)))); } VmResult::Ok(vec) diff --git a/crates/rune/src/runtime/key.rs b/crates/rune/src/runtime/key.rs deleted file mode 100644 index 075de7df6..000000000 --- a/crates/rune/src/runtime/key.rs +++ /dev/null @@ -1,541 +0,0 @@ -use core::fmt; - -use crate::no_std::prelude::*; -use crate::no_std::sync::Arc; -use crate::no_std::vec; - -use serde::{de, ser}; - -use crate::runtime::{ - Bytes, FromValue, FullTypeOf, MaybeTypeOf, Object, OwnedTuple, Shared, ToValue, TypeInfo, - Value, Variant, VariantData, VariantRtti, Vec, VmErrorKind, VmResult, -}; - -/// A key that can be used as an anonymous object key. -#[derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub enum Key { - /// A constant unit. - EmptyTuple, - /// A byte. - Byte(u8), - /// A character. - Char(char), - /// A boolean constant value. - Bool(bool), - /// An integer constant. - Integer(i64), - /// A string constant designated by its slot. - String(Box), - /// A byte string. - Bytes(Box<[u8]>), - /// A vector of values. - Vec(vec::Vec), - /// An anonymous tuple. - Tuple(Box<[Key]>), - /// An option. - Option(Option>), - /// A variant. - Variant(VariantKey), -} - -impl Key { - /// Convert a value reference into a key. - pub fn from_value(value: &Value) -> VmResult { - return VmResult::Ok(match value { - Value::EmptyTuple => Self::EmptyTuple, - Value::Byte(b) => Self::Byte(*b), - Value::Char(c) => Self::Char(*c), - Value::Bool(b) => Self::Bool(*b), - Value::Integer(n) => Self::Integer(*n), - Value::String(s) => Self::String(vm_try!(s.borrow_ref()).as_str().into()), - Value::Bytes(b) => Self::Bytes(vm_try!(b.borrow_ref()).as_ref().into()), - Value::Option(option) => Self::Option(match &*vm_try!(option.borrow_ref()) { - Some(some) => Some(Box::new(vm_try!(Self::from_value(some)))), - None => None, - }), - Value::Vec(vec) => { - let vec = vm_try!(vec.borrow_ref()); - let mut key_vec = vec::Vec::with_capacity(vec.len()); - - for value in &*vec { - key_vec.push(vm_try!(Self::from_value(value))); - } - - Self::Vec(key_vec) - } - Value::Tuple(tuple) => { - let tuple = vm_try!(tuple.borrow_ref()); - Self::Tuple(vm_try!(tuple_from_value(&tuple))) - } - Value::Variant(variant) => { - let variant = vm_try!(variant.borrow_ref()); - - let data = match &variant.data { - VariantData::Empty => VariantKeyData::Unit, - VariantData::Tuple(tuple) => { - VariantKeyData::Tuple(vm_try!(tuple_from_value(tuple))) - } - VariantData::Struct(object) => { - VariantKeyData::Struct(vm_try!(struct_from_value(object))) - } - }; - - Key::Variant(VariantKey { - rtti: variant.rtti.clone(), - data, - }) - } - value => { - return VmResult::err(VmErrorKind::KeyNotSupported { - actual: vm_try!(value.type_info()), - }); - } - }); - - fn tuple_from_value(tuple: &OwnedTuple) -> VmResult> { - let mut output = vec::Vec::with_capacity(tuple.len()); - - for value in tuple.iter() { - output.push(vm_try!(Key::from_value(value))); - } - - VmResult::Ok(output.into_boxed_slice()) - } - - fn struct_from_value(object: &Object) -> VmResult, Key)]>> { - let mut output = vec::Vec::with_capacity(object.len()); - - for (key, value) in object { - output.push((key.as_str().into(), vm_try!(Key::from_value(value)))); - } - - VmResult::Ok(output.into_boxed_slice()) - } - } - - /// Convert into virtual machine value. - /// - /// We provide this associated method since a constant value can be - /// converted into a value infallibly, which is not captured by the trait - /// otherwise. - pub fn into_value(self) -> Value { - return match self { - Self::EmptyTuple => Value::EmptyTuple, - Self::Byte(b) => Value::Byte(b), - Self::Char(c) => Value::Char(c), - Self::Bool(b) => Value::Bool(b), - Self::Integer(n) => Value::Integer(n), - Self::String(s) => Value::String(Shared::new(String::from(s))), - Self::Bytes(b) => Value::Bytes(Shared::new(Bytes::from(b))), - Self::Option(option) => { - Value::Option(Shared::new(option.map(|some| some.into_value()))) - } - Self::Vec(vec) => { - let mut v = Vec::with_capacity(vec.len()); - - for value in vec { - v.push(value.into_value()); - } - - Value::Vec(Shared::new(v)) - } - Self::Tuple(tuple) => Value::Tuple(Shared::new(tuple_into_value(tuple))), - Self::Variant(variant) => { - let data = match variant.data { - VariantKeyData::Unit => VariantData::Empty, - VariantKeyData::Tuple(tuple) => VariantData::Tuple(tuple_into_value(tuple)), - VariantKeyData::Struct(st) => VariantData::Struct(struct_into_value(st)), - }; - - Value::Variant(Shared::new(Variant { - rtti: variant.rtti, - data, - })) - } - }; - - fn tuple_into_value(data: Box<[Key]>) -> OwnedTuple { - let mut t = vec::Vec::with_capacity(data.len()); - - for value in vec::Vec::from(data) { - t.push(value.into_value()); - } - - OwnedTuple::from(t) - } - - fn struct_into_value(data: Box<[(Box, Key)]>) -> Object { - let mut object = Object::with_capacity(data.len()); - - for (key, value) in vec::Vec::from(data) { - object.insert(key.into(), value.into_value()); - } - - object - } - } - - /// Try to coerce into boolean. - pub fn into_bool(self) -> Result { - match self { - Self::Bool(value) => Ok(value), - value => Err(value), - } - } - - /// Get the type information of the value. - pub fn type_info(&self) -> TypeInfo { - match self { - Self::Byte(..) => TypeInfo::StaticType(crate::runtime::static_type::BYTE_TYPE), - Self::Char(..) => TypeInfo::StaticType(crate::runtime::static_type::CHAR_TYPE), - Self::Bool(..) => TypeInfo::StaticType(crate::runtime::static_type::BOOL_TYPE), - Self::String(..) => TypeInfo::StaticType(crate::runtime::static_type::STRING_TYPE), - Self::Bytes(..) => TypeInfo::StaticType(crate::runtime::static_type::BYTES_TYPE), - Self::Integer(..) => TypeInfo::StaticType(crate::runtime::static_type::INTEGER_TYPE), - Self::Vec(..) => TypeInfo::StaticType(crate::runtime::static_type::VEC_TYPE), - Self::EmptyTuple => TypeInfo::StaticType(crate::runtime::static_type::TUPLE_TYPE), - Self::Tuple(..) => TypeInfo::StaticType(crate::runtime::static_type::TUPLE_TYPE), - Self::Option(..) => TypeInfo::StaticType(crate::runtime::static_type::OPTION_TYPE), - Self::Variant(variant) => TypeInfo::Variant(variant.rtti.clone()), - } - } -} - -impl fmt::Debug for Key { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Key::Byte(b) => write!(f, "{:?}", b), - Key::Char(c) => write!(f, "{:?}", c), - Key::Bool(b) => write!(f, "{}", b), - Key::Integer(n) => write!(f, "{}", n), - Key::String(s) => write!(f, "{:?}", s), - Key::Bytes(b) => write!(f, "{:?}", b), - Key::Vec(vec) => write!(f, "{:?}", vec), - Key::EmptyTuple => write!(f, "()"), - Key::Tuple(tuple) => { - write!(f, "(")?; - - let mut it = tuple.iter().peekable(); - - while let Some(key) = it.next() { - write!(f, "{:?}", key)?; - - if it.peek().is_some() { - write!(f, ", ")?; - } - } - - write!(f, "(")?; - Ok(()) - } - Key::Option(opt) => write!(f, "{:?}", opt), - Key::Variant(variant) => write!(f, "{:?}", variant), - } - } -} - -impl FromValue for Key { - #[inline] - fn from_value(value: Value) -> VmResult { - Key::from_value(&value) - } -} - -impl ToValue for Key { - #[inline] - fn to_value(self) -> VmResult { - VmResult::Ok(Key::into_value(self)) - } -} - -impl MaybeTypeOf for Key { - #[inline] - fn maybe_type_of() -> Option { - None - } -} - -/// Deserialize implementation for value. -impl<'de> de::Deserialize<'de> for Key { - fn deserialize(deserializer: D) -> Result - where - D: de::Deserializer<'de>, - { - deserializer.deserialize_any(KeyVisitor) - } -} - -/// Serialize implementation for value. -impl ser::Serialize for Key { - fn serialize(&self, serializer: S) -> Result - where - S: ser::Serializer, - { - use serde::ser::SerializeSeq as _; - - match self { - Self::Bool(b) => serializer.serialize_bool(*b), - Self::Char(c) => serializer.serialize_char(*c), - Self::Byte(c) => serializer.serialize_u8(*c), - Self::Integer(integer) => serializer.serialize_i64(*integer), - Self::String(string) => serializer.serialize_str(string), - Self::Bytes(bytes) => serializer.serialize_bytes(bytes), - Self::Vec(vec) => { - let mut serializer = serializer.serialize_seq(Some(vec.len()))?; - - for value in vec { - serializer.serialize_element(value)?; - } - - serializer.end() - } - Self::EmptyTuple => serializer.serialize_unit(), - Self::Tuple(tuple) => { - let mut serializer = serializer.serialize_seq(Some(tuple.len()))?; - - for value in tuple.iter() { - serializer.serialize_element(value)?; - } - - serializer.end() - } - Self::Option(option) => >>::serialize(option, serializer), - Self::Variant(..) => Err(ser::Error::custom("cannot serialize variants")), - } - } -} - -struct KeyVisitor; - -impl<'de> de::Visitor<'de> for KeyVisitor { - type Value = Key; - - fn expecting(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt.write_str("any valid key") - } - - #[inline] - fn visit_str(self, value: &str) -> Result - where - E: de::Error, - { - Ok(Key::String(value.into())) - } - - #[inline] - fn visit_string(self, value: String) -> Result - where - E: de::Error, - { - Ok(Key::String(value.into())) - } - - #[inline] - fn visit_bytes(self, v: &[u8]) -> Result - where - E: de::Error, - { - Ok(Key::Bytes(v.into())) - } - - #[inline] - fn visit_byte_buf(self, v: vec::Vec) -> Result - where - E: de::Error, - { - Ok(Key::Bytes(v.into())) - } - - #[inline] - fn visit_i8(self, v: i8) -> Result - where - E: de::Error, - { - Ok(Key::Integer(v as i64)) - } - - #[inline] - fn visit_i16(self, v: i16) -> Result - where - E: de::Error, - { - Ok(Key::Integer(v as i64)) - } - - #[inline] - fn visit_i32(self, v: i32) -> Result - where - E: de::Error, - { - Ok(Key::Integer(v as i64)) - } - - #[inline] - fn visit_i64(self, v: i64) -> Result - where - E: de::Error, - { - Ok(Key::Integer(v)) - } - - #[inline] - fn visit_i128(self, v: i128) -> Result - where - E: de::Error, - { - Ok(Key::Integer(v as i64)) - } - - #[inline] - fn visit_u8(self, v: u8) -> Result - where - E: de::Error, - { - Ok(Key::Integer(v as i64)) - } - - #[inline] - fn visit_u16(self, v: u16) -> Result - where - E: de::Error, - { - Ok(Key::Integer(v as i64)) - } - - #[inline] - fn visit_u32(self, v: u32) -> Result - where - E: de::Error, - { - Ok(Key::Integer(v as i64)) - } - - #[inline] - fn visit_u64(self, v: u64) -> Result - where - E: de::Error, - { - Ok(Key::Integer(v as i64)) - } - - #[inline] - fn visit_u128(self, v: u128) -> Result - where - E: de::Error, - { - Ok(Key::Integer(v as i64)) - } - - #[inline] - fn visit_bool(self, v: bool) -> Result - where - E: de::Error, - { - Ok(Key::Bool(v)) - } - - #[inline] - fn visit_none(self) -> Result - where - E: de::Error, - { - Ok(Key::EmptyTuple) - } - - #[inline] - fn visit_unit(self) -> Result - where - E: de::Error, - { - Ok(Key::EmptyTuple) - } - - #[inline] - fn visit_seq(self, mut visitor: V) -> Result - where - V: de::SeqAccess<'de>, - { - let mut vec = if let Some(hint) = visitor.size_hint() { - vec::Vec::with_capacity(hint) - } else { - vec::Vec::new() - }; - - while let Some(elem) = visitor.next_element()? { - vec.push(elem); - } - - Ok(Key::Vec(vec)) - } -} - -impl From> for Key { - #[inline] - fn from(value: Box) -> Self { - Self::String(value) - } -} - -impl From for Key { - #[inline] - fn from(value: String) -> Self { - Self::String(value.into()) - } -} - -impl From for Key { - #[inline] - fn from(value: i64) -> Self { - Self::Integer(value) - } -} - -/// A variant that has been serialized to a key. -#[derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct VariantKey { - rtti: Arc, - data: VariantKeyData, -} - -impl fmt::Debug for VariantKey { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.rtti.item)?; - - match &self.data { - VariantKeyData::Unit => (), - VariantKeyData::Tuple(tuple) => { - let mut it = tuple.iter(); - let last = it.next_back(); - - write!(f, "(")?; - - for v in it { - write!(f, "{:?}, ", v)?; - } - - if let Some(v) = last { - write!(f, "{:?}", v)?; - } - - write!(f, ")")?; - } - VariantKeyData::Struct(st) => f - .debug_map() - .entries(st.iter().map(|(k, v)| (k, v))) - .finish()?, - } - - Ok(()) - } -} - -/// Variant data that has been serialized to a key. -#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub enum VariantKeyData { - /// A unit variant with a specific type hash. - Unit, - /// A tuple variant with a specific type hash. - Tuple(Box<[Key]>), - /// An struct variant with a specific type hash. - Struct(Box<[(Box, Key)]>), -} diff --git a/crates/rune/src/runtime/object.rs b/crates/rune/src/runtime/object.rs index 693c22f5e..32949d6a7 100644 --- a/crates/rune/src/runtime/object.rs +++ b/crates/rune/src/runtime/object.rs @@ -5,12 +5,12 @@ use core::fmt; use core::hash; use core::iter; -use crate::no_std::collections::{btree_map, BTreeMap}; -use crate::no_std::prelude::*; +use crate::alloc::{btree_map, BTreeMap}; +use crate::alloc::{Error, Global, String, TryClone}; use crate as rune; use crate::compile::ItemBuf; -use crate::runtime::{FromValue, Iterator, ProtocolCaller, Ref, ToValue, Value, VmResult}; +use crate::runtime::{FromValue, Iterator, ProtocolCaller, RawRef, Ref, ToValue, Value, VmResult}; use crate::Any; /// An owning iterator over the entries of a `Object`. @@ -63,11 +63,13 @@ pub type Values<'a> = btree_map::Values<'a, String, Value>; /// # Rust Examples /// /// ```rust +/// use rune::alloc::String; +/// /// let mut object = rune::runtime::Object::new(); /// assert!(object.is_empty()); /// -/// object.insert_value(String::from("foo"), 42).into_result()?; -/// object.insert_value(String::from("bar"), true).into_result()?; +/// object.insert_value(String::try_from("foo")?, 42).into_result()?; +/// object.insert_value(String::try_from("bar")?, true).into_result()?; /// assert_eq!(2, object.len()); /// /// assert_eq!(Some(42), object.get_value("foo").into_result()?); @@ -75,11 +77,11 @@ pub type Values<'a> = btree_map::Values<'a, String, Value>; /// assert_eq!(None::, object.get_value("baz").into_result()?); /// # Ok::<_, rune::Error>(()) /// ``` -#[derive(Any, Default, Clone)] +#[derive(Any, Default)] #[repr(transparent)] #[rune(builtin, static_type = OBJECT_TYPE)] pub struct Object { - inner: BTreeMap, + inner: BTreeMap, } impl Object { @@ -95,7 +97,7 @@ impl Object { #[rune::function(keep, path = Self::new)] pub fn new() -> Self { Self { - inner: BTreeMap::new(), + inner: BTreeMap::new_in(Global), } } @@ -108,13 +110,18 @@ impl Object { /// object.insert("Hello", "World"); /// ``` #[inline] - #[rune::function(keep, path = Self::with_capacity)] - pub fn with_capacity(#[allow(unused)] capacity: usize) -> Self { + #[rune::function(path = Self::with_capacity)] + pub(crate) fn rune_with_capacity(capacity: usize) -> VmResult { + VmResult::Ok(vm_try!(Self::with_capacity(capacity))) + } + + /// Construct a new object with the given capacity. + pub fn with_capacity(#[allow(unused)] capacity: usize) -> Result { // BTreeMap doesn't support setting capacity on creation but we keep // this here in case we want to switch store later. - Self { - inner: BTreeMap::new(), - } + Ok(Self { + inner: BTreeMap::new_in(Global), + }) } /// Returns the number of elements in the object. @@ -211,7 +218,7 @@ impl Object { where T: ToValue, { - self.inner.insert(k, vm_try!(v.to_value())); + vm_try!(self.inner.try_insert(k, vm_try!(v.to_value()))); VmResult::Ok(()) } @@ -231,9 +238,16 @@ impl Object { /// assert_eq!(map["b"], 3); /// ``` #[inline] - #[rune::function(keep)] - pub fn insert(&mut self, k: String, v: Value) -> Option { - self.inner.insert(k, v) + #[rune::function(path = Self::insert)] + pub(crate) fn rune_insert(&mut self, k: String, v: Value) -> VmResult> { + VmResult::Ok(vm_try!(self.inner.try_insert(k, v))) + } + + /// Inserts a key-value pair into the map. + /// + /// If the map did not have this key present, `None` is returned. + pub fn insert(&mut self, k: String, v: Value) -> Result, Error> { + Ok(self.inner.try_insert(k, v)?) } /// Clears the object, removing all key-value pairs. Keeps the allocated @@ -291,8 +305,37 @@ impl Object { /// assert_eq!(vec, [("a", 1), ("b", 2), ("c", 3)]); /// ``` #[rune::function(keep, path = Self::iter)] - pub fn rune_iter(&self) -> Iterator { - Iterator::from("std::object::Iter", self.clone().into_iter()) + pub fn rune_iter(this: Ref) -> Iterator { + struct Iter { + iter: btree_map::IterRaw, + _guard: RawRef, + } + + impl iter::Iterator for Iter { + type Item = VmResult<(String, Value)>; + + fn next(&mut self) -> Option { + let (key, value) = self.iter.next()?; + + unsafe { + let key = match (*key).try_clone() { + Ok(key) => key, + Err(err) => return Some(VmResult::err(err)), + }; + + Some(VmResult::Ok((key, (*value).clone()))) + } + } + } + + // SAFETY: we're holding onto the related reference guard, and making + // sure that it's dropped after the iterator. + let iter = unsafe { this.inner.iter_raw() }; + let (_, _guard) = Ref::into_raw(this); + + let iter = Iter { iter, _guard }; + + Iterator::from("std::object::Iter", iter) } pub(crate) fn partial_eq_with( @@ -418,6 +461,14 @@ impl Object { } } +impl TryClone for Object { + fn try_clone(&self) -> Result { + Ok(Self { + inner: self.inner.try_clone()?, + }) + } +} + impl<'a> IntoIterator for &'a Object { type Item = (&'a String, &'a Value); type IntoIter = Iter<'a>; @@ -454,14 +505,6 @@ impl fmt::Debug for Object { } } -impl iter::FromIterator<(String, Value)> for Object { - fn from_iter>(src: T) -> Self { - Self { - inner: src.into_iter().collect(), - } - } -} - from_value!(Object, into_object); pub struct DebugStruct<'a> { @@ -471,6 +514,8 @@ pub struct DebugStruct<'a> { impl fmt::Display for DebugStruct<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + use ::rust_alloc::string::ToString; + let mut d = f.debug_struct(&self.item.to_string()); for (key, value) in self.st.iter() { diff --git a/crates/rune/src/runtime/protocol_caller.rs b/crates/rune/src/runtime/protocol_caller.rs index 8fff54042..330d49a74 100644 --- a/crates/rune/src/runtime/protocol_caller.rs +++ b/crates/rune/src/runtime/protocol_caller.rs @@ -55,8 +55,8 @@ impl ProtocolCaller for EnvProtocolCaller { { vm_try!(check_args(count, expected)); - let mut stack = Stack::with_capacity(count); - stack.push(target); + let mut stack = vm_try!(Stack::with_capacity(count)); + vm_try!(stack.push(target)); // Safety: We hold onto the guard until the vm has completed. let _guard = unsafe { vm_try!(args.unsafe_into_stack(&mut stack)) }; @@ -73,8 +73,8 @@ impl ProtocolCaller for EnvProtocolCaller { }); }; - let mut stack = Stack::with_capacity(count); - stack.push(target); + let mut stack = vm_try!(Stack::with_capacity(count)); + vm_try!(stack.push(target)); // Safety: We hold onto the guard until the vm has completed. let _guard = unsafe { vm_try!(args.unsafe_into_stack(&mut stack)) }; diff --git a/crates/rune/src/runtime/range.rs b/crates/rune/src/runtime/range.rs index 7ceb81331..d718a2807 100644 --- a/crates/rune/src/runtime/range.rs +++ b/crates/rune/src/runtime/range.rs @@ -301,7 +301,7 @@ where let start = vm_try!(self.start.to_value()); let end = vm_try!(self.end.to_value()); let range = Range::new(start, end); - VmResult::Ok(Value::from(range)) + VmResult::Ok(vm_try!(Value::try_from(range))) } } diff --git a/crates/rune/src/runtime/range_from.rs b/crates/rune/src/runtime/range_from.rs index 74b3a1f75..34e6ecc16 100644 --- a/crates/rune/src/runtime/range_from.rs +++ b/crates/rune/src/runtime/range_from.rs @@ -270,7 +270,7 @@ where fn to_value(self) -> VmResult { let start = vm_try!(self.start.to_value()); let range = RangeFrom::new(start); - VmResult::Ok(Value::from(range)) + VmResult::Ok(vm_try!(Value::try_from(range))) } } diff --git a/crates/rune/src/runtime/range_full.rs b/crates/rune/src/runtime/range_full.rs index a1f6b6319..3ef2025ec 100644 --- a/crates/rune/src/runtime/range_full.rs +++ b/crates/rune/src/runtime/range_full.rs @@ -35,7 +35,7 @@ pub struct RangeFull; impl RangeFull { /// Construct a new range. - pub fn new() -> Self { + pub const fn new() -> Self { Self } @@ -94,7 +94,7 @@ impl fmt::Debug for RangeFull { impl ToValue for ops::RangeFull { fn to_value(self) -> VmResult { let range = RangeFull::new(); - VmResult::Ok(Value::from(range)) + VmResult::Ok(vm_try!(Value::try_from(range))) } } diff --git a/crates/rune/src/runtime/range_inclusive.rs b/crates/rune/src/runtime/range_inclusive.rs index b4da9f62f..751bfe1a6 100644 --- a/crates/rune/src/runtime/range_inclusive.rs +++ b/crates/rune/src/runtime/range_inclusive.rs @@ -302,7 +302,7 @@ where let (start, end) = self.into_inner(); let start = vm_try!(start.to_value()); let end = vm_try!(end.to_value()); - VmResult::Ok(Value::from(RangeInclusive::new(start, end))) + VmResult::Ok(vm_try!(Value::try_from(RangeInclusive::new(start, end)))) } } diff --git a/crates/rune/src/runtime/range_to.rs b/crates/rune/src/runtime/range_to.rs index 337774e5e..37a59c569 100644 --- a/crates/rune/src/runtime/range_to.rs +++ b/crates/rune/src/runtime/range_to.rs @@ -191,7 +191,7 @@ where { fn to_value(self) -> VmResult { let end = vm_try!(self.end.to_value()); - VmResult::Ok(Value::from(RangeTo::new(end))) + VmResult::Ok(vm_try!(Value::try_from(RangeTo::new(end)))) } } diff --git a/crates/rune/src/runtime/range_to_inclusive.rs b/crates/rune/src/runtime/range_to_inclusive.rs index 654af1914..ae32393dd 100644 --- a/crates/rune/src/runtime/range_to_inclusive.rs +++ b/crates/rune/src/runtime/range_to_inclusive.rs @@ -191,7 +191,7 @@ where { fn to_value(self) -> VmResult { let end = vm_try!(self.end.to_value()); - VmResult::Ok(Value::from(RangeToInclusive::new(end))) + VmResult::Ok(vm_try!(Value::try_from(RangeToInclusive::new(end)))) } } diff --git a/crates/rune/src/runtime/shared.rs b/crates/rune/src/runtime/shared.rs index e36d9ce4a..ea1e5e3bc 100644 --- a/crates/rune/src/runtime/shared.rs +++ b/crates/rune/src/runtime/shared.rs @@ -9,12 +9,11 @@ use core::ptr; use core::task::{Context, Poll}; #[cfg(feature = "alloc")] -use alloc::rc::Rc; +use ::rust_alloc::rc::Rc; #[cfg(feature = "alloc")] -use alloc::sync::Arc; - -use crate::no_std::prelude::*; +use ::rust_alloc::sync::Arc; +use crate::alloc::{Box, Error, Global}; use crate::runtime::{ Access, AccessError, AccessKind, AnyObj, AnyObjError, BorrowMut, BorrowRef, RawAccessGuard, }; @@ -27,16 +26,18 @@ pub struct Shared { impl Shared { /// Construct a new shared value. - pub fn new(data: T) -> Self { - let inner = Box::leak(Box::new(SharedBox { + pub fn new(data: T) -> Result { + let shared = SharedBox { access: Access::new(false), count: Cell::new(1), data: data.into(), - })); + }; - Self { + let inner = Box::leak(Box::try_new_in(shared, Global)?); + + Ok(Self { inner: inner.into(), - } + }) } /// Return a debug formatter, that when printed will display detailed @@ -52,7 +53,7 @@ impl Shared { /// ``` /// use rune::runtime::Shared; /// - /// let shared = Shared::new(1u32); + /// let shared = Shared::new(1u32)?; /// assert!(shared.is_readable()); /// /// { @@ -66,6 +67,7 @@ impl Shared { /// } /// /// assert!(shared.is_readable()); + /// # Ok::<_, rune::alloc::Error>(()) /// ``` /// /// # Taking inner value @@ -73,12 +75,13 @@ impl Shared { /// ``` /// use rune::runtime::Shared; /// - /// let shared = Shared::new(1u32); + /// let shared = Shared::new(1u32)?; /// let shared2 = shared.clone(); /// assert!(shared.is_readable()); /// shared.take().unwrap(); /// assert!(!shared2.is_readable()); /// assert!(shared2.take().is_err()); + /// # Ok::<_, rune::alloc::Error>(()) /// ``` pub fn is_readable(&self) -> bool { // Safety: Since we have a reference to this shared, we know that the @@ -93,7 +96,7 @@ impl Shared { /// ``` /// use rune::runtime::Shared; /// - /// let shared = Shared::new(1u32); + /// let shared = Shared::new(1u32)?; /// assert!(shared.is_writable()); /// /// { @@ -102,6 +105,7 @@ impl Shared { /// } /// /// assert!(shared.is_writable()); + /// # Ok::<_, rune::alloc::Error>(()) /// ``` pub fn is_writable(&self) -> bool { // Safety: Since we have a reference to this shared, we know that the @@ -124,7 +128,7 @@ impl Shared { /// counter: isize, /// } /// - /// let a = Shared::new(Foo { counter: 0 }); + /// let a = Shared::new(Foo { counter: 0 })?; /// let b = a.clone(); /// /// { @@ -136,6 +140,7 @@ impl Shared { /// /// let a = a.take().unwrap(); /// assert_eq!(a.counter, 1); + /// # Ok::<_, rune::alloc::Error>(()) /// ``` pub fn take(self) -> Result { // Safety: We know that interior value is alive since this container is @@ -176,7 +181,7 @@ impl Shared { /// counter: isize, /// } /// - /// let a = Shared::new(Foo { counter: 0 }); + /// let a = Shared::new(Foo { counter: 0 })?; /// let b = a.clone(); /// /// b.borrow_mut().unwrap().counter += 1; @@ -191,6 +196,7 @@ impl Shared { /// let mut b = b.borrow_mut().unwrap(); /// b.counter += 1; /// assert_eq!(b.counter, 2); + /// # Ok::<_, rune::alloc::Error>(()) /// ``` pub fn into_ref(self) -> Result, AccessError> { // NB: we default to a "safer" mode with `AccessKind::Owned`, where @@ -236,7 +242,7 @@ impl Shared { /// counter: isize, /// } /// - /// let a = Shared::new(Foo { counter: 0 }); + /// let a = Shared::new(Foo { counter: 0 })?; /// let b = a.clone(); /// /// { @@ -248,6 +254,7 @@ impl Shared { /// } /// /// assert_eq!(b.borrow_ref().unwrap().counter, 1); + /// # Ok::<_, rune::alloc::Error>(()) /// ``` pub fn into_mut(self) -> Result, AccessError> { // NB: we default to a "safer" mode with `AccessKind::Owned`, where @@ -294,7 +301,7 @@ impl Shared { /// counter: isize, /// } /// - /// let a = Shared::new(Foo { counter: 0 }); + /// let a = Shared::new(Foo { counter: 0 })?; /// /// a.borrow_mut().unwrap().counter += 1; /// @@ -308,6 +315,7 @@ impl Shared { /// let mut a = a.borrow_mut().unwrap(); /// a.counter += 1; /// assert_eq!(a.counter, 2); + /// # Ok::<_, rune::alloc::Error>(()) /// ``` pub fn borrow_ref(&self) -> Result, AccessError> { // Safety: We know that interior value is alive since this container is @@ -337,7 +345,7 @@ impl Shared { /// counter: isize, /// } /// - /// let a = Shared::new(Foo { counter: 0 }); + /// let a = Shared::new(Foo { counter: 0 })?; /// /// { /// let mut a_mut = a.borrow_mut().unwrap(); @@ -348,6 +356,7 @@ impl Shared { /// /// let a = a.borrow_ref().unwrap(); /// assert_eq!(a.counter, 1); + /// # Ok::<_, rune::alloc::Error>(()) /// ``` pub fn borrow_mut(&self) -> Result, AccessError> { // Safety: We know that interior value is alive since this container is @@ -383,7 +392,7 @@ impl Shared { /// let value = Thing(10u32); /// /// unsafe { - /// let (shared, guard) = Shared::from_ref(&value); + /// let (shared, guard) = Shared::from_ref(&value)?; /// assert!(shared.downcast_borrow_mut::().is_err()); /// assert_eq!(10u32, shared.downcast_borrow_ref::().unwrap().0); /// @@ -392,8 +401,9 @@ impl Shared { /// assert!(shared.downcast_borrow_mut::().is_err()); /// assert!(shared.downcast_borrow_ref::().is_err()); /// } + /// # Ok::<_, rune::alloc::Error>(()) /// ``` - pub unsafe fn from_ref(data: &T) -> (Self, SharedPointerGuard) + pub unsafe fn from_ref(data: &T) -> Result<(Self, SharedPointerGuard), Error> where T: Any, { @@ -419,7 +429,7 @@ impl Shared { /// let mut value = Thing(10u32); /// /// unsafe { - /// let (shared, guard) = Shared::from_mut(&mut value); + /// let (shared, guard) = Shared::from_mut(&mut value)?; /// shared.downcast_borrow_mut::().unwrap().0 = 20; /// /// assert_eq!(20u32, shared.downcast_borrow_mut::().unwrap().0); @@ -430,8 +440,9 @@ impl Shared { /// assert!(shared.downcast_borrow_mut::().is_err()); /// assert!(shared.downcast_borrow_ref::().is_err()); /// } + /// # Ok::<_, rune::alloc::Error>(()) /// ``` - pub unsafe fn from_mut(data: &mut T) -> (Self, SharedPointerGuard) + pub unsafe fn from_mut(data: &mut T) -> Result<(Self, SharedPointerGuard), Error> where T: Any, { @@ -444,19 +455,20 @@ impl Shared { /// # Safety /// /// The reference must be valid for the duration of the guard. - unsafe fn unsafe_from_any_pointer(any: AnyObj) -> (Self, SharedPointerGuard) { - let inner = ptr::NonNull::from(Box::leak(Box::new(SharedBox { + unsafe fn unsafe_from_any_pointer(any: AnyObj) -> Result<(Self, SharedPointerGuard), Error> { + let shared = SharedBox { access: Access::new(true), count: Cell::new(2), data: any.into(), - }))); + }; + let inner = ptr::NonNull::from(Box::leak(Box::try_new_in(shared, Global)?)); let guard = SharedPointerGuard { _inner: RawDrop::take_shared_box(inner), }; let value = Self { inner }; - (value, guard) + Ok((value, guard)) } /// Take the interior value, if we have exlusive access to it and there @@ -487,7 +499,7 @@ impl Shared { let expected = TypeId::of::(); let (e, any) = match any.raw_take(expected) { - Ok(value) => return Ok(*Box::from_raw(value as *mut T)), + Ok(value) => return Ok(Box::into_inner(Box::from_raw_in(value as *mut T, Global))), Err((AnyObjError::Cast, any)) => { let actual = any.type_name(); @@ -791,7 +803,7 @@ impl SharedBox { return false; } - let this = Box::from_raw(this); + let this = Box::from_raw_in(this, Global); if this.access.is_taken() { // NB: This prevents the inner `T` from being dropped in case it @@ -997,7 +1009,7 @@ impl Ref { /// ``` /// use rune::runtime::{Shared, Ref}; /// - /// let vec = Shared::>::new(vec![1, 2, 3, 4]); + /// let vec = Shared::>::new(vec![1, 2, 3, 4])?; /// let vec = vec.into_ref()?; /// let value: Ref<[u32]> = Ref::map(vec, |vec| &vec[0..2]); /// @@ -1032,7 +1044,7 @@ impl Ref { /// ``` /// use rune::runtime::{Shared, Ref}; /// - /// let vec = Shared::>::new(vec![1, 2, 3, 4]); + /// let vec = Shared::>::new(vec![1, 2, 3, 4])?; /// let vec = vec.into_ref()?; /// let value: Option> = Ref::try_map(vec, |vec| vec.get(0..2)); /// @@ -1135,7 +1147,7 @@ impl Mut { /// ``` /// use rune::runtime::{Mut, Shared}; /// - /// let vec = Shared::>::new(vec![1, 2, 3, 4]); + /// let vec = Shared::>::new(vec![1, 2, 3, 4])?; /// let vec = vec.into_mut()?; /// let value: Mut<[u32]> = Mut::map(vec, |vec| &mut vec[0..2]); /// @@ -1172,7 +1184,7 @@ impl Mut { /// ``` /// use rune::runtime::{Mut, Shared}; /// - /// let vec = Shared::>::new(vec![1, 2, 3, 4]); + /// let vec = Shared::>::new(vec![1, 2, 3, 4])?; /// let vec = vec.into_mut()?; /// let mut value: Option> = Mut::try_map(vec, |vec| vec.get_mut(0..2)); /// diff --git a/crates/rune/src/runtime/stack.rs b/crates/rune/src/runtime/stack.rs index d486e9c0d..7b3ada569 100644 --- a/crates/rune/src/runtime/stack.rs +++ b/crates/rune/src/runtime/stack.rs @@ -1,12 +1,11 @@ use core::array; use core::fmt; -use core::iter; use core::mem::replace; use core::slice; use crate::no_std::borrow::Cow; -use crate::no_std::prelude::*; +use crate::alloc::{Error, Global, IteratorExt, TryClone, TryFromIteratorIn, Vec}; use crate::runtime::{InstAddress, Value}; /// An error raised when interacting with the stack. @@ -24,7 +23,7 @@ impl fmt::Display for StackError { impl crate::no_std::error::Error for StackError {} /// The stack of the virtual machine, where all values are stored. -#[derive(Default, Debug, Clone)] +#[derive(Default, Debug)] pub struct Stack { /// The current stack of values. stack: Vec, @@ -43,7 +42,7 @@ impl Stack { /// /// let mut stack = Stack::new(); /// assert!(stack.pop().is_err()); - /// stack.push(String::from("Hello World")); + /// stack.push(rune::to_value(String::from("Hello World"))?); /// assert!(matches!(stack.pop()?, Value::String(..))); /// # Ok::<_, rune::Error>(()) /// ``` @@ -60,17 +59,17 @@ impl Stack { /// use rune::runtime::Stack; /// use rune::Value; /// - /// let mut stack = Stack::with_capacity(16); + /// let mut stack = Stack::with_capacity(16)?; /// assert!(stack.pop().is_err()); - /// stack.push(String::from("Hello World")); + /// stack.push(rune::to_value(String::from("Hello World"))?); /// assert!(matches!(stack.pop()?, Value::String(..))); /// # Ok::<_, rune::Error>(()) /// ``` - pub fn with_capacity(capacity: usize) -> Self { - Self { - stack: Vec::with_capacity(capacity), + pub fn with_capacity(capacity: usize) -> Result { + Ok(Self { + stack: Vec::try_with_capacity(capacity)?, stack_bottom: 0, - } + }) } /// Check if the stack is empty. @@ -83,8 +82,9 @@ impl Stack { /// /// let mut stack = Stack::new(); /// assert!(stack.is_empty()); - /// stack.push(String::from("Hello World")); + /// stack.push(rune::to_value(String::from("Hello World"))?); /// assert!(!stack.is_empty()); + /// # Ok::<_, rune::Error>(()) /// ``` /// /// [stack_bottom]: Self::stack_bottom() @@ -102,8 +102,9 @@ impl Stack { /// /// let mut stack = Stack::new(); /// assert_eq!(stack.len(), 0); - /// stack.push(String::from("Hello World")); + /// stack.push(rune::to_value(String::from("Hello World"))?); /// assert_eq!(stack.len(), 1); + /// # Ok::<_, rune::Error>(()) /// ``` /// /// [stack_bottom]: Self::stack_bottom() @@ -140,15 +141,13 @@ impl Stack { /// /// let mut stack = Stack::new(); /// assert!(stack.pop().is_err()); - /// stack.push(String::from("Hello World")); + /// stack.push(rune::to_value(String::from("Hello World"))?); /// assert!(matches!(stack.pop()?, Value::String(..))); /// # Ok::<_, rune::Error>(()) /// ``` - pub fn push(&mut self, value: T) - where - Value: From, - { - self.stack.push(Value::from(value)); + pub fn push(&mut self, value: Value) -> Result<(), Error> { + self.stack.try_push(value)?; + Ok(()) } /// Pop a value from the stack. @@ -159,7 +158,7 @@ impl Stack { /// /// let mut stack = Stack::new(); /// assert!(stack.pop().is_err()); - /// stack.push(String::from("Hello World")); + /// stack.push(rune::to_value(String::from("Hello World"))?); /// assert!(matches!(stack.pop()?, Value::String(..))); /// # Ok::<_, rune::Error>(()) /// ``` @@ -180,9 +179,9 @@ impl Stack { /// /// let mut stack = Stack::new(); /// - /// stack.push(42i64); - /// stack.push(String::from("foo")); - /// stack.push(()); + /// stack.push(rune::to_value(42i64)?); + /// stack.push(rune::to_value(String::from("foo"))?); + /// stack.push(rune::to_value(())?); /// /// let mut it = stack.drain(2)?; /// @@ -214,11 +213,12 @@ impl Stack { /// /// ``` /// use rune::runtime::Stack; + /// use rune::alloc::String; /// use rune::Value; /// /// let mut stack = Stack::new(); /// - /// stack.extend([Value::from(42i64), Value::from(String::from("foo")), Value::EmptyTuple]); + /// stack.extend([Value::from(42i64), Value::try_from(String::try_from("foo")?)?, Value::EmptyTuple]); /// /// let mut it = stack.drain(2)?; /// @@ -227,11 +227,15 @@ impl Stack { /// assert!(matches!(it.next(), None)); /// # Ok::<_, rune::Error>(()) /// ``` - pub fn extend(&mut self, iter: I) + pub fn extend(&mut self, iter: I) -> Result<(), Error> where I: IntoIterator, { - self.stack.extend(iter); + for value in iter { + self.stack.try_push(value)?; + } + + Ok(()) } /// Clear the current stack. @@ -327,8 +331,21 @@ impl Stack { } /// Pop a sequence of values from the stack. - pub(crate) fn pop_sequence(&mut self, count: usize) -> Result, StackError> { - Ok(self.drain(count)?.collect::>()) + pub(crate) fn pop_sequence( + &mut self, + count: usize, + ) -> Result, StackError>, Error> { + let Ok(iter) = self.drain(count) else { + return Ok(Err(StackError)); + }; + + let mut vec = Vec::try_with_capacity(iter.size_hint().0)?; + + for value in iter { + vec.try_push(value)?; + } + + Ok(Ok(vec)) } /// Swap the value at position a with the value at position b. @@ -388,12 +405,21 @@ impl Stack { } } -impl iter::FromIterator for Stack { - fn from_iter>(iter: T) -> Self { - Self { - stack: iter.into_iter().collect(), +impl TryClone for Stack { + fn try_clone(&self) -> Result { + Ok(Self { + stack: self.stack.try_clone()?, + stack_bottom: self.stack_bottom, + }) + } +} + +impl TryFromIteratorIn for Stack { + fn try_from_iter_in>(iter: T, _: Global) -> Result { + Ok(Self { + stack: iter.into_iter().try_collect()?, stack_bottom: 0, - } + }) } } diff --git a/crates/rune/src/runtime/static_type.rs b/crates/rune/src/runtime/static_type.rs index 36222fa07..811189a89 100644 --- a/crates/rune/src/runtime/static_type.rs +++ b/crates/rune/src/runtime/static_type.rs @@ -3,9 +3,8 @@ use core::hash; use core::ops::ControlFlow; use crate::no_std::collections::HashMap; -use crate::no_std::prelude::*; -use crate::no_std::vec; +use crate::alloc; use crate::runtime as rt; use crate::runtime::{RawStr, TypeInfo}; use crate::Hash; @@ -106,7 +105,9 @@ pub(crate) static STRING_TYPE: &StaticType = &StaticType { hash: ::rune_macros::hash!(::std::string::String), }; -impl_static_type!(String => STRING_TYPE); +#[cfg(feature = "alloc")] +impl_static_type!(::rust_alloc::string::String => STRING_TYPE); +impl_static_type!(alloc::String => STRING_TYPE); impl_static_type!(str => STRING_TYPE); pub(crate) static BYTES_TYPE: &StaticType = &StaticType { @@ -122,7 +123,9 @@ pub(crate) static VEC_TYPE: &StaticType = &StaticType { }; impl_static_type!([rt::Value] => VEC_TYPE); -impl_static_type!(impl vec::Vec => VEC_TYPE); +#[cfg(feature = "alloc")] +impl_static_type!(impl ::rust_alloc::vec::Vec => VEC_TYPE); +impl_static_type!(impl alloc::Vec => VEC_TYPE); impl_static_type!(impl rt::VecTuple => VEC_TYPE); pub(crate) static TUPLE_TYPE: &StaticType = &StaticType { @@ -138,7 +141,7 @@ pub(crate) static OBJECT_TYPE: &StaticType = &StaticType { }; impl_static_type!(rt::Struct => OBJECT_TYPE); -impl_static_type!(impl HashMap => OBJECT_TYPE); +impl_static_type!(impl HashMap => OBJECT_TYPE); pub(crate) static RANGE_FROM_TYPE: &StaticType = &StaticType { name: RawStr::from_str("RangeFrom"), diff --git a/crates/rune/src/runtime/tests.rs b/crates/rune/src/runtime/tests.rs index 1d025bd00..571338ac5 100644 --- a/crates/rune/src/runtime/tests.rs +++ b/crates/rune/src/runtime/tests.rs @@ -7,13 +7,13 @@ struct Foo(isize); #[test] fn test_take() { - let thing = Shared::new(AnyObj::new(Foo(0))); + let thing = Shared::new(AnyObj::new(Foo(0))).unwrap(); let _ = thing.take().unwrap(); } #[test] fn test_clone_take() { - let thing = Shared::new(AnyObj::new(Foo(0))); + let thing = Shared::new(AnyObj::new(Foo(0))).unwrap(); let thing2 = thing.clone(); assert_eq!(Foo(0), thing2.take_downcast::().unwrap()); assert!(thing.take().is_err()); @@ -27,7 +27,7 @@ fn test_from_ref() { let value = Thing(10u32); unsafe { - let (shared, guard) = Shared::from_ref(&value); + let (shared, guard) = Shared::from_ref(&value).unwrap(); assert!(shared.downcast_borrow_mut::().is_err()); assert_eq!(10u32, shared.downcast_borrow_ref::().unwrap().0); @@ -46,7 +46,7 @@ fn test_from_mut() { let mut value = Thing(10u32); unsafe { - let (shared, guard) = Shared::from_mut(&mut value); + let (shared, guard) = Shared::from_mut(&mut value).unwrap(); shared.downcast_borrow_mut::().unwrap().0 = 20; assert_eq!(20u32, shared.downcast_borrow_mut::().unwrap().0); diff --git a/crates/rune/src/runtime/to_value.rs b/crates/rune/src/runtime/to_value.rs index a37457a42..20eaa726a 100644 --- a/crates/rune/src/runtime/to_value.rs +++ b/crates/rune/src/runtime/to_value.rs @@ -2,8 +2,9 @@ use core::any; use core::cmp::Ordering; use crate::no_std::collections::HashMap; -use crate::no_std::prelude::*; +use crate::no_std::std; +use crate::alloc::{self, TryToString}; use crate::runtime::{ AnyObj, Object, Shared, Value, VmError, VmErrorKind, VmIntegerRepr, VmResult, }; @@ -133,7 +134,7 @@ where T: Any, { fn to_value(self) -> VmResult { - VmResult::Ok(Value::from(AnyObj::new(self))) + VmResult::Ok(vm_try!(Value::try_from(AnyObj::new(self)))) } } @@ -162,27 +163,40 @@ where T: ToValue, { fn to_value(self) -> VmResult { - VmResult::Ok(Value::from(Shared::new(match self { - Some(some) => { - let value = vm_try!(some.to_value()); - Some(value) - } + VmResult::Ok(Value::from(vm_try!(Shared::new(match self { + Some(some) => Some(vm_try!(some.to_value())), None => None, - }))) + })))) } } // String impls -impl ToValue for Box { +impl ToValue for std::Box { + fn to_value(self) -> VmResult { + let this = vm_try!(self.try_to_string()); + VmResult::Ok(Value::from(vm_try!(Shared::new(this)))) + } +} + +impl ToValue for alloc::Box { + fn to_value(self) -> VmResult { + let this = alloc::String::from(self); + VmResult::Ok(Value::from(vm_try!(Shared::new(this)))) + } +} + +impl ToValue for std::String { fn to_value(self) -> VmResult { - VmResult::Ok(Value::from(Shared::new(self.to_string()))) + let string = vm_try!(alloc::String::try_from(self)); + VmResult::Ok(Value::from(vm_try!(Shared::new(string)))) } } impl ToValue for &str { fn to_value(self) -> VmResult { - VmResult::Ok(Value::from(Shared::new(self.to_string()))) + let this = vm_try!(alloc::String::try_from(self)); + VmResult::Ok(Value::from(vm_try!(Shared::new(this)))) } } @@ -208,11 +222,11 @@ where VmResult::Ok(match self { Ok(ok) => { let ok = vm_try!(ok.to_value()); - Value::from(Shared::new(Ok(ok))) + Value::from(vm_try!(Shared::new(Ok(ok)))) } Err(err) => { let err = vm_try!(err.to_value()); - Value::from(Shared::new(Err(err))) + Value::from(vm_try!(Shared::new(Err(err)))) } }) } @@ -263,19 +277,21 @@ macro_rules! impl_map { T: ToValue, { fn to_value(self) -> VmResult { - let mut output = Object::with_capacity(self.len()); + let mut output = vm_try!(Object::with_capacity(self.len())); for (key, value) in self { - output.insert(key, vm_try!(value.to_value())); + let key = vm_try!(alloc::String::try_from(key)); + vm_try!(output.insert(key, vm_try!(value.to_value()))); } - VmResult::Ok(Value::from(Shared::new(output))) + VmResult::Ok(Value::from(vm_try!(Shared::new(output)))) } } }; } -impl_map!(HashMap); +impl_map!(HashMap); +impl_map!(HashMap); impl ToValue for Ordering { #[inline] diff --git a/crates/rune/src/runtime/tuple.rs b/crates/rune/src/runtime/tuple.rs index 401e54486..37ba3d00d 100644 --- a/crates/rune/src/runtime/tuple.rs +++ b/crates/rune/src/runtime/tuple.rs @@ -2,14 +2,15 @@ use core::fmt; use core::ops; use core::slice; -use crate::no_std::prelude::*; +use crate::no_std::std; use crate as rune; +use crate::alloc::{Box, Error, Global, TryClone}; use crate::runtime::{ - ConstValue, FromValue, Mut, RawMut, RawRef, Ref, Shared, ToValue, UnsafeToMut, UnsafeToRef, - Value, VmErrorKind, VmResult, + ConstValue, FromValue, Mut, RawMut, RawRef, Ref, ToValue, UnsafeToMut, UnsafeToRef, Value, + VmErrorKind, VmResult, }; -#[cfg(feature = "std")] +#[cfg(feature = "alloc")] use crate::runtime::{Hasher, ProtocolCaller}; use crate::Any; @@ -47,7 +48,7 @@ impl Tuple { VmResult::Ok(Some(vm_try!(T::from_value(value)))) } - #[cfg(feature = "std")] + #[cfg(feature = "alloc")] pub(crate) fn hash_with( &self, hasher: &mut Hasher, @@ -100,10 +101,9 @@ impl<'a> IntoIterator for &'a mut Tuple { /// Struct representing a dynamic anonymous object. /// /// To access borrowed values of a tuple in native functions, use [`Tuple`]. -#[derive(Clone)] #[repr(transparent)] pub struct OwnedTuple { - inner: Option>, + inner: Box<[Value]>, } impl OwnedTuple { @@ -114,18 +114,37 @@ impl OwnedTuple { /// ``` /// use rune::runtime::OwnedTuple; /// - /// const EMPTY: OwnedTuple = OwnedTuple::new(); + /// let empty = OwnedTuple::new(); /// ``` - pub const fn new() -> Self { - Self { inner: None } + pub fn new() -> Self { + Self { + inner: Box::default(), + } } /// Convert into inner std boxed slice. pub fn into_inner(self) -> Box<[Value]> { - match self.inner { - Some(values) => values, - None => Box::from([]), - } + self.inner + } +} + +impl Default for OwnedTuple { + fn default() -> Self { + Self::new() + } +} + +impl TryClone for OwnedTuple { + #[inline] + fn try_clone(&self) -> Result { + Ok(Self { + inner: self.inner.try_clone()?, + }) + } + + #[inline] + fn try_clone_from(&mut self, source: &Self) -> Result<(), Error> { + self.inner.try_clone_from(&source.inner) } } @@ -154,73 +173,105 @@ impl ops::Deref for OwnedTuple { #[inline] fn deref(&self) -> &Self::Target { - match &self.inner { - Some(values) => Tuple::new(values), - None => Tuple::new(&[]), - } + Tuple::new(&self.inner) } } impl ops::DerefMut for OwnedTuple { #[inline] fn deref_mut(&mut self) -> &mut Self::Target { - match &mut self.inner { - Some(values) => Tuple::new_mut(values), - None => Tuple::new_mut(&mut []), - } + Tuple::new_mut(&mut self.inner) } } -impl From> for OwnedTuple { +impl TryFrom> for OwnedTuple { + type Error = Error; + #[inline] - fn from(vec: Vec) -> Self { - Self { - inner: if vec.is_empty() { - None - } else { - Some(vec.into_boxed_slice()) - }, - } + fn try_from(vec: std::Vec) -> Result { + Ok(Self { + inner: rune_alloc::Box::try_from(vec.into_boxed_slice())?, + }) } } -impl From<[Value; N]> for OwnedTuple { +impl TryFrom> for OwnedTuple { + type Error = Error; + #[inline] - fn from(values: [Value; N]) -> Self { - Self { - inner: if values.is_empty() { - None - } else { - Some(values.into()) - }, - } + fn try_from(vec: rune_alloc::Vec) -> Result { + Ok(Self { + inner: vec.try_into_boxed_slice()?, + }) } } -impl From> for OwnedTuple { +impl TryFrom<[Value; N]> for OwnedTuple { + type Error = Error; + #[inline] - fn from(inner: Box<[Value]>) -> Self { - Self { - inner: if inner.is_empty() { None } else { Some(inner) }, - } + fn try_from(values: [Value; N]) -> Result { + Ok(Self { + inner: values.try_into()?, + }) } } -impl From> for OwnedTuple { - fn from(inner: Box<[ConstValue]>) -> Self { +impl TryFrom> for OwnedTuple { + type Error = Error; + + #[inline] + fn try_from(inner: std::Box<[Value]>) -> Result { + Ok(Self { + inner: rune_alloc::Box::try_from(inner)?, + }) + } +} + +impl From> for OwnedTuple { + #[inline] + fn from(inner: rune_alloc::Box<[Value]>) -> Self { + Self { inner } + } +} + +impl TryFrom> for OwnedTuple { + type Error = Error; + + fn try_from(inner: std::Box<[ConstValue]>) -> Result { if inner.is_empty() { - return OwnedTuple::new(); + return Ok(OwnedTuple::new()); } - let mut out = Vec::with_capacity(inner.len()); + let mut out = rune_alloc::Vec::try_with_capacity_in(inner.len(), Global)?; for value in inner.into_vec() { - out.push(value.into_value()); + out.try_push(value.into_value()?)?; } - Self { - inner: Some(out.into_boxed_slice()), + Ok(Self { + inner: out.try_into_boxed_slice()?, + }) + } +} + +impl TryFrom> for OwnedTuple { + type Error = Error; + + fn try_from(inner: rune_alloc::Box<[ConstValue]>) -> Result { + if inner.is_empty() { + return Ok(OwnedTuple::new()); } + + let mut out = rune_alloc::Vec::try_with_capacity_in(inner.len(), Global)?; + + for value in rune_alloc::Vec::from(inner) { + out.try_push(value.into_value()?)?; + } + + Ok(Self { + inner: out.try_into_boxed_slice()?, + }) } } @@ -280,7 +331,9 @@ macro_rules! impl_tuple { fn to_value(self) -> VmResult { let ($($var,)*) = self; $(let $var = vm_try!($var.to_value());)* - VmResult::Ok(Value::from(OwnedTuple::from(vec![$($var,)*]))) + let mut vec = vm_try!(rune_alloc::Vec::try_with_capacity($count)); + $(vm_try!(vec.try_push($var));)* + VmResult::Ok(vm_try!(Value::try_from(vm_try!(OwnedTuple::try_from(vec))))) } } }; @@ -291,11 +344,7 @@ repeat_macro!(impl_tuple); impl FromValue for Mut { fn from_value(value: Value) -> VmResult { match value { - Value::EmptyTuple => { - let tuple = vm_try!(Shared::new(OwnedTuple::new()).into_mut()); - let tuple = Mut::map(tuple, |this| &mut **this); - VmResult::Ok(tuple) - } + Value::EmptyTuple => VmResult::Ok(Mut::from_static(Tuple::new_mut(&mut []))), Value::Tuple(tuple) => { let tuple = vm_try!(tuple.into_mut()); let tuple = Mut::map(tuple, |this| &mut **this); diff --git a/crates/rune/src/runtime/value.rs b/crates/rune/src/runtime/value.rs index dad2fb41c..bd517aa19 100644 --- a/crates/rune/src/runtime/value.rs +++ b/crates/rune/src/runtime/value.rs @@ -1,15 +1,15 @@ +mod serde; + use core::any; use core::borrow::Borrow; use core::cmp::{Eq, Ord, Ordering, PartialEq, PartialOrd}; use core::fmt; -use core::fmt::Write; use core::hash; use core::ptr; -use crate::no_std::prelude::*; use crate::no_std::sync::Arc; -use crate::no_std::vec; +use crate::alloc::{self, Error, String, TryClone, TryToString, TryWrite}; use crate::compile::ItemBuf; use crate::runtime::vm::CallResult; use crate::runtime::{ @@ -19,11 +19,11 @@ use crate::runtime::{ RangeTo, RangeToInclusive, RawMut, RawRef, Ref, Shared, Stream, ToValue, Type, TypeInfo, Variant, Vec, Vm, VmError, VmErrorKind, VmIntegerRepr, VmResult, }; -#[cfg(feature = "std")] +#[cfg(feature = "alloc")] use crate::runtime::{Hasher, Tuple}; use crate::{Any, Hash}; -use serde::{de, ser, Deserialize, Serialize}; +use ::serde::{Deserialize, Serialize}; // Small helper function to build errors. fn err(error: E) -> VmResult @@ -303,7 +303,7 @@ pub enum Value { /// A stored function pointer. Function(Shared), /// A value being formatted. - Format(Box), + Format(Shared), /// An iterator. Iterator(Shared), /// An opaque value that can be downcasted. @@ -323,7 +323,7 @@ impl Value { /// # Panics /// /// This function will panic if called outside of a virtual machine. - pub fn string_display(&self, f: &mut Formatter) -> VmResult { + pub fn string_display(&self, f: &mut Formatter) -> VmResult<()> { self.string_display_with(f, &mut EnvProtocolCaller) } @@ -332,31 +332,32 @@ impl Value { &self, f: &mut Formatter, caller: &mut impl ProtocolCaller, - ) -> VmResult { + ) -> VmResult<()> { match self { Value::Format(format) => { + let format = vm_try!(format.borrow_ref()); vm_try!(format.spec.format(&format.value, f, caller)); } Value::Char(c) => { - f.push(*c); + vm_try!(f.push(*c)); } Value::String(string) => { - f.push_str(&vm_try!(string.borrow_ref())); + vm_try!(f.push_str(&vm_try!(string.borrow_ref()))); } Value::Integer(integer) => { let mut buffer = itoa::Buffer::new(); - f.push_str(buffer.format(*integer)); + vm_try!(f.push_str(buffer.format(*integer))); } Value::Float(float) => { let mut buffer = ryu::Buffer::new(); - f.push_str(buffer.format(*float)); + vm_try!(f.push_str(buffer.format(*float))); } Value::Bool(bool) => { - return VmResult::Ok(write!(f, "{}", bool)); + return VmResult::Ok(vm_write!(f, "{}", bool)); } Value::Byte(byte) => { let mut buffer = itoa::Buffer::new(); - f.push_str(buffer.format(*byte)); + vm_try!(f.push_str(buffer.format(*byte))); } value => { let result = vm_try!(caller.call_protocol_fn( @@ -365,12 +366,11 @@ impl Value { (f,), )); - let result = vm_try!(fmt::Result::from_value(result)); - return VmResult::Ok(result); + return VmResult::Ok(vm_try!(<()>::from_value(result))); } } - VmResult::Ok(fmt::Result::Ok(())) + VmResult::Ok(()) } /// Debug format the value using the [`STRING_DEBUG`] protocol. @@ -383,7 +383,7 @@ impl Value { /// This function will panic if called outside of a virtual machine. /// /// [`STRING_DEBUG`]: Protocol::STRING_DEBUG - pub fn string_debug(&self, f: &mut Formatter) -> VmResult { + pub fn string_debug(&self, f: &mut Formatter) -> VmResult<()> { self.string_debug_with(f, &mut EnvProtocolCaller) } @@ -392,116 +392,116 @@ impl Value { &self, f: &mut Formatter, caller: &mut impl ProtocolCaller, - ) -> VmResult { - let result = match self { + ) -> VmResult<()> { + match self { Value::Bool(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::Byte(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::Char(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::Integer(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::Float(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::Type(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::String(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::Bytes(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::Vec(value) => { let value = vm_try!(value.borrow_ref()); - vm_try!(Vec::string_debug_with(&value, f, caller)) + vm_try!(Vec::string_debug_with(&value, f, caller)); } Value::EmptyTuple => { - write!(f, "()") + vm_write!(f, "()"); } Value::Tuple(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::Object(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::RangeFrom(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::RangeFull(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::RangeInclusive(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::RangeToInclusive(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::RangeTo(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::Range(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::ControlFlow(value) => { let value = vm_try!(value.borrow_ref()); - vm_try!(ControlFlow::string_debug_with(&value, f, caller)) + vm_try!(ControlFlow::string_debug_with(&value, f, caller)); } Value::Future(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::Stream(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::Generator(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::GeneratorState(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::Option(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::Result(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::EmptyStruct(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::TupleStruct(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::Struct(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::Variant(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::Function(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::Format(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } Value::Iterator(value) => { - write!(f, "{:?}", value) + vm_write!(f, "{:?}", value); } value => { let result = vm_try!(caller.call_protocol_fn(Protocol::STRING_DEBUG, value.clone(), (f,),)); - let result = vm_try!(fmt::Result::from_value(result)); - return VmResult::Ok(result); + vm_try!(<()>::from_value(result)); + return VmResult::Ok(()); } }; - VmResult::Ok(result) + VmResult::Ok(()) } /// Convert value into an iterator using the [`Protocol::INTO_ITER`] @@ -525,7 +525,9 @@ impl Value { &**vec }))) } - Value::Object(object) => return VmResult::Ok(vm_try!(object.borrow_ref()).rune_iter()), + Value::Object(object) => { + return VmResult::Ok(Object::rune_iter(vm_try!(object.into_ref()))) + } target => target, }; @@ -552,7 +554,7 @@ impl Value { }; let value = vm_try!(EnvProtocolCaller.call_protocol_fn(Protocol::INTO_FUTURE, target, ())); - VmResult::Ok(Shared::new(vm_try!(Future::from_value(value)))) + VmResult::Ok(vm_try!(Shared::new(vm_try!(Future::from_value(value))))) } /// Retrieves a human readable type name for the current value. @@ -571,53 +573,64 @@ impl Value { crate::runtime::env::with(|context, unit| { if let Some(name) = context.constant(hash) { match name { - ConstValue::String(s) => return VmResult::Ok(s.clone()), + ConstValue::String(s) => { + return VmResult::Ok(vm_try!(String::try_from(s.as_str()))) + } _ => return err(VmErrorKind::expected::(name.type_info())), } } if let Some(name) = unit.constant(hash) { match name { - ConstValue::String(s) => return VmResult::Ok(s.clone()), + ConstValue::String(s) => { + return VmResult::Ok(vm_try!(String::try_from(s.as_str()))) + } _ => return err(VmErrorKind::expected::(name.type_info())), } } - VmResult::Ok(vm_try!(self.type_info()).to_string()) + VmResult::Ok(vm_try!(vm_try!(self.type_info()).try_to_string())) }) } /// Construct a vector. - pub fn vec(vec: vec::Vec) -> Self { - Self::Vec(Shared::new(Vec::from(vec))) + pub fn vec(vec: alloc::Vec) -> VmResult { + VmResult::Ok(Self::Vec(vm_try!(Shared::new(Vec::from(vec))))) } /// Construct a tuple. - pub fn tuple(vec: vec::Vec) -> Self { - Self::Tuple(Shared::new(OwnedTuple::from(vec))) + pub fn tuple(vec: alloc::Vec) -> VmResult { + VmResult::Ok(Self::Tuple(vm_try!(Shared::new(vm_try!( + OwnedTuple::try_from(vec) + ))))) } /// Construct an empty. - pub fn empty_struct(rtti: Arc) -> Self { - Self::EmptyStruct(Shared::new(EmptyStruct { rtti })) + pub fn empty_struct(rtti: Arc) -> VmResult { + VmResult::Ok(Self::EmptyStruct(vm_try!(Shared::new(EmptyStruct { + rtti + })))) } /// Construct a typed tuple. - pub fn tuple_struct(rtti: Arc, vec: vec::Vec) -> Self { - Self::TupleStruct(Shared::new(TupleStruct { + pub fn tuple_struct(rtti: Arc, vec: alloc::Vec) -> VmResult { + VmResult::Ok(Self::TupleStruct(vm_try!(Shared::new(TupleStruct { rtti, - data: OwnedTuple::from(vec), - })) + data: vm_try!(OwnedTuple::try_from(vec)), + })))) } /// Construct an empty variant. - pub fn unit_variant(rtti: Arc) -> Self { - Self::Variant(Shared::new(Variant::unit(rtti))) + pub fn unit_variant(rtti: Arc) -> VmResult { + VmResult::Ok(Self::Variant(vm_try!(Shared::new(Variant::unit(rtti))))) } /// Construct a tuple variant. - pub fn tuple_variant(rtti: Arc, vec: vec::Vec) -> Self { - Self::Variant(Shared::new(Variant::tuple(rtti, OwnedTuple::from(vec)))) + pub fn tuple_variant(rtti: Arc, vec: alloc::Vec) -> VmResult { + VmResult::Ok(Self::Variant(vm_try!(Shared::new(Variant::tuple( + rtti, + vm_try!(OwnedTuple::try_from(vec)) + ))))) } /// Take the interior value. @@ -630,35 +643,45 @@ impl Value { Self::Float(value) => Self::Float(value), Self::Type(value) => Self::Type(value), Self::Ordering(value) => Self::Ordering(value), - Self::String(value) => Self::String(Shared::new(vm_try!(value.take()))), - Self::Bytes(value) => Self::Bytes(Shared::new(vm_try!(value.take()))), - Self::Vec(value) => Self::Vec(Shared::new(vm_try!(value.take()))), + Self::String(value) => Self::String(vm_try!(Shared::new(vm_try!(value.take())))), + Self::Bytes(value) => Self::Bytes(vm_try!(Shared::new(vm_try!(value.take())))), + Self::Vec(value) => Self::Vec(vm_try!(Shared::new(vm_try!(value.take())))), Self::EmptyTuple => Self::EmptyTuple, - Self::Tuple(value) => Self::Tuple(Shared::new(vm_try!(value.take()))), - Self::Object(value) => Self::Object(Shared::new(vm_try!(value.take()))), - Self::RangeFrom(value) => Self::RangeFrom(Shared::new(vm_try!(value.take()))), - Self::RangeFull(value) => Self::RangeFull(Shared::new(vm_try!(value.take()))), - Self::RangeInclusive(value) => Self::RangeInclusive(Shared::new(vm_try!(value.take()))), + Self::Tuple(value) => Self::Tuple(vm_try!(Shared::new(vm_try!(value.take())))), + Self::Object(value) => Self::Object(vm_try!(Shared::new(vm_try!(value.take())))), + Self::RangeFrom(value) => Self::RangeFrom(vm_try!(Shared::new(vm_try!(value.take())))), + Self::RangeFull(value) => Self::RangeFull(vm_try!(Shared::new(vm_try!(value.take())))), + Self::RangeInclusive(value) => { + Self::RangeInclusive(vm_try!(Shared::new(vm_try!(value.take())))) + } Self::RangeToInclusive(value) => { - Self::RangeToInclusive(Shared::new(vm_try!(value.take()))) - } - Self::RangeTo(value) => Self::RangeTo(Shared::new(vm_try!(value.take()))), - Self::Range(value) => Self::Range(Shared::new(vm_try!(value.take()))), - Self::ControlFlow(value) => Self::ControlFlow(Shared::new(vm_try!(value.take()))), - Self::Future(value) => Self::Future(Shared::new(vm_try!(value.take()))), - Self::Stream(value) => Self::Stream(Shared::new(vm_try!(value.take()))), - Self::Generator(value) => Self::Generator(Shared::new(vm_try!(value.take()))), - Self::GeneratorState(value) => Self::GeneratorState(Shared::new(vm_try!(value.take()))), - Self::Option(value) => Self::Option(Shared::new(vm_try!(value.take()))), - Self::Result(value) => Self::Result(Shared::new(vm_try!(value.take()))), - Self::EmptyStruct(value) => Self::EmptyStruct(Shared::new(vm_try!(value.take()))), - Self::TupleStruct(value) => Self::TupleStruct(Shared::new(vm_try!(value.take()))), - Self::Struct(value) => Self::Struct(Shared::new(vm_try!(value.take()))), - Self::Variant(value) => Self::Variant(Shared::new(vm_try!(value.take()))), - Self::Function(value) => Self::Function(Shared::new(vm_try!(value.take()))), + Self::RangeToInclusive(vm_try!(Shared::new(vm_try!(value.take())))) + } + Self::RangeTo(value) => Self::RangeTo(vm_try!(Shared::new(vm_try!(value.take())))), + Self::Range(value) => Self::Range(vm_try!(Shared::new(vm_try!(value.take())))), + Self::ControlFlow(value) => { + Self::ControlFlow(vm_try!(Shared::new(vm_try!(value.take())))) + } + Self::Future(value) => Self::Future(vm_try!(Shared::new(vm_try!(value.take())))), + Self::Stream(value) => Self::Stream(vm_try!(Shared::new(vm_try!(value.take())))), + Self::Generator(value) => Self::Generator(vm_try!(Shared::new(vm_try!(value.take())))), + Self::GeneratorState(value) => { + Self::GeneratorState(vm_try!(Shared::new(vm_try!(value.take())))) + } + Self::Option(value) => Self::Option(vm_try!(Shared::new(vm_try!(value.take())))), + Self::Result(value) => Self::Result(vm_try!(Shared::new(vm_try!(value.take())))), + Self::EmptyStruct(value) => { + Self::EmptyStruct(vm_try!(Shared::new(vm_try!(value.take())))) + } + Self::TupleStruct(value) => { + Self::TupleStruct(vm_try!(Shared::new(vm_try!(value.take())))) + } + Self::Struct(value) => Self::Struct(vm_try!(Shared::new(vm_try!(value.take())))), + Self::Variant(value) => Self::Variant(vm_try!(Shared::new(vm_try!(value.take())))), + Self::Function(value) => Self::Function(vm_try!(Shared::new(vm_try!(value.take())))), Self::Format(value) => Self::Format(value), Self::Iterator(value) => Self::Iterator(value), - Self::Any(value) => Self::Any(Shared::new(vm_try!(value.take()))), + Self::Any(value) => Self::Any(vm_try!(Shared::new(vm_try!(value.take())))), }) } @@ -910,7 +933,7 @@ impl Value { #[inline] pub fn into_tuple(self) -> VmResult> { match self { - Self::EmptyTuple => VmResult::Ok(Shared::new(OwnedTuple::new())), + Self::EmptyTuple => VmResult::Ok(vm_try!(Shared::new(OwnedTuple::new()))), Self::Tuple(tuple) => VmResult::Ok(tuple), actual => err(VmErrorKind::expected::(vm_try!( actual.type_info() @@ -1015,7 +1038,7 @@ impl Value { /// Try to coerce value into a format spec. #[inline] - pub fn into_format(self) -> VmResult> { + pub fn into_format(self) -> VmResult> { match self { Value::Format(format) => VmResult::Ok(format), actual => err(VmErrorKind::expected::(vm_try!(actual.type_info()))), @@ -1340,13 +1363,13 @@ impl Value { } /// Hash the current value. - #[cfg(feature = "std")] + #[cfg(feature = "alloc")] pub fn hash(&self, hasher: &mut Hasher) -> VmResult<()> { self.hash_with(hasher, &mut EnvProtocolCaller) } /// Hash the current value. - #[cfg(feature = "std")] + #[cfg(feature = "alloc")] pub(crate) fn hash_with( &self, hasher: &mut Hasher, @@ -2003,14 +2026,13 @@ impl fmt::Debug for Value { write!(f, "{:?}", value)?; } value => { - let mut formatter = Formatter::new(); + let mut o = Formatter::new(); - match value.string_debug(&mut formatter) { - VmResult::Ok(result) => result?, - VmResult::Err(..) => return Err(fmt::Error), + if value.string_debug(&mut o).is_err() { + return Err(fmt::Error); } - f.write_str(formatter.as_str())?; + f.write_str(o.as_str())?; } } @@ -2024,15 +2046,6 @@ impl Default for Value { } } -impl From for Value -where - T: Any, -{ - fn from(any: T) -> Self { - Self::Any(Shared::new(AnyObj::new(any))) - } -} - impl From<()> for Value { fn from((): ()) -> Self { Self::EmptyTuple @@ -2070,17 +2083,19 @@ macro_rules! impl_from_wrapper { impl_from!($($variant => $wrapper<$ty>),*); $( - impl From<$ty> for Value { + impl TryFrom<$ty> for Value { + type Error = rune_alloc::Error; + #[inline] - fn from(value: $ty) -> Self { - Self::$variant($wrapper::new(value)) + fn try_from(value: $ty) -> Result { + Ok(Self::$variant($wrapper::new(value)?)) } } impl ToValue for $ty { #[inline] fn to_value(self) -> VmResult { - VmResult::Ok(Value::from(self)) + VmResult::Ok(vm_try!(Value::try_from(self))) } } )* @@ -2099,7 +2114,7 @@ impl_from! { } impl_from_wrapper! { - Format => Box, + Format => Shared, Iterator => Shared, Bytes => Shared, String => Shared, @@ -2125,313 +2140,6 @@ impl_from_wrapper! { Any => Shared, } -/// Deserialize implementation for value pointers. -impl<'de> de::Deserialize<'de> for Value { - fn deserialize(deserializer: D) -> Result - where - D: de::Deserializer<'de>, - { - deserializer.deserialize_any(VmVisitor) - } -} - -/// Serialize implementation for value pointers. -impl ser::Serialize for Value { - fn serialize(&self, serializer: S) -> Result - where - S: ser::Serializer, - { - use serde::ser::SerializeMap as _; - use serde::ser::SerializeSeq as _; - - match self { - Value::Bool(b) => serializer.serialize_bool(*b), - Value::Char(c) => serializer.serialize_char(*c), - Value::Byte(c) => serializer.serialize_u8(*c), - Value::Integer(integer) => serializer.serialize_i64(*integer), - Value::Float(float) => serializer.serialize_f64(*float), - Value::Type(..) => Err(ser::Error::custom("cannot serialize types")), - Value::Ordering(..) => Err(ser::Error::custom("cannot serialize orderings")), - Value::String(string) => { - let string = string.borrow_ref().map_err(ser::Error::custom)?; - serializer.serialize_str(&string) - } - Value::Bytes(bytes) => { - let bytes = bytes.borrow_ref().map_err(ser::Error::custom)?; - serializer.serialize_bytes(&bytes) - } - Value::Vec(vec) => { - let vec = vec.borrow_ref().map_err(ser::Error::custom)?; - let mut serializer = serializer.serialize_seq(Some(vec.len()))?; - - for value in &*vec { - serializer.serialize_element(value)?; - } - - serializer.end() - } - Value::EmptyTuple => serializer.serialize_unit(), - Value::Tuple(tuple) => { - let tuple = tuple.borrow_ref().map_err(ser::Error::custom)?; - let mut serializer = serializer.serialize_seq(Some(tuple.len()))?; - - for value in tuple.iter() { - serializer.serialize_element(value)?; - } - - serializer.end() - } - Value::Object(object) => { - let object = object.borrow_ref().map_err(ser::Error::custom)?; - let mut serializer = serializer.serialize_map(Some(object.len()))?; - - for (key, value) in &*object { - serializer.serialize_entry(key, value)?; - } - - serializer.end() - } - Value::Option(option) => { - let option = option.borrow_ref().map_err(ser::Error::custom)?; - >::serialize(&*option, serializer) - } - Value::EmptyStruct(..) => serializer.serialize_unit(), - Value::TupleStruct(..) => Err(ser::Error::custom("cannot serialize tuple structs")), - Value::Struct(..) => Err(ser::Error::custom("cannot serialize objects structs")), - Value::Variant(..) => Err(ser::Error::custom("cannot serialize variants")), - Value::Result(..) => Err(ser::Error::custom("cannot serialize results")), - Value::Future(..) => Err(ser::Error::custom("cannot serialize futures")), - Value::Stream(..) => Err(ser::Error::custom("cannot serialize streams")), - Value::Generator(..) => Err(ser::Error::custom("cannot serialize generators")), - Value::GeneratorState(..) => { - Err(ser::Error::custom("cannot serialize generator states")) - } - Value::Function(..) => Err(ser::Error::custom("cannot serialize function pointers")), - Value::Format(..) => Err(ser::Error::custom("cannot serialize format specifications")), - Value::Iterator(..) => Err(ser::Error::custom("cannot serialize iterators")), - Value::RangeFrom(..) => Err(ser::Error::custom("cannot serialize `start..` ranges")), - Value::RangeFull(..) => Err(ser::Error::custom("cannot serialize `..` ranges")), - Value::RangeInclusive(..) => { - Err(ser::Error::custom("cannot serialize `start..=end` ranges")) - } - Value::RangeToInclusive(..) => { - Err(ser::Error::custom("cannot serialize `..=end` ranges")) - } - Value::RangeTo(..) => Err(ser::Error::custom("cannot serialize `..end` ranges")), - Value::Range(..) => Err(ser::Error::custom("cannot serialize `start..end` ranges")), - Value::ControlFlow(..) => { - Err(ser::Error::custom("cannot serialize `start..end` ranges")) - } - Value::Any(..) => Err(ser::Error::custom("cannot serialize external objects")), - } - } -} - -struct VmVisitor; - -impl<'de> de::Visitor<'de> for VmVisitor { - type Value = Value; - - #[inline] - fn expecting(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt.write_str("any valid value") - } - - #[inline] - fn visit_str(self, value: &str) -> Result - where - E: de::Error, - { - Ok(Value::String(Shared::new(value.to_owned()))) - } - - #[inline] - fn visit_string(self, value: String) -> Result - where - E: de::Error, - { - Ok(Value::String(Shared::new(value))) - } - - #[inline] - fn visit_bytes(self, v: &[u8]) -> Result - where - E: de::Error, - { - Ok(Value::Bytes(Shared::new(Bytes::from_vec(v.to_vec())))) - } - - #[inline] - fn visit_byte_buf(self, v: vec::Vec) -> Result - where - E: de::Error, - { - Ok(Value::Bytes(Shared::new(Bytes::from_vec(v)))) - } - - #[inline] - fn visit_i8(self, v: i8) -> Result - where - E: de::Error, - { - Ok(Value::Integer(v as i64)) - } - - #[inline] - fn visit_i16(self, v: i16) -> Result - where - E: de::Error, - { - Ok(Value::Integer(v as i64)) - } - - #[inline] - fn visit_i32(self, v: i32) -> Result - where - E: de::Error, - { - Ok(Value::Integer(v as i64)) - } - - #[inline] - fn visit_i64(self, v: i64) -> Result - where - E: de::Error, - { - Ok(Value::Integer(v)) - } - - #[inline] - fn visit_i128(self, v: i128) -> Result - where - E: de::Error, - { - Ok(Value::Integer(v as i64)) - } - - #[inline] - fn visit_u8(self, v: u8) -> Result - where - E: de::Error, - { - Ok(Value::Integer(v as i64)) - } - - #[inline] - fn visit_u16(self, v: u16) -> Result - where - E: de::Error, - { - Ok(Value::Integer(v as i64)) - } - - #[inline] - fn visit_u32(self, v: u32) -> Result - where - E: de::Error, - { - Ok(Value::Integer(v as i64)) - } - - #[inline] - fn visit_u64(self, v: u64) -> Result - where - E: de::Error, - { - Ok(Value::Integer(v as i64)) - } - - #[inline] - fn visit_u128(self, v: u128) -> Result - where - E: de::Error, - { - Ok(Value::Integer(v as i64)) - } - - #[inline] - fn visit_f32(self, v: f32) -> Result - where - E: de::Error, - { - Ok(Value::Float(v as f64)) - } - - #[inline] - fn visit_f64(self, v: f64) -> Result - where - E: de::Error, - { - Ok(Value::Float(v)) - } - - #[inline] - fn visit_bool(self, v: bool) -> Result - where - E: de::Error, - { - Ok(Value::Bool(v)) - } - - #[inline] - fn visit_some(self, deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - Ok(Value::Option(Shared::new(Some(Value::deserialize( - deserializer, - )?)))) - } - - #[inline] - fn visit_none(self) -> Result - where - E: de::Error, - { - Ok(Value::Option(Shared::new(None))) - } - - #[inline] - fn visit_unit(self) -> Result - where - E: de::Error, - { - Ok(Value::EmptyTuple) - } - - #[inline] - fn visit_seq(self, mut visitor: V) -> Result - where - V: de::SeqAccess<'de>, - { - let mut vec = if let Some(hint) = visitor.size_hint() { - vec::Vec::with_capacity(hint) - } else { - vec::Vec::new() - }; - - while let Some(elem) = visitor.next_element()? { - vec.push(elem); - } - - Ok(Value::Vec(Shared::new(Vec::from(vec)))) - } - - #[inline] - fn visit_map(self, mut visitor: V) -> Result - where - V: de::MapAccess<'de>, - { - let mut object = Object::new(); - - while let Some((key, value)) = visitor.next_entry()? { - object.insert(key, value); - } - - Ok(Value::Object(Shared::new(object))) - } -} - impl MaybeTypeOf for Value { #[inline] fn maybe_type_of() -> Option { @@ -2439,6 +2147,13 @@ impl MaybeTypeOf for Value { } } +impl TryClone for Value { + fn try_clone(&self) -> Result { + // NB: value cloning is a shallow clone of the underlying data. + Ok(self.clone()) + } +} + #[cfg(test)] mod tests { use super::Value; diff --git a/crates/rune/src/runtime/value/serde.rs b/crates/rune/src/runtime/value/serde.rs new file mode 100644 index 000000000..5841515bd --- /dev/null +++ b/crates/rune/src/runtime/value/serde.rs @@ -0,0 +1,324 @@ +use core::fmt; + +use crate::alloc::{self, Global, TryToOwned}; +use crate::no_std::std; +use crate::runtime::{Bytes, Object, Shared, Vec}; + +use serde::de::{self, Deserialize as _, Error}; +use serde::ser::{self, SerializeMap as _, SerializeSeq as _}; + +use super::Value; + +/// Deserialize implementation for value pointers. +impl<'de> de::Deserialize<'de> for Value { + fn deserialize(deserializer: D) -> Result + where + D: de::Deserializer<'de>, + { + deserializer.deserialize_any(VmVisitor) + } +} + +/// Serialize implementation for value pointers. +impl ser::Serialize for Value { + fn serialize(&self, serializer: S) -> Result + where + S: ser::Serializer, + { + match self { + Value::Bool(b) => serializer.serialize_bool(*b), + Value::Char(c) => serializer.serialize_char(*c), + Value::Byte(c) => serializer.serialize_u8(*c), + Value::Integer(integer) => serializer.serialize_i64(*integer), + Value::Float(float) => serializer.serialize_f64(*float), + Value::Type(..) => Err(ser::Error::custom("cannot serialize types")), + Value::Ordering(..) => Err(ser::Error::custom("cannot serialize orderings")), + Value::String(string) => { + let string = string.borrow_ref().map_err(ser::Error::custom)?; + serializer.serialize_str(&string) + } + Value::Bytes(bytes) => { + let bytes = bytes.borrow_ref().map_err(ser::Error::custom)?; + serializer.serialize_bytes(&bytes) + } + Value::Vec(vec) => { + let vec = vec.borrow_ref().map_err(ser::Error::custom)?; + let mut serializer = serializer.serialize_seq(Some(vec.len()))?; + + for value in &*vec { + serializer.serialize_element(value)?; + } + + serializer.end() + } + Value::EmptyTuple => serializer.serialize_unit(), + Value::Tuple(tuple) => { + let tuple = tuple.borrow_ref().map_err(ser::Error::custom)?; + let mut serializer = serializer.serialize_seq(Some(tuple.len()))?; + + for value in tuple.iter() { + serializer.serialize_element(value)?; + } + + serializer.end() + } + Value::Object(object) => { + let object = object.borrow_ref().map_err(ser::Error::custom)?; + let mut serializer = serializer.serialize_map(Some(object.len()))?; + + for (key, value) in &*object { + serializer.serialize_entry(key, value)?; + } + + serializer.end() + } + Value::Option(option) => { + let option = option.borrow_ref().map_err(ser::Error::custom)?; + >::serialize(&*option, serializer) + } + Value::EmptyStruct(..) => serializer.serialize_unit(), + Value::TupleStruct(..) => Err(ser::Error::custom("cannot serialize tuple structs")), + Value::Struct(..) => Err(ser::Error::custom("cannot serialize objects structs")), + Value::Variant(..) => Err(ser::Error::custom("cannot serialize variants")), + Value::Result(..) => Err(ser::Error::custom("cannot serialize results")), + Value::Future(..) => Err(ser::Error::custom("cannot serialize futures")), + Value::Stream(..) => Err(ser::Error::custom("cannot serialize streams")), + Value::Generator(..) => Err(ser::Error::custom("cannot serialize generators")), + Value::GeneratorState(..) => { + Err(ser::Error::custom("cannot serialize generator states")) + } + Value::Function(..) => Err(ser::Error::custom("cannot serialize function pointers")), + Value::Format(..) => Err(ser::Error::custom("cannot serialize format specifications")), + Value::Iterator(..) => Err(ser::Error::custom("cannot serialize iterators")), + Value::RangeFrom(..) => Err(ser::Error::custom("cannot serialize `start..` ranges")), + Value::RangeFull(..) => Err(ser::Error::custom("cannot serialize `..` ranges")), + Value::RangeInclusive(..) => { + Err(ser::Error::custom("cannot serialize `start..=end` ranges")) + } + Value::RangeToInclusive(..) => { + Err(ser::Error::custom("cannot serialize `..=end` ranges")) + } + Value::RangeTo(..) => Err(ser::Error::custom("cannot serialize `..end` ranges")), + Value::Range(..) => Err(ser::Error::custom("cannot serialize `start..end` ranges")), + Value::ControlFlow(..) => { + Err(ser::Error::custom("cannot serialize `start..end` ranges")) + } + Value::Any(..) => Err(ser::Error::custom("cannot serialize external objects")), + } + } +} + +struct VmVisitor; + +impl<'de> de::Visitor<'de> for VmVisitor { + type Value = Value; + + #[inline] + fn expecting(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt.write_str("any valid value") + } + + #[inline] + fn visit_str(self, value: &str) -> Result + where + E: de::Error, + { + let value = value.try_to_owned().map_err(E::custom)?; + Ok(Value::String(Shared::new(value).map_err(E::custom)?)) + } + + #[inline] + fn visit_string(self, value: std::String) -> Result + where + E: de::Error, + { + let value = alloc::String::try_from(value).map_err(E::custom)?; + Ok(Value::String(Shared::new(value).map_err(E::custom)?)) + } + + #[inline] + fn visit_bytes(self, v: &[u8]) -> Result + where + E: de::Error, + { + Ok(Value::Bytes( + Shared::new(Bytes::from_vec(v.to_vec())).map_err(E::custom)?, + )) + } + + #[inline] + fn visit_byte_buf(self, v: std::Vec) -> Result + where + E: de::Error, + { + Ok(Value::Bytes( + Shared::new(Bytes::from_vec(v)).map_err(E::custom)?, + )) + } + + #[inline] + fn visit_i8(self, v: i8) -> Result + where + E: de::Error, + { + Ok(Value::Integer(v as i64)) + } + + #[inline] + fn visit_i16(self, v: i16) -> Result + where + E: de::Error, + { + Ok(Value::Integer(v as i64)) + } + + #[inline] + fn visit_i32(self, v: i32) -> Result + where + E: de::Error, + { + Ok(Value::Integer(v as i64)) + } + + #[inline] + fn visit_i64(self, v: i64) -> Result + where + E: de::Error, + { + Ok(Value::Integer(v)) + } + + #[inline] + fn visit_i128(self, v: i128) -> Result + where + E: de::Error, + { + Ok(Value::Integer(v as i64)) + } + + #[inline] + fn visit_u8(self, v: u8) -> Result + where + E: de::Error, + { + Ok(Value::Integer(v as i64)) + } + + #[inline] + fn visit_u16(self, v: u16) -> Result + where + E: de::Error, + { + Ok(Value::Integer(v as i64)) + } + + #[inline] + fn visit_u32(self, v: u32) -> Result + where + E: de::Error, + { + Ok(Value::Integer(v as i64)) + } + + #[inline] + fn visit_u64(self, v: u64) -> Result + where + E: de::Error, + { + Ok(Value::Integer(v as i64)) + } + + #[inline] + fn visit_u128(self, v: u128) -> Result + where + E: de::Error, + { + Ok(Value::Integer(v as i64)) + } + + #[inline] + fn visit_f32(self, v: f32) -> Result + where + E: de::Error, + { + Ok(Value::Float(v as f64)) + } + + #[inline] + fn visit_f64(self, v: f64) -> Result + where + E: de::Error, + { + Ok(Value::Float(v)) + } + + #[inline] + fn visit_bool(self, v: bool) -> Result + where + E: de::Error, + { + Ok(Value::Bool(v)) + } + + #[inline] + fn visit_some(self, deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let option = + Shared::new(Some(Value::deserialize(deserializer)?)).map_err(D::Error::custom)?; + Ok(Value::Option(option)) + } + + #[inline] + fn visit_none(self) -> Result + where + E: de::Error, + { + Ok(Value::Option(Shared::new(None).map_err(E::custom)?)) + } + + #[inline] + fn visit_unit(self) -> Result + where + E: de::Error, + { + Ok(Value::EmptyTuple) + } + + #[inline] + fn visit_seq(self, mut visitor: V) -> Result + where + V: de::SeqAccess<'de>, + { + let mut vec = if let Some(hint) = visitor.size_hint() { + alloc::Vec::try_with_capacity_in(hint, Global).map_err(V::Error::custom)? + } else { + alloc::Vec::new_in(Global) + }; + + while let Some(elem) = visitor.next_element()? { + vec.try_push(elem).map_err(V::Error::custom)?; + } + + Ok(Value::Vec( + Shared::new(Vec::from(vec)).map_err(V::Error::custom)?, + )) + } + + #[inline] + fn visit_map(self, mut visitor: V) -> Result + where + V: de::MapAccess<'de>, + { + let mut object = Object::new(); + + while let Some((key, value)) = visitor.next_entry()? { + object.insert(key, value).map_err(V::Error::custom)?; + } + + Ok(Value::Object( + Shared::new(object).map_err(V::Error::custom)?, + )) + } +} diff --git a/crates/rune/src/runtime/vec.rs b/crates/rune/src/runtime/vec.rs index 50289a35a..9f4163936 100644 --- a/crates/rune/src/runtime/vec.rs +++ b/crates/rune/src/runtime/vec.rs @@ -2,16 +2,17 @@ mod iter; use core::cmp; use core::cmp::Ordering; -use core::fmt::{self, Write}; +use core::fmt; use core::ops; use core::slice; use core::slice::SliceIndex; -use crate::no_std::prelude::*; -use crate::no_std::vec; +use crate::no_std::std; use crate as rune; -#[cfg(feature = "std")] +use crate::alloc::fmt::TryWrite; +use crate::alloc::{self, Error, Global, TryClone}; +#[cfg(feature = "alloc")] use crate::runtime::Hasher; use crate::runtime::{ Formatter, FromValue, Iterator, ProtocolCaller, RawRef, Ref, Shared, ToValue, UnsafeToRef, @@ -38,11 +39,11 @@ use self::iter::Iter; /// assert_eq!(None::, vec.get_value(2).into_result()?); /// # Ok::<_, rune::Error>(()) /// ``` -#[derive(Clone, Any)] +#[derive(Any)] #[repr(transparent)] #[rune(builtin, static_type = VEC_TYPE, from_value = Value::into_vec)] pub struct Vec { - inner: vec::Vec, + inner: alloc::Vec, } impl Vec { @@ -59,7 +60,7 @@ impl Vec { /// ``` pub const fn new() -> Self { Self { - inner: vec::Vec::new(), + inner: alloc::Vec::new_in(Global), } } @@ -73,14 +74,14 @@ impl Vec { /// Construct a new dynamic vector guaranteed to have at least the given /// capacity. - pub fn with_capacity(cap: usize) -> Self { - Self { - inner: vec::Vec::with_capacity(cap), - } + pub fn with_capacity(cap: usize) -> Result { + Ok(Self { + inner: alloc::Vec::try_with_capacity_in(cap, Global)?, + }) } /// Convert into inner std vector. - pub fn into_inner(self) -> vec::Vec { + pub fn into_inner(self) -> alloc::Vec { self.inner } @@ -127,8 +128,8 @@ impl Vec { } /// Appends an element to the back of a dynamic vector. - pub fn push(&mut self, value: Value) { - self.inner.push(value); + pub fn push(&mut self, value: Value) -> Result<(), Error> { + self.inner.try_push(value) } /// Appends an element to the back of a dynamic vector, converting it as @@ -137,7 +138,7 @@ impl Vec { where T: ToValue, { - self.inner.push(vm_try!(value.to_value())); + vm_try!(self.inner.try_push(vm_try!(value.to_value()))); VmResult::Ok(()) } @@ -188,8 +189,9 @@ impl Vec { /// Inserts an element at position index within the vector, shifting all /// elements after it to the right. - pub fn insert(&mut self, index: usize, value: Value) { - self.inner.insert(index, value); + pub fn insert(&mut self, index: usize, value: Value) -> VmResult<()> { + vm_try!(self.inner.try_insert(index, value)); + VmResult::Ok(()) } /// Extend this vector with something that implements the into_iter @@ -198,7 +200,7 @@ impl Vec { let mut it = vm_try!(value.into_iter()); while let Some(value) = vm_try!(it.next()) { - self.push(value); + vm_try!(self.push(value)); } VmResult::Ok(()) @@ -206,7 +208,7 @@ impl Vec { /// Convert into a rune iterator. pub fn iter_ref(this: Ref<[Value]>) -> Iterator { - Iterator::from_double_ended("std::vec::Iter", Iter::new(this)) + Iterator::from_double_ended("std::alloc::Iter", Iter::new(this)) } /// Access the inner values as a slice. @@ -218,14 +220,12 @@ impl Vec { this: &[Value], f: &mut Formatter, caller: &mut impl ProtocolCaller, - ) -> VmResult { + ) -> VmResult<()> { let mut it = this.iter().peekable(); vm_write!(f, "["); while let Some(value) = it.next() { - if let Err(fmt::Error) = vm_try!(value.string_debug_with(f, caller)) { - return VmResult::Ok(Err(fmt::Error)); - } + vm_try!(value.string_debug_with(f, caller)); if it.peek().is_some() { vm_write!(f, ", "); @@ -233,7 +233,7 @@ impl Vec { } vm_write!(f, "]"); - VmResult::Ok(Ok(())) + VmResult::Ok(()) } pub(crate) fn partial_eq_with( @@ -379,10 +379,11 @@ impl Vec { return VmResult::Ok(None); }; - VmResult::Ok(Some(Value::vec(values.to_vec()))) + let vec = vm_try!(alloc::Vec::try_from(values)); + VmResult::Ok(Some(vm_try!(Value::vec(vec)))) } - #[cfg(feature = "std")] + #[cfg(feature = "alloc")] pub(crate) fn hash_with( &self, hasher: &mut Hasher, @@ -396,6 +397,14 @@ impl Vec { } } +impl TryClone for Vec { + fn try_clone(&self) -> Result { + Ok(Self { + inner: self.inner.try_clone()?, + }) + } +} + impl fmt::Debug for Vec { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(&*self.inner).finish() @@ -420,7 +429,7 @@ impl ops::DerefMut for Vec { impl IntoIterator for Vec { type Item = Value; - type IntoIter = vec::IntoIter; + type IntoIter = alloc::vec::IntoIter; #[inline] fn into_iter(self) -> Self::IntoIter { @@ -448,23 +457,38 @@ impl<'a> IntoIterator for &'a mut Vec { } } -impl From> for Vec { +impl TryFrom> for Vec { + type Error = Error; + #[inline] - fn from(inner: vec::Vec) -> Self { - Self { inner } + fn try_from(values: std::Vec) -> Result { + let mut inner = alloc::Vec::try_with_capacity_in(values.len(), Global)?; + + for value in values { + inner.try_push(value)?; + } + + Ok(Self { inner }) } } -impl From> for Vec { +impl TryFrom> for Vec { + type Error = Error; + #[inline] - fn from(inner: Box<[Value]>) -> Self { - Self { - inner: inner.to_vec(), - } + fn try_from(inner: std::Box<[Value]>) -> Result { + Vec::try_from(inner.into_vec()) + } +} + +impl From> for Vec { + #[inline] + fn from(inner: alloc::Vec) -> Self { + Self { inner } } } -impl FromValue for vec::Vec +impl FromValue for std::Vec where T: FromValue, { @@ -472,7 +496,7 @@ where let vec = vm_try!(value.into_vec()); let vec = vm_try!(vec.take()); - let mut output = vec::Vec::with_capacity(vec.len()); + let mut output = std::Vec::with_capacity(vec.len()); for value in vec { output.push(vm_try!(T::from_value(value))); @@ -482,6 +506,24 @@ where } } +impl FromValue for alloc::Vec +where + T: FromValue, +{ + fn from_value(value: Value) -> VmResult { + let vec = vm_try!(value.into_vec()); + let vec = vm_try!(vec.take()); + + let mut output = vm_try!(alloc::Vec::try_with_capacity_in(vec.len(), Global)); + + for value in vec { + vm_try!(output.try_push(vm_try!(T::from_value(value)))); + } + + VmResult::Ok(output) + } +} + impl UnsafeToRef for [Value] { type Guard = RawRef; @@ -494,17 +536,32 @@ impl UnsafeToRef for [Value] { } } -impl ToValue for vec::Vec +impl ToValue for alloc::Vec +where + T: ToValue, +{ + fn to_value(self) -> VmResult { + let mut inner = vm_try!(alloc::Vec::try_with_capacity_in(self.len(), Global)); + + for value in self { + vm_try!(inner.try_push(vm_try!(value.to_value()))); + } + + VmResult::Ok(Value::from(vm_try!(Shared::new(Vec { inner })))) + } +} + +impl ToValue for std::Vec where T: ToValue, { fn to_value(self) -> VmResult { - let mut vec = vec::Vec::with_capacity(self.len()); + let mut inner = vm_try!(alloc::Vec::try_with_capacity_in(self.len(), Global)); for value in self { - vec.push(vm_try!(value.to_value())); + vm_try!(inner.try_push(vm_try!(value.to_value()))); } - VmResult::Ok(Value::from(Shared::new(Vec::from(vec)))) + VmResult::Ok(Value::from(vm_try!(Shared::new(Vec { inner })))) } } diff --git a/crates/rune/src/runtime/vec_tuple.rs b/crates/rune/src/runtime/vec_tuple.rs index ac1afecac..04f6c01df 100644 --- a/crates/rune/src/runtime/vec_tuple.rs +++ b/crates/rune/src/runtime/vec_tuple.rs @@ -1,3 +1,4 @@ +use crate::alloc::Vec; use crate::runtime::{FromValue, ToValue, Value, VmErrorKind, VmResult}; /// A helper type to deserialize arrays with different interior types. @@ -43,10 +44,12 @@ macro_rules! impl_from_value_tuple_vec { where $($ty: ToValue,)* { + #[allow(unused_mut)] fn to_value(self) -> VmResult { let ($($var,)*) = self.0; - let vec = vec![$(vm_try!($var.to_value()),)*]; - VmResult::Ok(Value::vec(vec)) + let mut vec = vm_try!(Vec::try_with_capacity($count)); + $(vm_try!(vec.try_push(vm_try!($var.to_value())));)* + Value::vec(vec) } } }; diff --git a/crates/rune/src/runtime/vm.rs b/crates/rune/src/runtime/vm.rs index 3a17b36b0..5cbdb4d14 100644 --- a/crates/rune/src/runtime/vm.rs +++ b/crates/rune/src/runtime/vm.rs @@ -1,12 +1,13 @@ use core::cmp::Ordering; -use core::fmt; use core::mem::{replace, swap}; use core::ops; use core::slice; +use crate::alloc::{Error, IteratorExt, String, TryClone, TryToOwned}; use crate::hash::{Hash, IntoHash, ToTypeHash}; use crate::modules::{option, result}; -use crate::no_std::prelude::*; +use crate::no_std::borrow::ToOwned; +use crate::no_std::std; use crate::no_std::sync::Arc; use crate::no_std::vec; use crate::runtime::budget; @@ -88,7 +89,7 @@ macro_rules! target_value { } /// A stack which references variables indirectly from a slab. -#[derive(Debug, Clone)] +#[derive(Debug)] pub struct Vm { /// Context associated with virtual machine. context: Arc, @@ -515,7 +516,7 @@ impl Vm { args: expected, }) = self.unit.function(hash) { - self.stack.push(target); + vm_try!(self.stack.push(target)); // Safety: We hold onto the guard for the duration of this call. let _guard = unsafe { vm_try!(args.unsafe_into_stack(&mut self.stack)) }; vm_try!(check_args(count, expected)); @@ -524,7 +525,7 @@ impl Vm { } if let Some(handler) = self.context.function(hash) { - self.stack.push(target); + vm_try!(self.stack.push(target)); // Safety: We hold onto the guard for the duration of this call. let _guard = unsafe { vm_try!(args.unsafe_into_stack(&mut self.stack)) }; vm_try!(handler(&mut self.stack, count)); @@ -551,7 +552,7 @@ impl Vm { let hash = Hash::field_function(protocol, vm_try!(target.type_hash()), name); if let Some(handler) = self.context.function(hash) { - self.stack.push(target); + vm_try!(self.stack.push(target)); let _guard = unsafe { vm_try!(args.unsafe_into_stack(&mut self.stack)) }; vm_try!(handler(&mut self.stack, count)); return VmResult::Ok(CallResult::Ok(())); @@ -576,7 +577,7 @@ impl Vm { let hash = Hash::index_function(protocol, vm_try!(target.type_hash()), Hash::index(index)); if let Some(handler) = self.context.function(hash) { - self.stack.push(target); + vm_try!(self.stack.push(target)); let _guard = unsafe { vm_try!(args.unsafe_into_stack(&mut self.stack)) }; vm_try!(handler(&mut self.stack, count)); return VmResult::Ok(CallResult::Ok(())); @@ -609,7 +610,7 @@ impl Vm { } }; - self.stack.push(out); + vm_try!(self.stack.push(Value::from(out))); VmResult::Ok(()) } @@ -945,14 +946,14 @@ impl Vm { Value::Object(object) => { let object = vm_try!(object.borrow_ref()); - if let Some(value) = object.get(&***index) { + if let Some(value) = object.get(index.as_str()) { return VmResult::Ok(CallResult::Ok(value.clone())); } } Value::Struct(typed_object) => { let typed_object = vm_try!(typed_object.borrow_ref()); - if let Some(value) = typed_object.get(&***index) { + if let Some(value) = typed_object.get(index.as_str()) { return VmResult::Ok(CallResult::Ok(value.clone())); } } @@ -960,7 +961,7 @@ impl Vm { let variant = vm_try!(variant.borrow_ref()); if let VariantData::Struct(data) = variant.data() { - if let Some(value) = data.get(&***index) { + if let Some(value) = data.get(index.as_str()) { return VmResult::Ok(CallResult::Ok(value.clone())); } } @@ -991,7 +992,8 @@ impl Vm { VmResult::Ok(match target { Value::Object(object) => { let mut object = vm_try!(object.borrow_mut()); - object.insert(field.as_str().to_owned(), value); + let key = vm_try!(field.as_str().try_to_owned()); + vm_try!(object.insert(key, value)); return VmResult::Ok(CallResult::Ok(())); } Value::Struct(typed_object) => { @@ -1003,8 +1005,8 @@ impl Vm { } return err(VmErrorKind::MissingField { - field: field.as_str().to_owned(), target: typed_object.type_info(), + field: field.as_str().to_owned(), }); } Value::Variant(variant) => { @@ -1018,8 +1020,8 @@ impl Vm { } return err(VmErrorKind::MissingField { - field: field.as_str().to_owned(), target: variant.type_info(), + field: field.as_str().to_owned(), }); } target => { @@ -1159,36 +1161,36 @@ impl Vm { } }; - self.stack.push(out); + vm_try!(self.stack.push(Value::from(out))); VmResult::Ok(()) } /// Construct a future from calling an async function. fn call_generator_fn(&mut self, offset: usize, args: usize) -> Result<(), VmErrorKind> { - let stack = self.stack.drain(args)?.collect::(); + let stack = self.stack.drain(args)?.try_collect::()?; let mut vm = Self::with_stack(self.context.clone(), self.unit.clone(), stack); vm.ip = offset; - self.stack.push(Generator::new(vm)); + self.stack.push(Value::try_from(Generator::new(vm))?)?; Ok(()) } /// Construct a stream from calling a function. fn call_stream_fn(&mut self, offset: usize, args: usize) -> Result<(), VmErrorKind> { - let stack = self.stack.drain(args)?.collect::(); + let stack = self.stack.drain(args)?.try_collect::()?; let mut vm = Self::with_stack(self.context.clone(), self.unit.clone(), stack); vm.ip = offset; - self.stack.push(Stream::new(vm)); + self.stack.push(Value::try_from(Stream::new(vm))?)?; Ok(()) } /// Construct a future from calling a function. fn call_async_fn(&mut self, offset: usize, args: usize) -> Result<(), VmErrorKind> { - let stack = self.stack.drain(args)?.collect::(); + let stack = self.stack.drain(args)?.try_collect::()?; let mut vm = Self::with_stack(self.context.clone(), self.unit.clone(), stack); vm.ip = offset; let mut execution = vm.into_execution(); - self.stack - .push(Future::new(async move { execution.async_complete().await })); + let future = Future::new(async move { execution.async_complete().await }); + self.stack.push(Value::try_from(future)?)?; Ok(()) } @@ -1317,12 +1319,13 @@ impl Vm { let (lhs, rhs) = match (lhs, rhs) { (Value::Integer(lhs), Value::Integer(rhs)) => { - self.stack - .push(vm_try!(integer_op(lhs, rhs).ok_or_else(error))); + vm_try!(self + .stack + .push(Value::from(vm_try!(integer_op(lhs, rhs).ok_or_else(error))))); return VmResult::Ok(()); } (Value::Float(lhs), Value::Float(rhs)) => { - self.stack.push(float_op(lhs, rhs)); + vm_try!(self.stack.push(Value::from(float_op(lhs, rhs)))); return VmResult::Ok(()); } (lhs, rhs) => (lhs, rhs), @@ -1353,7 +1356,7 @@ impl Vm { let (lhs, rhs) = match (lhs, rhs) { (Value::Integer(lhs), Value::Integer(rhs)) => { - self.stack.push(integer_op(lhs, rhs)); + vm_try!(self.stack.push(Value::from(integer_op(lhs, rhs)))); return VmResult::Ok(()); } (lhs, rhs) => (lhs, rhs), @@ -1385,11 +1388,11 @@ impl Vm { let (lhs, rhs) = match (lhs, rhs) { (Value::Integer(lhs), Value::Integer(rhs)) => { - self.stack.push(integer_op(lhs, rhs)); + vm_try!(self.stack.push(Value::from(integer_op(lhs, rhs)))); return VmResult::Ok(()); } (Value::Bool(lhs), Value::Bool(rhs)) => { - self.stack.push(bool_op(lhs, rhs)); + vm_try!(self.stack.push(Value::from(bool_op(lhs, rhs)))); return VmResult::Ok(()); } (lhs, rhs) => (lhs, rhs), @@ -1443,8 +1446,8 @@ impl Vm { let (lhs, rhs) = match (lhs, rhs) { (Value::Integer(lhs), Value::Integer(rhs)) => { - self.stack - .push(vm_try!(integer_op(lhs, rhs).ok_or_else(error))); + let integer = vm_try!(integer_op(lhs, rhs).ok_or_else(error)); + vm_try!(self.stack.push(Value::from(integer))); return VmResult::Ok(()); } (lhs, rhs) => (lhs, rhs), @@ -1506,7 +1509,7 @@ impl Vm { // NB: nothing to poll. if futures.is_empty() { - self.stack.push(()); + vm_try!(self.stack.push(Value::from(()))); return VmResult::Ok(None); } @@ -1522,7 +1525,7 @@ impl Vm { #[cfg_attr(feature = "bench", inline(never))] fn op_push(&mut self, value: InstValue) -> VmResult<()> { - self.stack.push(value.into_value()); + vm_try!(self.stack.push(value.into_value())); VmResult::Ok(()) } @@ -1550,7 +1553,7 @@ impl Vm { fn op_clean(&mut self, n: usize) -> VmResult<()> { let value = vm_try!(self.stack.pop()); vm_try!(self.op_popn(n)); - self.stack.push(value); + vm_try!(self.stack.push(value)); VmResult::Ok(()) } @@ -1559,7 +1562,7 @@ impl Vm { #[cfg_attr(feature = "bench", inline(never))] fn op_copy(&mut self, offset: usize) -> VmResult<()> { let value = vm_try!(self.stack.at_offset(offset)).clone(); - self.stack.push(value); + vm_try!(self.stack.push(value)); VmResult::Ok(()) } @@ -1568,7 +1571,7 @@ impl Vm { #[cfg_attr(feature = "bench", inline(never))] fn op_move(&mut self, offset: usize) -> VmResult<()> { let value = vm_try!(self.stack.at_offset(offset)).clone(); - self.stack.push(vm_try!(value.take())); + vm_try!(self.stack.push(vm_try!(value.take()))); VmResult::Ok(()) } @@ -1654,16 +1657,24 @@ impl Vm { /// Construct a new vec. #[cfg_attr(feature = "bench", inline(never))] fn op_vec(&mut self, count: usize) -> VmResult<()> { - let vec = Vec::from(vm_try!(self.stack.pop_sequence(count))); - self.stack.push(Shared::new(vec)); + let vec = vm_try!(Vec::try_from(vm_try!(vm_try!(self + .stack + .pop_sequence(count))))); + vm_try!(self + .stack + .push(vm_try!(Value::try_from(vm_try!(Shared::new(vec)))))); VmResult::Ok(()) } /// Construct a new tuple. #[cfg_attr(feature = "bench", inline(never))] fn op_tuple(&mut self, count: usize) -> VmResult<()> { - let tuple = vm_try!(self.stack.pop_sequence(count)); - self.stack.push(OwnedTuple::from(tuple)); + let tuple = vm_try!(vm_try!(self.stack.pop_sequence(count))); + vm_try!(self + .stack + .push(vm_try!(Value::try_from(vm_try!(OwnedTuple::try_from( + tuple + )))))); VmResult::Ok(()) } @@ -1676,7 +1687,12 @@ impl Vm { tuple[n] = vm_try!(self.stack.address(*arg)); } - self.stack.push(OwnedTuple::from(tuple)); + vm_try!(self + .stack + .push(vm_try!(Value::try_from(vm_try!(OwnedTuple::try_from( + tuple + )))))); + VmResult::Ok(()) } @@ -1684,8 +1700,9 @@ impl Vm { #[cfg_attr(feature = "bench", inline(never))] fn op_push_tuple(&mut self) -> VmResult<()> { let tuple = vm_try!(vm_try!(self.stack.pop()).into_tuple()); - self.stack - .extend(vm_try!(tuple.borrow_ref()).iter().cloned()); + vm_try!(self + .stack + .extend(vm_try!(tuple.borrow_ref()).iter().cloned())); VmResult::Ok(()) } @@ -1702,7 +1719,7 @@ impl Vm { } }; - self.stack.push(value); + vm_try!(self.stack.push(value)); VmResult::Ok(()) } @@ -1719,7 +1736,7 @@ impl Vm { } }; - self.stack.push(value); + vm_try!(self.stack.push(value)); VmResult::Ok(()) } @@ -1858,13 +1875,13 @@ impl Vm { let rhs = vm_try!(self.stack.address(rhs)); let lhs = vm_try!(self.stack.address(lhs)); let test = vm_try!(Value::partial_eq_with(&lhs, &rhs, self)); - self.stack.push(test); + vm_try!(self.stack.push(Value::from(test))); } InstOp::Neq => { let rhs = vm_try!(self.stack.address(rhs)); let lhs = vm_try!(self.stack.address(lhs)); let test = vm_try!(Value::partial_eq_with(&lhs, &rhs, self)); - self.stack.push(!test); + vm_try!(self.stack.push(Value::from(!test))); } InstOp::And => { vm_try!(self.internal_boolean_op(|a, b| a && b, "&&", lhs, rhs)); @@ -1874,15 +1891,15 @@ impl Vm { } InstOp::As => { let value = vm_try!(self.as_op(lhs, rhs)); - self.stack.push(value); + vm_try!(self.stack.push(value)); } InstOp::Is => { let is_instance = vm_try!(self.test_is_instance(lhs, rhs)); - self.stack.push(is_instance); + vm_try!(self.stack.push(Value::from(is_instance))); } InstOp::IsNot => { let is_instance = vm_try!(self.test_is_instance(lhs, rhs)); - self.stack.push(!is_instance); + vm_try!(self.stack.push(Value::from(!is_instance))); } } @@ -1996,7 +2013,7 @@ impl Vm { match &target { Value::Object(object) => { let mut object = vm_try!(object.borrow_mut()); - object.insert(field.to_owned(), value); + vm_try!(object.insert(vm_try!(field.try_to_owned()), value)); return VmResult::Ok(()); } Value::Struct(typed_object) => { @@ -2008,8 +2025,8 @@ impl Vm { } return err(VmErrorKind::MissingField { - field: field.to_owned(), target: typed_object.type_info(), + field: field.to_owned(), }); } Value::Variant(variant) => { @@ -2023,8 +2040,8 @@ impl Vm { } return err(VmErrorKind::MissingField { - field: field.to_owned(), target: variant.type_info(), + field: field.to_owned(), }); } _ => {} @@ -2057,7 +2074,7 @@ impl Vm { } let exit = self.pop_call_frame()?; - self.stack.push(return_value); + self.stack.push(return_value)?; Ok(exit) } @@ -2126,7 +2143,7 @@ impl Vm { #[tracing::instrument(skip(self))] fn op_return_unit(&mut self) -> Result { let exit = self.pop_call_frame()?; - self.stack.push(()); + self.stack.push(Value::from(()))?; Ok(exit) } @@ -2135,7 +2152,7 @@ impl Vm { let instance = self.stack.pop()?; let ty = instance.type_hash()?; let hash = Hash::associated_function(ty, hash); - self.stack.push(Value::Type(Type::new(hash))); + self.stack.push(Value::Type(Type::new(hash)))?; Ok(()) } @@ -2153,7 +2170,7 @@ impl Vm { &target, string_ref.as_str() )) { - self.stack.push(value); + vm_try!(self.stack.push(value)); return VmResult::Ok(()); } } @@ -2166,7 +2183,7 @@ impl Vm { }; if let Some(value) = vm_try!(Self::try_tuple_like_index_get(&target, index)) { - self.stack.push(value); + vm_try!(self.stack.push(value)); return VmResult::Ok(()); } } @@ -2194,7 +2211,7 @@ impl Vm { let value = vm_try!(self.stack.pop()); if let Some(value) = vm_try!(Self::try_tuple_like_index_get(&value, index)) { - self.stack.push(value); + vm_try!(self.stack.push(value)); return VmResult::Ok(()); } @@ -2232,7 +2249,7 @@ impl Vm { let value = vm_try!(self.stack.at_offset(offset)); if let Some(value) = vm_try!(Self::try_tuple_like_index_get(value, index)) { - self.stack.push(value); + vm_try!(self.stack.push(value)); return VmResult::Ok(()); } @@ -2255,10 +2272,10 @@ impl Vm { fn op_eq_bool(&mut self, boolean: bool) -> VmResult<()> { let value = vm_try!(self.stack.pop()); - self.stack.push(match value { + vm_try!(self.stack.push(Value::from(match value { Value::Bool(actual) => actual == boolean, _ => false, - }); + }))); VmResult::Ok(()) } @@ -2270,7 +2287,7 @@ impl Vm { match vm_try!(self.try_object_slot_index_get(target, string_slot)) { CallResult::Ok(value) => { - self.stack.push(value); + vm_try!(self.stack.push(value)); VmResult::Ok(()) } CallResult::Unsupported(target) => err(VmErrorKind::UnsupportedObjectSlotIndexGet { @@ -2303,7 +2320,7 @@ impl Vm { match vm_try!(self.try_object_slot_index_get(target, string_slot)) { CallResult::Ok(value) => { - self.stack.push(value); + vm_try!(self.stack.push(value)); VmResult::Ok(()) } CallResult::Unsupported(target) => err(VmErrorKind::UnsupportedObjectSlotIndexGet { @@ -2320,14 +2337,17 @@ impl Vm { .lookup_object_keys(slot) .ok_or(VmErrorKind::MissingStaticObjectKeys { slot })); - let mut object = Object::with_capacity(keys.len()); + let mut object = vm_try!(Object::with_capacity(keys.len())); let values = vm_try!(self.stack.drain(keys.len())); for (key, value) in keys.iter().zip(values) { - object.insert(key.clone(), value); + let key = vm_try!(String::try_from(key.as_str())); + vm_try!(object.insert(key, value)); } - self.stack.push(Shared::new(object)); + vm_try!(self + .stack + .push(vm_try!(Value::try_from(vm_try!(Shared::new(object)))))); VmResult::Ok(()) } @@ -2337,30 +2357,32 @@ impl Vm { let value = match range { InstRange::RangeFrom => { let start = vm_try!(self.stack.pop()); - Value::from(RangeFrom::new(start)) + vm_try!(Value::try_from(RangeFrom::new(start))) + } + InstRange::RangeFull => { + vm_try!(Value::try_from(RangeFull::new())) } - InstRange::RangeFull => Value::from(RangeFull::new()), InstRange::RangeInclusive => { let end = vm_try!(self.stack.pop()); let start = vm_try!(self.stack.pop()); - Value::from(RangeInclusive::new(start, end)) + vm_try!(Value::try_from(RangeInclusive::new(start, end))) } InstRange::RangeToInclusive => { let end = vm_try!(self.stack.pop()); - Value::from(RangeToInclusive::new(end)) + vm_try!(Value::try_from(RangeToInclusive::new(end))) } InstRange::RangeTo => { let end = vm_try!(self.stack.pop()); - Value::from(RangeTo::new(end)) + vm_try!(Value::try_from(RangeTo::new(end))) } InstRange::Range => { let end = vm_try!(self.stack.pop()); let start = vm_try!(self.stack.pop()); - Value::from(Range::new(start, end)) + vm_try!(Value::try_from(Range::new(start, end))) } }; - self.stack.push(value); + vm_try!(self.stack.push(value)); VmResult::Ok(()) } @@ -2372,7 +2394,9 @@ impl Vm { .lookup_rtti(hash) .ok_or(VmErrorKind::MissingRtti { hash })); - self.stack.push(EmptyStruct { rtti: rtti.clone() }); + vm_try!(self + .stack + .push(vm_try!(Value::try_from(EmptyStruct { rtti: rtti.clone() })))); VmResult::Ok(()) } @@ -2390,16 +2414,17 @@ impl Vm { .ok_or(VmErrorKind::MissingRtti { hash })); let values = vm_try!(self.stack.drain(keys.len())); - let mut data = Object::with_capacity(keys.len()); + let mut data = vm_try!(Object::with_capacity(keys.len())); for (key, value) in keys.iter().zip(values) { - data.insert(key.clone(), value); + let key = vm_try!(String::try_from(key.as_str())); + vm_try!(data.insert(key, value)); } - self.stack.push(Struct { + vm_try!(self.stack.push(vm_try!(Value::try_from(Struct { rtti: rtti.clone(), data, - }); + })))); VmResult::Ok(()) } @@ -2412,7 +2437,9 @@ impl Vm { .lookup_variant_rtti(hash) .ok_or(VmErrorKind::MissingVariantRtti { hash })); - self.stack.push(Variant::unit(rtti.clone())); + vm_try!(self + .stack + .push(vm_try!(Value::try_from(Variant::unit(rtti.clone()))))); VmResult::Ok(()) } @@ -2429,28 +2456,38 @@ impl Vm { .lookup_variant_rtti(hash) .ok_or(VmErrorKind::MissingVariantRtti { hash })); - let mut data = Object::with_capacity(keys.len()); + let mut data = vm_try!(Object::with_capacity(keys.len())); let values = vm_try!(self.stack.drain(keys.len())); for (key, value) in keys.iter().zip(values) { - data.insert(key.clone(), value); + let key = vm_try!(String::try_from(key.as_str())); + vm_try!(data.insert(key, value)); } - self.stack.push(Variant::struct_(rtti.clone(), data)); + vm_try!(self.stack.push(vm_try!(Value::try_from(Variant::struct_( + rtti.clone(), + data + ))))); VmResult::Ok(()) } #[cfg_attr(feature = "bench", inline(never))] fn op_string(&mut self, slot: usize) -> VmResult<()> { let string = vm_try!(self.unit.lookup_string(slot)); - self.stack.push(String::from(string.as_str())); + vm_try!(self + .stack + .push(vm_try!(Value::try_from(vm_try!(String::try_from( + string.as_str() + )))))); VmResult::Ok(()) } #[cfg_attr(feature = "bench", inline(never))] fn op_bytes(&mut self, slot: usize) -> VmResult<()> { - let bytes = vm_try!(self.unit.lookup_bytes(slot)).to_owned(); - self.stack.push(Bytes::from_vec(bytes)); + let bytes = vm_try!(self.unit.lookup_bytes(slot)).to_vec(); + vm_try!(self + .stack + .push(vm_try!(Value::try_from(Bytes::from_vec(bytes))))); VmResult::Ok(()) } @@ -2459,16 +2496,13 @@ impl Vm { fn op_string_concat(&mut self, len: usize, size_hint: usize) -> VmResult<()> { let values = vm_try!(self.stack.drain(len)).collect::>(); - let mut f = Formatter::with_capacity(size_hint); + let mut f = vm_try!(Formatter::with_capacity(size_hint)); for value in values { - if let Result::Err(fmt::Error) = vm_try!(value.string_display_with(&mut f, &mut *self)) - { - return err(VmErrorKind::FormatError); - } + vm_try!(value.string_display_with(&mut f, &mut *self)); } - self.stack.push(f.into_string()); + vm_try!(self.stack.push(vm_try!(Value::try_from(f.string)))); VmResult::Ok(()) } @@ -2476,14 +2510,18 @@ impl Vm { #[cfg_attr(feature = "bench", inline(never))] fn op_format(&mut self, spec: FormatSpec) -> VmResult<()> { let value = vm_try!(self.stack.pop()); - self.stack.push(Format { value, spec }); + vm_try!(self + .stack + .push(vm_try!(Value::try_from(Format { value, spec })))); VmResult::Ok(()) } #[cfg_attr(feature = "bench", inline(never))] fn op_is_unit(&mut self) -> VmResult<()> { let value = vm_try!(self.stack.pop()); - self.stack.push(matches!(value, Value::EmptyTuple)); + vm_try!(self + .stack + .push(Value::from(matches!(value, Value::EmptyTuple)))); VmResult::Ok(()) } @@ -2493,8 +2531,8 @@ impl Vm { let value = vm_try!(self.stack.address(address)); let result = match value { - Value::Result(result) => result::result_try(vm_try!(result.take())), - Value::Option(option) => option::option_try(vm_try!(option.take())), + Value::Result(result) => vm_try!(result::result_try(vm_try!(result.take()))), + Value::Option(option) => vm_try!(option::option_try(vm_try!(option.take()))), value => { if let CallResult::Unsupported(target) = vm_try!(self.call_instance_fn(value, Protocol::TRY, ())) @@ -2512,7 +2550,7 @@ impl Vm { match result { ControlFlow::Continue(value) => { if preserve { - self.stack.push(value); + vm_try!(self.stack.push(value)); } VmResult::Ok(false) @@ -2527,10 +2565,10 @@ impl Vm { fn op_eq_byte(&mut self, byte: u8) -> VmResult<()> { let value = vm_try!(self.stack.pop()); - self.stack.push(match value { + vm_try!(self.stack.push(Value::from(match value { Value::Byte(actual) => actual == byte, _ => false, - }); + }))); VmResult::Ok(()) } @@ -2539,10 +2577,10 @@ impl Vm { fn op_eq_character(&mut self, character: char) -> VmResult<()> { let value = vm_try!(self.stack.pop()); - self.stack.push(match value { + vm_try!(self.stack.push(Value::from(match value { Value::Char(actual) => actual == character, _ => false, - }); + }))); VmResult::Ok(()) } @@ -2551,10 +2589,10 @@ impl Vm { fn op_eq_integer(&mut self, integer: i64) -> VmResult<()> { let value = vm_try!(self.stack.pop()); - self.stack.push(match value { + vm_try!(self.stack.push(Value::from(match value { Value::Integer(actual) => actual == integer, _ => false, - }); + }))); VmResult::Ok(()) } @@ -2569,12 +2607,12 @@ impl Vm { Value::String(actual) => { let string = vm_try!(self.unit.lookup_string(slot)); let actual = vm_try!(actual.borrow_ref()); - *actual == ***string + actual.as_str() == string.as_str() } _ => false, }; - self.stack.push(Value::Bool(equal)); + vm_try!(self.stack.push(Value::Bool(equal))); VmResult::Ok(()) } @@ -2593,7 +2631,7 @@ impl Vm { _ => false, }; - self.stack.push(Value::Bool(equal)); + vm_try!(self.stack.push(Value::Bool(equal))); VmResult::Ok(()) } @@ -2609,7 +2647,7 @@ impl Vm { } })); - self.stack.push(Value::Bool(result.unwrap_or_default())); + vm_try!(self.stack.push(Value::Bool(result.unwrap_or_default()))); VmResult::Ok(()) } @@ -2617,7 +2655,7 @@ impl Vm { fn op_match_type(&mut self, hash: Hash) -> VmResult<()> { let value = vm_try!(self.stack.pop()); let is_match = vm_try!(value.type_hash()) == hash; - self.stack.push(is_match); + vm_try!(self.stack.push(Value::from(is_match))); VmResult::Ok(()) } @@ -2647,7 +2685,7 @@ impl Vm { _ => false, }; - self.stack.push(is_match); + vm_try!(self.stack.push(Value::from(is_match))); VmResult::Ok(()) } @@ -2690,31 +2728,13 @@ impl Vm { _ => false, }; - self.stack.push(is_match); + vm_try!(self.stack.push(Value::from(is_match))); VmResult::Ok(()) } #[cfg_attr(feature = "bench", inline(never))] fn op_match_object(&mut self, slot: usize, exact: bool) -> VmResult<()> { - let value = vm_try!(self.stack.pop()); - - let is_match = match value { - Value::Object(object) => { - let keys = vm_try!(self - .unit - .lookup_object_keys(slot) - .ok_or(VmErrorKind::MissingStaticObjectKeys { slot })); - - let object = vm_try!(object.borrow_ref()); - test(&object, keys, exact) - } - _ => false, - }; - - self.stack.push(is_match); - return VmResult::Ok(()); - - fn test(object: &Object, keys: &[String], exact: bool) -> bool { + fn test(object: &Object, keys: &[std::String], exact: bool) -> bool { if exact { if object.len() != keys.len() { return false; @@ -2724,13 +2744,31 @@ impl Vm { } for key in keys { - if !object.contains_key(key) { + if !object.contains_key(key.as_str()) { return false; } } true } + + let value = vm_try!(self.stack.pop()); + + let is_match = match value { + Value::Object(object) => { + let keys = vm_try!(self + .unit + .lookup_object_keys(slot) + .ok_or(VmErrorKind::MissingStaticObjectKeys { slot })); + + let object = vm_try!(object.borrow_ref()); + test(&object, keys, exact) + } + _ => false, + }; + + vm_try!(self.stack.push(Value::from(is_match))); + VmResult::Ok(()) } /// Push the given variant onto the stack. @@ -2739,20 +2777,24 @@ impl Vm { match variant { InstVariant::Some => { let some = vm_try!(self.stack.pop()); - self.stack.push(Value::Option(Shared::new(Some(some)))); + vm_try!(self + .stack + .push(Value::Option(vm_try!(Shared::new(Some(some)))))); } InstVariant::None => { - self.stack.push(Value::Option(Shared::new(None))); + vm_try!(self.stack.push(Value::Option(vm_try!(Shared::new(None))))); } InstVariant::Ok => { let some = vm_try!(self.stack.pop()); - self.stack - .push(Value::Result(Shared::new(Result::Ok(some)))); + vm_try!(self + .stack + .push(Value::Result(vm_try!(Shared::new(Result::Ok(some)))))); } InstVariant::Err => { let some = vm_try!(self.stack.pop()); - self.stack - .push(Value::Result(Shared::new(Result::Err(some)))); + vm_try!(self + .stack + .push(Value::Result(vm_try!(Shared::new(Result::Err(some)))))); } } @@ -2763,7 +2805,9 @@ impl Vm { #[cfg_attr(feature = "bench", inline(never))] fn op_load_fn(&mut self, hash: Hash) -> VmResult<()> { let function = vm_try!(self.lookup_function_by_hash(hash)); - self.stack.push(Value::Function(Shared::new(function))); + vm_try!(self + .stack + .push(Value::Function(vm_try!(Shared::new(function))))); VmResult::Ok(()) } @@ -2780,7 +2824,8 @@ impl Vm { _ => return err(VmErrorKind::MissingFunction { hash }), }; - let environment = vm_try!(self.stack.pop_sequence(count)).into_boxed_slice(); + let environment = + vm_try!(vm_try!(vm_try!(self.stack.pop_sequence(count))).try_into_boxed_slice()); let function = Function::from_vm_closure( self.context.clone(), @@ -2792,7 +2837,9 @@ impl Vm { hash, ); - self.stack.push(Value::Function(Shared::new(function))); + vm_try!(self + .stack + .push(Value::Function(vm_try!(Shared::new(function))))); VmResult::Ok(()) } @@ -2817,21 +2864,23 @@ impl Vm { .lookup_rtti(hash) .ok_or(VmErrorKind::MissingRtti { hash })); - self.stack.push(Value::empty_struct(rtti.clone())); + vm_try!(self.stack.push(vm_try!(Value::empty_struct(rtti.clone())))); } UnitFn::TupleStruct { hash, args: expected, } => { vm_try!(check_args(args, expected)); - let tuple = vm_try!(self.stack.pop_sequence(args)); + let tuple = vm_try!(vm_try!(self.stack.pop_sequence(args))); let rtti = vm_try!(self .unit .lookup_rtti(hash) .ok_or(VmErrorKind::MissingRtti { hash })); - self.stack.push(Value::tuple_struct(rtti.clone(), tuple)); + vm_try!(self + .stack + .push(vm_try!(Value::tuple_struct(rtti.clone(), tuple)))); } UnitFn::TupleVariant { hash, @@ -2844,8 +2893,10 @@ impl Vm { .lookup_variant_rtti(hash) .ok_or(VmErrorKind::MissingVariantRtti { hash })); - let tuple = vm_try!(self.stack.pop_sequence(args)); - self.stack.push(Value::tuple_variant(rtti.clone(), tuple)); + let tuple = vm_try!(vm_try!(self.stack.pop_sequence(args))); + vm_try!(self + .stack + .push(vm_try!(Value::tuple_variant(rtti.clone(), tuple)))); } UnitFn::UnitVariant { hash } => { vm_try!(check_args(args, 0)); @@ -2855,7 +2906,7 @@ impl Vm { .lookup_variant_rtti(hash) .ok_or(VmErrorKind::MissingVariantRtti { hash })); - self.stack.push(Value::unit_variant(rtti.clone())); + vm_try!(self.stack.push(vm_try!(Value::unit_variant(rtti.clone())))); } }, None => { @@ -2980,11 +3031,11 @@ impl Vm { /// // Call the string_display protocol on `output`. This requires /// // access to a virtual machine since it might use functions /// // registered in the unit associated with it. - /// let mut f = Formatter::default(); + /// let mut f = Formatter::new(); /// /// // Note: We do an extra unwrap because the return value is /// // `fmt::Result`. - /// vm.with(|| output.string_display(&mut f)).into_result()?.expect("formatting should succeed"); + /// vm.with(|| output.string_display(&mut f)).into_result()?; /// # Ok::<_, rune::Error>(()) /// ``` pub fn with(&mut self, f: F) -> T @@ -3244,7 +3295,7 @@ impl Vm { return VmResult::Ok(VmHalt::Yielded); } Inst::YieldUnit => { - self.stack.push(Value::EmptyTuple); + vm_try!(self.stack.push(Value::EmptyTuple)); return VmResult::Ok(VmHalt::Yielded); } Inst::Variant { variant } => { @@ -3269,6 +3320,19 @@ impl Vm { } } +impl TryClone for Vm { + fn try_clone(&self) -> Result { + Ok(Self { + context: self.context.clone(), + unit: self.unit.clone(), + ip: self.ip, + last_ip_len: self.last_ip_len, + stack: self.stack.try_clone()?, + call_frames: self.call_frames.clone(), + }) + } +} + impl AsMut for Vm { #[inline] fn as_mut(&mut self) -> &mut Vm { diff --git a/crates/rune/src/runtime/vm_call.rs b/crates/rune/src/runtime/vm_call.rs index eef712b02..92b38d47a 100644 --- a/crates/rune/src/runtime/vm_call.rs +++ b/crates/rune/src/runtime/vm_call.rs @@ -1,5 +1,6 @@ use crate::no_std::sync::Arc; +use crate::alloc::IteratorExt; use crate::runtime::vm_execution::VmExecutionState; use crate::runtime::{ Call, Future, Generator, RuntimeContext, Stack, Stream, Unit, Value, Vm, VmErrorKind, @@ -40,7 +41,9 @@ impl VmCall { Call::Async => { let vm = vm_try!(self.build_vm(execution)); let mut execution = vm.into_execution(); - Value::from(Future::new(async move { execution.async_complete().await })) + vm_try!(Value::try_from(Future::new(async move { + execution.async_complete().await + }))) } Call::Immediate => { execution.push_state(VmExecutionState { @@ -52,15 +55,15 @@ impl VmCall { } Call::Stream => { let vm = vm_try!(self.build_vm(execution)); - Value::from(Stream::new(vm)) + vm_try!(Value::try_from(Stream::new(vm))) } Call::Generator => { let vm = vm_try!(self.build_vm(execution)); - Value::from(Generator::new(vm)) + vm_try!(Value::try_from(Generator::new(vm))) } }; - execution.vm_mut().stack_mut().push(value); + vm_try!(execution.vm_mut().stack_mut().push(value)); VmResult::Ok(()) } @@ -74,7 +77,7 @@ impl VmCall { tracing::trace!(args); - let new_stack = vm_try!(vm.stack_mut().drain(args)).collect::(); + let new_stack = vm_try!(vm_try!(vm.stack_mut().drain(args)).try_collect::()); let Some(ip) = vm_try!(vm.pop_call_frame_from_call()) else { return VmResult::err(VmErrorKind::MissingCallFrame); diff --git a/crates/rune/src/runtime/vm_error.rs b/crates/rune/src/runtime/vm_error.rs index 1322c2687..8e7bdc142 100644 --- a/crates/rune/src/runtime/vm_error.rs +++ b/crates/rune/src/runtime/vm_error.rs @@ -1,8 +1,10 @@ +use core::convert::Infallible; use core::fmt; use crate::no_std::prelude::*; use crate::no_std::sync::Arc; +use crate::alloc::{AllocError, CustomError, Error}; use crate::compile::ItemBuf; use crate::hash::Hash; use crate::runtime::unit::{BadInstruction, BadJump}; @@ -124,6 +126,23 @@ pub struct VmError { } impl VmError { + pub(crate) fn new(error: E) -> Self + where + VmErrorKind: From, + { + Self { + inner: Box::new(VmErrorInner { + error: VmErrorAt { + #[cfg(feature = "emit")] + index: 0, + kind: VmErrorKind::from(error), + }, + chain: Vec::new(), + stacktrace: Vec::new(), + }), + } + } + /// Construct an error containing a panic. pub fn panic(message: D) -> Self where @@ -363,16 +382,19 @@ where VmErrorKind: From, { fn from(error: E) -> Self { - Self { - inner: Box::new(VmErrorInner { - error: VmErrorAt { - #[cfg(feature = "emit")] - index: 0, - kind: VmErrorKind::from(error), - }, - chain: Vec::new(), - stacktrace: Vec::new(), - }), + Self::new(error) + } +} + +impl From> for VmError +where + VmError: From, +{ + #[inline] + fn from(error: CustomError) -> Self { + match error { + CustomError::Custom(error) => Self::from(error), + CustomError::Error(error) => VmError::new(error), } } } @@ -443,7 +465,6 @@ pub(crate) enum VmErrorKind { Halted { halt: VmHaltInfo, }, - FormatError, Overflow, Underflow, DivideByZero, @@ -539,7 +560,7 @@ pub(crate) enum VmErrorKind { target: TypeInfo, index: VmIntegerRepr, }, - #[cfg(feature = "std")] + #[cfg(feature = "alloc")] MissingIndexKey { target: TypeInfo, }, @@ -586,9 +607,6 @@ pub(crate) enum VmErrorKind { ConstNotSupported { actual: TypeInfo, }, - KeyNotSupported { - actual: TypeInfo, - }, MissingInterfaceEnvironment, ExpectedExecutionState { expected: ExecutionState, @@ -623,11 +641,18 @@ pub(crate) enum VmErrorKind { lhs: f64, rhs: f64, }, - #[cfg(feature = "std")] + #[cfg(feature = "alloc")] IllegalFloatOperation { value: f64, }, MissingCallFrame, + IllegalFormat, + TryReserveError { + error: Error, + }, + AllocError { + error: AllocError, + }, } impl fmt::Display for VmErrorKind { @@ -646,7 +671,6 @@ impl fmt::Display for VmErrorKind { VmErrorKind::Panic { reason } => write!(f, "Panicked: {reason}",), VmErrorKind::NoRunningVm {} => write!(f, "No running virtual machines"), VmErrorKind::Halted { halt } => write!(f, "Halted for unexpected reason `{halt}`",), - VmErrorKind::FormatError {} => write!(f, "Failed to format argument"), VmErrorKind::Overflow {} => write!(f, "Numerical overflow"), VmErrorKind::Underflow {} => write!(f, "Numerical underflow"), VmErrorKind::DivideByZero {} => write!(f, "Division by zero"), @@ -741,7 +765,7 @@ impl fmt::Display for VmErrorKind { VmErrorKind::MissingIndexInteger { target, index } => { write!(f, "Type `{target}` missing integer index `{index}`",) } - #[cfg(feature = "std")] + #[cfg(feature = "alloc")] VmErrorKind::MissingIndexKey { target } => { write!(f, "Type `{target}` missing index",) } @@ -783,9 +807,6 @@ impl fmt::Display for VmErrorKind { VmErrorKind::ConstNotSupported { actual } => { write!(f, "Type `{actual}` can't be converted to a constant value",) } - VmErrorKind::KeyNotSupported { actual } => { - write!(f, "Type `{actual}` can't be converted to a hash key",) - } VmErrorKind::MissingInterfaceEnvironment {} => { write!(f, "Missing interface environment") } @@ -824,17 +845,34 @@ impl fmt::Display for VmErrorKind { "Cannot perform a comparison of the floats {lhs} and {rhs}", ) } - #[cfg(feature = "std")] + #[cfg(feature = "alloc")] VmErrorKind::IllegalFloatOperation { value } => { write!(f, "Cannot perform operation on float `{value}`",) } VmErrorKind::MissingCallFrame => { write!(f, "Missing call frame for internal vm call") } + VmErrorKind::IllegalFormat => { + write!(f, "Value cannot be formatted") + } + VmErrorKind::TryReserveError { error } => { + write!( + f, + "Failed to allocate memory for the current operation: {error}" + ) + } + VmErrorKind::AllocError { error } => error.fmt(f), } } } +impl From for VmErrorKind { + #[inline(always)] + fn from(error: Infallible) -> Self { + match error {} + } +} + impl From for VmErrorKind { #[allow(deprecated)] fn from(error: AccessError) -> Self { @@ -863,6 +901,20 @@ impl From for VmErrorKind { } } +impl From for VmErrorKind { + #[inline] + fn from(error: Error) -> Self { + VmErrorKind::TryReserveError { error } + } +} + +impl From for VmErrorKind { + #[inline] + fn from(error: AllocError) -> Self { + VmErrorKind::AllocError { error } + } +} + impl VmErrorKind { /// Bad argument. pub fn bad_argument(arg: usize) -> Self { diff --git a/crates/rune/src/runtime/vm_execution.rs b/crates/rune/src/runtime/vm_execution.rs index 72da6dee4..fa7d87053 100644 --- a/crates/rune/src/runtime/vm_execution.rs +++ b/crates/rune/src/runtime/vm_execution.rs @@ -188,7 +188,7 @@ where }); } - self.head.as_mut().stack_mut().push(value); + vm_try!(self.head.as_mut().stack_mut().push(value)); self.inner_async_resume().await } @@ -198,7 +198,7 @@ where /// it while returning a unit from the current `yield`. pub async fn async_resume(&mut self) -> VmResult { if matches!(self.state, ExecutionState::Resumed) { - self.head.as_mut().stack_mut().push(Value::EmptyTuple); + vm_try!(self.head.as_mut().stack_mut().push(Value::EmptyTuple)); } else { self.state = ExecutionState::Resumed; } @@ -251,7 +251,7 @@ where }); } - self.head.as_mut().stack_mut().push(value); + vm_try!(self.head.as_mut().stack_mut().push(value)); self.inner_resume() } @@ -264,7 +264,7 @@ where #[tracing::instrument(skip_all)] pub fn resume(&mut self) -> VmResult { if matches!(self.state, ExecutionState::Resumed) { - self.head.as_mut().stack_mut().push(Value::EmptyTuple); + vm_try!(self.head.as_mut().stack_mut().push(Value::EmptyTuple)); } else { self.state = ExecutionState::Resumed; } diff --git a/crates/rune/src/tests.rs b/crates/rune/src/tests.rs index c59b23b17..5f5f7cd54 100644 --- a/crates/rune/src/tests.rs +++ b/crates/rune/src/tests.rs @@ -5,6 +5,7 @@ pub(crate) mod prelude { pub(crate) use crate as rune; + pub(crate) use crate::alloc::{self, TryClone}; pub(crate) use crate::ast; pub(crate) use crate::compile::{self, ErrorKind, Item, ItemBuf, Located, Named}; pub(crate) use crate::diagnostics; diff --git a/crates/rune/src/tests/bug_344.rs b/crates/rune/src/tests/bug_344.rs index 3701d5da3..72fc5f7bf 100644 --- a/crates/rune/src/tests/bug_344.rs +++ b/crates/rune/src/tests/bug_344.rs @@ -27,7 +27,7 @@ fn bug_344_function() -> Result<()> { let function = runtime.function(hash).expect("expect function"); let mut stack = Stack::new(); - stack.push(GuardCheck::new()); + stack.push(rune::to_value(GuardCheck::new())?)?; function(&mut stack, 1).into_result()?; assert_eq!(stack.pop()?.into_integer().into_result()?, 42); return Ok(()); @@ -61,8 +61,8 @@ fn bug_344_inst_fn() -> Result<()> { let function = runtime.function(hash).expect("expect function"); let mut stack = Stack::new(); - stack.push(GuardCheck::new()); - stack.push(GuardCheck::new()); + stack.push(rune::to_value(GuardCheck::new())?)?; + stack.push(rune::to_value(GuardCheck::new())?)?; function(&mut stack, 2).into_result()?; assert_eq!(stack.pop()?.into_integer().into_result()?, 42); @@ -84,7 +84,7 @@ fn bug_344_async_function() -> Result<()> { let function = runtime.function(hash).expect("expect function"); let mut stack = Stack::new(); - stack.push(GuardCheck::new()); + stack.push(rune::to_value(GuardCheck::new())?)?; function(&mut stack, 1).into_result()?; let future = stack.pop()?.into_future().into_result()?.take()?; assert_eq!( @@ -128,8 +128,8 @@ fn bug_344_async_inst_fn() -> Result<()> { let function = runtime.function(hash).expect("expect function"); let mut stack = Stack::new(); - stack.push(GuardCheck::new()); - stack.push(GuardCheck::new()); + stack.push(rune::to_value(GuardCheck::new())?)?; + stack.push(rune::to_value(GuardCheck::new())?)?; function(&mut stack, 2).into_result()?; let future = stack.pop()?.into_future().into_result()?.take()?; diff --git a/crates/rune/src/tests/derive_from_to_value.rs b/crates/rune/src/tests/derive_from_to_value.rs index c2374cfd1..2199cac12 100644 --- a/crates/rune/src/tests/derive_from_to_value.rs +++ b/crates/rune/src/tests/derive_from_to_value.rs @@ -15,7 +15,7 @@ struct TestUnit; #[derive(FromValue)] struct TestNamed { - a: Mut, + a: Mut, b: Mut, c: Mut, d: Ref, @@ -23,11 +23,11 @@ struct TestNamed { } #[derive(FromValue)] -struct TestUnnamed(Mut, Mut); +struct TestUnnamed(Mut, Mut); #[derive(ToValue)] struct Test2 { - a: String, + a: alloc::String, b: OwnedTuple, c: Object, d: Custom, @@ -35,20 +35,20 @@ struct Test2 { } #[derive(ToValue)] -struct Test2Unnamed(String, Custom); +struct Test2Unnamed(alloc::String, Custom); #[derive(FromValue)] enum TestEnum { TestUnit, TestNamed { - a: Mut, + a: Mut, b: Mut, c: Mut, d: Ref, e: Mut, }, TestUnnamed( - Mut, + Mut, Mut, Mut, Ref, diff --git a/crates/rune/src/tests/external_ops.rs b/crates/rune/src/tests/external_ops.rs index d4c7dbf89..c6599a2f7 100644 --- a/crates/rune/src/tests/external_ops.rs +++ b/crates/rune/src/tests/external_ops.rs @@ -68,7 +68,7 @@ fn assign_ops_struct() -> Result<()> { foo.derived = $initial; foo.custom = $initial; - let output = vm.clone().call(["type"], (&mut foo,))?; + let output = vm.try_clone()?.call(["type"], (&mut foo,))?; assert_eq!(foo.value, $expected, "{} != {} (value)", foo.value, $expected); assert_eq!(foo.field, $expected, "{} != {} (field)", foo.field, $expected); @@ -150,7 +150,7 @@ fn assign_ops_tuple() -> Result<()> { foo.2 = $initial; foo.3 = $initial; - let output = vm.clone().call(["type"], (&mut foo,))?; + let output = vm.try_clone()?.call(["type"], (&mut foo,))?; assert_eq!(foo.0, $expected, "{} != {} (value .0)", foo.0, $expected); assert_eq!(foo.1, $expected, "{} != {} (field .1)", foo.1, $expected); @@ -219,7 +219,7 @@ fn ordering_struct() -> Result<()> { let mut foo = External::default(); foo.value = $initial; - let output = vm.clone().call(["type"], (&mut foo,))?; + let output = vm.try_clone()?.call(["type"], (&mut foo,))?; let a = ::from_value(output).into_result()?; assert_eq!(a, $expected, "{} != {} (value)", foo.value, $expected); @@ -288,7 +288,7 @@ fn eq_struct() -> Result<()> { let mut foo = External::default(); foo.value = $initial; - let output = vm.clone().call(["type"], (&mut foo,))?; + let output = vm.try_clone()?.call(["type"], (&mut foo,))?; let a = ::from_value(output).into_result()?; assert_eq!(a, $expected, "{} != {} (value)", foo.value, $expected); diff --git a/crates/rune/src/tests/unit_constants.rs b/crates/rune/src/tests/unit_constants.rs index 627a8108e..76781dfdb 100644 --- a/crates/rune/src/tests/unit_constants.rs +++ b/crates/rune/src/tests/unit_constants.rs @@ -17,6 +17,7 @@ fn test_get_const() -> Result<()> { .expect("successful lookup") .clone() .into_value() + .expect("could not allocate value") .into_integer() .expect("the inner value"), 1337 @@ -45,6 +46,7 @@ fn test_get_const_re_export() -> Result<()> { .expect("successful lookup") .clone() .into_value() + .expect("could not allocate value") .into_integer() .expect("the inner value"), 1337 @@ -71,6 +73,7 @@ fn test_get_const_nested() -> Result<()> { .expect("successful lookup") .clone() .into_value() + .expect("could not allocate value") .into_integer() .expect("the inner value"), 1337 diff --git a/examples/examples/object.rs b/examples/examples/object.rs index 8fc480750..c7e54537d 100644 --- a/examples/examples/object.rs +++ b/examples/examples/object.rs @@ -1,3 +1,4 @@ +use rune::alloc; use rune::runtime::Object; use rune::termcolor::{ColorChoice, StandardStream}; use rune::{Diagnostics, Value, Vm}; @@ -34,7 +35,7 @@ fn main() -> rune::Result<()> { let mut vm = Vm::new(runtime, Arc::new(unit)); let mut object = Object::new(); - object.insert(String::from("key"), Value::from(42i64)); + object.insert(alloc::String::try_from("key")?, Value::from(42i64))?; let output = vm.call(["calc"], (object,))?; let output: Object = rune::from_value(output)?; diff --git a/examples/examples/proxy.rs b/examples/examples/proxy.rs index b330e91d3..a94636864 100644 --- a/examples/examples/proxy.rs +++ b/examples/examples/proxy.rs @@ -1,3 +1,4 @@ +use rune::alloc; use rune::runtime::{Mut, Ref}; use rune::termcolor::{ColorChoice, StandardStream}; use rune::{Any, Context, Diagnostics, FromValue, Vm}; @@ -11,7 +12,7 @@ struct MyBytes { #[derive(FromValue)] struct Proxy { - field: Mut, + field: Mut, my_bytes: Ref, } diff --git a/examples/examples/tokio_spawn.rs b/examples/examples/tokio_spawn.rs index 2844437eb..da737d36b 100644 --- a/examples/examples/tokio_spawn.rs +++ b/examples/examples/tokio_spawn.rs @@ -1,3 +1,4 @@ +use rune::alloc::TryClone; use rune::termcolor::{ColorChoice, StandardStream}; use rune::{Diagnostics, Vm}; use std::sync::Arc; @@ -31,13 +32,13 @@ async fn main() -> rune::Result<()> { let vm = Vm::new(runtime, Arc::new(unit)); - let execution = vm.clone().send_execute(["main"], (5u32,))?; + let execution = vm.try_clone()?.send_execute(["main"], (5u32,))?; let t1 = tokio::spawn(async move { execution.async_complete().await.unwrap(); println!("timer ticked"); }); - let execution = vm.clone().send_execute(["main"], (2u32,))?; + let execution = vm.try_clone()?.send_execute(["main"], (2u32,))?; let t2 = tokio::spawn(async move { execution.async_complete().await.unwrap(); println!("timer ticked"); diff --git a/examples/examples/vec_args.rs b/examples/examples/vec_args.rs index 2ba9247b3..8e6289cb7 100644 --- a/examples/examples/vec_args.rs +++ b/examples/examples/vec_args.rs @@ -1,7 +1,9 @@ +use std::sync::Arc; + +use rune::alloc::Vec; use rune::runtime::{Function, VmResult}; use rune::termcolor::{ColorChoice, StandardStream}; use rune::{ContextError, Diagnostics, Module, Value, Vm}; -use std::sync::Arc; fn main() -> rune::Result<()> { let m = module()?; diff --git a/tools/import_hashbrown.ps1 b/tools/import_hashbrown.ps1 deleted file mode 100644 index 1180c3afa..000000000 --- a/tools/import_hashbrown.ps1 +++ /dev/null @@ -1,8 +0,0 @@ -$Path = "D:\Repo\hashbrown" -Copy-Item $Path\src\raw\ -Destination crates\rune\src\hashbrown\fork\ -Recurse -Force -Copy-Item $Path\src\scopeguard.rs -Destination crates\rune\src\hashbrown\fork\scopeguard.rs -Force -Copy-Item $Path\src\macros.rs -Destination crates\rune\src\hashbrown\fork\macros.rs -Force - -$template = Get-Content -Path crates\rune\src\hashbrown\fork\raw\mod.rs -Encoding UTF8 -Raw -$template = $template -replace 'crate::(?!alloc)', 'crate::hashbrown::fork::' -Set-Content -Path crates\rune\src\hashbrown\fork\raw\mod.rs -Value $template -Encoding UTF8