binary-heap-plus-0.5.0/.cargo_vcs_info.json0000644000000001120000000000100142100ustar { "git": { "sha1": "8565858f610451f3ec8d5530f9a6616f3896474d" } } binary-heap-plus-0.5.0/.github/workflows/rust.yml000064400000000000000000000030550072674642500201560ustar 00000000000000name: Rust on: push: branches: [ master ] pull_request: branches: [ master ] jobs: build: strategy: matrix: os: - ubuntu-latest - windows-latest - macos-latest rust: - stable - 1.56.0 # MSRV cargo_args: - "" - --features serde include: - os: ubuntu-latest rust: nightly cargo_args: "" runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v2 - name: Install toolchain uses: actions-rs/toolchain@v1 with: toolchain: ${{ matrix.rust }} override: true - name: Run cargo check uses: actions-rs/cargo@v1 with: command: check args: ${{ matrix.cargo_args }} - name: Run cargo build uses: actions-rs/cargo@v1 with: command: build args: ${{ matrix.cargo_args }} - name: Run cargo test uses: actions-rs/cargo@v1 with: command: test args: ${{ matrix.cargo_args }} - name: Run cargo bench uses: actions-rs/cargo@v1 if: ${{ matrix.rust == 'nightly' }} with: command: bench args: ${{ matrix.cargo_args }} # - name: Build # run: cargo build --verbose # - name: Build (serde) # run: cargo build --verbose --features serde # - name: Run tests # run: cargo test --verbose # - name: Run tests (serde) # run: cargo test --verbose --features serde binary-heap-plus-0.5.0/.gitignore000064400000000000000000000000620072674642500150240ustar 00000000000000.idea .vscode /target/ **/*.rs.bk Cargo.lock binary-heap-plus-0.5.0/CHANGELOG.md000064400000000000000000000066770072674642500146670ustar 00000000000000# Changelog All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## [Unreleased] ## [0.5.0] - 2022-09-30 ### Added * `#[must_use]` attribute to many methods, porting and extending several rust-lang/rust PRs * Method `shrink_to()`, ported from rust-lang/rust * Implementation of `From<[T; N]>` for `BinaryHeap`, ported from rust-lang/rust#84111 * Links to referenced items in the documenation * Example of a min-heap, ported from rust-lang/rust#60451 * Documentation of time complexities of several methods, ported from rust-lang/rust#60952 ### Changed * Migrate to Rust 2021 Edition * Increase MSRV (minimum supported rust version) to rust 1.56.0. * Implement `From>` for `Vec` instead of `Into>` for `BinaryHeap` * Port rust-lang/rust#77435 improvement to rebuild heuristic of `BinaryHeap::append()` * Use italics with big-O notation in documentation, ported from rust-lang/rust#71167 * Relax trait bound `C: Compare` on `BinaryHeap` struct and certain methods, in part ported from rust-lang/rust#58421 * Synchronize internal implementation details with `std::collections::BinaryHeap` in Rust 1.62.0 ## [0.4.1] - 2021-01-06 ### Added * Performance improvement (#28) * Port rust-lang/rust#78857 - Improve BinaryHeap performance ## [0.4.0] - 2020-09-29 ### Added * Performance improvement (#27) * Avoid sift_down for unmutated PeekMut (rust#75974) ### Changed * Bump MSRV (minimum supported rust version) to rust 1.32.0. ## [0.3.1] - 2020-09-24 ### Added * generic constructor `from_vec_cmp_raw()`. * `replace_cmp()` which replace the comparator of heap and its unsafe version `replace_cmp_raw()`. ## [0.3.0] - 2020-07-08 ### Added * Quickstart section in the doc * `.into_iter_sorted()` which returns the values in *heap* order. (#13) * Note: `.into_iter()` returns the values in *arbitrary* order. There is a good reason (ex. make `.extend()` efficient) for `std` to do that, it was surprising to some people. ### Changed * Migrate to Rust 2018 Edition. * Bump MSRV (minimum supported rust version) to rust 1.31.1. * [CI] Switched to Github actions. * [CI] travis is removed because it was unreliable. * [CI] appveyor is removed because it's slow. ## [0.2.0] - 2019-05-27 ### Changed * [COMPATIBILITY] Use `Compare` trait from `compare` crate instead of our own definition. Most users should not be affected by this. TIP: External `Compare` impls needs to be updated to use `Fn` instead of `FnMut`. * [COMPATIBILITY] rename feature `serde1` to `serde` in order to comply with the guideline: https://rust-lang-nursery.github.io/api-guidelines/interoperability.html#c-serde * Refactor ctor impl. ## [0.1.6] - 2019-05-21 ### Added * generic constructor `from_vec()` and `from_vec_cmp()`. ### Changed * Refactor other ctor to call above methods. ## [0.1.5] - 2019-05-20 ### Added * `serde1` feature which adds Serialize/Deserialize ## [0.1.4] ### Fixed * Merge #1) Do not require T: Ord when a custom comparator is provided ## [0.1.3] - 2018-05-14 * Add comprehensive CI based on `trust` CI template v0.1.2 * README.md tweaks. ## [0.1.2] - 2018-05-14 * Cargo.toml tweaks ## [0.1.1] - 2018-05-14 * Initial tag binary-heap-plus-0.5.0/Cargo.toml0000644000000022020000000000100122100ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" rust-version = "1.56.0" name = "binary-heap-plus" version = "0.5.0" authors = ["SEKINE, Hideki "] description = "Enhanced version of std::collections::BinaryHeap that supports max, min, and custom-order heaps." readme = "README.md" keywords = ["binary", "heap", "priority", "queue"] categories = ["data-structures", "algorithms"] license = "MIT" repository = "https://github.com/sekineh/binary-heap-plus-rs" resolver = "2" [dependencies.compare] version = "0.1.0" [dependencies.serde] version = "1.0.116" features = ["derive"] optional = true [dev-dependencies.rand] version = "0.8" [dev-dependencies.serde_json] version = "1.0.57" [badges] binary-heap-plus-0.5.0/Cargo.toml.orig000064400000000000000000000014600072674642500157260ustar 00000000000000[package] name = "binary-heap-plus" version = "0.5.0" authors = ["SEKINE, Hideki "] description = "Enhanced version of std::collections::BinaryHeap that supports max, min, and custom-order heaps." license = "MIT" repository = "https://github.com/sekineh/binary-heap-plus-rs" readme = "README.md" keywords = ["binary", "heap", "priority", "queue"] categories = ["data-structures", "algorithms", ] edition = "2021" rust-version = "1.56.0" [dependencies] compare = "0.1.0" serde = { version = "1.0.116", optional = true, features = ["derive"] } [dev-dependencies] serde_json = "1.0.57" rand = "0.8" [badges] # TODO: waiting for PR to land...: https://github.com/rust-lang/crates.io/pull/1838# # github-actions = { repository = "sekineh/binary-heap-plus-rs", workflow = "Rust" }binary-heap-plus-0.5.0/LICENSE000064400000000000000000000020260072674642500140430ustar 00000000000000Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. binary-heap-plus-0.5.0/README.md000064400000000000000000000041770072674642500143260ustar 00000000000000# binary-heap-plus-rs ![Rust](https://github.com/sekineh/binary-heap-plus-rs/workflows/Rust/badge.svg) Enhancement over Rust's [`std::collections::BinaryHeap`](https://doc.rust-lang.org/stable/std/collections/struct.BinaryHeap.html). It supports the following heaps and still maintains backward compatibility. - Max heap - Use `BinaryHeap::new()` or `::with_capacity()` - Min heap - Use `BinaryHeap::new_min()` or `::with_capacity_min()` - Heap ordered by closure - Use `BinaryHeap::new_by()` or `::with_capacity_by()` - Heap ordered by key generated by closure - Use `BinaryHeap::new_by_key()` or `::with_capacity_by_key()` Other notable added methods are: - `BinaryHeap::from_vec_cmp()` and `BinaryHeap::from_vec()` for more generic construction. - `.into_iter_sorted()` which is less-surprising version of `.into_iter()`. The implementation is backported from `std`. - `.replace_cmp()` which replace the comparator of the existing heap. ## Compatibility and MSRV (Minimum Supported Rust Version) This crate is based on the standard library's implementation of [`BinaryHeap`](https://doc.rust-lang.org/stable/std/collections/struct.BinaryHeap.html) from Rust 1.62.0. The minimum supported Rust version is 1.56.0. # Changes See [CHANGELOG.md](https://github.com/sekineh/binary-heap-plus-rs/blob/master/CHANGELOG.md). # Thanks - I received many valuable feedback from Pre-RFC thread [1]. - The current design is based on @ExpHP's suggestion that compiles on stable compiler. - DDOtten, steven099, CAD97, ExpHP, scottmcm, Nemo157 and gnzlbg, thanks for looking into the design! - @ulysseB sent me a first pull request! - @inesseq contributed feature `serde1`. - @davidli2010 contributed comparator update and `unsafe` perf optimization. # References See the following discussions for the background of the crate: - [1] https://internals.rust-lang.org/t/pre-rfc-binaryheap-flexibility/7482 - https://users.rust-lang.org/t/binaryheap-flexibility-revisited-supporting-other-than-max-heap/17062 - https://users.rust-lang.org/t/binaryheap-flexibility/8766 - https://github.com/rust-lang/rust/issues/38886 binary-heap-plus-0.5.0/appveyor.yml000064400000000000000000000000550072674642500154260ustar 00000000000000branches: only: - never-never-never binary-heap-plus-0.5.0/benches/binary_heap.rs000064400000000000000000000046460072674642500173060ustar 00000000000000#![feature(test)] extern crate test; use binary_heap_plus::BinaryHeap; use rand::{seq::SliceRandom, thread_rng}; use test::{black_box, Bencher}; #[bench] fn bench_find_smallest_1000(b: &mut Bencher) { let mut rng = thread_rng(); let mut vec: Vec = (0..100_000).collect(); vec.shuffle(&mut rng); b.iter(|| { let mut iter = vec.iter().copied(); let mut heap: BinaryHeap<_> = iter.by_ref().take(1000).collect(); for x in iter { let mut max = heap.peek_mut().unwrap(); // This comparison should be true only 1% of the time. // Unnecessary `sift_down`s will degrade performance if x < *max { *max = x; } } heap }) } #[bench] fn bench_peek_mut_deref_mut(b: &mut Bencher) { let mut bheap = BinaryHeap::from(vec![42]); let vec: Vec = (0..1_000_000).collect(); b.iter(|| { let vec = black_box(&vec); let mut peek_mut = bheap.peek_mut().unwrap(); // The compiler shouldn't be able to optimize away the `sift_down` // assignment in `PeekMut`'s `DerefMut` implementation since // the loop may not run. for &i in vec.iter() { *peek_mut = i; } // Remove the already minimal overhead of the sift_down std::mem::forget(peek_mut); }) } #[bench] fn bench_from_vec(b: &mut Bencher) { let mut rng = thread_rng(); let mut vec: Vec = (0..100_000).collect(); vec.shuffle(&mut rng); b.iter(|| BinaryHeap::from(vec.clone())) } #[bench] fn bench_into_sorted_vec(b: &mut Bencher) { let bheap: BinaryHeap = (0..10_000).collect(); b.iter(|| bheap.clone().into_sorted_vec()) } #[bench] fn bench_push(b: &mut Bencher) { let mut bheap = BinaryHeap::with_capacity(50_000); let mut rng = thread_rng(); let mut vec: Vec = (0..50_000).collect(); vec.shuffle(&mut rng); b.iter(|| { for &i in vec.iter() { bheap.push(i); } black_box(&mut bheap); bheap.clear(); }) } #[bench] fn bench_pop(b: &mut Bencher) { let mut bheap = BinaryHeap::with_capacity(10_000); b.iter(|| { bheap.extend((0..10_000).rev()); black_box(&mut bheap); while let Some(elem) = bheap.pop() { black_box(elem); } }) } binary-heap-plus-0.5.0/src/binary_heap.rs000064400000000000000000001612630072674642500164650ustar 00000000000000//! A priority queue implemented with a binary heap. //! //! Note: This version is folked from Rust standartd library, which only supports //! max heap. //! //! Insertion and popping the largest element have *O*(log(*n*)) time complexity. //! Checking the largest element is *O*(1). Converting a vector to a binary heap //! can be done in-place, and has *O*(*n*) complexity. A binary heap can also be //! converted to a sorted vector in-place, allowing it to be used for an *O*(*n* * log(*n*)) //! in-place heapsort. //! //! # Examples //! //! This is a larger example that implements [Dijkstra's algorithm][dijkstra] //! to solve the [shortest path problem][sssp] on a [directed graph][dir_graph]. //! It shows how to use [`BinaryHeap`] with custom types. //! //! [dijkstra]: https://en.wikipedia.org/wiki/Dijkstra%27s_algorithm //! [sssp]: https://en.wikipedia.org/wiki/Shortest_path_problem //! [dir_graph]: https://en.wikipedia.org/wiki/Directed_graph //! //! ``` //! use std::cmp::Ordering; //! use binary_heap_plus::BinaryHeap; //! //! #[derive(Copy, Clone, Eq, PartialEq)] //! struct State { //! cost: usize, //! position: usize, //! } //! //! // The priority queue depends on `Ord`. //! // Explicitly implement the trait so the queue becomes a min-heap //! // instead of a max-heap. //! impl Ord for State { //! fn cmp(&self, other: &Self) -> Ordering { //! // Notice that the we flip the ordering on costs. //! // In case of a tie we compare positions - this step is necessary //! // to make implementations of `PartialEq` and `Ord` consistent. //! other.cost.cmp(&self.cost) //! .then_with(|| self.position.cmp(&other.position)) //! } //! } //! //! // `PartialOrd` needs to be implemented as well. //! impl PartialOrd for State { //! fn partial_cmp(&self, other: &Self) -> Option { //! Some(self.cmp(other)) //! } //! } //! //! // Each node is represented as a `usize`, for a shorter implementation. //! struct Edge { //! node: usize, //! cost: usize, //! } //! //! // Dijkstra's shortest path algorithm. //! //! // Start at `start` and use `dist` to track the current shortest distance //! // to each node. This implementation isn't memory-efficient as it may leave duplicate //! // nodes in the queue. It also uses `usize::MAX` as a sentinel value, //! // for a simpler implementation. //! fn shortest_path(adj_list: &Vec>, start: usize, goal: usize) -> Option { //! // dist[node] = current shortest distance from `start` to `node` //! let mut dist: Vec<_> = (0..adj_list.len()).map(|_| usize::MAX).collect(); //! //! let mut heap = BinaryHeap::new(); //! //! // We're at `start`, with a zero cost //! dist[start] = 0; //! heap.push(State { cost: 0, position: start }); //! //! // Examine the frontier with lower cost nodes first (min-heap) //! while let Some(State { cost, position }) = heap.pop() { //! // Alternatively we could have continued to find all shortest paths //! if position == goal { return Some(cost); } //! //! // Important as we may have already found a better way //! if cost > dist[position] { continue; } //! //! // For each node we can reach, see if we can find a way with //! // a lower cost going through this node //! for edge in &adj_list[position] { //! let next = State { cost: cost + edge.cost, position: edge.node }; //! //! // If so, add it to the frontier and continue //! if next.cost < dist[next.position] { //! heap.push(next); //! // Relaxation, we have now found a better way //! dist[next.position] = next.cost; //! } //! } //! } //! //! // Goal not reachable //! None //! } //! //! fn main() { //! // This is the directed graph we're going to use. //! // The node numbers correspond to the different states, //! // and the edge weights symbolize the cost of moving //! // from one node to another. //! // Note that the edges are one-way. //! // //! // 7 //! // +-----------------+ //! // | | //! // v 1 2 | 2 //! // 0 -----> 1 -----> 3 ---> 4 //! // | ^ ^ ^ //! // | | 1 | | //! // | | | 3 | 1 //! // +------> 2 -------+ | //! // 10 | | //! // +---------------+ //! // //! // The graph is represented as an adjacency list where each index, //! // corresponding to a node value, has a list of outgoing edges. //! // Chosen for its efficiency. //! let graph = vec![ //! // Node 0 //! vec![Edge { node: 2, cost: 10 }, //! Edge { node: 1, cost: 1 }], //! // Node 1 //! vec![Edge { node: 3, cost: 2 }], //! // Node 2 //! vec![Edge { node: 1, cost: 1 }, //! Edge { node: 3, cost: 3 }, //! Edge { node: 4, cost: 1 }], //! // Node 3 //! vec![Edge { node: 0, cost: 7 }, //! Edge { node: 4, cost: 2 }], //! // Node 4 //! vec![]]; //! //! assert_eq!(shortest_path(&graph, 0, 1), Some(1)); //! assert_eq!(shortest_path(&graph, 0, 3), Some(3)); //! assert_eq!(shortest_path(&graph, 3, 0), Some(7)); //! assert_eq!(shortest_path(&graph, 0, 4), Some(5)); //! assert_eq!(shortest_path(&graph, 4, 0), None); //! } //! ``` #![deny(unsafe_op_in_unsafe_fn)] #![allow(clippy::needless_doctest_main)] #![allow(missing_docs)] // #![stable(feature = "rust1", since = "1.0.0")] // use core::ops::{Deref, DerefMut, Place, Placer, InPlace}; // use core::iter::{FromIterator, FusedIterator}; use std::cmp::Ordering; use std::iter::FromIterator; use std::slice; // use std::iter::FusedIterator; // use std::vec::Drain; use compare::Compare; use core::fmt; use core::mem::{swap, ManuallyDrop}; use core::ptr; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; use std::ops::Deref; use std::ops::DerefMut; use std::vec; // use slice; // use vec::{self, Vec}; // use super::SpecExtend; /// A priority queue implemented with a binary heap. /// /// This will be a max-heap. /// /// It is a logic error for an item to be modified in such a way that the /// item's ordering relative to any other item, as determined by the [`Ord`] /// trait, changes while it is in the heap. This is normally only possible /// through [`Cell`], [`RefCell`], global state, I/O, or unsafe code. The /// behavior resulting from such a logic error is not specified (it /// could include panics, incorrect results, aborts, memory leaks, or /// non-termination) but will not be undefined behavior. /// /// # Examples /// /// ``` /// use binary_heap_plus::BinaryHeap; /// /// // Type inference lets us omit an explicit type signature (which /// // would be `BinaryHeap` in this example). /// let mut heap = BinaryHeap::new(); /// /// // We can use peek to look at the next item in the heap. In this case, /// // there's no items in there yet so we get None. /// assert_eq!(heap.peek(), None); /// /// // Let's add some scores... /// heap.push(1); /// heap.push(5); /// heap.push(2); /// /// // Now peek shows the most important item in the heap. /// assert_eq!(heap.peek(), Some(&5)); /// /// // We can check the length of a heap. /// assert_eq!(heap.len(), 3); /// /// // We can iterate over the items in the heap, although they are returned in /// // a random order. /// for x in &heap { /// println!("{}", x); /// } /// /// // If we instead pop these scores, they should come back in order. /// assert_eq!(heap.pop(), Some(5)); /// assert_eq!(heap.pop(), Some(2)); /// assert_eq!(heap.pop(), Some(1)); /// assert_eq!(heap.pop(), None); /// /// // We can clear the heap of any remaining items. /// heap.clear(); /// /// // The heap should now be empty. /// assert!(heap.is_empty()) /// ``` /// /// A `BinaryHeap` with a known list of items can be initialized from an array: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// /// // This will create a max-heap. /// let heap = BinaryHeap::from([1, 5, 2]); /// ``` /// /// ## Min-heap /// /// `BinaryHeap` can also act as a min-heap without requiring [`Reverse`] or a custom [`Ord`] /// implementation. /// /// ``` /// use binary_heap_plus::BinaryHeap; /// /// let mut heap = BinaryHeap::new_min(); /// /// // There is no need to wrap values in `Reverse` /// heap.push(1); /// heap.push(5); /// heap.push(2); /// /// // If we pop these scores now, they should come back in the reverse order. /// assert_eq!(heap.pop(), Some(1)); /// assert_eq!(heap.pop(), Some(2)); /// assert_eq!(heap.pop(), Some(5)); /// assert_eq!(heap.pop(), None); /// ``` /// /// # Time complexity /// /// | [push] | [pop] | [peek]/[peek\_mut] | /// |---------|---------------|--------------------| /// | *O*(1)~ | *O*(log(*n*)) | *O*(1) | /// /// The value for `push` is an expected cost; the method documentation gives a /// more detailed analysis. /// /// [`Reverse`]: https://doc.rust-lang.org/stable/core/cmp/struct.Reverse.html /// [`Ord`]: https://doc.rust-lang.org/stable/core/cmp/trait.Ord.html /// [`Cell`]: https://doc.rust-lang.org/stable/core/cell/struct.Cell.html /// [`RefCell`]: https://doc.rust-lang.org/stable/core/cell/struct.RefCell.html /// [push]: BinaryHeap::push /// [pop]: BinaryHeap::pop /// [peek]: BinaryHeap::peek /// [peek\_mut]: BinaryHeap::peek_mut // #[stable(feature = "rust1", since = "1.0.0")] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct BinaryHeap { data: Vec, cmp: C, } /// For `T` that implements `Ord`, you can use this struct to quickly /// set up a max heap. #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Clone, Copy, Default, PartialEq, Eq, Debug)] pub struct MaxComparator; impl Compare for MaxComparator { fn compare(&self, a: &T, b: &T) -> Ordering { a.cmp(b) } } /// For `T` that implements `Ord`, you can use this struct to quickly /// set up a min heap. #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Clone, Copy, Default, PartialEq, Eq, Debug)] pub struct MinComparator; impl Compare for MinComparator { fn compare(&self, a: &T, b: &T) -> Ordering { b.cmp(a) } } /// The comparator defined by closure #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Clone, Copy, Default, PartialEq, Eq, Debug)] pub struct FnComparator(pub F); impl Compare for FnComparator where F: Fn(&T, &T) -> Ordering, { fn compare(&self, a: &T, b: &T) -> Ordering { self.0(a, b) } } /// The comparator ordered by key #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Clone, Copy, Default, PartialEq, Eq, Debug)] pub struct KeyComparator(pub F); impl Compare for KeyComparator where F: Fn(&T) -> K, { fn compare(&self, a: &T, b: &T) -> Ordering { self.0(a).cmp(&self.0(b)) } } /// Structure wrapping a mutable reference to the greatest item on a /// `BinaryHeap`. /// /// This `struct` is created by the [`peek_mut`] method on [`BinaryHeap`]. See /// its documentation for more. /// /// [`peek_mut`]: BinaryHeap::peek_mut // #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")] pub struct PeekMut<'a, T: 'a, C: 'a + Compare> { heap: &'a mut BinaryHeap, sift: bool, } // #[stable(feature = "collection_debug", since = "1.17.0")] impl> fmt::Debug for PeekMut<'_, T, C> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("PeekMut").field(&self.heap.data[0]).finish() } } // #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")] impl> Drop for PeekMut<'_, T, C> { fn drop(&mut self) { if self.sift { // SAFETY: PeekMut is only instantiated for non-empty heaps. unsafe { self.heap.sift_down(0) }; } } } // #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")] impl> Deref for PeekMut<'_, T, C> { type Target = T; fn deref(&self) -> &T { debug_assert!(!self.heap.is_empty()); // SAFE: PeekMut is only instantiated for non-empty heaps unsafe { self.heap.data.get_unchecked(0) } } } // #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")] impl> DerefMut for PeekMut<'_, T, C> { fn deref_mut(&mut self) -> &mut T { debug_assert!(!self.heap.is_empty()); self.sift = true; // SAFE: PeekMut is only instantiated for non-empty heaps unsafe { self.heap.data.get_unchecked_mut(0) } } } impl<'a, T, C: Compare> PeekMut<'a, T, C> { /// Removes the peeked value from the heap and returns it. // #[stable(feature = "binary_heap_peek_mut_pop", since = "1.18.0")] pub fn pop(mut this: PeekMut<'a, T, C>) -> T { let value = this.heap.pop().unwrap(); this.sift = false; value } } // #[stable(feature = "rust1", since = "1.0.0")] impl Clone for BinaryHeap { fn clone(&self) -> Self { BinaryHeap { data: self.data.clone(), cmp: self.cmp.clone(), } } fn clone_from(&mut self, source: &Self) { self.data.clone_from(&source.data); } } // #[stable(feature = "rust1", since = "1.0.0")] impl Default for BinaryHeap { /// Creates an empty `BinaryHeap`. #[inline] fn default() -> BinaryHeap { BinaryHeap::new() } } // #[stable(feature = "binaryheap_debug", since = "1.4.0")] impl fmt::Debug for BinaryHeap { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.iter()).finish() } } impl + Default> BinaryHeap { /// Generic constructor for `BinaryHeap` from [`Vec`]. /// /// Because `BinaryHeap` stores the elements in its internal `Vec`, /// it's natural to construct it from `Vec`. /// /// [`Vec`]: https://doc.rust-lang.org/stable/std/vec/struct.Vec.html pub fn from_vec(vec: Vec) -> Self { BinaryHeap::from_vec_cmp(vec, C::default()) } } impl> BinaryHeap { /// Generic constructor for `BinaryHeap` from [`Vec`] and comparator. /// /// Because `BinaryHeap` stores the elements in its internal `Vec`, /// it's natural to construct it from `Vec`. /// /// [`Vec`]: https://doc.rust-lang.org/stable/std/vec/struct.Vec.html pub fn from_vec_cmp(vec: Vec, cmp: C) -> Self { unsafe { BinaryHeap::from_vec_cmp_raw(vec, cmp, true) } } /// Generic constructor for `BinaryHeap` from [`Vec`] and comparator. /// /// Because `BinaryHeap` stores the elements in its internal `Vec`, /// it's natural to construct it from `Vec`. /// /// # Safety /// User is responsible for providing valid `rebuild` value. /// /// [`Vec`]: https://doc.rust-lang.org/stable/std/vec/struct.Vec.html pub unsafe fn from_vec_cmp_raw(vec: Vec, cmp: C, rebuild: bool) -> Self { let mut heap = BinaryHeap { data: vec, cmp }; if rebuild && !heap.data.is_empty() { heap.rebuild(); } heap } } impl BinaryHeap { /// Creates an empty `BinaryHeap`. /// /// This default version will create a max-heap. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let mut heap = BinaryHeap::new(); /// heap.push(3); /// heap.push(1); /// heap.push(5); /// assert_eq!(heap.pop(), Some(5)); /// ``` // #[stable(feature = "rust1", since = "1.0.0")] #[must_use] pub fn new() -> Self { BinaryHeap::from_vec(vec![]) } /// Creates an empty `BinaryHeap` with a specific capacity. /// This preallocates enough memory for `capacity` elements, /// so that the `BinaryHeap` does not have to be reallocated /// until it contains at least that many values. /// /// This default version will create a max-heap. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let mut heap = BinaryHeap::with_capacity(10); /// assert_eq!(heap.capacity(), 10); /// heap.push(3); /// heap.push(1); /// heap.push(5); /// assert_eq!(heap.pop(), Some(5)); /// ``` // #[stable(feature = "rust1", since = "1.0.0")] #[must_use] pub fn with_capacity(capacity: usize) -> Self { BinaryHeap::from_vec(Vec::with_capacity(capacity)) } } impl BinaryHeap { /// Creates an empty `BinaryHeap`. /// /// The `_min()` version will create a min-heap. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let mut heap = BinaryHeap::new_min(); /// heap.push(3); /// heap.push(1); /// heap.push(5); /// assert_eq!(heap.pop(), Some(1)); /// ``` #[must_use] pub fn new_min() -> Self { BinaryHeap::from_vec(vec![]) } /// Creates an empty `BinaryHeap` with a specific capacity. /// This preallocates enough memory for `capacity` elements, /// so that the `BinaryHeap` does not have to be reallocated /// until it contains at least that many values. /// /// The `_min()` version will create a min-heap. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let mut heap = BinaryHeap::with_capacity_min(10); /// assert_eq!(heap.capacity(), 10); /// heap.push(3); /// heap.push(1); /// heap.push(5); /// assert_eq!(heap.pop(), Some(1)); /// ``` #[must_use] pub fn with_capacity_min(capacity: usize) -> Self { BinaryHeap::from_vec(Vec::with_capacity(capacity)) } } impl BinaryHeap> where F: Fn(&T, &T) -> Ordering, { /// Creates an empty `BinaryHeap`. /// /// The `_by()` version will create a heap ordered by given closure. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let mut heap = BinaryHeap::new_by(|a: &i32, b: &i32| b.cmp(a)); /// heap.push(3); /// heap.push(1); /// heap.push(5); /// assert_eq!(heap.pop(), Some(1)); /// ``` #[must_use] pub fn new_by(f: F) -> Self { BinaryHeap::from_vec_cmp(vec![], FnComparator(f)) } /// Creates an empty `BinaryHeap` with a specific capacity. /// This preallocates enough memory for `capacity` elements, /// so that the `BinaryHeap` does not have to be reallocated /// until it contains at least that many values. /// /// The `_by()` version will create a heap ordered by given closure. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let mut heap = BinaryHeap::with_capacity_by(10, |a: &i32, b: &i32| b.cmp(a)); /// assert_eq!(heap.capacity(), 10); /// heap.push(3); /// heap.push(1); /// heap.push(5); /// assert_eq!(heap.pop(), Some(1)); /// ``` #[must_use] pub fn with_capacity_by(capacity: usize, f: F) -> Self { BinaryHeap::from_vec_cmp(Vec::with_capacity(capacity), FnComparator(f)) } } impl BinaryHeap> where F: Fn(&T) -> K, { /// Creates an empty `BinaryHeap`. /// /// The `_by_key()` version will create a heap ordered by key converted by given closure. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let mut heap = BinaryHeap::new_by_key(|a: &i32| a % 4); /// heap.push(3); /// heap.push(1); /// heap.push(5); /// assert_eq!(heap.pop(), Some(3)); /// ``` #[must_use] pub fn new_by_key(f: F) -> Self { BinaryHeap::from_vec_cmp(vec![], KeyComparator(f)) } /// Creates an empty `BinaryHeap` with a specific capacity. /// This preallocates enough memory for `capacity` elements, /// so that the `BinaryHeap` does not have to be reallocated /// until it contains at least that many values. /// /// The `_by_key()` version will create a heap ordered by key coverted by given closure. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let mut heap = BinaryHeap::with_capacity_by_key(10, |a: &i32| a % 4); /// assert_eq!(heap.capacity(), 10); /// heap.push(3); /// heap.push(1); /// heap.push(5); /// assert_eq!(heap.pop(), Some(3)); /// ``` #[must_use] pub fn with_capacity_by_key(capacity: usize, f: F) -> Self { BinaryHeap::from_vec_cmp(Vec::with_capacity(capacity), KeyComparator(f)) } } impl> BinaryHeap { /// Replaces the comparator of binary heap. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// use compare::Compare; /// use std::cmp::Ordering; /// /// struct Comparator { /// ascending: bool /// } /// /// impl Compare for Comparator { /// fn compare(&self,l: &i32,r: &i32) -> Ordering { /// if self.ascending { /// r.cmp(l) /// } else { /// l.cmp(r) /// } /// } /// } /// /// // construct a heap in ascending order. /// let mut heap = BinaryHeap::from_vec_cmp(vec![3, 1, 5], Comparator { ascending: true }); /// /// // replace the comparor /// heap.replace_cmp(Comparator { ascending: false }); /// assert_eq!(heap.into_iter_sorted().collect::>(), [5, 3, 1]); /// ``` #[inline] pub fn replace_cmp(&mut self, cmp: C) { unsafe { self.replace_cmp_raw(cmp, true); } } /// Replaces the comparator of binary heap. /// /// # Safety /// User is responsible for providing valid `rebuild` value. pub unsafe fn replace_cmp_raw(&mut self, cmp: C, rebuild: bool) { self.cmp = cmp; if rebuild && !self.data.is_empty() { self.rebuild(); } } /// Returns a mutable reference to the greatest item in the binary heap, or /// `None` if it is empty. /// /// Note: If the `PeekMut` value is leaked, the heap may be in an /// inconsistent state. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let mut heap = BinaryHeap::new(); /// assert!(heap.peek_mut().is_none()); /// /// heap.push(1); /// heap.push(5); /// heap.push(2); /// { /// let mut val = heap.peek_mut().unwrap(); /// *val = 0; /// } /// assert_eq!(heap.peek(), Some(&2)); /// ``` /// /// # Time complexity /// /// If the item is modified then the worst case time complexity is *O*(log(*n*)), /// otherwise it's *O*(1). // #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")] pub fn peek_mut(&mut self) -> Option> { if self.is_empty() { None } else { Some(PeekMut { heap: self, sift: false, }) } } /// Removes the greatest item from the binary heap and returns it, or `None` if it /// is empty. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let mut heap = BinaryHeap::from([1, 3]); /// /// assert_eq!(heap.pop(), Some(3)); /// assert_eq!(heap.pop(), Some(1)); /// assert_eq!(heap.pop(), None); /// ``` /// /// # Time complexity /// /// The worst case cost of `pop` on a heap containing *n* elements is *O*(log(*n*)). // #[stable(feature = "rust1", since = "1.0.0")] pub fn pop(&mut self) -> Option { self.data.pop().map(|mut item| { if !self.is_empty() { swap(&mut item, &mut self.data[0]); // SAFETY: !self.is_empty() means that self.len() > 0 unsafe { self.sift_down_to_bottom(0) }; } item }) } /// Pushes an item onto the binary heap. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let mut heap = BinaryHeap::new(); /// heap.push(3); /// heap.push(5); /// heap.push(1); /// /// assert_eq!(heap.len(), 3); /// assert_eq!(heap.peek(), Some(&5)); /// ``` /// /// # Time complexity /// /// The expected cost of `push`, averaged over every possible ordering of /// the elements being pushed, and over a sufficiently large number of /// pushes, is *O*(1). This is the most meaningful cost metric when pushing /// elements that are *not* already in any sorted pattern. /// /// The time complexity degrades if elements are pushed in predominantly /// ascending order. In the worst case, elements are pushed in ascending /// sorted order and the amortized cost per push is *O*(log(*n*)) against a heap /// containing *n* elements. /// /// The worst case cost of a *single* call to `push` is *O*(*n*). The worst case /// occurs when capacity is exhausted and needs a resize. The resize cost /// has been amortized in the previous figures. // #[stable(feature = "rust1", since = "1.0.0")] pub fn push(&mut self, item: T) { let old_len = self.len(); self.data.push(item); // SAFETY: Since we pushed a new item it means that // old_len = self.len() - 1 < self.len() unsafe { self.sift_up(0, old_len) }; } /// Consumes the `BinaryHeap` and returns a vector in sorted /// (ascending) order. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// /// let mut heap = BinaryHeap::from([1, 2, 4, 5, 7]); /// heap.push(6); /// heap.push(3); /// /// let vec = heap.into_sorted_vec(); /// assert_eq!(vec, [1, 2, 3, 4, 5, 6, 7]); /// ``` #[must_use = "`self` will be dropped if the result is not used"] // #[stable(feature = "binary_heap_extras_15", since = "1.5.0")] pub fn into_sorted_vec(mut self) -> Vec { let mut end = self.len(); while end > 1 { end -= 1; // SAFETY: `end` goes from `self.len() - 1` to 1 (both included), // so it's always a valid index to access. // It is safe to access index 0 (i.e. `ptr`), because // 1 <= end < self.len(), which means self.len() >= 2. unsafe { let ptr = self.data.as_mut_ptr(); ptr::swap(ptr, ptr.add(end)); } // SAFETY: `end` goes from `self.len() - 1` to 1 (both included) so: // 0 < 1 <= end <= self.len() - 1 < self.len() // Which means 0 < end and end < self.len(). unsafe { self.sift_down_range(0, end) }; } self.into_vec() } // The implementations of sift_up and sift_down use unsafe blocks in // order to move an element out of the vector (leaving behind a // hole), shift along the others and move the removed element back into the // vector at the final location of the hole. // The `Hole` type is used to represent this, and make sure // the hole is filled back at the end of its scope, even on panic. // Using a hole reduces the constant factor compared to using swaps, // which involves twice as many moves. /// # Safety /// /// The caller must guarantee that `pos < self.len()`. unsafe fn sift_up(&mut self, start: usize, pos: usize) -> usize { // Take out the value at `pos` and create a hole. // SAFETY: The caller guarantees that pos < self.len() let mut hole = unsafe { Hole::new(&mut self.data, pos) }; while hole.pos() > start { let parent = (hole.pos() - 1) / 2; // SAFETY: hole.pos() > start >= 0, which means hole.pos() > 0 // and so hole.pos() - 1 can't underflow. // This guarantees that parent < hole.pos() so // it's a valid index and also != hole.pos(). if self .cmp .compares_le(hole.element(), unsafe { hole.get(parent) }) { break; } // SAFETY: Same as above unsafe { hole.move_to(parent) }; } hole.pos() } /// Take an element at `pos` and move it down the heap, /// while its children are larger. /// /// # Safety /// /// The caller must guarantee that `pos < end <= self.len()`. unsafe fn sift_down_range(&mut self, pos: usize, end: usize) { // SAFETY: The caller guarantees that pos < end <= self.len(). let mut hole = unsafe { Hole::new(&mut self.data, pos) }; let mut child = 2 * hole.pos() + 1; // Loop invariant: child == 2 * hole.pos() + 1. while child <= end.saturating_sub(2) { // compare with the greater of the two children // SAFETY: child < end - 1 < self.len() and // child + 1 < end <= self.len(), so they're valid indexes. // child == 2 * hole.pos() + 1 != hole.pos() and // child + 1 == 2 * hole.pos() + 2 != hole.pos(). // FIXME: 2 * hole.pos() + 1 or 2 * hole.pos() + 2 could overflow // if T is a ZST child += unsafe { self.cmp.compares_le(hole.get(child), hole.get(child + 1)) } as usize; // if we are already in order, stop. // SAFETY: child is now either the old child or the old child+1 // We already proven that both are < self.len() and != hole.pos() if self .cmp .compares_ge(hole.element(), unsafe { hole.get(child) }) { return; } // SAFETY: same as above. unsafe { hole.move_to(child) }; child = 2 * hole.pos() + 1; } // SAFETY: && short circuit, which means that in the // second condition it's already true that child == end - 1 < self.len(). if child == end - 1 && self .cmp .compares_lt(hole.element(), unsafe { hole.get(child) }) { // SAFETY: child is already proven to be a valid index and // child == 2 * hole.pos() + 1 != hole.pos(). unsafe { hole.move_to(child) }; } } /// # Safety /// /// The caller must guarantee that `pos < self.len()`. unsafe fn sift_down(&mut self, pos: usize) { let len = self.len(); // SAFETY: pos < len is guaranteed by the caller and // obviously len = self.len() <= self.len(). unsafe { self.sift_down_range(pos, len) }; } /// Take an element at `pos` and move it all the way down the heap, /// then sift it up to its position. /// /// Note: This is faster when the element is known to be large / should /// be closer to the bottom. /// /// # Safety /// /// The caller must guarantee that `pos < self.len()`. unsafe fn sift_down_to_bottom(&mut self, mut pos: usize) { let end = self.len(); let start = pos; // SAFETY: The caller guarantees that pos < self.len(). let mut hole = unsafe { Hole::new(&mut self.data, pos) }; let mut child = 2 * hole.pos() + 1; // Loop invariant: child == 2 * hole.pos() + 1. while child <= end.saturating_sub(2) { // SAFETY: child < end - 1 < self.len() and // child + 1 < end <= self.len(), so they're valid indexes. // child == 2 * hole.pos() + 1 != hole.pos() and // child + 1 == 2 * hole.pos() + 2 != hole.pos(). // FIXME: 2 * hole.pos() + 1 or 2 * hole.pos() + 2 could overflow // if T is a ZST child += unsafe { self.cmp.compares_le(hole.get(child), hole.get(child + 1)) } as usize; // SAFETY: Same as above unsafe { hole.move_to(child) }; child = 2 * hole.pos() + 1; } if child == end - 1 { // SAFETY: child == end - 1 < self.len(), so it's a valid index // and child == 2 * hole.pos() + 1 != hole.pos(). unsafe { hole.move_to(child) }; } pos = hole.pos(); drop(hole); // SAFETY: pos is the position in the hole and was already proven // to be a valid index. unsafe { self.sift_up(start, pos) }; } /// Rebuild assuming data[0..start] is still a proper heap. fn rebuild_tail(&mut self, start: usize) { if start == self.len() { return; } let tail_len = self.len() - start; #[inline(always)] fn log2_fast(x: usize) -> usize { (usize::BITS - x.leading_zeros() - 1) as usize } // `rebuild` takes O(self.len()) operations // and about 2 * self.len() comparisons in the worst case // while repeating `sift_up` takes O(tail_len * log(start)) operations // and about 1 * tail_len * log_2(start) comparisons in the worst case, // assuming start >= tail_len. For larger heaps, the crossover point // no longer follows this reasoning and was determined empirically. let better_to_rebuild = if start < tail_len { true } else if self.len() <= 2048 { 2 * self.len() < tail_len * log2_fast(start) } else { 2 * self.len() < tail_len * 11 }; if better_to_rebuild { self.rebuild(); } else { for i in start..self.len() { // SAFETY: The index `i` is always less than self.len(). unsafe { self.sift_up(0, i) }; } } } fn rebuild(&mut self) { let mut n = self.len() / 2; while n > 0 { n -= 1; // SAFETY: n starts from self.len() / 2 and goes down to 0. // The only case when !(n < self.len()) is if // self.len() == 0, but it's ruled out by the loop condition. unsafe { self.sift_down(n) }; } } /// Moves all the elements of `other` into `self`, leaving `other` empty. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// /// let mut a = BinaryHeap::from([-10, 1, 2, 3, 3]); /// let mut b = BinaryHeap::from([-20, 5, 43]); /// /// a.append(&mut b); /// /// assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]); /// assert!(b.is_empty()); /// ``` // #[stable(feature = "binary_heap_append", since = "1.11.0")] pub fn append(&mut self, other: &mut Self) { if self.len() < other.len() { swap(self, other); } let start = self.data.len(); self.data.append(&mut other.data); self.rebuild_tail(start); } } impl BinaryHeap { /// Returns an iterator visiting all values in the underlying vector, in /// arbitrary order. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let heap = BinaryHeap::from([1, 2, 3, 4]); /// /// // Print 1, 2, 3, 4 in arbitrary order /// for x in heap.iter() { /// println!("{}", x); /// } /// ``` // #[stable(feature = "rust1", since = "1.0.0")] pub fn iter(&self) -> Iter<'_, T> { Iter { iter: self.data.iter(), } } /// Returns an iterator which retrieves elements in heap order. /// This method consumes the original heap. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let heap = BinaryHeap::from([1, 2, 3, 4, 5]); /// /// assert_eq!(heap.into_iter_sorted().take(2).collect::>(), [5, 4]); /// ``` // #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")] pub fn into_iter_sorted(self) -> IntoIterSorted { IntoIterSorted { inner: self } } /// Returns the greatest item in the binary heap, or `None` if it is empty. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let mut heap = BinaryHeap::new(); /// assert_eq!(heap.peek(), None); /// /// heap.push(1); /// heap.push(5); /// heap.push(2); /// assert_eq!(heap.peek(), Some(&5)); /// /// ``` /// /// # Time complexity /// /// Cost is *O*(1) in the worst case. #[must_use] // #[stable(feature = "rust1", since = "1.0.0")] pub fn peek(&self) -> Option<&T> { self.data.get(0) } /// Returns the number of elements the binary heap can hold without reallocating. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let mut heap = BinaryHeap::with_capacity(100); /// assert!(heap.capacity() >= 100); /// heap.push(4); /// ``` #[must_use] // #[stable(feature = "rust1", since = "1.0.0")] pub fn capacity(&self) -> usize { self.data.capacity() } /// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the /// given `BinaryHeap`. Does nothing if the capacity is already sufficient. /// /// Note that the allocator may give the collection more space than it requests. Therefore /// capacity can not be relied upon to be precisely minimal. Prefer [`reserve`] if future /// insertions are expected. /// /// # Panics /// /// Panics if the new capacity overflows `usize`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let mut heap = BinaryHeap::new(); /// heap.reserve_exact(100); /// assert!(heap.capacity() >= 100); /// heap.push(4); /// ``` /// /// [`reserve`]: BinaryHeap::reserve // #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve_exact(&mut self, additional: usize) { self.data.reserve_exact(additional); } /// Reserves capacity for at least `additional` more elements to be inserted in the /// `BinaryHeap`. The collection may reserve more space to avoid frequent reallocations. /// /// # Panics /// /// Panics if the new capacity overflows `usize`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let mut heap = BinaryHeap::new(); /// heap.reserve(100); /// assert!(heap.capacity() >= 100); /// heap.push(4); /// ``` // #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve(&mut self, additional: usize) { self.data.reserve(additional); } /// Discards as much additional capacity as possible. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let mut heap: BinaryHeap = BinaryHeap::with_capacity(100); /// /// assert!(heap.capacity() >= 100); /// heap.shrink_to_fit(); /// assert!(heap.capacity() == 0); /// ``` // #[stable(feature = "rust1", since = "1.0.0")] pub fn shrink_to_fit(&mut self) { self.data.shrink_to_fit(); } /// Discards capacity with a lower bound. /// /// The capacity will remain at least as large as both the length /// and the supplied value. /// /// If the current capacity is less than the lower limit, this is a no-op. /// /// # Examples /// /// ``` /// use std::collections::BinaryHeap; /// let mut heap: BinaryHeap = BinaryHeap::with_capacity(100); /// /// assert!(heap.capacity() >= 100); /// heap.shrink_to(10); /// assert!(heap.capacity() >= 10); /// ``` #[inline] pub fn shrink_to(&mut self, min_capacity: usize) { self.data.shrink_to(min_capacity) } /// Consumes the `BinaryHeap` and returns the underlying vector /// in arbitrary order. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let heap = BinaryHeap::from([1, 2, 3, 4, 5, 6, 7]); /// let vec = heap.into_vec(); /// /// // Will print in some order /// for x in vec { /// println!("{}", x); /// } /// ``` #[must_use = "`self` will be dropped if the result is not used"] // #[stable(feature = "binary_heap_extras_15", since = "1.5.0")] pub fn into_vec(self) -> Vec { self.into() } /// Returns the length of the binary heap. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let heap = BinaryHeap::from([1, 3]); /// /// assert_eq!(heap.len(), 2); /// ``` #[must_use] // #[stable(feature = "rust1", since = "1.0.0")] pub fn len(&self) -> usize { self.data.len() } /// Checks if the binary heap is empty. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let mut heap = BinaryHeap::new(); /// /// assert!(heap.is_empty()); /// /// heap.push(3); /// heap.push(5); /// heap.push(1); /// /// assert!(!heap.is_empty()); /// ``` #[must_use] // #[stable(feature = "rust1", since = "1.0.0")] pub fn is_empty(&self) -> bool { self.len() == 0 } /// Clears the binary heap, returning an iterator over the removed elements /// in arbitrary order. If the iterator is dropped before being fully /// consumed, it drops the remaining elements in arbitrary order. /// /// The returned iterator keeps a mutable borrow on the heap to optimize /// its implementation. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let mut heap = BinaryHeap::from([1, 3]); /// /// assert!(!heap.is_empty()); /// /// for x in heap.drain() { /// println!("{}", x); /// } /// /// assert!(heap.is_empty()); /// ``` #[inline] // #[stable(feature = "drain", since = "1.6.0")] pub fn drain(&mut self) -> Drain<'_, T> { Drain { iter: self.data.drain(..), } } /// Drops all items from the binary heap. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let mut heap = BinaryHeap::from([1, 3]); /// /// assert!(!heap.is_empty()); /// /// heap.clear(); /// /// assert!(heap.is_empty()); /// ``` // #[stable(feature = "rust1", since = "1.0.0")] pub fn clear(&mut self) { self.drain(); } } /// Hole represents a hole in a slice i.e., an index without valid value /// (because it was moved from or duplicated). /// In drop, `Hole` will restore the slice by filling the hole /// position with the value that was originally removed. struct Hole<'a, T: 'a> { data: &'a mut [T], elt: ManuallyDrop, pos: usize, } impl<'a, T> Hole<'a, T> { /// Create a new `Hole` at index `pos`. /// /// Unsafe because pos must be within the data slice. #[inline] unsafe fn new(data: &'a mut [T], pos: usize) -> Self { debug_assert!(pos < data.len()); // SAFE: pos should be inside the slice let elt = unsafe { ptr::read(data.get_unchecked(pos)) }; Hole { data, elt: ManuallyDrop::new(elt), pos, } } #[inline] fn pos(&self) -> usize { self.pos } /// Returns a reference to the element removed. #[inline] fn element(&self) -> &T { &self.elt } /// Returns a reference to the element at `index`. /// /// Unsafe because index must be within the data slice and not equal to pos. #[inline] unsafe fn get(&self, index: usize) -> &T { debug_assert!(index != self.pos); debug_assert!(index < self.data.len()); unsafe { self.data.get_unchecked(index) } } /// Move hole to new location /// /// Unsafe because index must be within the data slice and not equal to pos. #[inline] unsafe fn move_to(&mut self, index: usize) { debug_assert!(index != self.pos); debug_assert!(index < self.data.len()); unsafe { let ptr = self.data.as_mut_ptr(); let index_ptr: *const _ = ptr.add(index); let hole_ptr = ptr.add(self.pos); ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1); } self.pos = index; } } impl Drop for Hole<'_, T> { #[inline] fn drop(&mut self) { // fill the hole again unsafe { let pos = self.pos; ptr::copy_nonoverlapping(&*self.elt, self.data.get_unchecked_mut(pos), 1); } } } /// An iterator over the elements of a `BinaryHeap`. /// /// This `struct` is created by [`BinaryHeap::iter()`]. See its /// documentation for more. #[must_use = "iterators are lazy and do nothing unless consumed"] // #[stable(feature = "rust1", since = "1.0.0")] pub struct Iter<'a, T: 'a> { iter: slice::Iter<'a, T>, } // #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for Iter<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Iter").field(&self.iter.as_slice()).finish() } } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` // #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Iter<'_, T> { fn clone(&self) -> Self { Iter { iter: self.iter.clone(), } } } // #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Iterator for Iter<'a, T> { type Item = &'a T; #[inline] fn next(&mut self) -> Option<&'a T> { self.iter.next() } #[inline] fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } #[inline] fn last(self) -> Option<&'a T> { self.iter.last() } } // #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> DoubleEndedIterator for Iter<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a T> { self.iter.next_back() } } // #[stable(feature = "rust1", since = "1.0.0")] // impl<'a, T> ExactSizeIterator for Iter<'a, T> { // fn is_empty(&self) -> bool { // self.iter.is_empty() // } // } // #[stable(feature = "fused", since = "1.26.0")] // impl<'a, T> FusedIterator for Iter<'a, T> {} /// An owning iterator over the elements of a `BinaryHeap`. /// /// This `struct` is created by [`BinaryHeap::into_iter()`] /// (provided by the [`IntoIterator`] trait). See its documentation for more. /// /// [`IntoIterator`]: https://doc.rust-lang.org/stable/core/iter/trait.IntoIterator.html // #[stable(feature = "rust1", since = "1.0.0")] #[derive(Clone)] pub struct IntoIter { iter: vec::IntoIter, } // #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("IntoIter") .field(&self.iter.as_slice()) .finish() } } // #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for IntoIter { type Item = T; #[inline] fn next(&mut self) -> Option { self.iter.next() } #[inline] fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } } // #[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for IntoIter { #[inline] fn next_back(&mut self) -> Option { self.iter.next_back() } } // #[stable(feature = "rust1", since = "1.0.0")] // impl ExactSizeIterator for IntoIter { // fn is_empty(&self) -> bool { // self.iter.is_empty() // } // } // #[stable(feature = "fused", since = "1.26.0")] // impl FusedIterator for IntoIter {} #[must_use = "iterators are lazy and do nothing unless consumed"] // #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")] #[derive(Clone, Debug)] pub struct IntoIterSorted { inner: BinaryHeap, } // #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")] impl> Iterator for IntoIterSorted { type Item = T; #[inline] fn next(&mut self) -> Option { self.inner.pop() } #[inline] fn size_hint(&self) -> (usize, Option) { let exact = self.inner.len(); (exact, Some(exact)) } } /// A draining iterator over the elements of a `BinaryHeap`. /// /// This `struct` is created by [`BinaryHeap::drain()`]. See its /// documentation for more. // #[stable(feature = "drain", since = "1.6.0")] #[derive(Debug)] pub struct Drain<'a, T: 'a> { iter: vec::Drain<'a, T>, } // #[stable(feature = "drain", since = "1.6.0")] impl Iterator for Drain<'_, T> { type Item = T; #[inline] fn next(&mut self) -> Option { self.iter.next() } #[inline] fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } } // #[stable(feature = "drain", since = "1.6.0")] impl DoubleEndedIterator for Drain<'_, T> { #[inline] fn next_back(&mut self) -> Option { self.iter.next_back() } } // #[stable(feature = "drain", since = "1.6.0")] // impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> { // fn is_empty(&self) -> bool { // self.iter.is_empty() // } // } // #[stable(feature = "fused", since = "1.26.0")] // impl<'a, T: 'a> FusedIterator for Drain<'a, T> {} // #[stable(feature = "binary_heap_extras_15", since = "1.5.0")] impl From> for BinaryHeap { /// Converts a `Vec` into a `BinaryHeap`. /// /// This conversion happens in-place, and has *O*(*n*) time complexity. fn from(vec: Vec) -> Self { BinaryHeap::from_vec(vec) } } impl From<[T; N]> for BinaryHeap { /// ``` /// use binary_heap_plus::BinaryHeap; /// /// let mut h1 = BinaryHeap::from([1, 4, 2, 3]); /// let mut h2: BinaryHeap<_> = [1, 4, 2, 3].into(); /// while let Some((a, b)) = h1.pop().zip(h2.pop()) { /// assert_eq!(a, b); /// } /// ``` fn from(arr: [T; N]) -> Self { Self::from_iter(arr) } } impl From> for Vec { /// Converts a `BinaryHeap` into a `Vec`. /// /// This conversion requires no data movement or allocation, and has /// constant time complexity. fn from(heap: BinaryHeap) -> Vec { heap.data } } // #[stable(feature = "rust1", since = "1.0.0")] impl FromIterator for BinaryHeap { fn from_iter>(iter: I) -> Self { BinaryHeap::from(iter.into_iter().collect::>()) } } // #[stable(feature = "rust1", since = "1.0.0")] impl IntoIterator for BinaryHeap { type Item = T; type IntoIter = IntoIter; /// Creates a consuming iterator, that is, one that moves each value out of /// the binary heap in arbitrary order. The binary heap cannot be used /// after calling this. /// /// # Examples /// /// Basic usage: /// /// ``` /// use binary_heap_plus::BinaryHeap; /// let heap = BinaryHeap::from([1, 2, 3, 4]); /// /// // Print 1, 2, 3, 4 in arbitrary order /// for x in heap.into_iter() { /// // x has type i32, not &i32 /// println!("{}", x); /// } /// ``` fn into_iter(self) -> IntoIter { IntoIter { iter: self.data.into_iter(), } } } // #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T, C> IntoIterator for &'a BinaryHeap { type Item = &'a T; type IntoIter = Iter<'a, T>; fn into_iter(self) -> Iter<'a, T> { self.iter() } } // #[stable(feature = "rust1", since = "1.0.0")] impl> Extend for BinaryHeap { #[inline] fn extend>(&mut self, iter: I) { // >::spec_extend(self, iter); self.extend_desugared(iter); } } // impl> SpecExtend for BinaryHeap { // default fn spec_extend(&mut self, iter: I) { // self.extend_desugared(iter.into_iter()); // } // } // impl SpecExtend> for BinaryHeap { // fn spec_extend(&mut self, ref mut other: BinaryHeap) { // self.append(other); // } // } impl> BinaryHeap { fn extend_desugared>(&mut self, iter: I) { let iterator = iter.into_iter(); let (lower, _) = iterator.size_hint(); self.reserve(lower); iterator.for_each(move |elem| self.push(elem)); } } // #[stable(feature = "extend_ref", since = "1.2.0")] impl<'a, T: 'a + Copy, C: Compare> Extend<&'a T> for BinaryHeap { fn extend>(&mut self, iter: I) { self.extend(iter.into_iter().cloned()); } } // #[unstable(feature = "collection_placement", // reason = "placement protocol is subject to change", // issue = "30172")] // pub struct BinaryHeapPlace<'a, T: 'a> // where T: Clone { // heap: *mut BinaryHeap, // place: vec::PlaceBack<'a, T>, // } // #[unstable(feature = "collection_placement", // reason = "placement protocol is subject to change", // issue = "30172")] // impl<'a, T: Clone + Ord + fmt::Debug> fmt::Debug for BinaryHeapPlace<'a, T> { // fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // f.debug_tuple("BinaryHeapPlace") // .field(&self.place) // .finish() // } // } // #[unstable(feature = "collection_placement", // reason = "placement protocol is subject to change", // issue = "30172")] // impl<'a, T: 'a> Placer for &'a mut BinaryHeap // where T: Clone + Ord { // type Place = BinaryHeapPlace<'a, T>; // fn make_place(self) -> Self::Place { // let ptr = self as *mut BinaryHeap; // let place = Placer::make_place(self.data.place_back()); // BinaryHeapPlace { // heap: ptr, // place, // } // } // } // #[unstable(feature = "collection_placement", // reason = "placement protocol is subject to change", // issue = "30172")] // unsafe impl<'a, T> Place for BinaryHeapPlace<'a, T> // where T: Clone + Ord { // fn pointer(&mut self) -> *mut T { // self.place.pointer() // } // } // #[unstable(feature = "collection_placement", // reason = "placement protocol is subject to change", // issue = "30172")] // impl<'a, T> InPlace for BinaryHeapPlace<'a, T> // where T: Clone + Ord { // type Owner = &'a T; // unsafe fn finalize(self) -> &'a T { // self.place.finalize(); // let heap: &mut BinaryHeap = &mut *self.heap; // let len = heap.len(); // let i = heap.sift_up(0, len - 1); // heap.data.get_unchecked(i) // } // } binary-heap-plus-0.5.0/src/lib.rs000064400000000000000000000424710072674642500147510ustar 00000000000000//! This crate provides [`BinaryHeap`] which is backward-compatible with //! [`std::collections::BinaryHeap`]. //! //! Added features include: //! * Heaps other than max heap. //! * Optional [`serde`] feature. //! //! [`BinaryHeap`]: struct.BinaryHeap.html //! [`std::collections::BinaryHeap`]: //! https://doc.rust-lang.org/stable/std/collections/struct.BinaryHeap.html //! [`serde`]: https://docs.serde.rs/serde/ //! //! # Quick start //! //! ## Max/Min Heap //! //! For max heap, [`BinaryHeap::from_vec()`] is the most versatile way to create a heap. //! //! ```rust //! use binary_heap_plus::*; //! //! // max heap //! let mut h: BinaryHeap = BinaryHeap::from_vec(vec![]); //! // max heap with initial capacity //! let mut h: BinaryHeap = BinaryHeap::from_vec(Vec::with_capacity(16)); //! // max heap from iterator //! let mut h: BinaryHeap = BinaryHeap::from_vec((0..42).collect()); //! assert_eq!(h.pop(), Some(41)); //! ``` //! //! Min heap is similar, but requires type annotation. //! //! ```rust //! use binary_heap_plus::*; //! //! // min heap //! let mut h: BinaryHeap = BinaryHeap::from_vec(vec![]); //! // min heap with initial capacity //! let mut h: BinaryHeap = BinaryHeap::from_vec(Vec::with_capacity(16)); //! // min heap from iterator //! let mut h: BinaryHeap = BinaryHeap::from_vec((0..42).collect()); //! assert_eq!(h.pop(), Some(0)); //! ``` //! //! [`BinaryHeap::from_vec()`]: struct.BinaryHeap.html#method.from_vec //! //! ## Custom Heap //! //! For custom heap, [`BinaryHeap::from_vec_cmp()`] works in a similar way to max/min heap. The //! only difference is that you add the comparator closure with apropriate signature. //! //! ```rust //! use binary_heap_plus::*; //! //! // custom heap: ordered by second value (_.1) of the tuples; min first //! let mut h = BinaryHeap::from_vec_cmp( //! vec![(1, 5), (3, 2), (2, 3)], //! |a: &(i32, i32), b: &(i32, i32)| b.1.cmp(&a.1), // comparator closure here //! ); //! assert_eq!(h.pop(), Some((3, 2))); //! ``` //! //! [`BinaryHeap::from_vec_cmp()`]: struct.BinaryHeap.html#method.from_vec_cmp //! //! # Constructers //! //! ## Generic methods to create different kind of heaps from initial `vec` data. //! //! * [`BinaryHeap::from_vec`]`(vec)` //! * [`BinaryHeap::from_vec_cmp`]`(vec, cmp)` //! //! [`BinaryHeap::from_vec`]: struct.BinaryHeap.html#method.from_vec //! [`BinaryHeap::from_vec_cmp`]: struct.BinaryHeap.html#method.from_vec_cmp //! //! ``` //! use binary_heap_plus::*; //! //! // max heap (default) //! let mut heap: BinaryHeap = BinaryHeap::from_vec(vec![1,5,3]); //! assert_eq!(heap.pop(), Some(5)); //! //! // min heap //! let mut heap: BinaryHeap = BinaryHeap::from_vec(vec![1,5,3]); //! assert_eq!(heap.pop(), Some(1)); //! //! // custom-sort heap //! let mut heap = BinaryHeap::from_vec_cmp(vec![1,5,3], |a: &i32, b: &i32| b.cmp(a)); //! assert_eq!(heap.pop(), Some(1)); //! //! // custom-key heap //! let mut heap = BinaryHeap::from_vec_cmp(vec![6,3,1], KeyComparator(|k: &i32| k % 4)); //! assert_eq!(heap.pop(), Some(3)); //! //! // TIP: How to reuse a comparator //! let mod4_comparator = KeyComparator(|k: &_| k % 4); //! let mut heap1 = BinaryHeap::from_vec_cmp(vec![6,3,1], mod4_comparator); //! assert_eq!(heap1.pop(), Some(3)); //! let mut heap2 = BinaryHeap::from_vec_cmp(vec![2,4,1], mod4_comparator); //! assert_eq!(heap2.pop(), Some(2)); //! ``` //! //! ## Dedicated methods to create different kind of heaps //! //! * [`BinaryHeap::new()`] creates a max heap. //! * [`BinaryHeap::new_min()`] creates a min heap. //! * [`BinaryHeap::new_by()`] creates a heap sorted by the given closure. //! * [`BinaryHeap::new_by_key()`] creates a heap sorted by the key generated by the given closure. //! //! [`BinaryHeap::new()`]: struct.BinaryHeap.html#method.new //! [`BinaryHeap::new_min()`]: struct.BinaryHeap.html#method.new_min //! [`BinaryHeap::new_by()`]: struct.BinaryHeap.html#method.new_by //! [`BinaryHeap::new_by_key()`]: struct.BinaryHeap.html#method.new_by_key mod binary_heap; pub use crate::binary_heap::*; /// An intermediate trait for specialization of `Extend`. // #[doc(hidden)] // trait SpecExtend { // /// Extends `self` with the contents of the given iterator. // fn spec_extend(&mut self, iter: I); // } #[cfg(test)] mod from_liballoc { // The following tests copyed from liballoc/tests/binary_heap.rs use super::binary_heap::*; // use std::panic; // use std::collections::BinaryHeap; // use std::collections::binary_heap::{Drain, PeekMut}; #[test] fn test_iterator() { let data = vec![5, 9, 3]; let iterout = [9, 5, 3]; let heap = BinaryHeap::from(data); let mut i = 0; for el in &heap { assert_eq!(*el, iterout[i]); i += 1; } } #[test] fn test_iterator_reverse() { let data = vec![5, 9, 3]; let iterout = vec![3, 5, 9]; let pq = BinaryHeap::from(data); let v: Vec<_> = pq.iter().rev().cloned().collect(); assert_eq!(v, iterout); } #[test] fn test_move_iter() { let data = vec![5, 9, 3]; let iterout = vec![9, 5, 3]; let pq = BinaryHeap::from(data); let v: Vec<_> = pq.into_iter().collect(); assert_eq!(v, iterout); } #[test] fn test_move_iter_size_hint() { let data = vec![5, 9]; let pq = BinaryHeap::from(data); let mut it = pq.into_iter(); assert_eq!(it.size_hint(), (2, Some(2))); assert_eq!(it.next(), Some(9)); assert_eq!(it.size_hint(), (1, Some(1))); assert_eq!(it.next(), Some(5)); assert_eq!(it.size_hint(), (0, Some(0))); assert_eq!(it.next(), None); } #[test] fn test_move_iter_reverse() { let data = vec![5, 9, 3]; let iterout = vec![3, 5, 9]; let pq = BinaryHeap::from(data); let v: Vec<_> = pq.into_iter().rev().collect(); assert_eq!(v, iterout); } #[test] fn test_into_iter_sorted_collect() { let heap = BinaryHeap::from(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]); let it = heap.into_iter_sorted(); let sorted = it.collect::>(); assert_eq!(sorted, vec![10, 9, 8, 7, 6, 5, 4, 3, 2, 2, 1, 1, 0]); } #[test] fn test_peek_and_pop() { let data = vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]; let mut sorted = data.clone(); sorted.sort(); let mut heap = BinaryHeap::from(data); while !heap.is_empty() { assert_eq!(heap.peek().unwrap(), sorted.last().unwrap()); assert_eq!(heap.pop().unwrap(), sorted.pop().unwrap()); } } #[test] fn test_peek_mut() { let data = vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]; let mut heap = BinaryHeap::from(data); assert_eq!(heap.peek(), Some(&10)); { let mut top = heap.peek_mut().unwrap(); *top -= 2; } assert_eq!(heap.peek(), Some(&9)); } #[test] fn test_peek_mut_pop() { let data = vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]; let mut heap = BinaryHeap::from(data); assert_eq!(heap.peek(), Some(&10)); { let mut top = heap.peek_mut().unwrap(); *top -= 2; assert_eq!(PeekMut::pop(top), 8); } assert_eq!(heap.peek(), Some(&9)); } #[test] fn test_push() { let mut heap = BinaryHeap::from(vec![2, 4, 9]); assert_eq!(heap.len(), 3); assert!(*heap.peek().unwrap() == 9); heap.push(11); assert_eq!(heap.len(), 4); assert!(*heap.peek().unwrap() == 11); heap.push(5); assert_eq!(heap.len(), 5); assert!(*heap.peek().unwrap() == 11); heap.push(27); assert_eq!(heap.len(), 6); assert!(*heap.peek().unwrap() == 27); heap.push(3); assert_eq!(heap.len(), 7); assert!(*heap.peek().unwrap() == 27); heap.push(103); assert_eq!(heap.len(), 8); assert!(*heap.peek().unwrap() == 103); } // #[test] // fn test_push_unique() { // let mut heap = BinaryHeap::>::from(vec![box 2, box 4, box 9]); // assert_eq!(heap.len(), 3); // assert!(**heap.peek().unwrap() == 9); // heap.push(box 11); // assert_eq!(heap.len(), 4); // assert!(**heap.peek().unwrap() == 11); // heap.push(box 5); // assert_eq!(heap.len(), 5); // assert!(**heap.peek().unwrap() == 11); // heap.push(box 27); // assert_eq!(heap.len(), 6); // assert!(**heap.peek().unwrap() == 27); // heap.push(box 3); // assert_eq!(heap.len(), 7); // assert!(**heap.peek().unwrap() == 27); // heap.push(box 103); // assert_eq!(heap.len(), 8); // assert!(**heap.peek().unwrap() == 103); // } fn check_to_vec(mut data: Vec) { let heap = BinaryHeap::from(data.clone()); let mut v = heap.clone().into_vec(); v.sort(); data.sort(); assert_eq!(v, data); assert_eq!(heap.into_sorted_vec(), data); } #[test] fn test_to_vec() { check_to_vec(vec![]); check_to_vec(vec![5]); check_to_vec(vec![3, 2]); check_to_vec(vec![2, 3]); check_to_vec(vec![5, 1, 2]); check_to_vec(vec![1, 100, 2, 3]); check_to_vec(vec![1, 3, 5, 7, 9, 2, 4, 6, 8, 0]); check_to_vec(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]); check_to_vec(vec![9, 11, 9, 9, 9, 9, 11, 2, 3, 4, 11, 9, 0, 0, 0, 0]); check_to_vec(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); check_to_vec(vec![10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0]); check_to_vec(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 0, 0, 1, 2]); check_to_vec(vec![5, 4, 3, 2, 1, 5, 4, 3, 2, 1, 5, 4, 3, 2, 1]); } #[test] fn test_empty_pop() { let mut heap = BinaryHeap::::new(); assert!(heap.pop().is_none()); } #[test] fn test_empty_peek() { let empty = BinaryHeap::::new(); assert!(empty.peek().is_none()); } #[test] fn test_empty_peek_mut() { let mut empty = BinaryHeap::::new(); assert!(empty.peek_mut().is_none()); } #[test] fn test_from_iter() { let xs = vec![9, 8, 7, 6, 5, 4, 3, 2, 1]; let mut q: BinaryHeap<_> = xs.iter().rev().cloned().collect(); for &x in &xs { assert_eq!(q.pop().unwrap(), x); } } #[test] fn test_drain() { let mut q: BinaryHeap<_> = [9, 8, 7, 6, 5, 4, 3, 2, 1].iter().cloned().collect(); assert_eq!(q.drain().take(5).count(), 5); assert!(q.is_empty()); } #[test] fn test_extend_ref() { let mut a = BinaryHeap::new(); a.push(1); a.push(2); a.extend(&[3, 4, 5]); assert_eq!(a.len(), 5); assert_eq!(a.into_sorted_vec(), [1, 2, 3, 4, 5]); let mut a = BinaryHeap::new(); a.push(1); a.push(2); let mut b = BinaryHeap::new(); b.push(3); b.push(4); b.push(5); a.extend(&b); assert_eq!(a.len(), 5); assert_eq!(a.into_sorted_vec(), [1, 2, 3, 4, 5]); } #[test] fn test_append() { let mut a = BinaryHeap::from(vec![-10, 1, 2, 3, 3]); let mut b = BinaryHeap::from(vec![-20, 5, 43]); a.append(&mut b); assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]); assert!(b.is_empty()); } #[test] fn test_append_to_empty() { let mut a = BinaryHeap::new(); let mut b = BinaryHeap::from(vec![-20, 5, 43]); a.append(&mut b); assert_eq!(a.into_sorted_vec(), [-20, 5, 43]); assert!(b.is_empty()); } #[test] fn test_extend_specialization() { let mut a = BinaryHeap::from(vec![-10, 1, 2, 3, 3]); let b = BinaryHeap::from(vec![-20, 5, 43]); a.extend(b); assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]); } // #[test] // fn test_placement() { // let mut a = BinaryHeap::new(); // &mut a <- 2; // &mut a <- 4; // &mut a <- 3; // assert_eq!(a.peek(), Some(&4)); // assert_eq!(a.len(), 3); // &mut a <- 1; // assert_eq!(a.into_sorted_vec(), vec![1, 2, 3, 4]); // } // #[test] // fn test_placement_panic() { // let mut heap = BinaryHeap::from(vec![1, 2, 3]); // fn mkpanic() -> usize { // panic!() // } // let _ = panic::catch_unwind(panic::AssertUnwindSafe(|| { // &mut heap <- mkpanic(); // })); // assert_eq!(heap.len(), 3); // } #[allow(dead_code)] fn assert_covariance() { fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> { d } } // old binaryheap failed this test // // Integrity means that all elements are present after a comparison panics, // even if the order might not be correct. // // Destructors must be called exactly once per element. // FIXME: re-enable emscripten once it can unwind again #[test] #[cfg(not(target_os = "emscripten"))] fn panic_safe() { use std::cmp; use std::panic::{self, AssertUnwindSafe}; use std::sync::atomic::{AtomicUsize, Ordering}; use rand::{seq::SliceRandom, thread_rng}; static DROP_COUNTER: AtomicUsize = AtomicUsize::new(0); #[derive(Eq, PartialEq, PartialOrd, Clone, Debug)] struct PanicOrd(T, bool); impl Drop for PanicOrd { fn drop(&mut self) { // update global drop count DROP_COUNTER.fetch_add(1, Ordering::SeqCst); } } impl Ord for PanicOrd { fn cmp(&self, other: &Self) -> cmp::Ordering { if self.1 || other.1 { panic!("Panicking comparison"); } self.0.cmp(&other.0) } } let mut rng = thread_rng(); const DATASZ: usize = 32; // Miri is too slow let ntest = if cfg!(miri) { 1 } else { 10 }; // don't use 0 in the data -- we want to catch the zeroed-out case. let data = (1..=DATASZ).collect::>(); // since it's a fuzzy test, run several tries. for _ in 0..ntest { for i in 1..=DATASZ { DROP_COUNTER.store(0, Ordering::SeqCst); let mut panic_ords: Vec<_> = data .iter() .filter(|&&x| x != i) .map(|&x| PanicOrd(x, false)) .collect(); let panic_item = PanicOrd(i, true); // heapify the sane items panic_ords.shuffle(&mut rng); let mut heap = BinaryHeap::from(panic_ords); let inner_data; { // push the panicking item to the heap and catch the panic let thread_result = { let mut heap_ref = AssertUnwindSafe(&mut heap); panic::catch_unwind(move || { heap_ref.push(panic_item); }) }; assert!(thread_result.is_err()); // Assert no elements were dropped let drops = DROP_COUNTER.load(Ordering::SeqCst); assert!(drops == 0, "Must not drop items. drops={}", drops); inner_data = heap.clone().into_vec(); drop(heap); } let drops = DROP_COUNTER.load(Ordering::SeqCst); assert_eq!(drops, DATASZ); let mut data_sorted = inner_data.into_iter().map(|p| p.0).collect::>(); data_sorted.sort(); assert_eq!(data_sorted, data); } } } } #[cfg(feature = "serde")] #[cfg(test)] mod tests_serde { use super::binary_heap::*; use serde_json; #[test] fn deserialized_same_small_vec() { let heap = BinaryHeap::from(vec![1, 2, 3]); let serialized = serde_json::to_string(&heap).unwrap(); let deserialized: BinaryHeap = serde_json::from_str(&serialized).unwrap(); let v0: Vec<_> = heap.into_iter().collect(); let v1: Vec<_> = deserialized.into_iter().collect(); assert_eq!(v0, v1); } #[test] fn deserialized_same() { let vec: Vec = (0..1000).collect(); let heap = BinaryHeap::from(vec); let serialized = serde_json::to_string(&heap).unwrap(); let deserialized: BinaryHeap = serde_json::from_str(&serialized).unwrap(); let v0: Vec<_> = heap.into_iter().collect(); let v1: Vec<_> = deserialized.into_iter().collect(); assert_eq!(v0, v1); } }