alloc/collections/binary_heap/mod.rs
1//! A priority queue implemented with a binary heap.
2//!
3//! Insertion and popping the largest element have *O*(log(*n*)) time complexity.
4//! Checking the largest element is *O*(1). Converting a vector to a binary heap
5//! can be done in-place, and has *O*(*n*) complexity. A binary heap can also be
6//! converted to a sorted vector in-place, allowing it to be used for an *O*(*n* * log(*n*))
7//! in-place heapsort.
8//!
9//! # Examples
10//!
11//! This is a larger example that implements [Dijkstra's algorithm][dijkstra]
12//! to solve the [shortest path problem][sssp] on a [directed graph][dir_graph].
13//! It shows how to use [`BinaryHeap`] with custom types.
14//!
15//! [dijkstra]: https://en.wikipedia.org/wiki/Dijkstra%27s_algorithm
16//! [sssp]: https://en.wikipedia.org/wiki/Shortest_path_problem
17//! [dir_graph]: https://en.wikipedia.org/wiki/Directed_graph
18//!
19//! ```
20//! use std::cmp::Ordering;
21//! use std::collections::BinaryHeap;
22//!
23//! #[derive(Copy, Clone, Eq, PartialEq)]
24//! struct State {
25//! cost: usize,
26//! position: usize,
27//! }
28//!
29//! // The priority queue depends on `Ord`.
30//! // Explicitly implement the trait so the queue becomes a min-heap
31//! // instead of a max-heap.
32//! impl Ord for State {
33//! fn cmp(&self, other: &Self) -> Ordering {
34//! // Notice that we flip the ordering on costs.
35//! // In case of a tie we compare positions - this step is necessary
36//! // to make implementations of `PartialEq` and `Ord` consistent.
37//! other.cost.cmp(&self.cost)
38//! .then_with(|| self.position.cmp(&other.position))
39//! }
40//! }
41//!
42//! // `PartialOrd` needs to be implemented as well.
43//! impl PartialOrd for State {
44//! fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
45//! Some(self.cmp(other))
46//! }
47//! }
48//!
49//! // Each node is represented as a `usize`, for a shorter implementation.
50//! struct Edge {
51//! node: usize,
52//! cost: usize,
53//! }
54//!
55//! // Dijkstra's shortest path algorithm.
56//!
57//! // Start at `start` and use `dist` to track the current shortest distance
58//! // to each node. This implementation isn't memory-efficient as it may leave duplicate
59//! // nodes in the queue. It also uses `usize::MAX` as a sentinel value,
60//! // for a simpler implementation.
61//! fn shortest_path(adj_list: &Vec<Vec<Edge>>, start: usize, goal: usize) -> Option<usize> {
62//! // dist[node] = current shortest distance from `start` to `node`
63//! let mut dist: Vec<_> = (0..adj_list.len()).map(|_| usize::MAX).collect();
64//!
65//! let mut heap = BinaryHeap::new();
66//!
67//! // We're at `start`, with a zero cost
68//! dist[start] = 0;
69//! heap.push(State { cost: 0, position: start });
70//!
71//! // Examine the frontier with lower cost nodes first (min-heap)
72//! while let Some(State { cost, position }) = heap.pop() {
73//! // Alternatively we could have continued to find all shortest paths
74//! if position == goal { return Some(cost); }
75//!
76//! // Important as we may have already found a better way
77//! if cost > dist[position] { continue; }
78//!
79//! // For each node we can reach, see if we can find a way with
80//! // a lower cost going through this node
81//! for edge in &adj_list[position] {
82//! let next = State { cost: cost + edge.cost, position: edge.node };
83//!
84//! // If so, add it to the frontier and continue
85//! if next.cost < dist[next.position] {
86//! heap.push(next);
87//! // Relaxation, we have now found a better way
88//! dist[next.position] = next.cost;
89//! }
90//! }
91//! }
92//!
93//! // Goal not reachable
94//! None
95//! }
96//!
97//! fn main() {
98//! // This is the directed graph we're going to use.
99//! // The node numbers correspond to the different states,
100//! // and the edge weights symbolize the cost of moving
101//! // from one node to another.
102//! // Note that the edges are one-way.
103//! //
104//! // 7
105//! // +-----------------+
106//! // | |
107//! // v 1 2 | 2
108//! // 0 -----> 1 -----> 3 ---> 4
109//! // | ^ ^ ^
110//! // | | 1 | |
111//! // | | | 3 | 1
112//! // +------> 2 -------+ |
113//! // 10 | |
114//! // +---------------+
115//! //
116//! // The graph is represented as an adjacency list where each index,
117//! // corresponding to a node value, has a list of outgoing edges.
118//! // Chosen for its efficiency.
119//! let graph = vec![
120//! // Node 0
121//! vec![Edge { node: 2, cost: 10 },
122//! Edge { node: 1, cost: 1 }],
123//! // Node 1
124//! vec![Edge { node: 3, cost: 2 }],
125//! // Node 2
126//! vec![Edge { node: 1, cost: 1 },
127//! Edge { node: 3, cost: 3 },
128//! Edge { node: 4, cost: 1 }],
129//! // Node 3
130//! vec![Edge { node: 0, cost: 7 },
131//! Edge { node: 4, cost: 2 }],
132//! // Node 4
133//! vec![]];
134//!
135//! assert_eq!(shortest_path(&graph, 0, 1), Some(1));
136//! assert_eq!(shortest_path(&graph, 0, 3), Some(3));
137//! assert_eq!(shortest_path(&graph, 3, 0), Some(7));
138//! assert_eq!(shortest_path(&graph, 0, 4), Some(5));
139//! assert_eq!(shortest_path(&graph, 4, 0), None);
140//! }
141//! ```
142
143#![allow(missing_docs)]
144#![stable(feature = "rust1", since = "1.0.0")]
145
146use core::alloc::Allocator;
147use core::iter::{FusedIterator, InPlaceIterable, SourceIter, TrustedFused, TrustedLen};
148use core::mem::{self, ManuallyDrop, swap};
149use core::num::NonZero;
150use core::ops::{Deref, DerefMut};
151use core::{fmt, ptr};
152
153use crate::alloc::Global;
154use crate::collections::TryReserveError;
155use crate::slice;
156#[cfg(not(test))]
157use crate::vec::AsVecIntoIter;
158use crate::vec::{self, Vec};
159
160/// A priority queue implemented with a binary heap.
161///
162/// This will be a max-heap.
163///
164/// It is a logic error for an item to be modified in such a way that the
165/// item's ordering relative to any other item, as determined by the [`Ord`]
166/// trait, changes while it is in the heap. This is normally only possible
167/// through interior mutability, global state, I/O, or unsafe code. The
168/// behavior resulting from such a logic error is not specified, but will
169/// be encapsulated to the `BinaryHeap` that observed the logic error and not
170/// result in undefined behavior. This could include panics, incorrect results,
171/// aborts, memory leaks, and non-termination.
172///
173/// As long as no elements change their relative order while being in the heap
174/// as described above, the API of `BinaryHeap` guarantees that the heap
175/// invariant remains intact i.e. its methods all behave as documented. For
176/// example if a method is documented as iterating in sorted order, that's
177/// guaranteed to work as long as elements in the heap have not changed order,
178/// even in the presence of closures getting unwinded out of, iterators getting
179/// leaked, and similar foolishness.
180///
181/// # Examples
182///
183/// ```
184/// use std::collections::BinaryHeap;
185///
186/// // Type inference lets us omit an explicit type signature (which
187/// // would be `BinaryHeap<i32>` in this example).
188/// let mut heap = BinaryHeap::new();
189///
190/// // We can use peek to look at the next item in the heap. In this case,
191/// // there's no items in there yet so we get None.
192/// assert_eq!(heap.peek(), None);
193///
194/// // Let's add some scores...
195/// heap.push(1);
196/// heap.push(5);
197/// heap.push(2);
198///
199/// // Now peek shows the most important item in the heap.
200/// assert_eq!(heap.peek(), Some(&5));
201///
202/// // We can check the length of a heap.
203/// assert_eq!(heap.len(), 3);
204///
205/// // We can iterate over the items in the heap, although they are returned in
206/// // a random order.
207/// for x in &heap {
208/// println!("{x}");
209/// }
210///
211/// // If we instead pop these scores, they should come back in order.
212/// assert_eq!(heap.pop(), Some(5));
213/// assert_eq!(heap.pop(), Some(2));
214/// assert_eq!(heap.pop(), Some(1));
215/// assert_eq!(heap.pop(), None);
216///
217/// // We can clear the heap of any remaining items.
218/// heap.clear();
219///
220/// // The heap should now be empty.
221/// assert!(heap.is_empty())
222/// ```
223///
224/// A `BinaryHeap` with a known list of items can be initialized from an array:
225///
226/// ```
227/// use std::collections::BinaryHeap;
228///
229/// let heap = BinaryHeap::from([1, 5, 2]);
230/// ```
231///
232/// ## Min-heap
233///
234/// Either [`core::cmp::Reverse`] or a custom [`Ord`] implementation can be used to
235/// make `BinaryHeap` a min-heap. This makes `heap.pop()` return the smallest
236/// value instead of the greatest one.
237///
238/// ```
239/// use std::collections::BinaryHeap;
240/// use std::cmp::Reverse;
241///
242/// let mut heap = BinaryHeap::new();
243///
244/// // Wrap values in `Reverse`
245/// heap.push(Reverse(1));
246/// heap.push(Reverse(5));
247/// heap.push(Reverse(2));
248///
249/// // If we pop these scores now, they should come back in the reverse order.
250/// assert_eq!(heap.pop(), Some(Reverse(1)));
251/// assert_eq!(heap.pop(), Some(Reverse(2)));
252/// assert_eq!(heap.pop(), Some(Reverse(5)));
253/// assert_eq!(heap.pop(), None);
254/// ```
255///
256/// # Time complexity
257///
258/// | [push] | [pop] | [peek]/[peek\_mut] |
259/// |---------|---------------|--------------------|
260/// | *O*(1)~ | *O*(log(*n*)) | *O*(1) |
261///
262/// The value for `push` is an expected cost; the method documentation gives a
263/// more detailed analysis.
264///
265/// [`core::cmp::Reverse`]: core::cmp::Reverse
266/// [`Cell`]: core::cell::Cell
267/// [`RefCell`]: core::cell::RefCell
268/// [push]: BinaryHeap::push
269/// [pop]: BinaryHeap::pop
270/// [peek]: BinaryHeap::peek
271/// [peek\_mut]: BinaryHeap::peek_mut
272#[stable(feature = "rust1", since = "1.0.0")]
273#[cfg_attr(not(test), rustc_diagnostic_item = "BinaryHeap")]
274pub struct BinaryHeap<
275 T,
276 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
277> {
278 data: Vec<T, A>,
279}
280
281/// Structure wrapping a mutable reference to the greatest item on a
282/// `BinaryHeap`.
283///
284/// This `struct` is created by the [`peek_mut`] method on [`BinaryHeap`]. See
285/// its documentation for more.
286///
287/// [`peek_mut`]: BinaryHeap::peek_mut
288#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
289pub struct PeekMut<
290 'a,
291 T: 'a + Ord,
292 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
293> {
294 heap: &'a mut BinaryHeap<T, A>,
295 // If a set_len + sift_down are required, this is Some. If a &mut T has not
296 // yet been exposed to peek_mut()'s caller, it's None.
297 original_len: Option<NonZero<usize>>,
298}
299
300#[stable(feature = "collection_debug", since = "1.17.0")]
301impl<T: Ord + fmt::Debug, A: Allocator> fmt::Debug for PeekMut<'_, T, A> {
302 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
303 f.debug_tuple("PeekMut").field(&self.heap.data[0]).finish()
304 }
305}
306
307#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
308impl<T: Ord, A: Allocator> Drop for PeekMut<'_, T, A> {
309 fn drop(&mut self) {
310 if let Some(original_len) = self.original_len {
311 // SAFETY: That's how many elements were in the Vec at the time of
312 // the PeekMut::deref_mut call, and therefore also at the time of
313 // the BinaryHeap::peek_mut call. Since the PeekMut did not end up
314 // getting leaked, we are now undoing the leak amplification that
315 // the DerefMut prepared for.
316 unsafe { self.heap.data.set_len(original_len.get()) };
317
318 // SAFETY: PeekMut is only instantiated for non-empty heaps.
319 unsafe { self.heap.sift_down(0) };
320 }
321 }
322}
323
324#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
325impl<T: Ord, A: Allocator> Deref for PeekMut<'_, T, A> {
326 type Target = T;
327 fn deref(&self) -> &T {
328 debug_assert!(!self.heap.is_empty());
329 // SAFE: PeekMut is only instantiated for non-empty heaps
330 unsafe { self.heap.data.get_unchecked(0) }
331 }
332}
333
334#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
335impl<T: Ord, A: Allocator> DerefMut for PeekMut<'_, T, A> {
336 fn deref_mut(&mut self) -> &mut T {
337 debug_assert!(!self.heap.is_empty());
338
339 let len = self.heap.len();
340 if len > 1 {
341 // Here we preemptively leak all the rest of the underlying vector
342 // after the currently max element. If the caller mutates the &mut T
343 // we're about to give them, and then leaks the PeekMut, all these
344 // elements will remain leaked. If they don't leak the PeekMut, then
345 // either Drop or PeekMut::pop will un-leak the vector elements.
346 //
347 // This is technique is described throughout several other places in
348 // the standard library as "leak amplification".
349 unsafe {
350 // SAFETY: len > 1 so len != 0.
351 self.original_len = Some(NonZero::new_unchecked(len));
352 // SAFETY: len > 1 so all this does for now is leak elements,
353 // which is safe.
354 self.heap.data.set_len(1);
355 }
356 }
357
358 // SAFE: PeekMut is only instantiated for non-empty heaps
359 unsafe { self.heap.data.get_unchecked_mut(0) }
360 }
361}
362
363impl<'a, T: Ord, A: Allocator> PeekMut<'a, T, A> {
364 /// Sifts the current element to its new position.
365 ///
366 /// Afterwards refers to the new element. Returns if the element changed.
367 ///
368 /// ## Examples
369 ///
370 /// The condition can be used to upper bound all elements in the heap. When only few elements
371 /// are affected, the heap's sort ensures this is faster than a reconstruction from the raw
372 /// element list and requires no additional allocation.
373 ///
374 /// ```
375 /// #![feature(binary_heap_peek_mut_refresh)]
376 /// use std::collections::BinaryHeap;
377 ///
378 /// let mut heap: BinaryHeap<u32> = (0..128).collect();
379 /// let mut peek = heap.peek_mut().unwrap();
380 ///
381 /// loop {
382 /// *peek = 99;
383 ///
384 /// if !peek.refresh() {
385 /// break;
386 /// }
387 /// }
388 ///
389 /// // Post condition, this is now an upper bound.
390 /// assert!(*peek < 100);
391 /// ```
392 ///
393 /// When the element remains the maximum after modification, the peek remains unchanged:
394 ///
395 /// ```
396 /// #![feature(binary_heap_peek_mut_refresh)]
397 /// use std::collections::BinaryHeap;
398 ///
399 /// let mut heap: BinaryHeap<u32> = [1, 2, 3].into();
400 /// let mut peek = heap.peek_mut().unwrap();
401 ///
402 /// assert_eq!(*peek, 3);
403 /// *peek = 42;
404 ///
405 /// // When we refresh, the peek is updated to the new maximum.
406 /// assert!(!peek.refresh(), "42 is even larger than 3");
407 /// assert_eq!(*peek, 42);
408 /// ```
409 #[unstable(feature = "binary_heap_peek_mut_refresh", issue = "138355")]
410 #[must_use = "is equivalent to dropping and getting a new PeekMut except for return information"]
411 pub fn refresh(&mut self) -> bool {
412 // The length of the underlying heap is unchanged by sifting down. The value stored for leak
413 // amplification thus remains accurate. We erase the leak amplification firstly because the
414 // operation is then equivalent to constructing a new PeekMut and secondly this avoids any
415 // future complication where original_len being non-empty would be interpreted as the heap
416 // having been leak amplified instead of checking the heap itself.
417 if let Some(original_len) = self.original_len.take() {
418 // SAFETY: This is how many elements were in the Vec at the time of
419 // the BinaryHeap::peek_mut call.
420 unsafe { self.heap.data.set_len(original_len.get()) };
421
422 // The length of the heap did not change by sifting, upholding our own invariants.
423
424 // SAFETY: PeekMut is only instantiated for non-empty heaps.
425 (unsafe { self.heap.sift_down(0) }) != 0
426 } else {
427 // The element was not modified.
428 false
429 }
430 }
431
432 /// Removes the peeked value from the heap and returns it.
433 #[stable(feature = "binary_heap_peek_mut_pop", since = "1.18.0")]
434 pub fn pop(mut this: PeekMut<'a, T, A>) -> T {
435 if let Some(original_len) = this.original_len.take() {
436 // SAFETY: This is how many elements were in the Vec at the time of
437 // the BinaryHeap::peek_mut call.
438 unsafe { this.heap.data.set_len(original_len.get()) };
439
440 // Unlike in Drop, here we don't also need to do a sift_down even if
441 // the caller could've mutated the element. It is removed from the
442 // heap on the next line and pop() is not sensitive to its value.
443 }
444
445 // SAFETY: Have a `PeekMut` element proves that the associated binary heap being non-empty,
446 // so the `pop` operation will not fail.
447 unsafe { this.heap.pop().unwrap_unchecked() }
448 }
449}
450
451#[stable(feature = "rust1", since = "1.0.0")]
452impl<T: Clone, A: Allocator + Clone> Clone for BinaryHeap<T, A> {
453 fn clone(&self) -> Self {
454 BinaryHeap { data: self.data.clone() }
455 }
456
457 /// Overwrites the contents of `self` with a clone of the contents of `source`.
458 ///
459 /// This method is preferred over simply assigning `source.clone()` to `self`,
460 /// as it avoids reallocation if possible.
461 ///
462 /// See [`Vec::clone_from()`] for more details.
463 fn clone_from(&mut self, source: &Self) {
464 self.data.clone_from(&source.data);
465 }
466}
467
468#[stable(feature = "rust1", since = "1.0.0")]
469impl<T> Default for BinaryHeap<T> {
470 /// Creates an empty `BinaryHeap<T>`.
471 #[inline]
472 fn default() -> BinaryHeap<T> {
473 BinaryHeap::new()
474 }
475}
476
477#[stable(feature = "binaryheap_debug", since = "1.4.0")]
478impl<T: fmt::Debug, A: Allocator> fmt::Debug for BinaryHeap<T, A> {
479 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
480 f.debug_list().entries(self.iter()).finish()
481 }
482}
483
484struct RebuildOnDrop<
485 'a,
486 T: Ord,
487 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
488> {
489 heap: &'a mut BinaryHeap<T, A>,
490 rebuild_from: usize,
491}
492
493impl<T: Ord, A: Allocator> Drop for RebuildOnDrop<'_, T, A> {
494 fn drop(&mut self) {
495 self.heap.rebuild_tail(self.rebuild_from);
496 }
497}
498
499impl<T> BinaryHeap<T> {
500 /// Creates an empty `BinaryHeap` as a max-heap.
501 ///
502 /// # Examples
503 ///
504 /// Basic usage:
505 ///
506 /// ```
507 /// use std::collections::BinaryHeap;
508 /// let mut heap = BinaryHeap::new();
509 /// heap.push(4);
510 /// ```
511 #[stable(feature = "rust1", since = "1.0.0")]
512 #[rustc_const_stable(feature = "const_binary_heap_constructor", since = "1.80.0")]
513 #[must_use]
514 pub const fn new() -> BinaryHeap<T> {
515 BinaryHeap { data: vec![] }
516 }
517
518 /// Creates an empty `BinaryHeap` with at least the specified capacity.
519 ///
520 /// The binary heap will be able to hold at least `capacity` elements without
521 /// reallocating. This method is allowed to allocate for more elements than
522 /// `capacity`. If `capacity` is zero, the binary heap will not allocate.
523 ///
524 /// # Examples
525 ///
526 /// Basic usage:
527 ///
528 /// ```
529 /// use std::collections::BinaryHeap;
530 /// let mut heap = BinaryHeap::with_capacity(10);
531 /// heap.push(4);
532 /// ```
533 #[stable(feature = "rust1", since = "1.0.0")]
534 #[must_use]
535 pub fn with_capacity(capacity: usize) -> BinaryHeap<T> {
536 BinaryHeap { data: Vec::with_capacity(capacity) }
537 }
538}
539
540impl<T, A: Allocator> BinaryHeap<T, A> {
541 /// Creates an empty `BinaryHeap` as a max-heap, using `A` as allocator.
542 ///
543 /// # Examples
544 ///
545 /// Basic usage:
546 ///
547 /// ```
548 /// #![feature(allocator_api)]
549 ///
550 /// use std::alloc::System;
551 /// use std::collections::BinaryHeap;
552 /// let mut heap = BinaryHeap::new_in(System);
553 /// heap.push(4);
554 /// ```
555 #[unstable(feature = "allocator_api", issue = "32838")]
556 #[must_use]
557 pub const fn new_in(alloc: A) -> BinaryHeap<T, A> {
558 BinaryHeap { data: Vec::new_in(alloc) }
559 }
560
561 /// Creates an empty `BinaryHeap` with at least the specified capacity, using `A` as allocator.
562 ///
563 /// The binary heap will be able to hold at least `capacity` elements without
564 /// reallocating. This method is allowed to allocate for more elements than
565 /// `capacity`. If `capacity` is zero, the binary heap will not allocate.
566 ///
567 /// # Examples
568 ///
569 /// Basic usage:
570 ///
571 /// ```
572 /// #![feature(allocator_api)]
573 ///
574 /// use std::alloc::System;
575 /// use std::collections::BinaryHeap;
576 /// let mut heap = BinaryHeap::with_capacity_in(10, System);
577 /// heap.push(4);
578 /// ```
579 #[unstable(feature = "allocator_api", issue = "32838")]
580 #[must_use]
581 pub fn with_capacity_in(capacity: usize, alloc: A) -> BinaryHeap<T, A> {
582 BinaryHeap { data: Vec::with_capacity_in(capacity, alloc) }
583 }
584}
585
586impl<T: Ord, A: Allocator> BinaryHeap<T, A> {
587 /// Returns a mutable reference to the greatest item in the binary heap, or
588 /// `None` if it is empty.
589 ///
590 /// Note: If the `PeekMut` value is leaked, some heap elements might get
591 /// leaked along with it, but the remaining elements will remain a valid
592 /// heap.
593 ///
594 /// # Examples
595 ///
596 /// Basic usage:
597 ///
598 /// ```
599 /// use std::collections::BinaryHeap;
600 /// let mut heap = BinaryHeap::new();
601 /// assert!(heap.peek_mut().is_none());
602 ///
603 /// heap.push(1);
604 /// heap.push(5);
605 /// heap.push(2);
606 /// if let Some(mut val) = heap.peek_mut() {
607 /// *val = 0;
608 /// }
609 /// assert_eq!(heap.peek(), Some(&2));
610 /// ```
611 ///
612 /// # Time complexity
613 ///
614 /// If the item is modified then the worst case time complexity is *O*(log(*n*)),
615 /// otherwise it's *O*(1).
616 #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
617 pub fn peek_mut(&mut self) -> Option<PeekMut<'_, T, A>> {
618 if self.is_empty() { None } else { Some(PeekMut { heap: self, original_len: None }) }
619 }
620
621 /// Removes the greatest item from the binary heap and returns it, or `None` if it
622 /// is empty.
623 ///
624 /// # Examples
625 ///
626 /// Basic usage:
627 ///
628 /// ```
629 /// use std::collections::BinaryHeap;
630 /// let mut heap = BinaryHeap::from([1, 3]);
631 ///
632 /// assert_eq!(heap.pop(), Some(3));
633 /// assert_eq!(heap.pop(), Some(1));
634 /// assert_eq!(heap.pop(), None);
635 /// ```
636 ///
637 /// # Time complexity
638 ///
639 /// The worst case cost of `pop` on a heap containing *n* elements is *O*(log(*n*)).
640 #[stable(feature = "rust1", since = "1.0.0")]
641 pub fn pop(&mut self) -> Option<T> {
642 self.data.pop().map(|mut item| {
643 if !self.is_empty() {
644 swap(&mut item, &mut self.data[0]);
645 // SAFETY: !self.is_empty() means that self.len() > 0
646 unsafe { self.sift_down_to_bottom(0) };
647 }
648 item
649 })
650 }
651
652 /// Removes and returns the greatest item from the binary heap if the predicate
653 /// returns `true`, or [`None`] if the predicate returns false or the heap
654 /// is empty (the predicate will not be called in that case).
655 ///
656 /// # Examples
657 ///
658 /// ```
659 /// #![feature(binary_heap_pop_if)]
660 /// use std::collections::BinaryHeap;
661 /// let mut heap = BinaryHeap::from([1, 2]);
662 /// let pred = |x: &i32| *x % 2 == 0;
663 ///
664 /// assert_eq!(heap.pop_if(pred), Some(2));
665 /// assert_eq!(heap.as_slice(), [1]);
666 /// assert_eq!(heap.pop_if(pred), None);
667 /// assert_eq!(heap.as_slice(), [1]);
668 /// ```
669 ///
670 /// # Time complexity
671 ///
672 /// The worst case cost of `pop_if` on a heap containing *n* elements is *O*(log(*n*)).
673 #[unstable(feature = "binary_heap_pop_if", issue = "151828")]
674 pub fn pop_if(&mut self, predicate: impl FnOnce(&T) -> bool) -> Option<T> {
675 let first = self.peek()?;
676 if predicate(first) { self.pop() } else { None }
677 }
678
679 /// Pushes an item onto the binary heap.
680 ///
681 /// # Examples
682 ///
683 /// Basic usage:
684 ///
685 /// ```
686 /// use std::collections::BinaryHeap;
687 /// let mut heap = BinaryHeap::new();
688 /// heap.push(3);
689 /// heap.push(5);
690 /// heap.push(1);
691 ///
692 /// assert_eq!(heap.len(), 3);
693 /// assert_eq!(heap.peek(), Some(&5));
694 /// ```
695 ///
696 /// # Time complexity
697 ///
698 /// The expected cost of `push`, averaged over every possible ordering of
699 /// the elements being pushed, and over a sufficiently large number of
700 /// pushes, is *O*(1). This is the most meaningful cost metric when pushing
701 /// elements that are *not* already in any sorted pattern.
702 ///
703 /// The time complexity degrades if elements are pushed in predominantly
704 /// ascending order. In the worst case, elements are pushed in ascending
705 /// sorted order and the amortized cost per push is *O*(log(*n*)) against a heap
706 /// containing *n* elements.
707 ///
708 /// The worst case cost of a *single* call to `push` is *O*(*n*). The worst case
709 /// occurs when capacity is exhausted and needs a resize. The resize cost
710 /// has been amortized in the previous figures.
711 #[stable(feature = "rust1", since = "1.0.0")]
712 #[rustc_confusables("append", "put")]
713 pub fn push(&mut self, item: T) {
714 let old_len = self.len();
715 self.data.push(item);
716 // SAFETY: Since we pushed a new item it means that
717 // old_len = self.len() - 1 < self.len()
718 unsafe { self.sift_up(0, old_len) };
719 }
720
721 /// Consumes the `BinaryHeap` and returns a vector in sorted
722 /// (ascending) order.
723 ///
724 /// # Examples
725 ///
726 /// Basic usage:
727 ///
728 /// ```
729 /// use std::collections::BinaryHeap;
730 ///
731 /// let mut heap = BinaryHeap::from([1, 2, 4, 5, 7]);
732 /// heap.push(6);
733 /// heap.push(3);
734 ///
735 /// let vec = heap.into_sorted_vec();
736 /// assert_eq!(vec, [1, 2, 3, 4, 5, 6, 7]);
737 /// ```
738 #[must_use = "`self` will be dropped if the result is not used"]
739 #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
740 pub fn into_sorted_vec(mut self) -> Vec<T, A> {
741 let mut end = self.len();
742 while end > 1 {
743 end -= 1;
744 // SAFETY: `end` goes from `self.len() - 1` to 1 (both included),
745 // so it's always a valid index to access.
746 // It is safe to access index 0 (i.e. `ptr`), because
747 // 1 <= end < self.len(), which means self.len() >= 2.
748 unsafe {
749 let ptr = self.data.as_mut_ptr();
750 ptr::swap(ptr, ptr.add(end));
751 }
752 // SAFETY: `end` goes from `self.len() - 1` to 1 (both included) so:
753 // 0 < 1 <= end <= self.len() - 1 < self.len()
754 // Which means 0 < end and end < self.len().
755 unsafe { self.sift_down_range(0, end) };
756 }
757 self.into_vec()
758 }
759
760 // The implementations of sift_up and sift_down use unsafe blocks in
761 // order to move an element out of the vector (leaving behind a
762 // hole), shift along the others and move the removed element back into the
763 // vector at the final location of the hole.
764 // The `Hole` type is used to represent this, and make sure
765 // the hole is filled back at the end of its scope, even on panic.
766 // Using a hole reduces the constant factor compared to using swaps,
767 // which involves twice as many moves.
768
769 /// # Safety
770 ///
771 /// The caller must guarantee that `pos < self.len()`.
772 ///
773 /// Returns the new position of the element.
774 unsafe fn sift_up(&mut self, start: usize, pos: usize) -> usize {
775 // Take out the value at `pos` and create a hole.
776 // SAFETY: The caller guarantees that pos < self.len()
777 let mut hole = unsafe { Hole::new(&mut self.data, pos) };
778
779 while hole.pos() > start {
780 let parent = (hole.pos() - 1) / 2;
781
782 // SAFETY: hole.pos() > start >= 0, which means hole.pos() > 0
783 // and so hole.pos() - 1 can't underflow.
784 // This guarantees that parent < hole.pos() so
785 // it's a valid index and also != hole.pos().
786 if hole.element() <= unsafe { hole.get(parent) } {
787 break;
788 }
789
790 // SAFETY: Same as above
791 unsafe { hole.move_to(parent) };
792 }
793
794 hole.pos()
795 }
796
797 /// Take an element at `pos` and move it down the heap,
798 /// while its children are larger.
799 ///
800 /// Returns the new position of the element.
801 ///
802 /// # Safety
803 ///
804 /// The caller must guarantee that `pos < end <= self.len()`.
805 unsafe fn sift_down_range(&mut self, pos: usize, end: usize) -> usize {
806 // SAFETY: The caller guarantees that pos < end <= self.len().
807 let mut hole = unsafe { Hole::new(&mut self.data, pos) };
808 let mut child = 2 * hole.pos() + 1;
809
810 // Loop invariant: child == 2 * hole.pos() + 1.
811 while child <= end.saturating_sub(2) {
812 // compare with the greater of the two children
813 // SAFETY: child < end - 1 < self.len() and
814 // child + 1 < end <= self.len(), so they're valid indexes.
815 // child == 2 * hole.pos() + 1 != hole.pos() and
816 // child + 1 == 2 * hole.pos() + 2 != hole.pos().
817 // FIXME: 2 * hole.pos() + 1 or 2 * hole.pos() + 2 could overflow
818 // if T is a ZST
819 child += unsafe { hole.get(child) <= hole.get(child + 1) } as usize;
820
821 // if we are already in order, stop.
822 // SAFETY: child is now either the old child or the old child+1
823 // We already proven that both are < self.len() and != hole.pos()
824 if hole.element() >= unsafe { hole.get(child) } {
825 return hole.pos();
826 }
827
828 // SAFETY: same as above.
829 unsafe { hole.move_to(child) };
830 child = 2 * hole.pos() + 1;
831 }
832
833 // SAFETY: && short circuit, which means that in the
834 // second condition it's already true that child == end - 1 < self.len().
835 if child == end - 1 && hole.element() < unsafe { hole.get(child) } {
836 // SAFETY: child is already proven to be a valid index and
837 // child == 2 * hole.pos() + 1 != hole.pos().
838 unsafe { hole.move_to(child) };
839 }
840
841 hole.pos()
842 }
843
844 /// # Safety
845 ///
846 /// The caller must guarantee that `pos < self.len()`.
847 unsafe fn sift_down(&mut self, pos: usize) -> usize {
848 let len = self.len();
849 // SAFETY: pos < len is guaranteed by the caller and
850 // obviously len = self.len() <= self.len().
851 unsafe { self.sift_down_range(pos, len) }
852 }
853
854 /// Take an element at `pos` and move it all the way down the heap,
855 /// then sift it up to its position.
856 ///
857 /// Note: This is faster when the element is known to be large / should
858 /// be closer to the bottom.
859 ///
860 /// # Safety
861 ///
862 /// The caller must guarantee that `pos < self.len()`.
863 unsafe fn sift_down_to_bottom(&mut self, mut pos: usize) {
864 let end = self.len();
865 let start = pos;
866
867 // SAFETY: The caller guarantees that pos < self.len().
868 let mut hole = unsafe { Hole::new(&mut self.data, pos) };
869 let mut child = 2 * hole.pos() + 1;
870
871 // Loop invariant: child == 2 * hole.pos() + 1.
872 while child <= end.saturating_sub(2) {
873 // SAFETY: child < end - 1 < self.len() and
874 // child + 1 < end <= self.len(), so they're valid indexes.
875 // child == 2 * hole.pos() + 1 != hole.pos() and
876 // child + 1 == 2 * hole.pos() + 2 != hole.pos().
877 // FIXME: 2 * hole.pos() + 1 or 2 * hole.pos() + 2 could overflow
878 // if T is a ZST
879 child += unsafe { hole.get(child) <= hole.get(child + 1) } as usize;
880
881 // SAFETY: Same as above
882 unsafe { hole.move_to(child) };
883 child = 2 * hole.pos() + 1;
884 }
885
886 if child == end - 1 {
887 // SAFETY: child == end - 1 < self.len(), so it's a valid index
888 // and child == 2 * hole.pos() + 1 != hole.pos().
889 unsafe { hole.move_to(child) };
890 }
891 pos = hole.pos();
892 drop(hole);
893
894 // SAFETY: pos is the position in the hole and was already proven
895 // to be a valid index.
896 unsafe { self.sift_up(start, pos) };
897 }
898
899 /// Rebuild assuming data[0..start] is still a proper heap.
900 fn rebuild_tail(&mut self, start: usize) {
901 if start == self.len() {
902 return;
903 }
904
905 let tail_len = self.len() - start;
906
907 #[inline(always)]
908 fn log2_fast(x: usize) -> usize {
909 (usize::BITS - x.leading_zeros() - 1) as usize
910 }
911
912 // `rebuild` takes O(self.len()) operations
913 // and about 2 * self.len() comparisons in the worst case
914 // while repeating `sift_up` takes O(tail_len * log(start)) operations
915 // and about 1 * tail_len * log_2(start) comparisons in the worst case,
916 // assuming start >= tail_len. For larger heaps, the crossover point
917 // no longer follows this reasoning and was determined empirically.
918 let better_to_rebuild = if start < tail_len {
919 true
920 } else if self.len() <= 2048 {
921 2 * self.len() < tail_len * log2_fast(start)
922 } else {
923 2 * self.len() < tail_len * 11
924 };
925
926 if better_to_rebuild {
927 self.rebuild();
928 } else {
929 for i in start..self.len() {
930 // SAFETY: The index `i` is always less than self.len().
931 unsafe { self.sift_up(0, i) };
932 }
933 }
934 }
935
936 fn rebuild(&mut self) {
937 let mut n = self.len() / 2;
938 while n > 0 {
939 n -= 1;
940 // SAFETY: n starts from self.len() / 2 and goes down to 0.
941 // The only case when !(n < self.len()) is if
942 // self.len() == 0, but it's ruled out by the loop condition.
943 unsafe { self.sift_down(n) };
944 }
945 }
946
947 /// Moves all the elements of `other` into `self`, leaving `other` empty.
948 ///
949 /// # Examples
950 ///
951 /// Basic usage:
952 ///
953 /// ```
954 /// use std::collections::BinaryHeap;
955 ///
956 /// let mut a = BinaryHeap::from([-10, 1, 2, 3, 3]);
957 /// let mut b = BinaryHeap::from([-20, 5, 43]);
958 ///
959 /// a.append(&mut b);
960 ///
961 /// assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
962 /// assert!(b.is_empty());
963 /// ```
964 #[stable(feature = "binary_heap_append", since = "1.11.0")]
965 pub fn append(&mut self, other: &mut Self) {
966 if self.len() < other.len() {
967 swap(self, other);
968 }
969
970 let start = self.data.len();
971
972 self.data.append(&mut other.data);
973
974 self.rebuild_tail(start);
975 }
976
977 /// Clears the binary heap, returning an iterator over the removed elements
978 /// in heap order. If the iterator is dropped before being fully consumed,
979 /// it drops the remaining elements in heap order.
980 ///
981 /// The returned iterator keeps a mutable borrow on the heap to optimize
982 /// its implementation.
983 ///
984 /// Note:
985 /// * `.drain_sorted()` is *O*(*n* \* log(*n*)); much slower than `.drain()`.
986 /// You should use the latter for most cases.
987 ///
988 /// # Examples
989 ///
990 /// Basic usage:
991 ///
992 /// ```
993 /// #![feature(binary_heap_drain_sorted)]
994 /// use std::collections::BinaryHeap;
995 ///
996 /// let mut heap = BinaryHeap::from([1, 2, 3, 4, 5]);
997 /// assert_eq!(heap.len(), 5);
998 ///
999 /// drop(heap.drain_sorted()); // removes all elements in heap order
1000 /// assert_eq!(heap.len(), 0);
1001 /// ```
1002 #[inline]
1003 #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1004 pub fn drain_sorted(&mut self) -> DrainSorted<'_, T, A> {
1005 DrainSorted { inner: self }
1006 }
1007
1008 /// Retains only the elements specified by the predicate.
1009 ///
1010 /// In other words, remove all elements `e` for which `f(&e)` returns
1011 /// `false`. The elements are visited in unsorted (and unspecified) order.
1012 ///
1013 /// # Examples
1014 ///
1015 /// Basic usage:
1016 ///
1017 /// ```
1018 /// use std::collections::BinaryHeap;
1019 ///
1020 /// let mut heap = BinaryHeap::from([-10, -5, 1, 2, 4, 13]);
1021 ///
1022 /// heap.retain(|x| x % 2 == 0); // only keep even numbers
1023 ///
1024 /// assert_eq!(heap.into_sorted_vec(), [-10, 2, 4])
1025 /// ```
1026 #[stable(feature = "binary_heap_retain", since = "1.70.0")]
1027 pub fn retain<F>(&mut self, mut f: F)
1028 where
1029 F: FnMut(&T) -> bool,
1030 {
1031 // rebuild_start will be updated to the first touched element below, and the rebuild will
1032 // only be done for the tail.
1033 let mut guard = RebuildOnDrop { rebuild_from: self.len(), heap: self };
1034 let mut i = 0;
1035
1036 guard.heap.data.retain(|e| {
1037 let keep = f(e);
1038 if !keep && i < guard.rebuild_from {
1039 guard.rebuild_from = i;
1040 }
1041 i += 1;
1042 keep
1043 });
1044 }
1045}
1046
1047impl<T, A: Allocator> BinaryHeap<T, A> {
1048 /// Returns an iterator visiting all values in the underlying vector, in
1049 /// arbitrary order.
1050 ///
1051 /// # Examples
1052 ///
1053 /// Basic usage:
1054 ///
1055 /// ```
1056 /// use std::collections::BinaryHeap;
1057 /// let heap = BinaryHeap::from([1, 2, 3, 4]);
1058 ///
1059 /// // Print 1, 2, 3, 4 in arbitrary order
1060 /// for x in heap.iter() {
1061 /// println!("{x}");
1062 /// }
1063 /// ```
1064 #[stable(feature = "rust1", since = "1.0.0")]
1065 #[cfg_attr(not(test), rustc_diagnostic_item = "binaryheap_iter")]
1066 pub fn iter(&self) -> Iter<'_, T> {
1067 Iter { iter: self.data.iter() }
1068 }
1069
1070 /// Returns an iterator which retrieves elements in heap order.
1071 ///
1072 /// This method consumes the original heap.
1073 ///
1074 /// # Examples
1075 ///
1076 /// Basic usage:
1077 ///
1078 /// ```
1079 /// #![feature(binary_heap_into_iter_sorted)]
1080 /// use std::collections::BinaryHeap;
1081 /// let heap = BinaryHeap::from([1, 2, 3, 4, 5]);
1082 ///
1083 /// assert_eq!(heap.into_iter_sorted().take(2).collect::<Vec<_>>(), [5, 4]);
1084 /// ```
1085 #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1086 pub fn into_iter_sorted(self) -> IntoIterSorted<T, A> {
1087 IntoIterSorted { inner: self }
1088 }
1089
1090 /// Returns the greatest item in the binary heap, or `None` if it is empty.
1091 ///
1092 /// # Examples
1093 ///
1094 /// Basic usage:
1095 ///
1096 /// ```
1097 /// use std::collections::BinaryHeap;
1098 /// let mut heap = BinaryHeap::new();
1099 /// assert_eq!(heap.peek(), None);
1100 ///
1101 /// heap.push(1);
1102 /// heap.push(5);
1103 /// heap.push(2);
1104 /// assert_eq!(heap.peek(), Some(&5));
1105 ///
1106 /// ```
1107 ///
1108 /// # Time complexity
1109 ///
1110 /// Cost is *O*(1) in the worst case.
1111 #[must_use]
1112 #[stable(feature = "rust1", since = "1.0.0")]
1113 pub fn peek(&self) -> Option<&T> {
1114 self.data.get(0)
1115 }
1116
1117 /// Returns the number of elements the binary heap can hold without reallocating.
1118 ///
1119 /// # Examples
1120 ///
1121 /// Basic usage:
1122 ///
1123 /// ```
1124 /// use std::collections::BinaryHeap;
1125 /// let mut heap = BinaryHeap::with_capacity(100);
1126 /// assert!(heap.capacity() >= 100);
1127 /// heap.push(4);
1128 /// ```
1129 #[must_use]
1130 #[stable(feature = "rust1", since = "1.0.0")]
1131 pub fn capacity(&self) -> usize {
1132 self.data.capacity()
1133 }
1134
1135 /// Reserves the minimum capacity for at least `additional` elements more than
1136 /// the current length. Unlike [`reserve`], this will not
1137 /// deliberately over-allocate to speculatively avoid frequent allocations.
1138 /// After calling `reserve_exact`, capacity will be greater than or equal to
1139 /// `self.len() + additional`. Does nothing if the capacity is already
1140 /// sufficient.
1141 ///
1142 /// [`reserve`]: BinaryHeap::reserve
1143 ///
1144 /// # Panics
1145 ///
1146 /// Panics if the new capacity overflows [`usize`].
1147 ///
1148 /// # Examples
1149 ///
1150 /// Basic usage:
1151 ///
1152 /// ```
1153 /// use std::collections::BinaryHeap;
1154 /// let mut heap = BinaryHeap::new();
1155 /// heap.reserve_exact(100);
1156 /// assert!(heap.capacity() >= 100);
1157 /// heap.push(4);
1158 /// ```
1159 ///
1160 /// [`reserve`]: BinaryHeap::reserve
1161 #[stable(feature = "rust1", since = "1.0.0")]
1162 pub fn reserve_exact(&mut self, additional: usize) {
1163 self.data.reserve_exact(additional);
1164 }
1165
1166 /// Reserves capacity for at least `additional` elements more than the
1167 /// current length. The allocator may reserve more space to speculatively
1168 /// avoid frequent allocations. After calling `reserve`,
1169 /// capacity will be greater than or equal to `self.len() + additional`.
1170 /// Does nothing if capacity is already sufficient.
1171 ///
1172 /// # Panics
1173 ///
1174 /// Panics if the new capacity overflows [`usize`].
1175 ///
1176 /// # Examples
1177 ///
1178 /// Basic usage:
1179 ///
1180 /// ```
1181 /// use std::collections::BinaryHeap;
1182 /// let mut heap = BinaryHeap::new();
1183 /// heap.reserve(100);
1184 /// assert!(heap.capacity() >= 100);
1185 /// heap.push(4);
1186 /// ```
1187 #[stable(feature = "rust1", since = "1.0.0")]
1188 pub fn reserve(&mut self, additional: usize) {
1189 self.data.reserve(additional);
1190 }
1191
1192 /// Tries to reserve the minimum capacity for at least `additional` elements
1193 /// more than the current length. Unlike [`try_reserve`], this will not
1194 /// deliberately over-allocate to speculatively avoid frequent allocations.
1195 /// After calling `try_reserve_exact`, capacity will be greater than or
1196 /// equal to `self.len() + additional` if it returns `Ok(())`.
1197 /// Does nothing if the capacity is already sufficient.
1198 ///
1199 /// Note that the allocator may give the collection more space than it
1200 /// requests. Therefore, capacity can not be relied upon to be precisely
1201 /// minimal. Prefer [`try_reserve`] if future insertions are expected.
1202 ///
1203 /// [`try_reserve`]: BinaryHeap::try_reserve
1204 ///
1205 /// # Errors
1206 ///
1207 /// If the capacity overflows, or the allocator reports a failure, then an error
1208 /// is returned.
1209 ///
1210 /// # Examples
1211 ///
1212 /// ```
1213 /// use std::collections::BinaryHeap;
1214 /// use std::collections::TryReserveError;
1215 ///
1216 /// fn find_max_slow(data: &[u32]) -> Result<Option<u32>, TryReserveError> {
1217 /// let mut heap = BinaryHeap::new();
1218 ///
1219 /// // Pre-reserve the memory, exiting if we can't
1220 /// heap.try_reserve_exact(data.len())?;
1221 ///
1222 /// // Now we know this can't OOM in the middle of our complex work
1223 /// heap.extend(data.iter());
1224 ///
1225 /// Ok(heap.pop())
1226 /// }
1227 /// # find_max_slow(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
1228 /// ```
1229 #[stable(feature = "try_reserve_2", since = "1.63.0")]
1230 pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
1231 self.data.try_reserve_exact(additional)
1232 }
1233
1234 /// Tries to reserve capacity for at least `additional` elements more than the
1235 /// current length. The allocator may reserve more space to speculatively
1236 /// avoid frequent allocations. After calling `try_reserve`, capacity will be
1237 /// greater than or equal to `self.len() + additional` if it returns
1238 /// `Ok(())`. Does nothing if capacity is already sufficient. This method
1239 /// preserves the contents even if an error occurs.
1240 ///
1241 /// # Errors
1242 ///
1243 /// If the capacity overflows, or the allocator reports a failure, then an error
1244 /// is returned.
1245 ///
1246 /// # Examples
1247 ///
1248 /// ```
1249 /// use std::collections::BinaryHeap;
1250 /// use std::collections::TryReserveError;
1251 ///
1252 /// fn find_max_slow(data: &[u32]) -> Result<Option<u32>, TryReserveError> {
1253 /// let mut heap = BinaryHeap::new();
1254 ///
1255 /// // Pre-reserve the memory, exiting if we can't
1256 /// heap.try_reserve(data.len())?;
1257 ///
1258 /// // Now we know this can't OOM in the middle of our complex work
1259 /// heap.extend(data.iter());
1260 ///
1261 /// Ok(heap.pop())
1262 /// }
1263 /// # find_max_slow(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
1264 /// ```
1265 #[stable(feature = "try_reserve_2", since = "1.63.0")]
1266 pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
1267 self.data.try_reserve(additional)
1268 }
1269
1270 /// Discards as much additional capacity as possible.
1271 ///
1272 /// # Examples
1273 ///
1274 /// Basic usage:
1275 ///
1276 /// ```
1277 /// use std::collections::BinaryHeap;
1278 /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
1279 ///
1280 /// assert!(heap.capacity() >= 100);
1281 /// heap.shrink_to_fit();
1282 /// assert!(heap.capacity() == 0);
1283 /// ```
1284 #[stable(feature = "rust1", since = "1.0.0")]
1285 pub fn shrink_to_fit(&mut self) {
1286 self.data.shrink_to_fit();
1287 }
1288
1289 /// Discards capacity with a lower bound.
1290 ///
1291 /// The capacity will remain at least as large as both the length
1292 /// and the supplied value.
1293 ///
1294 /// If the current capacity is less than the lower limit, this is a no-op.
1295 ///
1296 /// # Examples
1297 ///
1298 /// ```
1299 /// use std::collections::BinaryHeap;
1300 /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
1301 ///
1302 /// assert!(heap.capacity() >= 100);
1303 /// heap.shrink_to(10);
1304 /// assert!(heap.capacity() >= 10);
1305 /// ```
1306 #[inline]
1307 #[stable(feature = "shrink_to", since = "1.56.0")]
1308 pub fn shrink_to(&mut self, min_capacity: usize) {
1309 self.data.shrink_to(min_capacity)
1310 }
1311
1312 /// Returns a slice of all values in the underlying vector, in arbitrary
1313 /// order.
1314 ///
1315 /// # Examples
1316 ///
1317 /// Basic usage:
1318 ///
1319 /// ```
1320 /// use std::collections::BinaryHeap;
1321 /// use std::io::{self, Write};
1322 ///
1323 /// let heap = BinaryHeap::from([1, 2, 3, 4, 5, 6, 7]);
1324 ///
1325 /// io::sink().write(heap.as_slice()).unwrap();
1326 /// ```
1327 #[must_use]
1328 #[stable(feature = "binary_heap_as_slice", since = "1.80.0")]
1329 pub fn as_slice(&self) -> &[T] {
1330 self.data.as_slice()
1331 }
1332
1333 /// Consumes the `BinaryHeap` and returns the underlying vector
1334 /// in arbitrary order.
1335 ///
1336 /// # Examples
1337 ///
1338 /// Basic usage:
1339 ///
1340 /// ```
1341 /// use std::collections::BinaryHeap;
1342 /// let heap = BinaryHeap::from([1, 2, 3, 4, 5, 6, 7]);
1343 /// let vec = heap.into_vec();
1344 ///
1345 /// // Will print in some order
1346 /// for x in vec {
1347 /// println!("{x}");
1348 /// }
1349 /// ```
1350 #[must_use = "`self` will be dropped if the result is not used"]
1351 #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
1352 pub fn into_vec(self) -> Vec<T, A> {
1353 self.into()
1354 }
1355
1356 /// Returns a reference to the underlying allocator.
1357 #[unstable(feature = "allocator_api", issue = "32838")]
1358 #[inline]
1359 pub fn allocator(&self) -> &A {
1360 self.data.allocator()
1361 }
1362
1363 /// Returns the length of the binary heap.
1364 ///
1365 /// # Examples
1366 ///
1367 /// Basic usage:
1368 ///
1369 /// ```
1370 /// use std::collections::BinaryHeap;
1371 /// let heap = BinaryHeap::from([1, 3]);
1372 ///
1373 /// assert_eq!(heap.len(), 2);
1374 /// ```
1375 #[must_use]
1376 #[stable(feature = "rust1", since = "1.0.0")]
1377 #[rustc_confusables("length", "size")]
1378 pub fn len(&self) -> usize {
1379 self.data.len()
1380 }
1381
1382 /// Checks if the binary heap is empty.
1383 ///
1384 /// # Examples
1385 ///
1386 /// Basic usage:
1387 ///
1388 /// ```
1389 /// use std::collections::BinaryHeap;
1390 /// let mut heap = BinaryHeap::new();
1391 ///
1392 /// assert!(heap.is_empty());
1393 ///
1394 /// heap.push(3);
1395 /// heap.push(5);
1396 /// heap.push(1);
1397 ///
1398 /// assert!(!heap.is_empty());
1399 /// ```
1400 #[must_use]
1401 #[stable(feature = "rust1", since = "1.0.0")]
1402 pub fn is_empty(&self) -> bool {
1403 self.len() == 0
1404 }
1405
1406 /// Clears the binary heap, returning an iterator over the removed elements
1407 /// in arbitrary order. If the iterator is dropped before being fully
1408 /// consumed, it drops the remaining elements in arbitrary order.
1409 ///
1410 /// The returned iterator keeps a mutable borrow on the heap to optimize
1411 /// its implementation.
1412 ///
1413 /// # Examples
1414 ///
1415 /// Basic usage:
1416 ///
1417 /// ```
1418 /// use std::collections::BinaryHeap;
1419 /// let mut heap = BinaryHeap::from([1, 3]);
1420 ///
1421 /// assert!(!heap.is_empty());
1422 ///
1423 /// for x in heap.drain() {
1424 /// println!("{x}");
1425 /// }
1426 ///
1427 /// assert!(heap.is_empty());
1428 /// ```
1429 #[inline]
1430 #[stable(feature = "drain", since = "1.6.0")]
1431 pub fn drain(&mut self) -> Drain<'_, T, A> {
1432 Drain { iter: self.data.drain(..) }
1433 }
1434
1435 /// Drops all items from the binary heap.
1436 ///
1437 /// # Examples
1438 ///
1439 /// Basic usage:
1440 ///
1441 /// ```
1442 /// use std::collections::BinaryHeap;
1443 /// let mut heap = BinaryHeap::from([1, 3]);
1444 ///
1445 /// assert!(!heap.is_empty());
1446 ///
1447 /// heap.clear();
1448 ///
1449 /// assert!(heap.is_empty());
1450 /// ```
1451 #[stable(feature = "rust1", since = "1.0.0")]
1452 pub fn clear(&mut self) {
1453 self.drain();
1454 }
1455}
1456
1457/// Hole represents a hole in a slice i.e., an index without valid value
1458/// (because it was moved from or duplicated).
1459/// In drop, `Hole` will restore the slice by filling the hole
1460/// position with the value that was originally removed.
1461struct Hole<'a, T: 'a> {
1462 data: &'a mut [T],
1463 elt: ManuallyDrop<T>,
1464 pos: usize,
1465}
1466
1467impl<'a, T> Hole<'a, T> {
1468 /// Creates a new `Hole` at index `pos`.
1469 ///
1470 /// Unsafe because pos must be within the data slice.
1471 #[inline]
1472 unsafe fn new(data: &'a mut [T], pos: usize) -> Self {
1473 debug_assert!(pos < data.len());
1474 // SAFE: pos should be inside the slice
1475 let elt = unsafe { ptr::read(data.get_unchecked(pos)) };
1476 Hole { data, elt: ManuallyDrop::new(elt), pos }
1477 }
1478
1479 #[inline]
1480 fn pos(&self) -> usize {
1481 self.pos
1482 }
1483
1484 /// Returns a reference to the element removed.
1485 #[inline]
1486 fn element(&self) -> &T {
1487 &self.elt
1488 }
1489
1490 /// Returns a reference to the element at `index`.
1491 ///
1492 /// Unsafe because index must be within the data slice and not equal to pos.
1493 #[inline]
1494 unsafe fn get(&self, index: usize) -> &T {
1495 debug_assert!(index != self.pos);
1496 debug_assert!(index < self.data.len());
1497 unsafe { self.data.get_unchecked(index) }
1498 }
1499
1500 /// Move hole to new location
1501 ///
1502 /// Unsafe because index must be within the data slice and not equal to pos.
1503 #[inline]
1504 unsafe fn move_to(&mut self, index: usize) {
1505 debug_assert!(index != self.pos);
1506 debug_assert!(index < self.data.len());
1507 unsafe {
1508 let ptr = self.data.as_mut_ptr();
1509 let index_ptr: *const _ = ptr.add(index);
1510 let hole_ptr = ptr.add(self.pos);
1511 ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
1512 }
1513 self.pos = index;
1514 }
1515}
1516
1517impl<T> Drop for Hole<'_, T> {
1518 #[inline]
1519 fn drop(&mut self) {
1520 // fill the hole again
1521 unsafe {
1522 let pos = self.pos;
1523 ptr::copy_nonoverlapping(&*self.elt, self.data.get_unchecked_mut(pos), 1);
1524 }
1525 }
1526}
1527
1528/// An iterator over the elements of a `BinaryHeap`.
1529///
1530/// This `struct` is created by [`BinaryHeap::iter()`]. See its
1531/// documentation for more.
1532///
1533/// [`iter`]: BinaryHeap::iter
1534#[must_use = "iterators are lazy and do nothing unless consumed"]
1535#[stable(feature = "rust1", since = "1.0.0")]
1536pub struct Iter<'a, T: 'a> {
1537 iter: slice::Iter<'a, T>,
1538}
1539
1540#[stable(feature = "default_iters_sequel", since = "1.82.0")]
1541impl<T> Default for Iter<'_, T> {
1542 /// Creates an empty `binary_heap::Iter`.
1543 ///
1544 /// ```
1545 /// # use std::collections::binary_heap;
1546 /// let iter: binary_heap::Iter<'_, u8> = Default::default();
1547 /// assert_eq!(iter.len(), 0);
1548 /// ```
1549 fn default() -> Self {
1550 Iter { iter: Default::default() }
1551 }
1552}
1553
1554#[stable(feature = "collection_debug", since = "1.17.0")]
1555impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
1556 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1557 f.debug_tuple("Iter").field(&self.iter.as_slice()).finish()
1558 }
1559}
1560
1561// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
1562#[stable(feature = "rust1", since = "1.0.0")]
1563impl<T> Clone for Iter<'_, T> {
1564 fn clone(&self) -> Self {
1565 Iter { iter: self.iter.clone() }
1566 }
1567}
1568
1569#[stable(feature = "rust1", since = "1.0.0")]
1570impl<'a, T> Iterator for Iter<'a, T> {
1571 type Item = &'a T;
1572
1573 #[inline]
1574 fn next(&mut self) -> Option<&'a T> {
1575 self.iter.next()
1576 }
1577
1578 #[inline]
1579 fn size_hint(&self) -> (usize, Option<usize>) {
1580 self.iter.size_hint()
1581 }
1582
1583 #[inline]
1584 fn last(self) -> Option<&'a T> {
1585 self.iter.last()
1586 }
1587}
1588
1589#[stable(feature = "rust1", since = "1.0.0")]
1590impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
1591 #[inline]
1592 fn next_back(&mut self) -> Option<&'a T> {
1593 self.iter.next_back()
1594 }
1595}
1596
1597#[stable(feature = "rust1", since = "1.0.0")]
1598impl<T> ExactSizeIterator for Iter<'_, T> {
1599 fn is_empty(&self) -> bool {
1600 self.iter.is_empty()
1601 }
1602}
1603
1604#[stable(feature = "fused", since = "1.26.0")]
1605impl<T> FusedIterator for Iter<'_, T> {}
1606
1607/// An owning iterator over the elements of a `BinaryHeap`.
1608///
1609/// This `struct` is created by [`BinaryHeap::into_iter()`]
1610/// (provided by the [`IntoIterator`] trait). See its documentation for more.
1611///
1612/// [`into_iter`]: BinaryHeap::into_iter
1613#[stable(feature = "rust1", since = "1.0.0")]
1614#[derive(Clone)]
1615pub struct IntoIter<
1616 T,
1617 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
1618> {
1619 iter: vec::IntoIter<T, A>,
1620}
1621
1622impl<T, A: Allocator> IntoIter<T, A> {
1623 /// Returns a reference to the underlying allocator.
1624 #[unstable(feature = "allocator_api", issue = "32838")]
1625 pub fn allocator(&self) -> &A {
1626 self.iter.allocator()
1627 }
1628}
1629
1630#[stable(feature = "collection_debug", since = "1.17.0")]
1631impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
1632 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1633 f.debug_tuple("IntoIter").field(&self.iter.as_slice()).finish()
1634 }
1635}
1636
1637#[stable(feature = "rust1", since = "1.0.0")]
1638impl<T, A: Allocator> Iterator for IntoIter<T, A> {
1639 type Item = T;
1640
1641 #[inline]
1642 fn next(&mut self) -> Option<T> {
1643 self.iter.next()
1644 }
1645
1646 #[inline]
1647 fn size_hint(&self) -> (usize, Option<usize>) {
1648 self.iter.size_hint()
1649 }
1650}
1651
1652#[stable(feature = "rust1", since = "1.0.0")]
1653impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
1654 #[inline]
1655 fn next_back(&mut self) -> Option<T> {
1656 self.iter.next_back()
1657 }
1658}
1659
1660#[stable(feature = "rust1", since = "1.0.0")]
1661impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
1662 fn is_empty(&self) -> bool {
1663 self.iter.is_empty()
1664 }
1665}
1666
1667#[stable(feature = "fused", since = "1.26.0")]
1668impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
1669
1670#[doc(hidden)]
1671#[unstable(issue = "none", feature = "trusted_fused")]
1672unsafe impl<T, A: Allocator> TrustedFused for IntoIter<T, A> {}
1673
1674#[stable(feature = "default_iters", since = "1.70.0")]
1675impl<T> Default for IntoIter<T> {
1676 /// Creates an empty `binary_heap::IntoIter`.
1677 ///
1678 /// ```
1679 /// # use std::collections::binary_heap;
1680 /// let iter: binary_heap::IntoIter<u8> = Default::default();
1681 /// assert_eq!(iter.len(), 0);
1682 /// ```
1683 fn default() -> Self {
1684 IntoIter { iter: Default::default() }
1685 }
1686}
1687
1688// In addition to the SAFETY invariants of the following three unsafe traits
1689// also refer to the vec::in_place_collect module documentation to get an overview
1690#[unstable(issue = "none", feature = "inplace_iteration")]
1691#[doc(hidden)]
1692unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
1693 type Source = IntoIter<T, A>;
1694
1695 #[inline]
1696 unsafe fn as_inner(&mut self) -> &mut Self::Source {
1697 self
1698 }
1699}
1700
1701#[unstable(issue = "none", feature = "inplace_iteration")]
1702#[doc(hidden)]
1703unsafe impl<I, A: Allocator> InPlaceIterable for IntoIter<I, A> {
1704 const EXPAND_BY: Option<NonZero<usize>> = NonZero::new(1);
1705 const MERGE_BY: Option<NonZero<usize>> = NonZero::new(1);
1706}
1707
1708#[cfg(not(test))]
1709unsafe impl<I> AsVecIntoIter for IntoIter<I> {
1710 type Item = I;
1711
1712 fn as_into_iter(&mut self) -> &mut vec::IntoIter<Self::Item> {
1713 &mut self.iter
1714 }
1715}
1716
1717#[must_use = "iterators are lazy and do nothing unless consumed"]
1718#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1719#[derive(Clone, Debug)]
1720pub struct IntoIterSorted<
1721 T,
1722 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
1723> {
1724 inner: BinaryHeap<T, A>,
1725}
1726
1727impl<T, A: Allocator> IntoIterSorted<T, A> {
1728 /// Returns a reference to the underlying allocator.
1729 #[unstable(feature = "allocator_api", issue = "32838")]
1730 pub fn allocator(&self) -> &A {
1731 self.inner.allocator()
1732 }
1733}
1734
1735#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1736impl<T: Ord, A: Allocator> Iterator for IntoIterSorted<T, A> {
1737 type Item = T;
1738
1739 #[inline]
1740 fn next(&mut self) -> Option<T> {
1741 self.inner.pop()
1742 }
1743
1744 #[inline]
1745 fn size_hint(&self) -> (usize, Option<usize>) {
1746 let exact = self.inner.len();
1747 (exact, Some(exact))
1748 }
1749}
1750
1751#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1752impl<T: Ord, A: Allocator> ExactSizeIterator for IntoIterSorted<T, A> {}
1753
1754#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1755impl<T: Ord, A: Allocator> FusedIterator for IntoIterSorted<T, A> {}
1756
1757#[unstable(feature = "trusted_len", issue = "37572")]
1758unsafe impl<T: Ord, A: Allocator> TrustedLen for IntoIterSorted<T, A> {}
1759
1760/// A draining iterator over the elements of a `BinaryHeap`.
1761///
1762/// This `struct` is created by [`BinaryHeap::drain()`]. See its
1763/// documentation for more.
1764///
1765/// [`drain`]: BinaryHeap::drain
1766#[stable(feature = "drain", since = "1.6.0")]
1767#[derive(Debug)]
1768pub struct Drain<
1769 'a,
1770 T: 'a,
1771 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
1772> {
1773 iter: vec::Drain<'a, T, A>,
1774}
1775
1776impl<T, A: Allocator> Drain<'_, T, A> {
1777 /// Returns a reference to the underlying allocator.
1778 #[unstable(feature = "allocator_api", issue = "32838")]
1779 pub fn allocator(&self) -> &A {
1780 self.iter.allocator()
1781 }
1782}
1783
1784#[stable(feature = "drain", since = "1.6.0")]
1785impl<T, A: Allocator> Iterator for Drain<'_, T, A> {
1786 type Item = T;
1787
1788 #[inline]
1789 fn next(&mut self) -> Option<T> {
1790 self.iter.next()
1791 }
1792
1793 #[inline]
1794 fn size_hint(&self) -> (usize, Option<usize>) {
1795 self.iter.size_hint()
1796 }
1797}
1798
1799#[stable(feature = "drain", since = "1.6.0")]
1800impl<T, A: Allocator> DoubleEndedIterator for Drain<'_, T, A> {
1801 #[inline]
1802 fn next_back(&mut self) -> Option<T> {
1803 self.iter.next_back()
1804 }
1805}
1806
1807#[stable(feature = "drain", since = "1.6.0")]
1808impl<T, A: Allocator> ExactSizeIterator for Drain<'_, T, A> {
1809 fn is_empty(&self) -> bool {
1810 self.iter.is_empty()
1811 }
1812}
1813
1814#[stable(feature = "fused", since = "1.26.0")]
1815impl<T, A: Allocator> FusedIterator for Drain<'_, T, A> {}
1816
1817/// A draining iterator over the elements of a `BinaryHeap`.
1818///
1819/// This `struct` is created by [`BinaryHeap::drain_sorted()`]. See its
1820/// documentation for more.
1821///
1822/// [`drain_sorted`]: BinaryHeap::drain_sorted
1823#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1824#[derive(Debug)]
1825pub struct DrainSorted<
1826 'a,
1827 T: Ord,
1828 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
1829> {
1830 inner: &'a mut BinaryHeap<T, A>,
1831}
1832
1833impl<'a, T: Ord, A: Allocator> DrainSorted<'a, T, A> {
1834 /// Returns a reference to the underlying allocator.
1835 #[unstable(feature = "allocator_api", issue = "32838")]
1836 pub fn allocator(&self) -> &A {
1837 self.inner.allocator()
1838 }
1839}
1840
1841#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1842impl<'a, T: Ord, A: Allocator> Drop for DrainSorted<'a, T, A> {
1843 /// Removes heap elements in heap order.
1844 fn drop(&mut self) {
1845 struct DropGuard<'r, 'a, T: Ord, A: Allocator>(&'r mut DrainSorted<'a, T, A>);
1846
1847 impl<'r, 'a, T: Ord, A: Allocator> Drop for DropGuard<'r, 'a, T, A> {
1848 fn drop(&mut self) {
1849 while self.0.inner.pop().is_some() {}
1850 }
1851 }
1852
1853 while let Some(item) = self.inner.pop() {
1854 let guard = DropGuard(self);
1855 drop(item);
1856 mem::forget(guard);
1857 }
1858 }
1859}
1860
1861#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1862impl<T: Ord, A: Allocator> Iterator for DrainSorted<'_, T, A> {
1863 type Item = T;
1864
1865 #[inline]
1866 fn next(&mut self) -> Option<T> {
1867 self.inner.pop()
1868 }
1869
1870 #[inline]
1871 fn size_hint(&self) -> (usize, Option<usize>) {
1872 let exact = self.inner.len();
1873 (exact, Some(exact))
1874 }
1875}
1876
1877#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1878impl<T: Ord, A: Allocator> ExactSizeIterator for DrainSorted<'_, T, A> {}
1879
1880#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1881impl<T: Ord, A: Allocator> FusedIterator for DrainSorted<'_, T, A> {}
1882
1883#[unstable(feature = "trusted_len", issue = "37572")]
1884unsafe impl<T: Ord, A: Allocator> TrustedLen for DrainSorted<'_, T, A> {}
1885
1886#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
1887impl<T: Ord, A: Allocator> From<Vec<T, A>> for BinaryHeap<T, A> {
1888 /// Converts a `Vec<T>` into a `BinaryHeap<T>`.
1889 ///
1890 /// This conversion happens in-place, and has *O*(*n*) time complexity.
1891 fn from(vec: Vec<T, A>) -> BinaryHeap<T, A> {
1892 let mut heap = BinaryHeap { data: vec };
1893 heap.rebuild();
1894 heap
1895 }
1896}
1897
1898#[stable(feature = "std_collections_from_array", since = "1.56.0")]
1899impl<T: Ord, const N: usize> From<[T; N]> for BinaryHeap<T> {
1900 /// ```
1901 /// use std::collections::BinaryHeap;
1902 ///
1903 /// let mut h1 = BinaryHeap::from([1, 4, 2, 3]);
1904 /// let mut h2: BinaryHeap<_> = [1, 4, 2, 3].into();
1905 /// while let Some((a, b)) = h1.pop().zip(h2.pop()) {
1906 /// assert_eq!(a, b);
1907 /// }
1908 /// ```
1909 fn from(arr: [T; N]) -> Self {
1910 Self::from_iter(arr)
1911 }
1912}
1913
1914#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
1915impl<T, A: Allocator> From<BinaryHeap<T, A>> for Vec<T, A> {
1916 /// Converts a `BinaryHeap<T>` into a `Vec<T>`.
1917 ///
1918 /// This conversion requires no data movement or allocation, and has
1919 /// constant time complexity.
1920 fn from(heap: BinaryHeap<T, A>) -> Vec<T, A> {
1921 heap.data
1922 }
1923}
1924
1925#[stable(feature = "rust1", since = "1.0.0")]
1926impl<T: Ord> FromIterator<T> for BinaryHeap<T> {
1927 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> BinaryHeap<T> {
1928 BinaryHeap::from(iter.into_iter().collect::<Vec<_>>())
1929 }
1930}
1931
1932#[stable(feature = "rust1", since = "1.0.0")]
1933impl<T, A: Allocator> IntoIterator for BinaryHeap<T, A> {
1934 type Item = T;
1935 type IntoIter = IntoIter<T, A>;
1936
1937 /// Creates a consuming iterator, that is, one that moves each value out of
1938 /// the binary heap in arbitrary order. The binary heap cannot be used
1939 /// after calling this.
1940 ///
1941 /// # Examples
1942 ///
1943 /// Basic usage:
1944 ///
1945 /// ```
1946 /// use std::collections::BinaryHeap;
1947 /// let heap = BinaryHeap::from([1, 2, 3, 4]);
1948 ///
1949 /// // Print 1, 2, 3, 4 in arbitrary order
1950 /// for x in heap.into_iter() {
1951 /// // x has type i32, not &i32
1952 /// println!("{x}");
1953 /// }
1954 /// ```
1955 fn into_iter(self) -> IntoIter<T, A> {
1956 IntoIter { iter: self.data.into_iter() }
1957 }
1958}
1959
1960#[stable(feature = "rust1", since = "1.0.0")]
1961impl<'a, T, A: Allocator> IntoIterator for &'a BinaryHeap<T, A> {
1962 type Item = &'a T;
1963 type IntoIter = Iter<'a, T>;
1964
1965 fn into_iter(self) -> Iter<'a, T> {
1966 self.iter()
1967 }
1968}
1969
1970#[stable(feature = "rust1", since = "1.0.0")]
1971impl<T: Ord, A: Allocator> Extend<T> for BinaryHeap<T, A> {
1972 #[inline]
1973 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
1974 let guard = RebuildOnDrop { rebuild_from: self.len(), heap: self };
1975 guard.heap.data.extend(iter);
1976 }
1977
1978 #[inline]
1979 fn extend_one(&mut self, item: T) {
1980 self.push(item);
1981 }
1982
1983 #[inline]
1984 fn extend_reserve(&mut self, additional: usize) {
1985 self.reserve(additional);
1986 }
1987}
1988
1989#[stable(feature = "extend_ref", since = "1.2.0")]
1990impl<'a, T: 'a + Ord + Copy, A: Allocator> Extend<&'a T> for BinaryHeap<T, A> {
1991 fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
1992 self.extend(iter.into_iter().cloned());
1993 }
1994
1995 #[inline]
1996 fn extend_one(&mut self, &item: &'a T) {
1997 self.push(item);
1998 }
1999
2000 #[inline]
2001 fn extend_reserve(&mut self, additional: usize) {
2002 self.reserve(additional);
2003 }
2004}