Skip to content
Merged
Changes from 1 commit
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
5d4ff50
Update books
rustbot Mar 10, 2025
436959e
Add PeekMut::refresh
197g Mar 7, 2025
c170d0f
Elaborate param-env built for checking DispatchFromDyn for dyn compat
compiler-errors Mar 11, 2025
6133999
Fix `cargo vendor` not working without `rustc` installed
ehuss Mar 11, 2025
faa5b3f
Fix false-positive in `expr_or_init` and in the `invalid_from_utf8` lint
Urgau Mar 9, 2025
64d1433
Refactor pick2_mut & pick3_mut to use get_disjoint_mut
Eclips4 Mar 11, 2025
763db5d
Convert a delayed bug to a bug.
nnethercote Mar 5, 2025
fe04460
Remove an unnecessary `kw::Empty` check.
nnethercote Mar 6, 2025
0b2d706
Introduce `sym::dummy` and `Ident::dummy`.
nnethercote Mar 4, 2025
4eadaff
Convert a `kw::Empty` occurrence to `sym::dummy`.
nnethercote Mar 11, 2025
7398b39
Make panic's more specific
Eclips4 Mar 11, 2025
d7029d7
Remove unused `OwnerNode::ident` method.
nnethercote Mar 7, 2025
ed10418
Inline and remove `State::print_item_type`.
nnethercote Mar 7, 2025
ee9ef82
Factor out some repeated code in `parse_item_impl`.
nnethercote Mar 7, 2025
c0cee43
Remove unnecessary lifetime from `PatInfo`.
nnethercote Jan 30, 2025
4198902
Rollup merge of #138161 - HeroicKatora:heap-peek-mut-refresh, r=dtolnay
matthiaskrgr Mar 12, 2025
143eb4f
Rollup merge of #138174 - compiler-errors:elaborate-unsize-self-pred,…
matthiaskrgr Mar 12, 2025
53e4e6f
Rollup merge of #138313 - rustbot:docs-update, r=jieyouxu
matthiaskrgr Mar 12, 2025
5833dfa
Rollup merge of #138347 - nnethercote:less-kw-Empty-2, r=compiler-errors
matthiaskrgr Mar 12, 2025
b849aa9
Rollup merge of #138360 - Urgau:fix-fp-expr_or_init, r=wesleywiser
matthiaskrgr Mar 12, 2025
2bdb10f
Rollup merge of #138372 - Eclips4:issue-138196, r=scottmcm
matthiaskrgr Mar 12, 2025
4c6edb1
Rollup merge of #138376 - nnethercote:hir-ItemKind-ident-precursors, …
matthiaskrgr Mar 12, 2025
76f9cda
Rollup merge of #138377 - nnethercote:rustc_hir_typeck, r=compiler-er…
matthiaskrgr Mar 12, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Add PeekMut::refresh
This improves the useability of heaps for priority-based work queues. In
certain scenarios, modifications on the most relevant or critical items are
performed until a condition that determines the work items have been
sufficiently addressed. The loop will repeatedly access the most critical
item and put it back in a sorted position when it is complete. Crucially,
due to the ordering invariant we know that all work was performed when the
completed item remains the most critical. Getting this information from the
heap position avoids a (potentially more costly) check on the item state
itself.

A customized `drop` with boolean result would avoid up to two more
comparisons performed in both the last no-op refresh and Drop code but this
occurs once in each execution of the above scenario whereas refresh occurs
any number of times. Also note that the comparison overhead of Drop is only
taken if the element is mutably inspected to determine the end condition,
i.e. not when refresh itself is the break condition.
  • Loading branch information
197g committed Mar 11, 2025
commit 436959e3f710d3161093bfa183e3c5deceff7ccf
82 changes: 78 additions & 4 deletions library/alloc/src/collections/binary_heap/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -359,6 +359,74 @@ impl<T: Ord, A: Allocator> DerefMut for PeekMut<'_, T, A> {
}

impl<'a, T: Ord, A: Allocator> PeekMut<'a, T, A> {
/// Sifts the current element to its new position.
///
/// Afterwards refers to the new element. Returns if the element changed.
///
/// ## Examples
///
/// The condition can be used to upper bound all elements in the heap. When only few elements
/// are affected, the heap's sort ensures this is faster than a reconstruction from the raw
/// element list and requires no additional allocation.
///
/// ```
/// #![feature(binary_heap_peek_mut_refresh)]
/// use std::collections::BinaryHeap;
///
/// let mut heap: BinaryHeap<u32> = (0..128).collect();
/// let mut peek = heap.peek_mut().unwrap();
///
/// loop {
/// *peek = 99;
///
/// if !peek.refresh() {
/// break;
/// }
/// }
///
/// // Post condition, this is now an upper bound.
/// assert!(*peek < 100);
/// ```
///
/// When the element remains the maximum after modification, the peek remains unchanged:
///
/// ```
/// #![feature(binary_heap_peek_mut_refresh)]
/// use std::collections::BinaryHeap;
///
/// let mut heap: BinaryHeap<u32> = [1, 2, 3].into();
/// let mut peek = heap.peek_mut().unwrap();
///
/// assert_eq!(*peek, 3);
/// *peek = 42;
///
/// // When we refresh, the peek is updated to the new maximum.
/// assert!(!peek.refresh(), "42 is even larger than 3");
/// assert_eq!(*peek, 42);
/// ```
#[unstable(feature = "binary_heap_peek_mut_refresh", issue = "138355")]
#[must_use = "is equivalent to dropping and getting a new PeekMut except for return information"]
pub fn refresh(&mut self) -> bool {
// The length of the underlying heap is unchanged by sifting down. The value stored for leak
// amplification thus remains accurate. We erase the leak amplification firstly because the
// operation is then equivalent to constructing a new PeekMut and secondly this avoids any
// future complication where original_len being non-empty would be interpreted as the heap
// having been leak amplified instead of checking the heap itself.
if let Some(original_len) = self.original_len.take() {
// SAFETY: This is how many elements were in the Vec at the time of
// the BinaryHeap::peek_mut call.
unsafe { self.heap.data.set_len(original_len.get()) };

// The length of the heap did not change by sifting, upholding our own invariants.

// SAFETY: PeekMut is only instantiated for non-empty heaps.
(unsafe { self.heap.sift_down(0) }) != 0
} else {
// The element was not modified.
false
}
}

/// Removes the peeked value from the heap and returns it.
#[stable(feature = "binary_heap_peek_mut_pop", since = "1.18.0")]
pub fn pop(mut this: PeekMut<'a, T, A>) -> T {
Expand Down Expand Up @@ -670,6 +738,8 @@ impl<T: Ord, A: Allocator> BinaryHeap<T, A> {
/// # Safety
///
/// The caller must guarantee that `pos < self.len()`.
///
/// Returns the new position of the element.
unsafe fn sift_up(&mut self, start: usize, pos: usize) -> usize {
// Take out the value at `pos` and create a hole.
// SAFETY: The caller guarantees that pos < self.len()
Expand All @@ -696,10 +766,12 @@ impl<T: Ord, A: Allocator> BinaryHeap<T, A> {
/// Take an element at `pos` and move it down the heap,
/// while its children are larger.
///
/// Returns the new position of the element.
///
/// # Safety
///
/// The caller must guarantee that `pos < end <= self.len()`.
unsafe fn sift_down_range(&mut self, pos: usize, end: usize) {
unsafe fn sift_down_range(&mut self, pos: usize, end: usize) -> usize {
// SAFETY: The caller guarantees that pos < end <= self.len().
let mut hole = unsafe { Hole::new(&mut self.data, pos) };
let mut child = 2 * hole.pos() + 1;
Expand All @@ -719,7 +791,7 @@ impl<T: Ord, A: Allocator> BinaryHeap<T, A> {
// SAFETY: child is now either the old child or the old child+1
// We already proven that both are < self.len() and != hole.pos()
if hole.element() >= unsafe { hole.get(child) } {
return;
return hole.pos();
}

// SAFETY: same as above.
Expand All @@ -734,16 +806,18 @@ impl<T: Ord, A: Allocator> BinaryHeap<T, A> {
// child == 2 * hole.pos() + 1 != hole.pos().
unsafe { hole.move_to(child) };
}

hole.pos()
}

/// # Safety
///
/// The caller must guarantee that `pos < self.len()`.
unsafe fn sift_down(&mut self, pos: usize) {
unsafe fn sift_down(&mut self, pos: usize) -> usize {
let len = self.len();
// SAFETY: pos < len is guaranteed by the caller and
// obviously len = self.len() <= self.len().
unsafe { self.sift_down_range(pos, len) };
unsafe { self.sift_down_range(pos, len) }
}

/// Take an element at `pos` and move it all the way down the heap,
Expand Down
Loading