From bf3d0247f75e282c765ef59b31f641b33cbfe8ce Mon Sep 17 00:00:00 2001 From: Stein Somers Date: Tue, 8 Mar 2022 18:45:15 +0100 Subject: [PATCH] BTree: let `clear` recycle root memory --- library/alloc/src/collections/btree/map.rs | 35 +++++++---- .../alloc/src/collections/btree/map/tests.rs | 4 +- .../alloc/src/collections/btree/navigate.rs | 39 +++++++++--- library/alloc/src/collections/btree/node.rs | 63 +++++++++++++++---- library/alloc/src/collections/btree/set.rs | 1 + 5 files changed, 109 insertions(+), 33 deletions(-) diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs index f23980faa0417..fa7bde4cfebac 100644 --- a/library/alloc/src/collections/btree/map.rs +++ b/library/alloc/src/collections/btree/map.rs @@ -13,7 +13,7 @@ use crate::alloc::{Allocator, Global}; use super::borrow::DormantMutRef; use super::dedup_sorted_iter::DedupSortedIter; -use super::navigate::{LazyLeafRange, LeafRange}; +use super::navigate::{LazyLeafRange, LeafRange, RootVessel}; use super::node::{self, marker, ForceResult::*, Handle, NodeRef, Root}; use super::search::SearchResult::*; @@ -559,6 +559,7 @@ impl BTreeMap { impl BTreeMap { /// Clears the map, removing all elements. + /// Keeps a part of the allocated memory for reuse. /// /// # Examples /// @@ -574,12 +575,18 @@ impl BTreeMap { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn clear(&mut self) { - // avoid moving the allocator - mem::drop(BTreeMap { - root: mem::replace(&mut self.root, None), - length: mem::replace(&mut self.length, 0), - alloc: ManuallyDrop::new(&*self.alloc), - }); + if let Some(root) = self.root.take() { + let mut iter = IntoIter { + range: root.into_dying().full_range(), + length: self.length, + alloc: unsafe { ManuallyDrop::take(&mut self.alloc) }, + }; + self.length = 0; + while let Some(kv) = iter.dying_next(Some(&mut self.root)) { + // SAFETY: we consume the dying handle immediately. + unsafe { kv.drop_key_val() }; + } + } } /// Makes a new empty BTreeMap with a reasonable choice for B. @@ -1606,14 +1613,14 @@ impl Drop for IntoIter { fn drop(&mut self) { // Continue the same loop we perform below. This only runs when unwinding, so we // don't have to care about panics this time (they'll abort). - while let Some(kv) = self.0.dying_next() { + while let Some(kv) = self.0.dying_next(None) { // SAFETY: we consume the dying handle immediately. unsafe { kv.drop_key_val() }; } } } - while let Some(kv) = self.dying_next() { + while let Some(kv) = self.dying_next(None) { let guard = DropGuard(self); // SAFETY: we don't touch the tree before consuming the dying handle. unsafe { kv.drop_key_val() }; @@ -1625,11 +1632,15 @@ impl Drop for IntoIter { impl IntoIter { /// Core of a `next` method returning a dying KV handle, /// invalidated by further calls to this function and some others. + /// + /// If `root_recycling` is given some vessel, this method recycles the last + /// leaf and stores it as a fresh root in the vessel. fn dying_next( &mut self, + root_recycling: Option<&mut RootVessel>, ) -> Option, marker::KV>> { if self.length == 0 { - self.range.deallocating_end(&self.alloc); + self.range.deallocating_end(&self.alloc, root_recycling); None } else { self.length -= 1; @@ -1643,7 +1654,7 @@ impl IntoIter { &mut self, ) -> Option, marker::KV>> { if self.length == 0 { - self.range.deallocating_end(&self.alloc); + self.range.deallocating_end(&self.alloc, None); None } else { self.length -= 1; @@ -1658,7 +1669,7 @@ impl Iterator for IntoIter { fn next(&mut self) -> Option<(K, V)> { // SAFETY: we consume the dying handle immediately. - self.dying_next().map(unsafe { |kv| kv.into_key_val() }) + self.dying_next(None).map(unsafe { |kv| kv.into_key_val() }) } fn size_hint(&self) -> (usize, Option) { diff --git a/library/alloc/src/collections/btree/map/tests.rs b/library/alloc/src/collections/btree/map/tests.rs index a4c24cd4593b0..d0d91ff62800e 100644 --- a/library/alloc/src/collections/btree/map/tests.rs +++ b/library/alloc/src/collections/btree/map/tests.rs @@ -1408,6 +1408,8 @@ fn test_bad_zst() { #[test] fn test_clear() { let mut map = BTreeMap::new(); + map.clear(); + assert_eq!(map.height(), None); for &len in &[MIN_INSERTS_HEIGHT_1, MIN_INSERTS_HEIGHT_2, 0, node::CAPACITY] { for i in 0..len { map.insert(i, ()); @@ -1415,7 +1417,7 @@ fn test_clear() { assert_eq!(map.len(), len); map.clear(); map.check(); - assert_eq!(map.height(), None); + assert_eq!(map.height(), Some(0), "len={len}"); } } diff --git a/library/alloc/src/collections/btree/navigate.rs b/library/alloc/src/collections/btree/navigate.rs index d44cb57618dfa..b7fa4480dec54 100644 --- a/library/alloc/src/collections/btree/navigate.rs +++ b/library/alloc/src/collections/btree/navigate.rs @@ -3,7 +3,7 @@ use core::hint; use core::ops::RangeBounds; use core::ptr; -use super::node::{marker, ForceResult::*, Handle, NodeRef}; +use super::node::{marker, ForceResult::*, Handle, NodeRef, Root}; use crate::alloc::Allocator; // `front` and `back` are always both `None` or both `Some`. @@ -12,6 +12,8 @@ pub struct LeafRange { back: Option, marker::Edge>>, } +pub type RootVessel = Option>; + impl<'a, K: 'a, V: 'a> Clone for LeafRange, K, V> { fn clone(&self) -> Self { LeafRange { front: self.front.clone(), back: self.back.clone() } @@ -198,9 +200,14 @@ impl LazyLeafRange { } #[inline] - pub fn deallocating_end(&mut self, alloc: &A) { + /// Fused: no harm if invoked multiple times on the same range object. + pub fn deallocating_end( + &mut self, + alloc: &A, + root_recycling: Option<&mut RootVessel>, + ) { if let Some(front) = self.take_front() { - front.deallocating_end(alloc) + front.deallocating_end(alloc, root_recycling) } } } @@ -501,10 +508,28 @@ impl Handle, marker::Edge> { /// both sides of the tree, and have hit the same edge. As it is intended /// only to be called when all keys and values have been returned, /// no cleanup is done on any of the keys or values. - fn deallocating_end(self, alloc: &A) { - let mut edge = self.forget_node_type(); - while let Some(parent_edge) = unsafe { edge.into_node().deallocate_and_ascend(alloc) } { - edge = parent_edge.forget_node_type(); + /// + /// If `root_recycling` is given some vessel, this method recycles the leaf + /// and stores it as a fresh root in the vessel, instead of deallocating it. + fn deallocating_end( + self, + alloc: &A, + root_recycling: Option<&mut RootVessel>, + ) { + let leaf = self.into_node(); + let mut parent_edge = match root_recycling { + None => unsafe { leaf.deallocate_and_ascend(alloc) }, + Some(root_recycling) => { + let (leaf, parent_edge) = unsafe { leaf.recycle_and_ascend() }; + *root_recycling = Some(leaf.forget_type()); + parent_edge + } + }; + loop { + parent_edge = match parent_edge { + Some(edge) => unsafe { edge.into_node().deallocate_and_ascend(alloc) }, + None => return, + } } } } diff --git a/library/alloc/src/collections/btree/node.rs b/library/alloc/src/collections/btree/node.rs index 5ae0a554aeea6..11666016e19bd 100644 --- a/library/alloc/src/collections/btree/node.rs +++ b/library/alloc/src/collections/btree/node.rs @@ -394,20 +394,57 @@ impl NodeRef { self, alloc: &A, ) -> Option, marker::Edge>> { - let height = self.height; - let node = self.node; - let ret = self.ascend().ok(); - unsafe { - alloc.deallocate( - node.cast(), - if height > 0 { - Layout::new::>() - } else { - Layout::new::>() - }, - ); + match self.force() { + ForceResult::Leaf(node) => unsafe { node.deallocate_and_ascend(alloc) }, + ForceResult::Internal(node) => unsafe { node.deallocate_and_ascend(alloc) }, } - ret + } +} + +impl NodeRef { + /// Overload for internal nodes. + pub unsafe fn deallocate_and_ascend( + self, + alloc: &A, + ) -> Option, marker::Edge>> { + debug_assert!(self.height > 0); + let node = self.node; + let parent_edge = self.ascend().ok(); + unsafe { alloc.deallocate(node.cast(), Layout::new::>()) }; + parent_edge + } +} + +impl NodeRef { + /// Overload for leaf nodes. + pub unsafe fn deallocate_and_ascend( + self, + alloc: &A, + ) -> Option, marker::Edge>> { + debug_assert!(self.height == 0); + let node = self.node; + let parent_edge = self.ascend().ok(); + unsafe { alloc.deallocate(node.cast(), Layout::new::>()) }; + parent_edge + } +} + +impl NodeRef { + /// Similar to `ascend`, gets a reference to a node's parent node, but also + /// clears and returns the current leaf node. This is unsafe because that + /// leaf node will still be accessible from the dying tree, despite having + /// been reinitialized and being returned in an exclusive `NodeRef`. + pub unsafe fn recycle_and_ascend( + self, + ) -> ( + NodeRef, + Option, marker::Edge>>, + ) { + debug_assert!(self.height == 0); + let node = self.node; + let parent_edge = self.ascend().ok(); + unsafe { LeafNode::init(node.as_ptr()) }; + (NodeRef { height: 0, node, _marker: PhantomData }, parent_edge) } } diff --git a/library/alloc/src/collections/btree/set.rs b/library/alloc/src/collections/btree/set.rs index aeb5c30dba346..42dba110999c6 100644 --- a/library/alloc/src/collections/btree/set.rs +++ b/library/alloc/src/collections/btree/set.rs @@ -572,6 +572,7 @@ impl BTreeSet { } /// Clears the set, removing all elements. + /// Keeps a part of the allocated memory for reuse. /// /// # Examples ///