Skip to content

Commit ff86b27

Browse files
committed
Auto merge of #98178 - RalfJung:btree-alloc, r=thomcc
btree: avoid forcing the allocator to be a reference The previous code forces the actual allocator used to be some `&A`. This generalizes the code to allow any `A: Copy`. If people truly want to use a reference, they can use `&A` themselves. Fixes #98176
2 parents 529c4c7 + 3a1e114 commit ff86b27

File tree

10 files changed

+270
-241
lines changed

10 files changed

+270
-241
lines changed

library/alloc/src/collections/btree/append.rs

+6-6
Original file line numberDiff line numberDiff line change
@@ -15,12 +15,12 @@ impl<K, V> Root<K, V> {
1515
/// a `BTreeMap`, both iterators should produce keys in strictly ascending
1616
/// order, each greater than all keys in the tree, including any keys
1717
/// already in the tree upon entry.
18-
pub fn append_from_sorted_iters<I, A: Allocator>(
18+
pub fn append_from_sorted_iters<I, A: Allocator + Clone>(
1919
&mut self,
2020
left: I,
2121
right: I,
2222
length: &mut usize,
23-
alloc: &A,
23+
alloc: A,
2424
) where
2525
K: Ord,
2626
I: Iterator<Item = (K, V)> + FusedIterator,
@@ -35,7 +35,7 @@ impl<K, V> Root<K, V> {
3535
/// Pushes all key-value pairs to the end of the tree, incrementing a
3636
/// `length` variable along the way. The latter makes it easier for the
3737
/// caller to avoid a leak when the iterator panicks.
38-
pub fn bulk_push<I, A: Allocator>(&mut self, iter: I, length: &mut usize, alloc: &A)
38+
pub fn bulk_push<I, A: Allocator + Clone>(&mut self, iter: I, length: &mut usize, alloc: A)
3939
where
4040
I: Iterator<Item = (K, V)>,
4141
{
@@ -64,17 +64,17 @@ impl<K, V> Root<K, V> {
6464
}
6565
Err(_) => {
6666
// We are at the top, create a new root node and push there.
67-
open_node = self.push_internal_level(alloc);
67+
open_node = self.push_internal_level(alloc.clone());
6868
break;
6969
}
7070
}
7171
}
7272

7373
// Push key-value pair and new right subtree.
7474
let tree_height = open_node.height() - 1;
75-
let mut right_tree = Root::new(alloc);
75+
let mut right_tree = Root::new(alloc.clone());
7676
for _ in 0..tree_height {
77-
right_tree.push_internal_level(alloc);
77+
right_tree.push_internal_level(alloc.clone());
7878
}
7979
open_node.push(key, value, right_tree);
8080

library/alloc/src/collections/btree/fix.rs

+20-20
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,9 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
77
/// sibling. If successful but at the cost of shrinking the parent node,
88
/// returns that shrunk parent node. Returns an `Err` if the node is
99
/// an empty root.
10-
fn fix_node_through_parent<A: Allocator>(
10+
fn fix_node_through_parent<A: Allocator + Clone>(
1111
self,
12-
alloc: &A,
12+
alloc: A,
1313
) -> Result<Option<NodeRef<marker::Mut<'a>, K, V, marker::Internal>>, Self> {
1414
let len = self.len();
1515
if len >= MIN_LEN {
@@ -54,9 +54,9 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
5454
///
5555
/// This method does not expect ancestors to already be underfull upon entry
5656
/// and panics if it encounters an empty ancestor.
57-
pub fn fix_node_and_affected_ancestors<A: Allocator>(mut self, alloc: &A) -> bool {
57+
pub fn fix_node_and_affected_ancestors<A: Allocator + Clone>(mut self, alloc: A) -> bool {
5858
loop {
59-
match self.fix_node_through_parent(alloc) {
59+
match self.fix_node_through_parent(alloc.clone()) {
6060
Ok(Some(parent)) => self = parent.forget_type(),
6161
Ok(None) => return true,
6262
Err(_) => return false,
@@ -67,28 +67,28 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
6767

6868
impl<K, V> Root<K, V> {
6969
/// Removes empty levels on the top, but keeps an empty leaf if the entire tree is empty.
70-
pub fn fix_top<A: Allocator>(&mut self, alloc: &A) {
70+
pub fn fix_top<A: Allocator + Clone>(&mut self, alloc: A) {
7171
while self.height() > 0 && self.len() == 0 {
72-
self.pop_internal_level(alloc);
72+
self.pop_internal_level(alloc.clone());
7373
}
7474
}
7575

7676
/// Stocks up or merge away any underfull nodes on the right border of the
7777
/// tree. The other nodes, those that are not the root nor a rightmost edge,
7878
/// must already have at least MIN_LEN elements.
79-
pub fn fix_right_border<A: Allocator>(&mut self, alloc: &A) {
80-
self.fix_top(alloc);
79+
pub fn fix_right_border<A: Allocator + Clone>(&mut self, alloc: A) {
80+
self.fix_top(alloc.clone());
8181
if self.len() > 0 {
82-
self.borrow_mut().last_kv().fix_right_border_of_right_edge(alloc);
82+
self.borrow_mut().last_kv().fix_right_border_of_right_edge(alloc.clone());
8383
self.fix_top(alloc);
8484
}
8585
}
8686

8787
/// The symmetric clone of `fix_right_border`.
88-
pub fn fix_left_border<A: Allocator>(&mut self, alloc: &A) {
89-
self.fix_top(alloc);
88+
pub fn fix_left_border<A: Allocator + Clone>(&mut self, alloc: A) {
89+
self.fix_top(alloc.clone());
9090
if self.len() > 0 {
91-
self.borrow_mut().first_kv().fix_left_border_of_left_edge(alloc);
91+
self.borrow_mut().first_kv().fix_left_border_of_left_edge(alloc.clone());
9292
self.fix_top(alloc);
9393
}
9494
}
@@ -115,16 +115,16 @@ impl<K, V> Root<K, V> {
115115
}
116116

117117
impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV> {
118-
fn fix_left_border_of_left_edge<A: Allocator>(mut self, alloc: &A) {
118+
fn fix_left_border_of_left_edge<A: Allocator + Clone>(mut self, alloc: A) {
119119
while let Internal(internal_kv) = self.force() {
120-
self = internal_kv.fix_left_child(alloc).first_kv();
120+
self = internal_kv.fix_left_child(alloc.clone()).first_kv();
121121
debug_assert!(self.reborrow().into_node().len() > MIN_LEN);
122122
}
123123
}
124124

125-
fn fix_right_border_of_right_edge<A: Allocator>(mut self, alloc: &A) {
125+
fn fix_right_border_of_right_edge<A: Allocator + Clone>(mut self, alloc: A) {
126126
while let Internal(internal_kv) = self.force() {
127-
self = internal_kv.fix_right_child(alloc).last_kv();
127+
self = internal_kv.fix_right_child(alloc.clone()).last_kv();
128128
debug_assert!(self.reborrow().into_node().len() > MIN_LEN);
129129
}
130130
}
@@ -135,9 +135,9 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
135135
/// provisions an extra element to allow merging its children in turn
136136
/// without becoming underfull.
137137
/// Returns the left child.
138-
fn fix_left_child<A: Allocator>(
138+
fn fix_left_child<A: Allocator + Clone>(
139139
self,
140-
alloc: &A,
140+
alloc: A,
141141
) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
142142
let mut internal_kv = self.consider_for_balancing();
143143
let left_len = internal_kv.left_child_len();
@@ -158,9 +158,9 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
158158
/// provisions an extra element to allow merging its children in turn
159159
/// without becoming underfull.
160160
/// Returns wherever the right child ended up.
161-
fn fix_right_child<A: Allocator>(
161+
fn fix_right_child<A: Allocator + Clone>(
162162
self,
163-
alloc: &A,
163+
alloc: A,
164164
) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
165165
let mut internal_kv = self.consider_for_balancing();
166166
let right_len = internal_kv.right_child_len();

0 commit comments

Comments
 (0)