Skip to content

Commit 55479de

Browse files
authoredJun 19, 2020
Rollup merge of rust-lang#72709 - LeSeulArtichaut:unsafe-liballoc, r=nikomatsakis
`#[deny(unsafe_op_in_unsafe_fn)]` in liballoc This PR proposes to make use of the new `unsafe_op_in_unsafe_fn` lint, i.e. no longer consider the body of an unsafe function as an unsafe block and require explicit unsafe block to perform unsafe operations. This has been first (partly) suggested by @Mark-Simulacrum in rust-lang#69245 (comment) Tracking issue for the feature: rust-lang#71668. ~~Blocked on rust-lang#71862.~~ r? @Mark-Simulacrum cc @nikomatsakis can you confirm that those changes are desirable? Should I restrict it to only BTree for the moment?
2 parents 85e1c3b + 7b63986 commit 55479de

19 files changed

+387
-257
lines changed
 

‎src/liballoc/alloc.rs

+28-17
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ pub struct Global;
7777
#[stable(feature = "global_alloc", since = "1.28.0")]
7878
#[inline]
7979
pub unsafe fn alloc(layout: Layout) -> *mut u8 {
80-
__rust_alloc(layout.size(), layout.align())
80+
unsafe { __rust_alloc(layout.size(), layout.align()) }
8181
}
8282

8383
/// Deallocate memory with the global allocator.
@@ -99,7 +99,7 @@ pub unsafe fn alloc(layout: Layout) -> *mut u8 {
9999
#[stable(feature = "global_alloc", since = "1.28.0")]
100100
#[inline]
101101
pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
102-
__rust_dealloc(ptr, layout.size(), layout.align())
102+
unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) }
103103
}
104104

105105
/// Reallocate memory with the global allocator.
@@ -121,7 +121,7 @@ pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
121121
#[stable(feature = "global_alloc", since = "1.28.0")]
122122
#[inline]
123123
pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
124-
__rust_realloc(ptr, layout.size(), layout.align(), new_size)
124+
unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) }
125125
}
126126

127127
/// Allocate zero-initialized memory with the global allocator.
@@ -158,7 +158,7 @@ pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8
158158
#[stable(feature = "global_alloc", since = "1.28.0")]
159159
#[inline]
160160
pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
161-
__rust_alloc_zeroed(layout.size(), layout.align())
161+
unsafe { __rust_alloc_zeroed(layout.size(), layout.align()) }
162162
}
163163

164164
#[unstable(feature = "allocator_api", issue = "32838")]
@@ -183,7 +183,7 @@ unsafe impl AllocRef for Global {
183183
#[inline]
184184
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
185185
if layout.size() != 0 {
186-
dealloc(ptr.as_ptr(), layout)
186+
unsafe { dealloc(ptr.as_ptr(), layout) }
187187
}
188188
}
189189

@@ -209,16 +209,21 @@ unsafe impl AllocRef for Global {
209209
match placement {
210210
ReallocPlacement::InPlace => Err(AllocErr),
211211
ReallocPlacement::MayMove if layout.size() == 0 => {
212-
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
212+
let new_layout =
213+
unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) };
213214
self.alloc(new_layout, init)
214215
}
215216
ReallocPlacement::MayMove => {
216217
// `realloc` probably checks for `new_size > size` or something similar.
217-
intrinsics::assume(new_size > size);
218-
let ptr = realloc(ptr.as_ptr(), layout, new_size);
218+
let ptr = unsafe {
219+
intrinsics::assume(new_size > size);
220+
realloc(ptr.as_ptr(), layout, new_size)
221+
};
219222
let memory =
220223
MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size };
221-
init.init_offset(memory, size);
224+
unsafe {
225+
init.init_offset(memory, size);
226+
}
222227
Ok(memory)
223228
}
224229
}
@@ -245,13 +250,17 @@ unsafe impl AllocRef for Global {
245250
match placement {
246251
ReallocPlacement::InPlace => Err(AllocErr),
247252
ReallocPlacement::MayMove if new_size == 0 => {
248-
self.dealloc(ptr, layout);
253+
unsafe {
254+
self.dealloc(ptr, layout);
255+
}
249256
Ok(MemoryBlock { ptr: layout.dangling(), size: 0 })
250257
}
251258
ReallocPlacement::MayMove => {
252259
// `realloc` probably checks for `new_size < size` or something similar.
253-
intrinsics::assume(new_size < size);
254-
let ptr = realloc(ptr.as_ptr(), layout, new_size);
260+
let ptr = unsafe {
261+
intrinsics::assume(new_size < size);
262+
realloc(ptr.as_ptr(), layout, new_size)
263+
};
255264
Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size })
256265
}
257266
}
@@ -264,7 +273,7 @@ unsafe impl AllocRef for Global {
264273
#[lang = "exchange_malloc"]
265274
#[inline]
266275
unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
267-
let layout = Layout::from_size_align_unchecked(size, align);
276+
let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
268277
match Global.alloc(layout, AllocInit::Uninitialized) {
269278
Ok(memory) => memory.ptr.as_ptr(),
270279
Err(_) => handle_alloc_error(layout),
@@ -279,10 +288,12 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
279288
// For example if `Box` is changed to `struct Box<T: ?Sized, A: AllocRef>(Unique<T>, A)`,
280289
// this function has to be changed to `fn box_free<T: ?Sized, A: AllocRef>(Unique<T>, A)` as well.
281290
pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
282-
let size = size_of_val(ptr.as_ref());
283-
let align = min_align_of_val(ptr.as_ref());
284-
let layout = Layout::from_size_align_unchecked(size, align);
285-
Global.dealloc(ptr.cast().into(), layout)
291+
unsafe {
292+
let size = size_of_val(ptr.as_ref());
293+
let align = min_align_of_val(ptr.as_ref());
294+
let layout = Layout::from_size_align_unchecked(size, align);
295+
Global.dealloc(ptr.cast().into(), layout)
296+
}
286297
}
287298

288299
/// Abort on memory allocation error or failure.

‎src/liballoc/boxed.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -311,7 +311,7 @@ impl<T> Box<mem::MaybeUninit<T>> {
311311
#[unstable(feature = "new_uninit", issue = "63291")]
312312
#[inline]
313313
pub unsafe fn assume_init(self) -> Box<T> {
314-
Box::from_raw(Box::into_raw(self) as *mut T)
314+
unsafe { Box::from_raw(Box::into_raw(self) as *mut T) }
315315
}
316316
}
317317

@@ -349,7 +349,7 @@ impl<T> Box<[mem::MaybeUninit<T>]> {
349349
#[unstable(feature = "new_uninit", issue = "63291")]
350350
#[inline]
351351
pub unsafe fn assume_init(self) -> Box<[T]> {
352-
Box::from_raw(Box::into_raw(self) as *mut [T])
352+
unsafe { Box::from_raw(Box::into_raw(self) as *mut [T]) }
353353
}
354354
}
355355

@@ -393,7 +393,7 @@ impl<T: ?Sized> Box<T> {
393393
#[stable(feature = "box_raw", since = "1.4.0")]
394394
#[inline]
395395
pub unsafe fn from_raw(raw: *mut T) -> Self {
396-
Box(Unique::new_unchecked(raw))
396+
Box(unsafe { Unique::new_unchecked(raw) })
397397
}
398398

399399
/// Consumes the `Box`, returning a wrapped raw pointer.

‎src/liballoc/collections/binary_heap.rs

+7-5
Original file line numberDiff line numberDiff line change
@@ -1003,7 +1003,7 @@ impl<'a, T> Hole<'a, T> {
10031003
unsafe fn new(data: &'a mut [T], pos: usize) -> Self {
10041004
debug_assert!(pos < data.len());
10051005
// SAFE: pos should be inside the slice
1006-
let elt = ptr::read(data.get_unchecked(pos));
1006+
let elt = unsafe { ptr::read(data.get_unchecked(pos)) };
10071007
Hole { data, elt: ManuallyDrop::new(elt), pos }
10081008
}
10091009

@@ -1025,7 +1025,7 @@ impl<'a, T> Hole<'a, T> {
10251025
unsafe fn get(&self, index: usize) -> &T {
10261026
debug_assert!(index != self.pos);
10271027
debug_assert!(index < self.data.len());
1028-
self.data.get_unchecked(index)
1028+
unsafe { self.data.get_unchecked(index) }
10291029
}
10301030

10311031
/// Move hole to new location
@@ -1035,9 +1035,11 @@ impl<'a, T> Hole<'a, T> {
10351035
unsafe fn move_to(&mut self, index: usize) {
10361036
debug_assert!(index != self.pos);
10371037
debug_assert!(index < self.data.len());
1038-
let index_ptr: *const _ = self.data.get_unchecked(index);
1039-
let hole_ptr = self.data.get_unchecked_mut(self.pos);
1040-
ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
1038+
unsafe {
1039+
let index_ptr: *const _ = self.data.get_unchecked(index);
1040+
let hole_ptr = self.data.get_unchecked_mut(self.pos);
1041+
ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
1042+
}
10411043
self.pos = index;
10421044
}
10431045
}

‎src/liballoc/collections/btree/map.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -1725,7 +1725,7 @@ impl<'a, K: 'a, V: 'a> DrainFilterInner<'a, K, V> {
17251725
&mut self,
17261726
) -> Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>> {
17271727
let edge = self.cur_leaf_edge.as_ref()?;
1728-
ptr::read(edge).next_kv().ok()
1728+
unsafe { ptr::read(edge).next_kv().ok() }
17291729
}
17301730

17311731
/// Implementation of a typical `DrainFilter::next` method, given the predicate.
@@ -1808,7 +1808,7 @@ impl<'a, K, V> Range<'a, K, V> {
18081808
}
18091809

18101810
unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
1811-
unwrap_unchecked(self.front.as_mut()).next_unchecked()
1811+
unsafe { unwrap_unchecked(self.front.as_mut()).next_unchecked() }
18121812
}
18131813
}
18141814

@@ -1821,7 +1821,7 @@ impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> {
18211821

18221822
impl<'a, K, V> Range<'a, K, V> {
18231823
unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
1824-
unwrap_unchecked(self.back.as_mut()).next_back_unchecked()
1824+
unsafe { unwrap_unchecked(self.back.as_mut()).next_back_unchecked() }
18251825
}
18261826
}
18271827

@@ -1859,7 +1859,7 @@ impl<'a, K, V> RangeMut<'a, K, V> {
18591859
}
18601860

18611861
unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
1862-
unwrap_unchecked(self.front.as_mut()).next_unchecked()
1862+
unsafe { unwrap_unchecked(self.front.as_mut()).next_unchecked() }
18631863
}
18641864
}
18651865

@@ -1880,7 +1880,7 @@ impl<K, V> FusedIterator for RangeMut<'_, K, V> {}
18801880

18811881
impl<'a, K, V> RangeMut<'a, K, V> {
18821882
unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
1883-
unwrap_unchecked(self.back.as_mut()).next_back_unchecked()
1883+
unsafe { unwrap_unchecked(self.back.as_mut()).next_back_unchecked() }
18841884
}
18851885
}
18861886

‎src/liballoc/collections/btree/mod.rs

+3-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,9 @@ pub unsafe fn unwrap_unchecked<T>(val: Option<T>) -> T {
1919
if cfg!(debug_assertions) {
2020
panic!("'unchecked' unwrap on None in BTreeMap");
2121
} else {
22-
core::intrinsics::unreachable();
22+
unsafe {
23+
core::intrinsics::unreachable();
24+
}
2325
}
2426
})
2527
}

‎src/liballoc/collections/btree/navigate.rs

+58-42
Original file line numberDiff line numberDiff line change
@@ -64,8 +64,10 @@ macro_rules! def_next_kv_uncheched_dealloc {
6464
edge = match edge.$adjacent_kv() {
6565
Ok(internal_kv) => return internal_kv,
6666
Err(last_edge) => {
67-
let parent_edge = last_edge.into_node().deallocate_and_ascend();
68-
unwrap_unchecked(parent_edge).forget_node_type()
67+
unsafe {
68+
let parent_edge = last_edge.into_node().deallocate_and_ascend();
69+
unwrap_unchecked(parent_edge).forget_node_type()
70+
}
6971
}
7072
}
7173
}
@@ -82,9 +84,11 @@ def_next_kv_uncheched_dealloc! {unsafe fn next_back_kv_unchecked_dealloc: left_k
8284
/// Safety: The change closure must not panic.
8385
#[inline]
8486
unsafe fn replace<T, R>(v: &mut T, change: impl FnOnce(T) -> (T, R)) -> R {
85-
let value = ptr::read(v);
87+
let value = unsafe { ptr::read(v) };
8688
let (new_value, ret) = change(value);
87-
ptr::write(v, new_value);
89+
unsafe {
90+
ptr::write(v, new_value);
91+
}
8892
ret
8993
}
9094

@@ -93,22 +97,26 @@ impl<'a, K, V> Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Ed
9397
/// key and value in between.
9498
/// Unsafe because the caller must ensure that the leaf edge is not the last one in the tree.
9599
pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
96-
replace(self, |leaf_edge| {
97-
let kv = leaf_edge.next_kv();
98-
let kv = unwrap_unchecked(kv.ok());
99-
(kv.next_leaf_edge(), kv.into_kv())
100-
})
100+
unsafe {
101+
replace(self, |leaf_edge| {
102+
let kv = leaf_edge.next_kv();
103+
let kv = unwrap_unchecked(kv.ok());
104+
(kv.next_leaf_edge(), kv.into_kv())
105+
})
106+
}
101107
}
102108

103109
/// Moves the leaf edge handle to the previous leaf edge and returns references to the
104110
/// key and value in between.
105111
/// Unsafe because the caller must ensure that the leaf edge is not the first one in the tree.
106112
pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
107-
replace(self, |leaf_edge| {
108-
let kv = leaf_edge.next_back_kv();
109-
let kv = unwrap_unchecked(kv.ok());
110-
(kv.next_back_leaf_edge(), kv.into_kv())
111-
})
113+
unsafe {
114+
replace(self, |leaf_edge| {
115+
let kv = leaf_edge.next_back_kv();
116+
let kv = unwrap_unchecked(kv.ok());
117+
(kv.next_back_leaf_edge(), kv.into_kv())
118+
})
119+
}
112120
}
113121
}
114122

@@ -119,14 +127,16 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge
119127
/// - The caller must ensure that the leaf edge is not the last one in the tree.
120128
/// - Using the updated handle may well invalidate the returned references.
121129
pub unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
122-
let kv = replace(self, |leaf_edge| {
123-
let kv = leaf_edge.next_kv();
124-
let kv = unwrap_unchecked(kv.ok());
125-
(ptr::read(&kv).next_leaf_edge(), kv)
126-
});
127-
// Doing the descend (and perhaps another move) invalidates the references
128-
// returned by `into_kv_mut`, so we have to do this last.
129-
kv.into_kv_mut()
130+
unsafe {
131+
let kv = replace(self, |leaf_edge| {
132+
let kv = leaf_edge.next_kv();
133+
let kv = unwrap_unchecked(kv.ok());
134+
(ptr::read(&kv).next_leaf_edge(), kv)
135+
});
136+
// Doing the descend (and perhaps another move) invalidates the references
137+
// returned by `into_kv_mut`, so we have to do this last.
138+
kv.into_kv_mut()
139+
}
130140
}
131141

132142
/// Moves the leaf edge handle to the previous leaf and returns references to the
@@ -135,14 +145,16 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge
135145
/// - The caller must ensure that the leaf edge is not the first one in the tree.
136146
/// - Using the updated handle may well invalidate the returned references.
137147
pub unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
138-
let kv = replace(self, |leaf_edge| {
139-
let kv = leaf_edge.next_back_kv();
140-
let kv = unwrap_unchecked(kv.ok());
141-
(ptr::read(&kv).next_back_leaf_edge(), kv)
142-
});
143-
// Doing the descend (and perhaps another move) invalidates the references
144-
// returned by `into_kv_mut`, so we have to do this last.
145-
kv.into_kv_mut()
148+
unsafe {
149+
let kv = replace(self, |leaf_edge| {
150+
let kv = leaf_edge.next_back_kv();
151+
let kv = unwrap_unchecked(kv.ok());
152+
(ptr::read(&kv).next_back_leaf_edge(), kv)
153+
});
154+
// Doing the descend (and perhaps another move) invalidates the references
155+
// returned by `into_kv_mut`, so we have to do this last.
156+
kv.into_kv_mut()
157+
}
146158
}
147159
}
148160

@@ -159,12 +171,14 @@ impl<K, V> Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge> {
159171
/// if the two preconditions above hold.
160172
/// - Using the updated handle may well invalidate the returned references.
161173
pub unsafe fn next_unchecked(&mut self) -> (K, V) {
162-
replace(self, |leaf_edge| {
163-
let kv = next_kv_unchecked_dealloc(leaf_edge);
164-
let k = ptr::read(kv.reborrow().into_kv().0);
165-
let v = ptr::read(kv.reborrow().into_kv().1);
166-
(kv.next_leaf_edge(), (k, v))
167-
})
174+
unsafe {
175+
replace(self, |leaf_edge| {
176+
let kv = next_kv_unchecked_dealloc(leaf_edge);
177+
let k = ptr::read(kv.reborrow().into_kv().0);
178+
let v = ptr::read(kv.reborrow().into_kv().1);
179+
(kv.next_leaf_edge(), (k, v))
180+
})
181+
}
168182
}
169183

170184
/// Moves the leaf edge handle to the previous leaf edge and returns the key
@@ -179,12 +193,14 @@ impl<K, V> Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge> {
179193
/// if the two preconditions above hold.
180194
/// - Using the updated handle may well invalidate the returned references.
181195
pub unsafe fn next_back_unchecked(&mut self) -> (K, V) {
182-
replace(self, |leaf_edge| {
183-
let kv = next_back_kv_unchecked_dealloc(leaf_edge);
184-
let k = ptr::read(kv.reborrow().into_kv().0);
185-
let v = ptr::read(kv.reborrow().into_kv().1);
186-
(kv.next_back_leaf_edge(), (k, v))
187-
})
196+
unsafe {
197+
replace(self, |leaf_edge| {
198+
let kv = next_back_kv_unchecked_dealloc(leaf_edge);
199+
let k = ptr::read(kv.reborrow().into_kv().0);
200+
let v = ptr::read(kv.reborrow().into_kv().1);
201+
(kv.next_back_leaf_edge(), (k, v))
202+
})
203+
}
188204
}
189205
}
190206

‎src/liballoc/collections/btree/node.rs

+32-22
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@ impl<K, V> InternalNode<K, V> {
107107
/// `len` of 0), there must be one initialized and valid edge. This function does not set up
108108
/// such an edge.
109109
unsafe fn new() -> Self {
110-
InternalNode { data: LeafNode::new(), edges: [MaybeUninit::UNINIT; 2 * B] }
110+
InternalNode { data: unsafe { LeafNode::new() }, edges: [MaybeUninit::UNINIT; 2 * B] }
111111
}
112112
}
113113

@@ -131,7 +131,7 @@ impl<K, V> BoxedNode<K, V> {
131131
}
132132

133133
unsafe fn from_ptr(ptr: NonNull<LeafNode<K, V>>) -> Self {
134-
BoxedNode { ptr: Unique::new_unchecked(ptr.as_ptr()) }
134+
BoxedNode { ptr: unsafe { Unique::new_unchecked(ptr.as_ptr()) } }
135135
}
136136

137137
fn as_ptr(&self) -> NonNull<LeafNode<K, V>> {
@@ -392,14 +392,16 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
392392
let height = self.height;
393393
let node = self.node;
394394
let ret = self.ascend().ok();
395-
Global.dealloc(
396-
node.cast(),
397-
if height > 0 {
398-
Layout::new::<InternalNode<K, V>>()
399-
} else {
400-
Layout::new::<LeafNode<K, V>>()
401-
},
402-
);
395+
unsafe {
396+
Global.dealloc(
397+
node.cast(),
398+
if height > 0 {
399+
Layout::new::<InternalNode<K, V>>()
400+
} else {
401+
Layout::new::<LeafNode<K, V>>()
402+
},
403+
);
404+
}
403405
ret
404406
}
405407
}
@@ -565,7 +567,7 @@ impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
565567
debug_assert!(first <= self.len());
566568
debug_assert!(after_last <= self.len() + 1);
567569
for i in first..after_last {
568-
Handle::new_edge(self.reborrow_mut(), i).correct_parent_link();
570+
unsafe { Handle::new_edge(self.reborrow_mut(), i) }.correct_parent_link();
569571
}
570572
}
571573

@@ -789,7 +791,7 @@ impl<'a, K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeT
789791
&mut self,
790792
) -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
791793
// We can't use Handle::new_kv or Handle::new_edge because we don't know our type
792-
Handle { node: self.node.reborrow_mut(), idx: self.idx, _marker: PhantomData }
794+
Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData }
793795
}
794796
}
795797

@@ -885,7 +887,7 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
885887
unsafe fn cast_unchecked<NewType>(
886888
&mut self,
887889
) -> Handle<NodeRef<marker::Mut<'_>, K, V, NewType>, marker::Edge> {
888-
Handle::new_edge(self.node.cast_unchecked(), self.idx)
890+
unsafe { Handle::new_edge(self.node.cast_unchecked(), self.idx) }
889891
}
890892

891893
/// Inserts a new key/value pair and an edge that will go to the right of that new pair
@@ -1330,8 +1332,10 @@ unsafe fn move_kv<K, V>(
13301332
dest_offset: usize,
13311333
count: usize,
13321334
) {
1333-
ptr::copy_nonoverlapping(source.0.add(source_offset), dest.0.add(dest_offset), count);
1334-
ptr::copy_nonoverlapping(source.1.add(source_offset), dest.1.add(dest_offset), count);
1335+
unsafe {
1336+
ptr::copy_nonoverlapping(source.0.add(source_offset), dest.0.add(dest_offset), count);
1337+
ptr::copy_nonoverlapping(source.1.add(source_offset), dest.1.add(dest_offset), count);
1338+
}
13351339
}
13361340

13371341
// Source and destination must have the same height.
@@ -1344,8 +1348,10 @@ unsafe fn move_edges<K, V>(
13441348
) {
13451349
let source_ptr = source.as_internal_mut().edges.as_mut_ptr();
13461350
let dest_ptr = dest.as_internal_mut().edges.as_mut_ptr();
1347-
ptr::copy_nonoverlapping(source_ptr.add(source_offset), dest_ptr.add(dest_offset), count);
1348-
dest.correct_childrens_parent_links(dest_offset, dest_offset + count);
1351+
unsafe {
1352+
ptr::copy_nonoverlapping(source_ptr.add(source_offset), dest_ptr.add(dest_offset), count);
1353+
dest.correct_childrens_parent_links(dest_offset, dest_offset + count);
1354+
}
13491355
}
13501356

13511357
impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
@@ -1459,12 +1465,16 @@ pub mod marker {
14591465
}
14601466

14611467
unsafe fn slice_insert<T>(slice: &mut [T], idx: usize, val: T) {
1462-
ptr::copy(slice.as_ptr().add(idx), slice.as_mut_ptr().add(idx + 1), slice.len() - idx);
1463-
ptr::write(slice.get_unchecked_mut(idx), val);
1468+
unsafe {
1469+
ptr::copy(slice.as_ptr().add(idx), slice.as_mut_ptr().add(idx + 1), slice.len() - idx);
1470+
ptr::write(slice.get_unchecked_mut(idx), val);
1471+
}
14641472
}
14651473

14661474
unsafe fn slice_remove<T>(slice: &mut [T], idx: usize) -> T {
1467-
let ret = ptr::read(slice.get_unchecked(idx));
1468-
ptr::copy(slice.as_ptr().add(idx + 1), slice.as_mut_ptr().add(idx), slice.len() - idx - 1);
1469-
ret
1475+
unsafe {
1476+
let ret = ptr::read(slice.get_unchecked(idx));
1477+
ptr::copy(slice.as_ptr().add(idx + 1), slice.as_mut_ptr().add(idx), slice.len() - idx - 1);
1478+
ret
1479+
}
14701480
}

‎src/liballoc/collections/linked_list.rs

+25-11
Original file line numberDiff line numberDiff line change
@@ -225,17 +225,17 @@ impl<T> LinkedList<T> {
225225
/// maintain validity of aliasing pointers.
226226
#[inline]
227227
unsafe fn unlink_node(&mut self, mut node: NonNull<Node<T>>) {
228-
let node = node.as_mut(); // this one is ours now, we can create an &mut.
228+
let node = unsafe { node.as_mut() }; // this one is ours now, we can create an &mut.
229229

230230
// Not creating new mutable (unique!) references overlapping `element`.
231231
match node.prev {
232-
Some(prev) => (*prev.as_ptr()).next = node.next,
232+
Some(prev) => unsafe { (*prev.as_ptr()).next = node.next },
233233
// this node is the head node
234234
None => self.head = node.next,
235235
};
236236

237237
match node.next {
238-
Some(next) => (*next.as_ptr()).prev = node.prev,
238+
Some(next) => unsafe { (*next.as_ptr()).prev = node.prev },
239239
// this node is the tail node
240240
None => self.tail = node.prev,
241241
};
@@ -258,17 +258,23 @@ impl<T> LinkedList<T> {
258258
// This method takes care not to create multiple mutable references to whole nodes at the same time,
259259
// to maintain validity of aliasing pointers into `element`.
260260
if let Some(mut existing_prev) = existing_prev {
261-
existing_prev.as_mut().next = Some(splice_start);
261+
unsafe {
262+
existing_prev.as_mut().next = Some(splice_start);
263+
}
262264
} else {
263265
self.head = Some(splice_start);
264266
}
265267
if let Some(mut existing_next) = existing_next {
266-
existing_next.as_mut().prev = Some(splice_end);
268+
unsafe {
269+
existing_next.as_mut().prev = Some(splice_end);
270+
}
267271
} else {
268272
self.tail = Some(splice_end);
269273
}
270-
splice_start.as_mut().prev = existing_prev;
271-
splice_end.as_mut().next = existing_next;
274+
unsafe {
275+
splice_start.as_mut().prev = existing_prev;
276+
splice_end.as_mut().next = existing_next;
277+
}
272278

273279
self.len += splice_length;
274280
}
@@ -297,9 +303,13 @@ impl<T> LinkedList<T> {
297303
if let Some(mut split_node) = split_node {
298304
let first_part_head;
299305
let first_part_tail;
300-
first_part_tail = split_node.as_mut().prev.take();
306+
unsafe {
307+
first_part_tail = split_node.as_mut().prev.take();
308+
}
301309
if let Some(mut tail) = first_part_tail {
302-
tail.as_mut().next = None;
310+
unsafe {
311+
tail.as_mut().next = None;
312+
}
303313
first_part_head = self.head;
304314
} else {
305315
first_part_head = None;
@@ -333,9 +343,13 @@ impl<T> LinkedList<T> {
333343
if let Some(mut split_node) = split_node {
334344
let second_part_head;
335345
let second_part_tail;
336-
second_part_head = split_node.as_mut().next.take();
346+
unsafe {
347+
second_part_head = split_node.as_mut().next.take();
348+
}
337349
if let Some(mut head) = second_part_head {
338-
head.as_mut().prev = None;
350+
unsafe {
351+
head.as_mut().prev = None;
352+
}
339353
second_part_tail = self.tail;
340354
} else {
341355
second_part_tail = None;

‎src/liballoc/collections/vec_deque.rs

+55-25
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,8 @@
77
88
#![stable(feature = "rust1", since = "1.0.0")]
99

10+
// ignore-tidy-filelength
11+
1012
use core::array::LengthAtMost32;
1113
use core::cmp::{self, Ordering};
1214
use core::fmt;
@@ -201,25 +203,27 @@ impl<T> VecDeque<T> {
201203
/// Turn ptr into a slice
202204
#[inline]
203205
unsafe fn buffer_as_slice(&self) -> &[T] {
204-
slice::from_raw_parts(self.ptr(), self.cap())
206+
unsafe { slice::from_raw_parts(self.ptr(), self.cap()) }
205207
}
206208

207209
/// Turn ptr into a mut slice
208210
#[inline]
209211
unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] {
210-
slice::from_raw_parts_mut(self.ptr(), self.cap())
212+
unsafe { slice::from_raw_parts_mut(self.ptr(), self.cap()) }
211213
}
212214

213215
/// Moves an element out of the buffer
214216
#[inline]
215217
unsafe fn buffer_read(&mut self, off: usize) -> T {
216-
ptr::read(self.ptr().add(off))
218+
unsafe { ptr::read(self.ptr().add(off)) }
217219
}
218220

219221
/// Writes an element into the buffer, moving it.
220222
#[inline]
221223
unsafe fn buffer_write(&mut self, off: usize, value: T) {
222-
ptr::write(self.ptr().add(off), value);
224+
unsafe {
225+
ptr::write(self.ptr().add(off), value);
226+
}
223227
}
224228

225229
/// Returns `true` if the buffer is at full capacity.
@@ -268,7 +272,9 @@ impl<T> VecDeque<T> {
268272
len,
269273
self.cap()
270274
);
271-
ptr::copy(self.ptr().add(src), self.ptr().add(dst), len);
275+
unsafe {
276+
ptr::copy(self.ptr().add(src), self.ptr().add(dst), len);
277+
}
272278
}
273279

274280
/// Copies a contiguous block of memory len long from src to dst
@@ -290,7 +296,9 @@ impl<T> VecDeque<T> {
290296
len,
291297
self.cap()
292298
);
293-
ptr::copy_nonoverlapping(self.ptr().add(src), self.ptr().add(dst), len);
299+
unsafe {
300+
ptr::copy_nonoverlapping(self.ptr().add(src), self.ptr().add(dst), len);
301+
}
294302
}
295303

296304
/// Copies a potentially wrapping block of memory len long from src to dest.
@@ -330,7 +338,9 @@ impl<T> VecDeque<T> {
330338
// 2 [_ _ A A A A B B _]
331339
// D . . .
332340
//
333-
self.copy(dst, src, len);
341+
unsafe {
342+
self.copy(dst, src, len);
343+
}
334344
}
335345
(false, false, true) => {
336346
// dst before src, src doesn't wrap, dst wraps
@@ -341,8 +351,10 @@ impl<T> VecDeque<T> {
341351
// 3 [B B B B _ _ _ A A]
342352
// . . D .
343353
//
344-
self.copy(dst, src, dst_pre_wrap_len);
345-
self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
354+
unsafe {
355+
self.copy(dst, src, dst_pre_wrap_len);
356+
self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
357+
}
346358
}
347359
(true, false, true) => {
348360
// src before dst, src doesn't wrap, dst wraps
@@ -353,8 +365,10 @@ impl<T> VecDeque<T> {
353365
// 3 [B B _ _ _ A A A A]
354366
// . . D .
355367
//
356-
self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
357-
self.copy(dst, src, dst_pre_wrap_len);
368+
unsafe {
369+
self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
370+
self.copy(dst, src, dst_pre_wrap_len);
371+
}
358372
}
359373
(false, true, false) => {
360374
// dst before src, src wraps, dst doesn't wrap
@@ -365,8 +379,10 @@ impl<T> VecDeque<T> {
365379
// 3 [C C _ _ _ B B C C]
366380
// D . . .
367381
//
368-
self.copy(dst, src, src_pre_wrap_len);
369-
self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
382+
unsafe {
383+
self.copy(dst, src, src_pre_wrap_len);
384+
self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
385+
}
370386
}
371387
(true, true, false) => {
372388
// src before dst, src wraps, dst doesn't wrap
@@ -377,8 +393,10 @@ impl<T> VecDeque<T> {
377393
// 3 [C C A A _ _ _ C C]
378394
// D . . .
379395
//
380-
self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
381-
self.copy(dst, src, src_pre_wrap_len);
396+
unsafe {
397+
self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
398+
self.copy(dst, src, src_pre_wrap_len);
399+
}
382400
}
383401
(false, true, true) => {
384402
// dst before src, src wraps, dst wraps
@@ -392,9 +410,11 @@ impl<T> VecDeque<T> {
392410
//
393411
debug_assert!(dst_pre_wrap_len > src_pre_wrap_len);
394412
let delta = dst_pre_wrap_len - src_pre_wrap_len;
395-
self.copy(dst, src, src_pre_wrap_len);
396-
self.copy(dst + src_pre_wrap_len, 0, delta);
397-
self.copy(0, delta, len - dst_pre_wrap_len);
413+
unsafe {
414+
self.copy(dst, src, src_pre_wrap_len);
415+
self.copy(dst + src_pre_wrap_len, 0, delta);
416+
self.copy(0, delta, len - dst_pre_wrap_len);
417+
}
398418
}
399419
(true, true, true) => {
400420
// src before dst, src wraps, dst wraps
@@ -408,9 +428,11 @@ impl<T> VecDeque<T> {
408428
//
409429
debug_assert!(src_pre_wrap_len > dst_pre_wrap_len);
410430
let delta = src_pre_wrap_len - dst_pre_wrap_len;
411-
self.copy(delta, 0, len - src_pre_wrap_len);
412-
self.copy(0, self.cap() - delta, delta);
413-
self.copy(dst, src, dst_pre_wrap_len);
431+
unsafe {
432+
self.copy(delta, 0, len - src_pre_wrap_len);
433+
self.copy(0, self.cap() - delta, delta);
434+
self.copy(dst, src, dst_pre_wrap_len);
435+
}
414436
}
415437
}
416438
}
@@ -440,13 +462,17 @@ impl<T> VecDeque<T> {
440462
// Nop
441463
} else if self.head < old_capacity - self.tail {
442464
// B
443-
self.copy_nonoverlapping(old_capacity, 0, self.head);
465+
unsafe {
466+
self.copy_nonoverlapping(old_capacity, 0, self.head);
467+
}
444468
self.head += old_capacity;
445469
debug_assert!(self.head > self.tail);
446470
} else {
447471
// C
448472
let new_tail = new_capacity - (old_capacity - self.tail);
449-
self.copy_nonoverlapping(new_tail, self.tail, old_capacity - self.tail);
473+
unsafe {
474+
self.copy_nonoverlapping(new_tail, self.tail, old_capacity - self.tail);
475+
}
450476
self.tail = new_tail;
451477
debug_assert!(self.head < self.tail);
452478
}
@@ -2297,7 +2323,9 @@ impl<T> VecDeque<T> {
22972323

22982324
unsafe fn rotate_left_inner(&mut self, mid: usize) {
22992325
debug_assert!(mid * 2 <= self.len());
2300-
self.wrap_copy(self.head, self.tail, mid);
2326+
unsafe {
2327+
self.wrap_copy(self.head, self.tail, mid);
2328+
}
23012329
self.head = self.wrap_add(self.head, mid);
23022330
self.tail = self.wrap_add(self.tail, mid);
23032331
}
@@ -2306,7 +2334,9 @@ impl<T> VecDeque<T> {
23062334
debug_assert!(k * 2 <= self.len());
23072335
self.head = self.wrap_sub(self.head, k);
23082336
self.tail = self.wrap_sub(self.tail, k);
2309-
self.wrap_copy(self.tail, self.head, k);
2337+
unsafe {
2338+
self.wrap_copy(self.tail, self.head, k);
2339+
}
23102340
}
23112341
}
23122342

‎src/liballoc/lib.rs

+2
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,7 @@
7272
#![deny(intra_doc_link_resolution_failure)] // rustdoc is run without -D warnings
7373
#![allow(explicit_outlives_requirements)]
7474
#![allow(incomplete_features)]
75+
#![deny(unsafe_op_in_unsafe_fn)]
7576
#![cfg_attr(not(test), feature(generator_trait))]
7677
#![cfg_attr(test, feature(test))]
7778
#![feature(allocator_api)]
@@ -118,6 +119,7 @@
118119
#![feature(try_reserve)]
119120
#![feature(unboxed_closures)]
120121
#![feature(unicode_internals)]
122+
#![feature(unsafe_block_in_unsafe_fn)]
121123
#![feature(unsize)]
122124
#![feature(unsized_locals)]
123125
#![feature(allocator_internals)]

‎src/liballoc/raw_vec.rs

+6-4
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ impl<T> RawVec<T, Global> {
108108
/// If the `ptr` and `capacity` come from a `RawVec`, then this is guaranteed.
109109
#[inline]
110110
pub unsafe fn from_raw_parts(ptr: *mut T, capacity: usize) -> Self {
111-
Self::from_raw_parts_in(ptr, capacity, Global)
111+
unsafe { Self::from_raw_parts_in(ptr, capacity, Global) }
112112
}
113113

114114
/// Converts a `Box<[T]>` into a `RawVec<T>`.
@@ -139,8 +139,10 @@ impl<T> RawVec<T, Global> {
139139
);
140140

141141
let me = ManuallyDrop::new(self);
142-
let slice = slice::from_raw_parts_mut(me.ptr() as *mut MaybeUninit<T>, len);
143-
Box::from_raw(slice)
142+
unsafe {
143+
let slice = slice::from_raw_parts_mut(me.ptr() as *mut MaybeUninit<T>, len);
144+
Box::from_raw(slice)
145+
}
144146
}
145147
}
146148

@@ -192,7 +194,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
192194
/// If the `ptr` and `capacity` come from a `RawVec` created via `a`, then this is guaranteed.
193195
#[inline]
194196
pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, a: A) -> Self {
195-
Self { ptr: Unique::new_unchecked(ptr), cap: capacity, alloc: a }
197+
Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap: capacity, alloc: a }
196198
}
197199

198200
/// Gets a raw pointer to the start of the allocation. Note that this is

‎src/liballoc/raw_vec/tests.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ fn allocator_param() {
3535
}
3636
}
3737
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
38-
Global.dealloc(ptr, layout)
38+
unsafe { Global.dealloc(ptr, layout) }
3939
}
4040
}
4141

‎src/liballoc/rc.rs

+51-39
Original file line numberDiff line numberDiff line change
@@ -304,7 +304,7 @@ impl<T: ?Sized> Rc<T> {
304304
}
305305

306306
unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
307-
Self::from_inner(NonNull::new_unchecked(ptr))
307+
Self::from_inner(unsafe { NonNull::new_unchecked(ptr) })
308308
}
309309
}
310310

@@ -544,7 +544,7 @@ impl<T> Rc<[mem::MaybeUninit<T>]> {
544544
#[unstable(feature = "new_uninit", issue = "63291")]
545545
#[inline]
546546
pub unsafe fn assume_init(self) -> Rc<[T]> {
547-
Rc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _)
547+
unsafe { Rc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) }
548548
}
549549
}
550550

@@ -643,13 +643,13 @@ impl<T: ?Sized> Rc<T> {
643643
/// ```
644644
#[stable(feature = "rc_raw", since = "1.17.0")]
645645
pub unsafe fn from_raw(ptr: *const T) -> Self {
646-
let offset = data_offset(ptr);
646+
let offset = unsafe { data_offset(ptr) };
647647

648648
// Reverse the offset to find the original RcBox.
649649
let fake_ptr = ptr as *mut RcBox<T>;
650-
let rc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
650+
let rc_ptr = unsafe { set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)) };
651651

652-
Self::from_ptr(rc_ptr)
652+
unsafe { Self::from_ptr(rc_ptr) }
653653
}
654654

655655
/// Consumes the `Rc`, returning the wrapped pointer as `NonNull<T>`.
@@ -805,7 +805,7 @@ impl<T: ?Sized> Rc<T> {
805805
#[inline]
806806
#[unstable(feature = "get_mut_unchecked", issue = "63292")]
807807
pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
808-
&mut this.ptr.as_mut().value
808+
unsafe { &mut this.ptr.as_mut().value }
809809
}
810810

811811
#[inline]
@@ -964,20 +964,24 @@ impl<T: ?Sized> Rc<T> {
964964

965965
// Initialize the RcBox
966966
let inner = mem_to_rcbox(mem.ptr.as_ptr());
967-
debug_assert_eq!(Layout::for_value(&*inner), layout);
967+
unsafe {
968+
debug_assert_eq!(Layout::for_value(&*inner), layout);
968969

969-
ptr::write(&mut (*inner).strong, Cell::new(1));
970-
ptr::write(&mut (*inner).weak, Cell::new(1));
970+
ptr::write(&mut (*inner).strong, Cell::new(1));
971+
ptr::write(&mut (*inner).weak, Cell::new(1));
972+
}
971973

972974
inner
973975
}
974976

975977
/// Allocates an `RcBox<T>` with sufficient space for an unsized inner value
976978
unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> {
977979
// Allocate for the `RcBox<T>` using the given value.
978-
Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| {
979-
set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T>
980-
})
980+
unsafe {
981+
Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| {
982+
set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T>
983+
})
984+
}
981985
}
982986

983987
fn from_box(v: Box<T>) -> Rc<T> {
@@ -1006,9 +1010,11 @@ impl<T: ?Sized> Rc<T> {
10061010
impl<T> Rc<[T]> {
10071011
/// Allocates an `RcBox<[T]>` with the given length.
10081012
unsafe fn allocate_for_slice(len: usize) -> *mut RcBox<[T]> {
1009-
Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| {
1010-
ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>
1011-
})
1013+
unsafe {
1014+
Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| {
1015+
ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>
1016+
})
1017+
}
10121018
}
10131019
}
10141020

@@ -1017,7 +1023,9 @@ impl<T> Rc<[T]> {
10171023
/// For a slice/trait object, this sets the `data` field and leaves the rest
10181024
/// unchanged. For a sized raw pointer, this simply sets the pointer.
10191025
unsafe fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
1020-
ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
1026+
unsafe {
1027+
ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
1028+
}
10211029
ptr
10221030
}
10231031

@@ -1026,11 +1034,11 @@ impl<T> Rc<[T]> {
10261034
///
10271035
/// Unsafe because the caller must either take ownership or bind `T: Copy`
10281036
unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
1029-
let ptr = Self::allocate_for_slice(v.len());
1030-
1031-
ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).value as *mut [T] as *mut T, v.len());
1032-
1033-
Self::from_ptr(ptr)
1037+
unsafe {
1038+
let ptr = Self::allocate_for_slice(v.len());
1039+
ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).value as *mut [T] as *mut T, v.len());
1040+
Self::from_ptr(ptr)
1041+
}
10341042
}
10351043

10361044
/// Constructs an `Rc<[T]>` from an iterator known to be of a certain size.
@@ -1058,25 +1066,27 @@ impl<T> Rc<[T]> {
10581066
}
10591067
}
10601068

1061-
let ptr = Self::allocate_for_slice(len);
1069+
unsafe {
1070+
let ptr = Self::allocate_for_slice(len);
10621071

1063-
let mem = ptr as *mut _ as *mut u8;
1064-
let layout = Layout::for_value(&*ptr);
1072+
let mem = ptr as *mut _ as *mut u8;
1073+
let layout = Layout::for_value(&*ptr);
10651074

1066-
// Pointer to first element
1067-
let elems = &mut (*ptr).value as *mut [T] as *mut T;
1075+
// Pointer to first element
1076+
let elems = &mut (*ptr).value as *mut [T] as *mut T;
10681077

1069-
let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
1078+
let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
10701079

1071-
for (i, item) in iter.enumerate() {
1072-
ptr::write(elems.add(i), item);
1073-
guard.n_elems += 1;
1074-
}
1080+
for (i, item) in iter.enumerate() {
1081+
ptr::write(elems.add(i), item);
1082+
guard.n_elems += 1;
1083+
}
10751084

1076-
// All clear. Forget the guard so it doesn't free the new RcBox.
1077-
forget(guard);
1085+
// All clear. Forget the guard so it doesn't free the new RcBox.
1086+
forget(guard);
10781087

1079-
Self::from_ptr(ptr)
1088+
Self::from_ptr(ptr)
1089+
}
10801090
}
10811091
}
10821092

@@ -1786,10 +1796,12 @@ impl<T> Weak<T> {
17861796
Self::new()
17871797
} else {
17881798
// See Rc::from_raw for details
1789-
let offset = data_offset(ptr);
1790-
let fake_ptr = ptr as *mut RcBox<T>;
1791-
let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
1792-
Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") }
1799+
unsafe {
1800+
let offset = data_offset(ptr);
1801+
let fake_ptr = ptr as *mut RcBox<T>;
1802+
let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
1803+
Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") }
1804+
}
17931805
}
17941806
}
17951807
}
@@ -2106,7 +2118,7 @@ unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
21062118
// Because it is ?Sized, it will always be the last field in memory.
21072119
// Note: This is a detail of the current implementation of the compiler,
21082120
// and is not a guaranteed language detail. Do not rely on it outside of std.
2109-
data_offset_align(align_of_val(&*ptr))
2121+
unsafe { data_offset_align(align_of_val(&*ptr)) }
21102122
}
21112123

21122124
/// Computes the offset of the data field within `RcBox`.

‎src/liballoc/slice.rs

+27-20
Original file line numberDiff line numberDiff line change
@@ -831,8 +831,7 @@ where
831831
{
832832
let len = v.len();
833833
let v = v.as_mut_ptr();
834-
let v_mid = v.add(mid);
835-
let v_end = v.add(len);
834+
let (v_mid, v_end) = unsafe { (v.add(mid), v.add(len)) };
836835

837836
// The merge process first copies the shorter run into `buf`. Then it traces the newly copied
838837
// run and the longer run forwards (or backwards), comparing their next unconsumed elements and
@@ -855,8 +854,10 @@ where
855854

856855
if mid <= len - mid {
857856
// The left run is shorter.
858-
ptr::copy_nonoverlapping(v, buf, mid);
859-
hole = MergeHole { start: buf, end: buf.add(mid), dest: v };
857+
unsafe {
858+
ptr::copy_nonoverlapping(v, buf, mid);
859+
hole = MergeHole { start: buf, end: buf.add(mid), dest: v };
860+
}
860861

861862
// Initially, these pointers point to the beginnings of their arrays.
862863
let left = &mut hole.start;
@@ -866,17 +867,21 @@ where
866867
while *left < hole.end && right < v_end {
867868
// Consume the lesser side.
868869
// If equal, prefer the left run to maintain stability.
869-
let to_copy = if is_less(&*right, &**left) {
870-
get_and_increment(&mut right)
871-
} else {
872-
get_and_increment(left)
873-
};
874-
ptr::copy_nonoverlapping(to_copy, get_and_increment(out), 1);
870+
unsafe {
871+
let to_copy = if is_less(&*right, &**left) {
872+
get_and_increment(&mut right)
873+
} else {
874+
get_and_increment(left)
875+
};
876+
ptr::copy_nonoverlapping(to_copy, get_and_increment(out), 1);
877+
}
875878
}
876879
} else {
877880
// The right run is shorter.
878-
ptr::copy_nonoverlapping(v_mid, buf, len - mid);
879-
hole = MergeHole { start: buf, end: buf.add(len - mid), dest: v_mid };
881+
unsafe {
882+
ptr::copy_nonoverlapping(v_mid, buf, len - mid);
883+
hole = MergeHole { start: buf, end: buf.add(len - mid), dest: v_mid };
884+
}
880885

881886
// Initially, these pointers point past the ends of their arrays.
882887
let left = &mut hole.dest;
@@ -886,25 +891,27 @@ where
886891
while v < *left && buf < *right {
887892
// Consume the greater side.
888893
// If equal, prefer the right run to maintain stability.
889-
let to_copy = if is_less(&*right.offset(-1), &*left.offset(-1)) {
890-
decrement_and_get(left)
891-
} else {
892-
decrement_and_get(right)
893-
};
894-
ptr::copy_nonoverlapping(to_copy, decrement_and_get(&mut out), 1);
894+
unsafe {
895+
let to_copy = if is_less(&*right.offset(-1), &*left.offset(-1)) {
896+
decrement_and_get(left)
897+
} else {
898+
decrement_and_get(right)
899+
};
900+
ptr::copy_nonoverlapping(to_copy, decrement_and_get(&mut out), 1);
901+
}
895902
}
896903
}
897904
// Finally, `hole` gets dropped. If the shorter run was not fully consumed, whatever remains of
898905
// it will now be copied into the hole in `v`.
899906

900907
unsafe fn get_and_increment<T>(ptr: &mut *mut T) -> *mut T {
901908
let old = *ptr;
902-
*ptr = ptr.offset(1);
909+
*ptr = unsafe { ptr.offset(1) };
903910
old
904911
}
905912

906913
unsafe fn decrement_and_get<T>(ptr: &mut *mut T) -> *mut T {
907-
*ptr = ptr.offset(-1);
914+
*ptr = unsafe { ptr.offset(-1) };
908915
*ptr
909916
}
910917

‎src/liballoc/str.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -583,5 +583,5 @@ impl str {
583583
#[stable(feature = "str_box_extras", since = "1.20.0")]
584584
#[inline]
585585
pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8]>) -> Box<str> {
586-
Box::from_raw(Box::into_raw(v) as *mut str)
586+
unsafe { Box::from_raw(Box::into_raw(v) as *mut str) }
587587
}

‎src/liballoc/string.rs

+6-4
Original file line numberDiff line numberDiff line change
@@ -724,7 +724,7 @@ impl String {
724724
#[inline]
725725
#[stable(feature = "rust1", since = "1.0.0")]
726726
pub unsafe fn from_raw_parts(buf: *mut u8, length: usize, capacity: usize) -> String {
727-
String { vec: Vec::from_raw_parts(buf, length, capacity) }
727+
unsafe { String { vec: Vec::from_raw_parts(buf, length, capacity) } }
728728
}
729729

730730
/// Converts a vector of bytes to a `String` without checking that the
@@ -1329,9 +1329,11 @@ impl String {
13291329
let amt = bytes.len();
13301330
self.vec.reserve(amt);
13311331

1332-
ptr::copy(self.vec.as_ptr().add(idx), self.vec.as_mut_ptr().add(idx + amt), len - idx);
1333-
ptr::copy(bytes.as_ptr(), self.vec.as_mut_ptr().add(idx), amt);
1334-
self.vec.set_len(len + amt);
1332+
unsafe {
1333+
ptr::copy(self.vec.as_ptr().add(idx), self.vec.as_mut_ptr().add(idx + amt), len - idx);
1334+
ptr::copy(bytes.as_ptr(), self.vec.as_mut_ptr().add(idx), amt);
1335+
self.vec.set_len(len + amt);
1336+
}
13351337
}
13361338

13371339
/// Inserts a string slice into this `String` at a byte position.

‎src/liballoc/sync.rs

+58-42
Original file line numberDiff line numberDiff line change
@@ -232,7 +232,7 @@ impl<T: ?Sized> Arc<T> {
232232
}
233233

234234
unsafe fn from_ptr(ptr: *mut ArcInner<T>) -> Self {
235-
Self::from_inner(NonNull::new_unchecked(ptr))
235+
unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
236236
}
237237
}
238238

@@ -543,7 +543,7 @@ impl<T> Arc<[mem::MaybeUninit<T>]> {
543543
#[unstable(feature = "new_uninit", issue = "63291")]
544544
#[inline]
545545
pub unsafe fn assume_init(self) -> Arc<[T]> {
546-
Arc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _)
546+
unsafe { Arc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) }
547547
}
548548
}
549549

@@ -642,13 +642,15 @@ impl<T: ?Sized> Arc<T> {
642642
/// ```
643643
#[stable(feature = "rc_raw", since = "1.17.0")]
644644
pub unsafe fn from_raw(ptr: *const T) -> Self {
645-
let offset = data_offset(ptr);
645+
unsafe {
646+
let offset = data_offset(ptr);
646647

647-
// Reverse the offset to find the original ArcInner.
648-
let fake_ptr = ptr as *mut ArcInner<T>;
649-
let arc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
648+
// Reverse the offset to find the original ArcInner.
649+
let fake_ptr = ptr as *mut ArcInner<T>;
650+
let arc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
650651

651-
Self::from_ptr(arc_ptr)
652+
Self::from_ptr(arc_ptr)
653+
}
652654
}
653655

654656
/// Consumes the `Arc`, returning the wrapped pointer as `NonNull<T>`.
@@ -807,7 +809,7 @@ impl<T: ?Sized> Arc<T> {
807809
#[unstable(feature = "arc_mutate_strong_count", issue = "71983")]
808810
pub unsafe fn incr_strong_count(ptr: *const T) {
809811
// Retain Arc, but don't touch refcount by wrapping in ManuallyDrop
810-
let arc = mem::ManuallyDrop::new(Arc::<T>::from_raw(ptr));
812+
let arc = unsafe { mem::ManuallyDrop::new(Arc::<T>::from_raw(ptr)) };
811813
// Now increase refcount, but don't drop new refcount either
812814
let _arc_clone: mem::ManuallyDrop<_> = arc.clone();
813815
}
@@ -847,7 +849,7 @@ impl<T: ?Sized> Arc<T> {
847849
#[inline]
848850
#[unstable(feature = "arc_mutate_strong_count", issue = "71983")]
849851
pub unsafe fn decr_strong_count(ptr: *const T) {
850-
mem::drop(Arc::from_raw(ptr));
852+
unsafe { mem::drop(Arc::from_raw(ptr)) };
851853
}
852854

853855
#[inline]
@@ -865,7 +867,7 @@ impl<T: ?Sized> Arc<T> {
865867
unsafe fn drop_slow(&mut self) {
866868
// Destroy the data at this time, even though we may not free the box
867869
// allocation itself (there may still be weak pointers lying around).
868-
ptr::drop_in_place(Self::get_mut_unchecked(self));
870+
unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) };
869871

870872
// Drop the weak ref collectively held by all strong references
871873
drop(Weak { ptr: self.ptr });
@@ -917,20 +919,24 @@ impl<T: ?Sized> Arc<T> {
917919

918920
// Initialize the ArcInner
919921
let inner = mem_to_arcinner(mem.ptr.as_ptr());
920-
debug_assert_eq!(Layout::for_value(&*inner), layout);
922+
debug_assert_eq!(unsafe { Layout::for_value(&*inner) }, layout);
921923

922-
ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
923-
ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
924+
unsafe {
925+
ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
926+
ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
927+
}
924928

925929
inner
926930
}
927931

928932
/// Allocates an `ArcInner<T>` with sufficient space for an unsized inner value.
929933
unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> {
930934
// Allocate for the `ArcInner<T>` using the given value.
931-
Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| {
932-
set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>
933-
})
935+
unsafe {
936+
Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| {
937+
set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>
938+
})
939+
}
934940
}
935941

936942
fn from_box(v: Box<T>) -> Arc<T> {
@@ -959,9 +965,11 @@ impl<T: ?Sized> Arc<T> {
959965
impl<T> Arc<[T]> {
960966
/// Allocates an `ArcInner<[T]>` with the given length.
961967
unsafe fn allocate_for_slice(len: usize) -> *mut ArcInner<[T]> {
962-
Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| {
963-
ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>
964-
})
968+
unsafe {
969+
Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| {
970+
ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>
971+
})
972+
}
965973
}
966974
}
967975

@@ -970,7 +978,9 @@ impl<T> Arc<[T]> {
970978
/// For a slice/trait object, this sets the `data` field and leaves the rest
971979
/// unchanged. For a sized raw pointer, this simply sets the pointer.
972980
unsafe fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
973-
ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
981+
unsafe {
982+
ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
983+
}
974984
ptr
975985
}
976986

@@ -979,11 +989,13 @@ impl<T> Arc<[T]> {
979989
///
980990
/// Unsafe because the caller must either take ownership or bind `T: Copy`.
981991
unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> {
982-
let ptr = Self::allocate_for_slice(v.len());
992+
unsafe {
993+
let ptr = Self::allocate_for_slice(v.len());
983994

984-
ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len());
995+
ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len());
985996

986-
Self::from_ptr(ptr)
997+
Self::from_ptr(ptr)
998+
}
987999
}
9881000

9891001
/// Constructs an `Arc<[T]>` from an iterator known to be of a certain size.
@@ -1011,25 +1023,27 @@ impl<T> Arc<[T]> {
10111023
}
10121024
}
10131025

1014-
let ptr = Self::allocate_for_slice(len);
1026+
unsafe {
1027+
let ptr = Self::allocate_for_slice(len);
10151028

1016-
let mem = ptr as *mut _ as *mut u8;
1017-
let layout = Layout::for_value(&*ptr);
1029+
let mem = ptr as *mut _ as *mut u8;
1030+
let layout = Layout::for_value(&*ptr);
10181031

1019-
// Pointer to first element
1020-
let elems = &mut (*ptr).data as *mut [T] as *mut T;
1032+
// Pointer to first element
1033+
let elems = &mut (*ptr).data as *mut [T] as *mut T;
10211034

1022-
let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
1035+
let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
10231036

1024-
for (i, item) in iter.enumerate() {
1025-
ptr::write(elems.add(i), item);
1026-
guard.n_elems += 1;
1027-
}
1037+
for (i, item) in iter.enumerate() {
1038+
ptr::write(elems.add(i), item);
1039+
guard.n_elems += 1;
1040+
}
10281041

1029-
// All clear. Forget the guard so it doesn't free the new ArcInner.
1030-
mem::forget(guard);
1042+
// All clear. Forget the guard so it doesn't free the new ArcInner.
1043+
mem::forget(guard);
10311044

1032-
Self::from_ptr(ptr)
1045+
Self::from_ptr(ptr)
1046+
}
10331047
}
10341048
}
10351049

@@ -1274,7 +1288,7 @@ impl<T: ?Sized> Arc<T> {
12741288
pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
12751289
// We are careful to *not* create a reference covering the "count" fields, as
12761290
// this would alias with concurrent access to the reference counts (e.g. by `Weak`).
1277-
&mut (*this.ptr.as_ptr()).data
1291+
unsafe { &mut (*this.ptr.as_ptr()).data }
12781292
}
12791293

12801294
/// Determine whether this is the unique reference (including weak refs) to
@@ -1551,10 +1565,12 @@ impl<T> Weak<T> {
15511565
Self::new()
15521566
} else {
15531567
// See Arc::from_raw for details
1554-
let offset = data_offset(ptr);
1555-
let fake_ptr = ptr as *mut ArcInner<T>;
1556-
let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
1557-
Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") }
1568+
unsafe {
1569+
let offset = data_offset(ptr);
1570+
let fake_ptr = ptr as *mut ArcInner<T>;
1571+
let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
1572+
Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") }
1573+
}
15581574
}
15591575
}
15601576
}
@@ -2260,7 +2276,7 @@ unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
22602276
// Because it is `?Sized`, it will always be the last field in memory.
22612277
// Note: This is a detail of the current implementation of the compiler,
22622278
// and is not a guaranteed language detail. Do not rely on it outside of std.
2263-
data_offset_align(align_of_val(&*ptr))
2279+
unsafe { data_offset_align(align_of_val(&*ptr)) }
22642280
}
22652281

22662282
/// Computes the offset of the data field within `ArcInner`.

‎src/liballoc/task.rs

+5-4
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ impl<W: Wake + Send + Sync + 'static> From<Arc<W>> for RawWaker {
6060
fn raw_waker<W: Wake + Send + Sync + 'static>(waker: Arc<W>) -> RawWaker {
6161
// Increment the reference count of the arc to clone it.
6262
unsafe fn clone_waker<W: Wake + Send + Sync + 'static>(waker: *const ()) -> RawWaker {
63-
Arc::incr_strong_count(waker as *const W);
63+
unsafe { Arc::incr_strong_count(waker as *const W) };
6464
RawWaker::new(
6565
waker as *const (),
6666
&RawWakerVTable::new(clone_waker::<W>, wake::<W>, wake_by_ref::<W>, drop_waker::<W>),
@@ -69,19 +69,20 @@ fn raw_waker<W: Wake + Send + Sync + 'static>(waker: Arc<W>) -> RawWaker {
6969

7070
// Wake by value, moving the Arc into the Wake::wake function
7171
unsafe fn wake<W: Wake + Send + Sync + 'static>(waker: *const ()) {
72-
let waker: Arc<W> = Arc::from_raw(waker as *const W);
72+
let waker: Arc<W> = unsafe { Arc::from_raw(waker as *const W) };
7373
<W as Wake>::wake(waker);
7474
}
7575

7676
// Wake by reference, wrap the waker in ManuallyDrop to avoid dropping it
7777
unsafe fn wake_by_ref<W: Wake + Send + Sync + 'static>(waker: *const ()) {
78-
let waker: ManuallyDrop<Arc<W>> = ManuallyDrop::new(Arc::from_raw(waker as *const W));
78+
let waker: ManuallyDrop<Arc<W>> =
79+
unsafe { ManuallyDrop::new(Arc::from_raw(waker as *const W)) };
7980
<W as Wake>::wake_by_ref(&waker);
8081
}
8182

8283
// Decrement the reference count of the Arc on drop
8384
unsafe fn drop_waker<W: Wake + Send + Sync + 'static>(waker: *const ()) {
84-
Arc::decr_strong_count(waker as *const W);
85+
unsafe { Arc::decr_strong_count(waker as *const W) };
8586
}
8687

8788
RawWaker::new(

‎src/liballoc/vec.rs

+14-11
Original file line numberDiff line numberDiff line change
@@ -465,7 +465,7 @@ impl<T> Vec<T> {
465465
/// ```
466466
#[stable(feature = "rust1", since = "1.0.0")]
467467
pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Vec<T> {
468-
Vec { buf: RawVec::from_raw_parts(ptr, capacity), len: length }
468+
unsafe { Vec { buf: RawVec::from_raw_parts(ptr, capacity), len: length } }
469469
}
470470

471471
/// Returns the number of elements the vector can hold without
@@ -1264,10 +1264,10 @@ impl<T> Vec<T> {
12641264
/// Appends elements to `Self` from other buffer.
12651265
#[inline]
12661266
unsafe fn append_elements(&mut self, other: *const [T]) {
1267-
let count = (*other).len();
1267+
let count = unsafe { (*other).len() };
12681268
self.reserve(count);
12691269
let len = self.len();
1270-
ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count);
1270+
unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) };
12711271
self.len += count;
12721272
}
12731273

@@ -2965,15 +2965,16 @@ impl<T> Drain<'_, T> {
29652965
/// Fill that range as much as possible with new elements from the `replace_with` iterator.
29662966
/// Returns `true` if we filled the entire range. (`replace_with.next()` didn’t return `None`.)
29672967
unsafe fn fill<I: Iterator<Item = T>>(&mut self, replace_with: &mut I) -> bool {
2968-
let vec = self.vec.as_mut();
2968+
let vec = unsafe { self.vec.as_mut() };
29692969
let range_start = vec.len;
29702970
let range_end = self.tail_start;
2971-
let range_slice =
2972-
slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start);
2971+
let range_slice = unsafe {
2972+
slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start)
2973+
};
29732974

29742975
for place in range_slice {
29752976
if let Some(new_item) = replace_with.next() {
2976-
ptr::write(place, new_item);
2977+
unsafe { ptr::write(place, new_item) };
29772978
vec.len += 1;
29782979
} else {
29792980
return false;
@@ -2984,14 +2985,16 @@ impl<T> Drain<'_, T> {
29842985

29852986
/// Makes room for inserting more elements before the tail.
29862987
unsafe fn move_tail(&mut self, additional: usize) {
2987-
let vec = self.vec.as_mut();
2988+
let vec = unsafe { self.vec.as_mut() };
29882989
let len = self.tail_start + self.tail_len;
29892990
vec.buf.reserve(len, additional);
29902991

29912992
let new_tail_start = self.tail_start + additional;
2992-
let src = vec.as_ptr().add(self.tail_start);
2993-
let dst = vec.as_mut_ptr().add(new_tail_start);
2994-
ptr::copy(src, dst, self.tail_len);
2993+
unsafe {
2994+
let src = vec.as_ptr().add(self.tail_start);
2995+
let dst = vec.as_mut_ptr().add(new_tail_start);
2996+
ptr::copy(src, dst, self.tail_len);
2997+
}
29952998
self.tail_start = new_tail_start;
29962999
}
29973000
}

0 commit comments

Comments
 (0)
Please sign in to comment.