@@ -18,6 +18,17 @@ use core::ops::Deref;
18
18
use core:: ptr:: { self , NonNull } ;
19
19
use core:: slice:: { self } ;
20
20
21
+ macro non_null {
22
+ ( mut $place: expr, $t: ident) => { {
23
+ #![ allow( unused_unsafe) ] // we're sometimes used within an unsafe block
24
+ unsafe { & mut * ( ptr:: addr_of_mut!( $place) as * mut NonNull < $t> ) }
25
+ } } ,
26
+ ( $place: expr, $t: ident) => { {
27
+ #![ allow( unused_unsafe) ] // we're sometimes used within an unsafe block
28
+ unsafe { * ( ptr:: addr_of!( $place) as * const NonNull < $t> ) }
29
+ } } ,
30
+ }
31
+
21
32
/// An iterator that moves out of a vector.
22
33
///
23
34
/// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec)
@@ -41,10 +52,12 @@ pub struct IntoIter<
41
52
// the drop impl reconstructs a RawVec from buf, cap and alloc
42
53
// to avoid dropping the allocator twice we need to wrap it into ManuallyDrop
43
54
pub ( super ) alloc : ManuallyDrop < A > ,
44
- pub ( super ) ptr : * const T ,
45
- pub ( super ) end : * const T , // If T is a ZST, this is actually ptr+len. This encoding is picked so that
46
- // ptr == end is a quick test for the Iterator being empty, that works
47
- // for both ZST and non-ZST.
55
+ pub ( super ) ptr : NonNull < T > ,
56
+ /// If T is a ZST, this is actually ptr+len. This encoding is picked so that
57
+ /// ptr == end is a quick test for the Iterator being empty, that works
58
+ /// for both ZST and non-ZST.
59
+ /// For non-ZSTs the pointer is treated as `NonNull<T>`
60
+ pub ( super ) end : * const T ,
48
61
}
49
62
50
63
#[ stable( feature = "vec_intoiter_debug" , since = "1.13.0" ) ]
@@ -68,7 +81,7 @@ impl<T, A: Allocator> IntoIter<T, A> {
68
81
/// ```
69
82
#[ stable( feature = "vec_into_iter_as_slice" , since = "1.15.0" ) ]
70
83
pub fn as_slice ( & self ) -> & [ T ] {
71
- unsafe { slice:: from_raw_parts ( self . ptr , self . len ( ) ) }
84
+ unsafe { slice:: from_raw_parts ( self . ptr . as_ptr ( ) , self . len ( ) ) }
72
85
}
73
86
74
87
/// Returns the remaining items of this iterator as a mutable slice.
@@ -97,7 +110,7 @@ impl<T, A: Allocator> IntoIter<T, A> {
97
110
}
98
111
99
112
fn as_raw_mut_slice ( & mut self ) -> * mut [ T ] {
100
- ptr:: slice_from_raw_parts_mut ( self . ptr as * mut T , self . len ( ) )
113
+ ptr:: slice_from_raw_parts_mut ( self . ptr . as_ptr ( ) , self . len ( ) )
101
114
}
102
115
103
116
/// Drops remaining elements and relinquishes the backing allocation.
@@ -124,7 +137,7 @@ impl<T, A: Allocator> IntoIter<T, A> {
124
137
// this creates less assembly
125
138
self . cap = 0 ;
126
139
self . buf = unsafe { NonNull :: new_unchecked ( RawVec :: NEW . ptr ( ) ) } ;
127
- self . ptr = self . buf . as_ptr ( ) ;
140
+ self . ptr = self . buf ;
128
141
self . end = self . buf . as_ptr ( ) ;
129
142
130
143
// Dropping the remaining elements can panic, so this needs to be
@@ -138,7 +151,7 @@ impl<T, A: Allocator> IntoIter<T, A> {
138
151
pub ( crate ) fn forget_remaining_elements ( & mut self ) {
139
152
// For the ZST case, it is crucial that we mutate `end` here, not `ptr`.
140
153
// `ptr` must stay aligned, while `end` may be unaligned.
141
- self . end = self . ptr ;
154
+ self . end = self . ptr . as_ptr ( ) ;
142
155
}
143
156
144
157
#[ cfg( not( no_global_oom_handling) ) ]
@@ -160,7 +173,7 @@ impl<T, A: Allocator> IntoIter<T, A> {
160
173
// say that they're all at the beginning of the "allocation".
161
174
0 ..this. len ( )
162
175
} else {
163
- this. ptr . sub_ptr ( buf) ..this. end . sub_ptr ( buf)
176
+ this. ptr . sub_ptr ( this . buf ) ..this. end . sub_ptr ( buf)
164
177
} ;
165
178
let cap = this. cap ;
166
179
let alloc = ManuallyDrop :: take ( & mut this. alloc ) ;
@@ -187,37 +200,43 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
187
200
188
201
#[ inline]
189
202
fn next ( & mut self ) -> Option < T > {
190
- if self . ptr == self . end {
191
- None
192
- } else if T :: IS_ZST {
193
- // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by
194
- // reducing the `end`.
195
- self . end = self . end . wrapping_byte_sub ( 1 ) ;
196
-
197
- // Make up a value of this ZST.
198
- Some ( unsafe { mem:: zeroed ( ) } )
203
+ if T :: IS_ZST {
204
+ if self . ptr . as_ptr ( ) == self . end as * mut _ {
205
+ None
206
+ } else {
207
+ // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by
208
+ // reducing the `end`.
209
+ self . end = self . end . wrapping_byte_sub ( 1 ) ;
210
+
211
+ // Make up a value of this ZST.
212
+ Some ( unsafe { mem:: zeroed ( ) } )
213
+ }
199
214
} else {
200
- let old = self . ptr ;
201
- self . ptr = unsafe { self . ptr . add ( 1 ) } ;
215
+ if self . ptr == non_null ! ( self . end, T ) {
216
+ None
217
+ } else {
218
+ let old = self . ptr ;
219
+ self . ptr = unsafe { old. add ( 1 ) } ;
202
220
203
- Some ( unsafe { ptr:: read ( old) } )
221
+ Some ( unsafe { ptr:: read ( old. as_ptr ( ) ) } )
222
+ }
204
223
}
205
224
}
206
225
207
226
#[ inline]
208
227
fn size_hint ( & self ) -> ( usize , Option < usize > ) {
209
228
let exact = if T :: IS_ZST {
210
- self . end . addr ( ) . wrapping_sub ( self . ptr . addr ( ) )
229
+ self . end . addr ( ) . wrapping_sub ( self . ptr . as_ptr ( ) . addr ( ) )
211
230
} else {
212
- unsafe { self . end . sub_ptr ( self . ptr ) }
231
+ unsafe { non_null ! ( self . end, T ) . sub_ptr ( self . ptr ) }
213
232
} ;
214
233
( exact, Some ( exact) )
215
234
}
216
235
217
236
#[ inline]
218
237
fn advance_by ( & mut self , n : usize ) -> Result < ( ) , NonZeroUsize > {
219
238
let step_size = self . len ( ) . min ( n) ;
220
- let to_drop = ptr:: slice_from_raw_parts_mut ( self . ptr as * mut T , step_size) ;
239
+ let to_drop = ptr:: slice_from_raw_parts_mut ( self . ptr . as_ptr ( ) , step_size) ;
221
240
if T :: IS_ZST {
222
241
// See `next` for why we sub `end` here.
223
242
self . end = self . end . wrapping_byte_sub ( step_size) ;
@@ -259,7 +278,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
259
278
// Safety: `len` indicates that this many elements are available and we just checked that
260
279
// it fits into the array.
261
280
unsafe {
262
- ptr:: copy_nonoverlapping ( self . ptr , raw_ary. as_mut_ptr ( ) as * mut T , len) ;
281
+ ptr:: copy_nonoverlapping ( self . ptr . as_ptr ( ) , raw_ary. as_mut_ptr ( ) as * mut T , len) ;
263
282
self . forget_remaining_elements ( ) ;
264
283
return Err ( array:: IntoIter :: new_unchecked ( raw_ary, 0 ..len) ) ;
265
284
}
@@ -268,7 +287,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
268
287
// Safety: `len` is larger than the array size. Copy a fixed amount here to fully initialize
269
288
// the array.
270
289
return unsafe {
271
- ptr:: copy_nonoverlapping ( self . ptr , raw_ary. as_mut_ptr ( ) as * mut T , N ) ;
290
+ ptr:: copy_nonoverlapping ( self . ptr . as_ptr ( ) , raw_ary. as_mut_ptr ( ) as * mut T , N ) ;
272
291
self . ptr = self . ptr . add ( N ) ;
273
292
Ok ( raw_ary. transpose ( ) . assume_init ( ) )
274
293
} ;
@@ -286,26 +305,33 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
286
305
// Also note the implementation of `Self: TrustedRandomAccess` requires
287
306
// that `T: Copy` so reading elements from the buffer doesn't invalidate
288
307
// them for `Drop`.
289
- unsafe { if T :: IS_ZST { mem:: zeroed ( ) } else { ptr :: read ( self . ptr . add ( i) ) } }
308
+ unsafe { if T :: IS_ZST { mem:: zeroed ( ) } else { self . ptr . add ( i) . read ( ) } }
290
309
}
291
310
}
292
311
293
312
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
294
313
impl < T , A : Allocator > DoubleEndedIterator for IntoIter < T , A > {
295
314
#[ inline]
296
315
fn next_back ( & mut self ) -> Option < T > {
297
- if self . end == self . ptr {
298
- None
299
- } else if T :: IS_ZST {
300
- // See above for why 'ptr.offset' isn't used
301
- self . end = self . end . wrapping_byte_sub ( 1 ) ;
302
-
303
- // Make up a value of this ZST.
304
- Some ( unsafe { mem:: zeroed ( ) } )
316
+ if T :: IS_ZST {
317
+ if self . end as * mut _ == self . ptr . as_ptr ( ) {
318
+ None
319
+ } else {
320
+ // See above for why 'ptr.offset' isn't used
321
+ self . end = self . end . wrapping_byte_sub ( 1 ) ;
322
+
323
+ // Make up a value of this ZST.
324
+ Some ( unsafe { mem:: zeroed ( ) } )
325
+ }
305
326
} else {
306
- self . end = unsafe { self . end . sub ( 1 ) } ;
327
+ if non_null ! ( self . end, T ) == self . ptr {
328
+ None
329
+ } else {
330
+ let new_end = unsafe { non_null ! ( self . end, T ) . sub ( 1 ) } ;
331
+ * non_null ! ( mut self . end, T ) = new_end;
307
332
308
- Some ( unsafe { ptr:: read ( self . end ) } )
333
+ Some ( unsafe { ptr:: read ( new_end. as_ptr ( ) ) } )
334
+ }
309
335
}
310
336
}
311
337
@@ -331,7 +357,11 @@ impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
331
357
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
332
358
impl < T , A : Allocator > ExactSizeIterator for IntoIter < T , A > {
333
359
fn is_empty ( & self ) -> bool {
334
- self . ptr == self . end
360
+ if T :: IS_ZST {
361
+ self . ptr . as_ptr ( ) == self . end as * mut _
362
+ } else {
363
+ self . ptr == non_null ! ( self . end, T )
364
+ }
335
365
}
336
366
}
337
367
0 commit comments