@@ -17,6 +17,17 @@ use core::ops::Deref;
17
17
use core:: ptr:: { self , NonNull } ;
18
18
use core:: slice:: { self } ;
19
19
20
+ macro non_null {
21
+ ( mut $place: expr, $t: ident) => { {
22
+ #![ allow( unused_unsafe) ] // we're sometimes used within an unsafe block
23
+ unsafe { & mut * ptr:: addr_of_mut!( $place) . cast :: < NonNull < $t> > ( ) }
24
+ } } ,
25
+ ( $place: expr, $t: ident) => { {
26
+ #![ allow( unused_unsafe) ] // we're sometimes used within an unsafe block
27
+ unsafe { * ptr:: addr_of!( $place) . cast :: < NonNull < $t> > ( ) }
28
+ } } ,
29
+ }
30
+
20
31
/// An iterator that moves out of a vector.
21
32
///
22
33
/// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec)
@@ -40,10 +51,12 @@ pub struct IntoIter<
40
51
// the drop impl reconstructs a RawVec from buf, cap and alloc
41
52
// to avoid dropping the allocator twice we need to wrap it into ManuallyDrop
42
53
pub ( super ) alloc : ManuallyDrop < A > ,
43
- pub ( super ) ptr : * const T ,
44
- pub ( super ) end : * const T , // If T is a ZST, this is actually ptr+len. This encoding is picked so that
45
- // ptr == end is a quick test for the Iterator being empty, that works
46
- // for both ZST and non-ZST.
54
+ pub ( super ) ptr : NonNull < T > ,
55
+ /// If T is a ZST, this is actually ptr+len. This encoding is picked so that
56
+ /// ptr == end is a quick test for the Iterator being empty, that works
57
+ /// for both ZST and non-ZST.
58
+ /// For non-ZSTs the pointer is treated as `NonNull<T>`
59
+ pub ( super ) end : * const T ,
47
60
}
48
61
49
62
#[ stable( feature = "vec_intoiter_debug" , since = "1.13.0" ) ]
@@ -67,7 +80,7 @@ impl<T, A: Allocator> IntoIter<T, A> {
67
80
/// ```
68
81
#[ stable( feature = "vec_into_iter_as_slice" , since = "1.15.0" ) ]
69
82
pub fn as_slice ( & self ) -> & [ T ] {
70
- unsafe { slice:: from_raw_parts ( self . ptr , self . len ( ) ) }
83
+ unsafe { slice:: from_raw_parts ( self . ptr . as_ptr ( ) , self . len ( ) ) }
71
84
}
72
85
73
86
/// Returns the remaining items of this iterator as a mutable slice.
@@ -96,7 +109,7 @@ impl<T, A: Allocator> IntoIter<T, A> {
96
109
}
97
110
98
111
fn as_raw_mut_slice ( & mut self ) -> * mut [ T ] {
99
- ptr:: slice_from_raw_parts_mut ( self . ptr as * mut T , self . len ( ) )
112
+ ptr:: slice_from_raw_parts_mut ( self . ptr . as_ptr ( ) , self . len ( ) )
100
113
}
101
114
102
115
/// Drops remaining elements and relinquishes the backing allocation.
@@ -123,7 +136,7 @@ impl<T, A: Allocator> IntoIter<T, A> {
123
136
// this creates less assembly
124
137
self . cap = 0 ;
125
138
self . buf = unsafe { NonNull :: new_unchecked ( RawVec :: NEW . ptr ( ) ) } ;
126
- self . ptr = self . buf . as_ptr ( ) ;
139
+ self . ptr = self . buf ;
127
140
self . end = self . buf . as_ptr ( ) ;
128
141
129
142
// Dropping the remaining elements can panic, so this needs to be
@@ -137,7 +150,7 @@ impl<T, A: Allocator> IntoIter<T, A> {
137
150
pub ( crate ) fn forget_remaining_elements ( & mut self ) {
138
151
// For the ZST case, it is crucial that we mutate `end` here, not `ptr`.
139
152
// `ptr` must stay aligned, while `end` may be unaligned.
140
- self . end = self . ptr ;
153
+ self . end = self . ptr . as_ptr ( ) ;
141
154
}
142
155
143
156
#[ cfg( not( no_global_oom_handling) ) ]
@@ -159,7 +172,7 @@ impl<T, A: Allocator> IntoIter<T, A> {
159
172
// say that they're all at the beginning of the "allocation".
160
173
0 ..this. len ( )
161
174
} else {
162
- this. ptr . sub_ptr ( buf) ..this. end . sub_ptr ( buf)
175
+ this. ptr . sub_ptr ( this . buf ) ..this. end . sub_ptr ( buf)
163
176
} ;
164
177
let cap = this. cap ;
165
178
let alloc = ManuallyDrop :: take ( & mut this. alloc ) ;
@@ -186,37 +199,43 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
186
199
187
200
#[ inline]
188
201
fn next ( & mut self ) -> Option < T > {
189
- if self . ptr == self . end {
190
- None
191
- } else if T :: IS_ZST {
192
- // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by
193
- // reducing the `end`.
194
- self . end = self . end . wrapping_byte_sub ( 1 ) ;
195
-
196
- // Make up a value of this ZST.
197
- Some ( unsafe { mem:: zeroed ( ) } )
202
+ if T :: IS_ZST {
203
+ if self . ptr . as_ptr ( ) . cast_const ( ) == self . end {
204
+ None
205
+ } else {
206
+ // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by
207
+ // reducing the `end`.
208
+ self . end = self . end . wrapping_byte_sub ( 1 ) ;
209
+
210
+ // Make up a value of this ZST.
211
+ Some ( unsafe { mem:: zeroed ( ) } )
212
+ }
198
213
} else {
199
- let old = self . ptr ;
200
- self . ptr = unsafe { self . ptr . add ( 1 ) } ;
214
+ if self . ptr == non_null ! ( self . end, T ) {
215
+ None
216
+ } else {
217
+ let old = self . ptr ;
218
+ self . ptr = unsafe { old. add ( 1 ) } ;
201
219
202
- Some ( unsafe { ptr:: read ( old) } )
220
+ Some ( unsafe { ptr:: read ( old. as_ptr ( ) ) } )
221
+ }
203
222
}
204
223
}
205
224
206
225
#[ inline]
207
226
fn size_hint ( & self ) -> ( usize , Option < usize > ) {
208
227
let exact = if T :: IS_ZST {
209
- self . end . addr ( ) . wrapping_sub ( self . ptr . addr ( ) )
228
+ self . end . addr ( ) . wrapping_sub ( self . ptr . as_ptr ( ) . addr ( ) )
210
229
} else {
211
- unsafe { self . end . sub_ptr ( self . ptr ) }
230
+ unsafe { non_null ! ( self . end, T ) . sub_ptr ( self . ptr ) }
212
231
} ;
213
232
( exact, Some ( exact) )
214
233
}
215
234
216
235
#[ inline]
217
236
fn advance_by ( & mut self , n : usize ) -> Result < ( ) , NonZeroUsize > {
218
237
let step_size = self . len ( ) . min ( n) ;
219
- let to_drop = ptr:: slice_from_raw_parts_mut ( self . ptr as * mut T , step_size) ;
238
+ let to_drop = ptr:: slice_from_raw_parts_mut ( self . ptr . as_ptr ( ) , step_size) ;
220
239
if T :: IS_ZST {
221
240
// See `next` for why we sub `end` here.
222
241
self . end = self . end . wrapping_byte_sub ( step_size) ;
@@ -258,7 +277,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
258
277
// Safety: `len` indicates that this many elements are available and we just checked that
259
278
// it fits into the array.
260
279
unsafe {
261
- ptr:: copy_nonoverlapping ( self . ptr , raw_ary. as_mut_ptr ( ) as * mut T , len) ;
280
+ ptr:: copy_nonoverlapping ( self . ptr . as_ptr ( ) , raw_ary. as_mut_ptr ( ) as * mut T , len) ;
262
281
self . forget_remaining_elements ( ) ;
263
282
return Err ( array:: IntoIter :: new_unchecked ( raw_ary, 0 ..len) ) ;
264
283
}
@@ -267,7 +286,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
267
286
// Safety: `len` is larger than the array size. Copy a fixed amount here to fully initialize
268
287
// the array.
269
288
return unsafe {
270
- ptr:: copy_nonoverlapping ( self . ptr , raw_ary. as_mut_ptr ( ) as * mut T , N ) ;
289
+ ptr:: copy_nonoverlapping ( self . ptr . as_ptr ( ) , raw_ary. as_mut_ptr ( ) as * mut T , N ) ;
271
290
self . ptr = self . ptr . add ( N ) ;
272
291
Ok ( raw_ary. transpose ( ) . assume_init ( ) )
273
292
} ;
@@ -286,7 +305,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
286
305
// that `T: Copy` so reading elements from the buffer doesn't invalidate
287
306
// them for `Drop`.
288
307
unsafe {
289
- if T :: IS_ZST { mem:: zeroed ( ) } else { ptr:: read ( self . ptr . add ( i) ) }
308
+ if T :: IS_ZST { mem:: zeroed ( ) } else { ptr:: read ( self . ptr . add ( i) . as_ptr ( ) ) }
290
309
}
291
310
}
292
311
}
@@ -295,18 +314,25 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
295
314
impl < T , A : Allocator > DoubleEndedIterator for IntoIter < T , A > {
296
315
#[ inline]
297
316
fn next_back ( & mut self ) -> Option < T > {
298
- if self . end == self . ptr {
299
- None
300
- } else if T :: IS_ZST {
301
- // See above for why 'ptr.offset' isn't used
302
- self . end = self . end . wrapping_byte_sub ( 1 ) ;
303
-
304
- // Make up a value of this ZST.
305
- Some ( unsafe { mem:: zeroed ( ) } )
317
+ if T :: IS_ZST {
318
+ if self . end == self . ptr . as_ptr ( ) . cast_const ( ) {
319
+ None
320
+ } else {
321
+ // See above for why 'ptr.offset' isn't used
322
+ self . end = self . end . wrapping_byte_sub ( 1 ) ;
323
+
324
+ // Make up a value of this ZST.
325
+ Some ( unsafe { mem:: zeroed ( ) } )
326
+ }
306
327
} else {
307
- self . end = unsafe { self . end . sub ( 1 ) } ;
328
+ if non_null ! ( self . end, T ) == self . ptr {
329
+ None
330
+ } else {
331
+ let new_end = unsafe { non_null ! ( self . end, T ) . sub ( 1 ) } ;
332
+ * non_null ! ( mut self . end, T ) = new_end;
308
333
309
- Some ( unsafe { ptr:: read ( self . end ) } )
334
+ Some ( unsafe { ptr:: read ( new_end. as_ptr ( ) ) } )
335
+ }
310
336
}
311
337
}
312
338
@@ -332,7 +358,11 @@ impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
332
358
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
333
359
impl < T , A : Allocator > ExactSizeIterator for IntoIter < T , A > {
334
360
fn is_empty ( & self ) -> bool {
335
- self . ptr == self . end
361
+ if T :: IS_ZST {
362
+ self . ptr . as_ptr ( ) . cast_const ( ) == self . end
363
+ } else {
364
+ self . ptr == non_null ! ( self . end, T )
365
+ }
336
366
}
337
367
}
338
368
0 commit comments