|
| 1 | +//! Defines the `IntoIter` owned iterator for arrays. |
| 2 | +
|
| 3 | +use crate::{ |
| 4 | + fmt, |
| 5 | + iter::{ExactSizeIterator, FusedIterator, TrustedLen}, |
| 6 | + mem::{self, MaybeUninit}, |
| 7 | + ops::Range, |
| 8 | + ptr, |
| 9 | +}; |
| 10 | +use super::LengthAtMost32; |
| 11 | + |
| 12 | + |
| 13 | +/// A by-value [array] iterator. |
| 14 | +/// |
| 15 | +/// [array]: ../../std/primitive.array.html |
| 16 | +#[unstable(feature = "array_value_iter", issue = "0")] |
| 17 | +pub struct IntoIter<T, const N: usize> |
| 18 | +where |
| 19 | + [T; N]: LengthAtMost32, |
| 20 | +{ |
| 21 | + /// This is the array we are iterating over. |
| 22 | + /// |
| 23 | + /// Elements with index `i` where `alive.start <= i < alive.end` have not |
| 24 | + /// been yielded yet and are valid array entries. Elements with indices `i |
| 25 | + /// < alive.start` or `i >= alive.end` have been yielded already and must |
| 26 | + /// not be accessed anymore! Those dead elements might even be in a |
| 27 | + /// completely uninitialized state! |
| 28 | + /// |
| 29 | + /// So the invariants are: |
| 30 | + /// - `data[alive]` is alive (i.e. contains valid elements) |
| 31 | + /// - `data[..alive.start]` and `data[alive.end..]` are dead (i.e. the |
| 32 | + /// elements were already read and must not be touched anymore!) |
| 33 | + data: [MaybeUninit<T>; N], |
| 34 | + |
| 35 | + /// The elements in `data` that have not been yielded yet. |
| 36 | + /// |
| 37 | + /// Invariants: |
| 38 | + /// - `alive.start <= alive.end` |
| 39 | + /// - `alive.end <= N` |
| 40 | + alive: Range<usize>, |
| 41 | +} |
| 42 | + |
| 43 | +impl<T, const N: usize> IntoIter<T, {N}> |
| 44 | +where |
| 45 | + [T; N]: LengthAtMost32, |
| 46 | +{ |
| 47 | + /// Creates a new iterator over the given `array`. |
| 48 | + /// |
| 49 | + /// *Note*: this method might never get stabilized and/or removed in the |
| 50 | + /// future as there will likely be another, preferred way of obtaining this |
| 51 | + /// iterator (either via `IntoIterator` for arrays or via another way). |
| 52 | + #[unstable(feature = "array_value_iter", issue = "0")] |
| 53 | + pub fn new(array: [T; N]) -> Self { |
| 54 | + // The transmute here is actually safe. The docs of `MaybeUninit` |
| 55 | + // promise: |
| 56 | + // |
| 57 | + // > `MaybeUninit<T>` is guaranteed to have the same size and alignment |
| 58 | + // > as `T`. |
| 59 | + // |
| 60 | + // The docs even show a transmute from an array of `MaybeUninit<T>` to |
| 61 | + // an array of `T`. |
| 62 | + // |
| 63 | + // With that, this initialization satisfies the invariants. |
| 64 | + |
| 65 | + // FIXME(LukasKalbertodt): actually use `mem::transmute` here, once it |
| 66 | + // works with const generics: |
| 67 | + // `mem::transmute::<[T; {N}], [MaybeUninit<T>; {N}]>(array)` |
| 68 | + // |
| 69 | + // Until then, we do it manually here. We first create a bitwise copy |
| 70 | + // but cast the pointer so that it is treated as a different type. Then |
| 71 | + // we forget `array` so that it is not dropped. |
| 72 | + let data = unsafe { |
| 73 | + let data = ptr::read(&array as *const [T; N] as *const [MaybeUninit<T>; N]); |
| 74 | + mem::forget(array); |
| 75 | + data |
| 76 | + }; |
| 77 | + |
| 78 | + Self { |
| 79 | + data, |
| 80 | + alive: 0..N, |
| 81 | + } |
| 82 | + } |
| 83 | + |
| 84 | + /// Returns an immutable slice of all elements that have not been yielded |
| 85 | + /// yet. |
| 86 | + fn as_slice(&self) -> &[T] { |
| 87 | + // This transmute is safe. As mentioned in `new`, `MaybeUninit` retains |
| 88 | + // the size and alignment of `T`. Furthermore, we know that all |
| 89 | + // elements within `alive` are properly initialized. |
| 90 | + let slice = &self.data[self.alive.clone()]; |
| 91 | + unsafe { |
| 92 | + mem::transmute::<&[MaybeUninit<T>], &[T]>(slice) |
| 93 | + } |
| 94 | + } |
| 95 | +} |
| 96 | + |
| 97 | + |
| 98 | +#[stable(feature = "array_value_iter_impls", since = "1.38.0")] |
| 99 | +impl<T, const N: usize> Iterator for IntoIter<T, {N}> |
| 100 | +where |
| 101 | + [T; N]: LengthAtMost32, |
| 102 | +{ |
| 103 | + type Item = T; |
| 104 | + fn next(&mut self) -> Option<Self::Item> { |
| 105 | + if self.alive.start == self.alive.end { |
| 106 | + return None; |
| 107 | + } |
| 108 | + |
| 109 | + // Bump start index. |
| 110 | + // |
| 111 | + // From the check above we know that `alive.start != alive.end`. |
| 112 | + // Combine this with the invariant `alive.start <= alive.end`, we know |
| 113 | + // that `alive.start < alive.end`. Increasing `alive.start` by 1 |
| 114 | + // maintains the invariant regarding `alive`. However, due to this |
| 115 | + // change, for a short time, the alive zone is not `data[alive]` |
| 116 | + // anymore, but `data[idx..alive.end]`. |
| 117 | + let idx = self.alive.start; |
| 118 | + self.alive.start += 1; |
| 119 | + |
| 120 | + // Read the element from the array. This is safe: `idx` is an index |
| 121 | + // into the "alive" region of the array. Reading this element means |
| 122 | + // that `data[idx]` is regarded as dead now (i.e. do not touch). As |
| 123 | + // `idx` was the start of the alive-zone, the alive zone is now |
| 124 | + // `data[alive]` again, restoring all invariants. |
| 125 | + let out = unsafe { self.data.get_unchecked(idx).read() }; |
| 126 | + |
| 127 | + Some(out) |
| 128 | + } |
| 129 | + |
| 130 | + fn size_hint(&self) -> (usize, Option<usize>) { |
| 131 | + let len = self.len(); |
| 132 | + (len, Some(len)) |
| 133 | + } |
| 134 | + |
| 135 | + fn count(self) -> usize { |
| 136 | + self.len() |
| 137 | + } |
| 138 | + |
| 139 | + fn last(mut self) -> Option<Self::Item> { |
| 140 | + self.next_back() |
| 141 | + } |
| 142 | +} |
| 143 | + |
| 144 | +#[stable(feature = "array_value_iter_impls", since = "1.38.0")] |
| 145 | +impl<T, const N: usize> DoubleEndedIterator for IntoIter<T, {N}> |
| 146 | +where |
| 147 | + [T; N]: LengthAtMost32, |
| 148 | +{ |
| 149 | + fn next_back(&mut self) -> Option<Self::Item> { |
| 150 | + if self.alive.start == self.alive.end { |
| 151 | + return None; |
| 152 | + } |
| 153 | + |
| 154 | + // Decrease end index. |
| 155 | + // |
| 156 | + // From the check above we know that `alive.start != alive.end`. |
| 157 | + // Combine this with the invariant `alive.start <= alive.end`, we know |
| 158 | + // that `alive.start < alive.end`. As `alive.start` cannot be negative, |
| 159 | + // `alive.end` is at least 1, meaning that we can safely decrement it |
| 160 | + // by one. This also maintains the invariant `alive.start <= |
| 161 | + // alive.end`. However, due to this change, for a short time, the alive |
| 162 | + // zone is not `data[alive]` anymore, but `data[alive.start..alive.end |
| 163 | + // + 1]`. |
| 164 | + self.alive.end -= 1; |
| 165 | + |
| 166 | + // Read the element from the array. This is safe: `alive.end` is an |
| 167 | + // index into the "alive" region of the array. Compare the previous |
| 168 | + // comment that states that the alive region is |
| 169 | + // `data[alive.start..alive.end + 1]`. Reading this element means that |
| 170 | + // `data[alive.end]` is regarded as dead now (i.e. do not touch). As |
| 171 | + // `alive.end` was the end of the alive-zone, the alive zone is now |
| 172 | + // `data[alive]` again, restoring all invariants. |
| 173 | + let out = unsafe { self.data.get_unchecked(self.alive.end).read() }; |
| 174 | + |
| 175 | + Some(out) |
| 176 | + } |
| 177 | +} |
| 178 | + |
| 179 | +#[stable(feature = "array_value_iter_impls", since = "1.38.0")] |
| 180 | +impl<T, const N: usize> Drop for IntoIter<T, {N}> |
| 181 | +where |
| 182 | + [T; N]: LengthAtMost32, |
| 183 | +{ |
| 184 | + fn drop(&mut self) { |
| 185 | + // We simply drop each element via `for_each`. This should not incur |
| 186 | + // any significant runtime overhead and avoids adding another `unsafe` |
| 187 | + // block. |
| 188 | + self.by_ref().for_each(drop); |
| 189 | + } |
| 190 | +} |
| 191 | + |
| 192 | +#[stable(feature = "array_value_iter_impls", since = "1.38.0")] |
| 193 | +impl<T, const N: usize> ExactSizeIterator for IntoIter<T, {N}> |
| 194 | +where |
| 195 | + [T; N]: LengthAtMost32, |
| 196 | +{ |
| 197 | + fn len(&self) -> usize { |
| 198 | + // Will never underflow due to the invariant `alive.start <= |
| 199 | + // alive.end`. |
| 200 | + self.alive.end - self.alive.start |
| 201 | + } |
| 202 | + fn is_empty(&self) -> bool { |
| 203 | + self.alive.is_empty() |
| 204 | + } |
| 205 | +} |
| 206 | + |
| 207 | +#[stable(feature = "array_value_iter_impls", since = "1.38.0")] |
| 208 | +impl<T, const N: usize> FusedIterator for IntoIter<T, {N}> |
| 209 | +where |
| 210 | + [T; N]: LengthAtMost32, |
| 211 | +{} |
| 212 | + |
| 213 | +// The iterator indeed reports the correct length. The number of "alive" |
| 214 | +// elements (that will still be yielded) is the length of the range `alive`. |
| 215 | +// This range is decremented in length in either `next` or `next_back`. It is |
| 216 | +// always decremented by 1 in those methods, but only if `Some(_)` is returned. |
| 217 | +#[stable(feature = "array_value_iter_impls", since = "1.38.0")] |
| 218 | +unsafe impl<T, const N: usize> TrustedLen for IntoIter<T, {N}> |
| 219 | +where |
| 220 | + [T; N]: LengthAtMost32, |
| 221 | +{} |
| 222 | + |
| 223 | +#[stable(feature = "array_value_iter_impls", since = "1.38.0")] |
| 224 | +impl<T: Clone, const N: usize> Clone for IntoIter<T, {N}> |
| 225 | +where |
| 226 | + [T; N]: LengthAtMost32, |
| 227 | +{ |
| 228 | + fn clone(&self) -> Self { |
| 229 | + unsafe { |
| 230 | + // This creates a new uninitialized array. Note that the `assume_init` |
| 231 | + // refers to the array, not the individual elements. And it is Ok if |
| 232 | + // the array is in an uninitialized state as all elements may be |
| 233 | + // uninitialized (all bit patterns are valid). Compare the |
| 234 | + // `MaybeUninit` docs for more information. |
| 235 | + let mut new_data: [MaybeUninit<T>; N] = MaybeUninit::uninit().assume_init(); |
| 236 | + |
| 237 | + // Clone all alive elements. |
| 238 | + for idx in self.alive.clone() { |
| 239 | + // The element at `idx` in the old array is alive, so we can |
| 240 | + // safely call `get_ref()`. We then clone it, and write the |
| 241 | + // clone into the new array. |
| 242 | + let clone = self.data.get_unchecked(idx).get_ref().clone(); |
| 243 | + new_data.get_unchecked_mut(idx).write(clone); |
| 244 | + } |
| 245 | + |
| 246 | + Self { |
| 247 | + data: new_data, |
| 248 | + alive: self.alive.clone(), |
| 249 | + } |
| 250 | + } |
| 251 | + } |
| 252 | +} |
| 253 | + |
| 254 | +#[stable(feature = "array_value_iter_impls", since = "1.38.0")] |
| 255 | +impl<T: fmt::Debug, const N: usize> fmt::Debug for IntoIter<T, {N}> |
| 256 | +where |
| 257 | + [T; N]: LengthAtMost32, |
| 258 | +{ |
| 259 | + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
| 260 | + // Only print the elements that were not yielded yet: we cannot |
| 261 | + // access the yielded elements anymore. |
| 262 | + f.debug_tuple("IntoIter") |
| 263 | + .field(&self.as_slice()) |
| 264 | + .finish() |
| 265 | + } |
| 266 | +} |
0 commit comments