xref: /openbmc/linux/rust/alloc/vec/into_iter.rs (revision f59a3ee6)
1 // SPDX-License-Identifier: Apache-2.0 OR MIT
2 
3 #[cfg(not(no_global_oom_handling))]
4 use super::AsVecIntoIter;
5 use crate::alloc::{Allocator, Global};
6 use crate::raw_vec::RawVec;
7 use core::fmt;
8 use core::intrinsics::arith_offset;
9 use core::iter::{
10     FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccessNoCoerce,
11 };
12 use core::marker::PhantomData;
13 use core::mem::{self, ManuallyDrop};
14 #[cfg(not(no_global_oom_handling))]
15 use core::ops::Deref;
16 use core::ptr::{self, NonNull};
17 use core::slice::{self};
18 
19 /// An iterator that moves out of a vector.
20 ///
21 /// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec)
22 /// (provided by the [`IntoIterator`] trait).
23 ///
24 /// # Example
25 ///
26 /// ```
27 /// let v = vec![0, 1, 2];
28 /// let iter: std::vec::IntoIter<_> = v.into_iter();
29 /// ```
30 #[stable(feature = "rust1", since = "1.0.0")]
31 #[rustc_insignificant_dtor]
32 pub struct IntoIter<
33     T,
34     #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
35 > {
36     pub(super) buf: NonNull<T>,
37     pub(super) phantom: PhantomData<T>,
38     pub(super) cap: usize,
39     // the drop impl reconstructs a RawVec from buf, cap and alloc
40     // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop
41     pub(super) alloc: ManuallyDrop<A>,
42     pub(super) ptr: *const T,
43     pub(super) end: *const T,
44 }
45 
46 #[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
47 impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
48     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
49         f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
50     }
51 }
52 
53 impl<T, A: Allocator> IntoIter<T, A> {
54     /// Returns the remaining items of this iterator as a slice.
55     ///
56     /// # Examples
57     ///
58     /// ```
59     /// let vec = vec!['a', 'b', 'c'];
60     /// let mut into_iter = vec.into_iter();
61     /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
62     /// let _ = into_iter.next().unwrap();
63     /// assert_eq!(into_iter.as_slice(), &['b', 'c']);
64     /// ```
65     #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
66     pub fn as_slice(&self) -> &[T] {
67         unsafe { slice::from_raw_parts(self.ptr, self.len()) }
68     }
69 
70     /// Returns the remaining items of this iterator as a mutable slice.
71     ///
72     /// # Examples
73     ///
74     /// ```
75     /// let vec = vec!['a', 'b', 'c'];
76     /// let mut into_iter = vec.into_iter();
77     /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
78     /// into_iter.as_mut_slice()[2] = 'z';
79     /// assert_eq!(into_iter.next().unwrap(), 'a');
80     /// assert_eq!(into_iter.next().unwrap(), 'b');
81     /// assert_eq!(into_iter.next().unwrap(), 'z');
82     /// ```
83     #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
84     pub fn as_mut_slice(&mut self) -> &mut [T] {
85         unsafe { &mut *self.as_raw_mut_slice() }
86     }
87 
88     /// Returns a reference to the underlying allocator.
89     #[unstable(feature = "allocator_api", issue = "32838")]
90     #[inline]
91     pub fn allocator(&self) -> &A {
92         &self.alloc
93     }
94 
95     fn as_raw_mut_slice(&mut self) -> *mut [T] {
96         ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len())
97     }
98 
99     /// Drops remaining elements and relinquishes the backing allocation.
100     ///
101     /// This is roughly equivalent to the following, but more efficient
102     ///
103     /// ```
104     /// # let mut into_iter = Vec::<u8>::with_capacity(10).into_iter();
105     /// (&mut into_iter).for_each(core::mem::drop);
106     /// unsafe { core::ptr::write(&mut into_iter, Vec::new().into_iter()); }
107     /// ```
108     ///
109     /// This method is used by in-place iteration, refer to the vec::in_place_collect
110     /// documentation for an overview.
111     #[cfg(not(no_global_oom_handling))]
112     pub(super) fn forget_allocation_drop_remaining(&mut self) {
113         let remaining = self.as_raw_mut_slice();
114 
115         // overwrite the individual fields instead of creating a new
116         // struct and then overwriting &mut self.
117         // this creates less assembly
118         self.cap = 0;
119         self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) };
120         self.ptr = self.buf.as_ptr();
121         self.end = self.buf.as_ptr();
122 
123         unsafe {
124             ptr::drop_in_place(remaining);
125         }
126     }
127 
128     /// Forgets to Drop the remaining elements while still allowing the backing allocation to be freed.
129     #[allow(dead_code)]
130     pub(crate) fn forget_remaining_elements(&mut self) {
131         self.ptr = self.end;
132     }
133 }
134 
135 #[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
136 impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
137     fn as_ref(&self) -> &[T] {
138         self.as_slice()
139     }
140 }
141 
142 #[stable(feature = "rust1", since = "1.0.0")]
143 unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
144 #[stable(feature = "rust1", since = "1.0.0")]
145 unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {}
146 
147 #[stable(feature = "rust1", since = "1.0.0")]
148 impl<T, A: Allocator> Iterator for IntoIter<T, A> {
149     type Item = T;
150 
151     #[inline]
152     fn next(&mut self) -> Option<T> {
153         if self.ptr as *const _ == self.end {
154             None
155         } else if mem::size_of::<T>() == 0 {
156             // purposefully don't use 'ptr.offset' because for
157             // vectors with 0-size elements this would return the
158             // same pointer.
159             self.ptr = unsafe { arith_offset(self.ptr as *const i8, 1) as *mut T };
160 
161             // Make up a value of this ZST.
162             Some(unsafe { mem::zeroed() })
163         } else {
164             let old = self.ptr;
165             self.ptr = unsafe { self.ptr.offset(1) };
166 
167             Some(unsafe { ptr::read(old) })
168         }
169     }
170 
171     #[inline]
172     fn size_hint(&self) -> (usize, Option<usize>) {
173         let exact = if mem::size_of::<T>() == 0 {
174             self.end.addr().wrapping_sub(self.ptr.addr())
175         } else {
176             unsafe { self.end.sub_ptr(self.ptr) }
177         };
178         (exact, Some(exact))
179     }
180 
181     #[inline]
182     fn advance_by(&mut self, n: usize) -> Result<(), usize> {
183         let step_size = self.len().min(n);
184         let to_drop = ptr::slice_from_raw_parts_mut(self.ptr as *mut T, step_size);
185         if mem::size_of::<T>() == 0 {
186             // SAFETY: due to unchecked casts of unsigned amounts to signed offsets the wraparound
187             // effectively results in unsigned pointers representing positions 0..usize::MAX,
188             // which is valid for ZSTs.
189             self.ptr = unsafe { arith_offset(self.ptr as *const i8, step_size as isize) as *mut T }
190         } else {
191             // SAFETY: the min() above ensures that step_size is in bounds
192             self.ptr = unsafe { self.ptr.add(step_size) };
193         }
194         // SAFETY: the min() above ensures that step_size is in bounds
195         unsafe {
196             ptr::drop_in_place(to_drop);
197         }
198         if step_size < n {
199             return Err(step_size);
200         }
201         Ok(())
202     }
203 
204     #[inline]
205     fn count(self) -> usize {
206         self.len()
207     }
208 
209     unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item
210     where
211         Self: TrustedRandomAccessNoCoerce,
212     {
213         // SAFETY: the caller must guarantee that `i` is in bounds of the
214         // `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)`
215         // is guaranteed to pointer to an element of the `Vec<T>` and
216         // thus guaranteed to be valid to dereference.
217         //
218         // Also note the implementation of `Self: TrustedRandomAccess` requires
219         // that `T: Copy` so reading elements from the buffer doesn't invalidate
220         // them for `Drop`.
221         unsafe {
222             if mem::size_of::<T>() == 0 { mem::zeroed() } else { ptr::read(self.ptr.add(i)) }
223         }
224     }
225 }
226 
227 #[stable(feature = "rust1", since = "1.0.0")]
228 impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
229     #[inline]
230     fn next_back(&mut self) -> Option<T> {
231         if self.end == self.ptr {
232             None
233         } else if mem::size_of::<T>() == 0 {
234             // See above for why 'ptr.offset' isn't used
235             self.end = unsafe { arith_offset(self.end as *const i8, -1) as *mut T };
236 
237             // Make up a value of this ZST.
238             Some(unsafe { mem::zeroed() })
239         } else {
240             self.end = unsafe { self.end.offset(-1) };
241 
242             Some(unsafe { ptr::read(self.end) })
243         }
244     }
245 
246     #[inline]
247     fn advance_back_by(&mut self, n: usize) -> Result<(), usize> {
248         let step_size = self.len().min(n);
249         if mem::size_of::<T>() == 0 {
250             // SAFETY: same as for advance_by()
251             self.end = unsafe {
252                 arith_offset(self.end as *const i8, step_size.wrapping_neg() as isize) as *mut T
253             }
254         } else {
255             // SAFETY: same as for advance_by()
256             self.end = unsafe { self.end.offset(step_size.wrapping_neg() as isize) };
257         }
258         let to_drop = ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size);
259         // SAFETY: same as for advance_by()
260         unsafe {
261             ptr::drop_in_place(to_drop);
262         }
263         if step_size < n {
264             return Err(step_size);
265         }
266         Ok(())
267     }
268 }
269 
270 #[stable(feature = "rust1", since = "1.0.0")]
271 impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
272     fn is_empty(&self) -> bool {
273         self.ptr == self.end
274     }
275 }
276 
277 #[stable(feature = "fused", since = "1.26.0")]
278 impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
279 
280 #[unstable(feature = "trusted_len", issue = "37572")]
281 unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {}
282 
283 #[doc(hidden)]
284 #[unstable(issue = "none", feature = "std_internals")]
285 #[rustc_unsafe_specialization_marker]
286 pub trait NonDrop {}
287 
288 // T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr
289 // and thus we can't implement drop-handling
290 #[unstable(issue = "none", feature = "std_internals")]
291 impl<T: Copy> NonDrop for T {}
292 
293 #[doc(hidden)]
294 #[unstable(issue = "none", feature = "std_internals")]
295 // TrustedRandomAccess (without NoCoerce) must not be implemented because
296 // subtypes/supertypes of `T` might not be `NonDrop`
297 unsafe impl<T, A: Allocator> TrustedRandomAccessNoCoerce for IntoIter<T, A>
298 where
299     T: NonDrop,
300 {
301     const MAY_HAVE_SIDE_EFFECT: bool = false;
302 }
303 
304 #[cfg(not(no_global_oom_handling))]
305 #[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
306 impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
307     #[cfg(not(test))]
308     fn clone(&self) -> Self {
309         self.as_slice().to_vec_in(self.alloc.deref().clone()).into_iter()
310     }
311     #[cfg(test)]
312     fn clone(&self) -> Self {
313         crate::slice::to_vec(self.as_slice(), self.alloc.deref().clone()).into_iter()
314     }
315 }
316 
317 #[stable(feature = "rust1", since = "1.0.0")]
318 unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
319     fn drop(&mut self) {
320         struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
321 
322         impl<T, A: Allocator> Drop for DropGuard<'_, T, A> {
323             fn drop(&mut self) {
324                 unsafe {
325                     // `IntoIter::alloc` is not used anymore after this and will be dropped by RawVec
326                     let alloc = ManuallyDrop::take(&mut self.0.alloc);
327                     // RawVec handles deallocation
328                     let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc);
329                 }
330             }
331         }
332 
333         let guard = DropGuard(self);
334         // destroy the remaining elements
335         unsafe {
336             ptr::drop_in_place(guard.0.as_raw_mut_slice());
337         }
338         // now `guard` will be dropped and do the rest
339     }
340 }
341 
342 // In addition to the SAFETY invariants of the following three unsafe traits
343 // also refer to the vec::in_place_collect module documentation to get an overview
344 #[unstable(issue = "none", feature = "inplace_iteration")]
345 #[doc(hidden)]
346 unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {}
347 
348 #[unstable(issue = "none", feature = "inplace_iteration")]
349 #[doc(hidden)]
350 unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
351     type Source = Self;
352 
353     #[inline]
354     unsafe fn as_inner(&mut self) -> &mut Self::Source {
355         self
356     }
357 }
358 
359 #[cfg(not(no_global_oom_handling))]
360 unsafe impl<T> AsVecIntoIter for IntoIter<T> {
361     type Item = T;
362 
363     fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> {
364         self
365     }
366 }
367