1 // SPDX-License-Identifier: Apache-2.0 OR MIT 2 3 #[cfg(not(no_global_oom_handling))] 4 use super::AsVecIntoIter; 5 use crate::alloc::{Allocator, Global}; 6 #[cfg(not(no_global_oom_handling))] 7 use crate::collections::VecDeque; 8 use crate::raw_vec::RawVec; 9 use core::array; 10 use core::fmt; 11 use core::iter::{ 12 FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccessNoCoerce, 13 }; 14 use core::marker::PhantomData; 15 use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties}; 16 #[cfg(not(no_global_oom_handling))] 17 use core::ops::Deref; 18 use core::ptr::{self, NonNull}; 19 use core::slice::{self}; 20 21 /// An iterator that moves out of a vector. 22 /// 23 /// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec) 24 /// (provided by the [`IntoIterator`] trait). 25 /// 26 /// # Example 27 /// 28 /// ``` 29 /// let v = vec![0, 1, 2]; 30 /// let iter: std::vec::IntoIter<_> = v.into_iter(); 31 /// ``` 32 #[stable(feature = "rust1", since = "1.0.0")] 33 #[rustc_insignificant_dtor] 34 pub struct IntoIter< 35 T, 36 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, 37 > { 38 pub(super) buf: NonNull<T>, 39 pub(super) phantom: PhantomData<T>, 40 pub(super) cap: usize, 41 // the drop impl reconstructs a RawVec from buf, cap and alloc 42 // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop 43 pub(super) alloc: ManuallyDrop<A>, 44 pub(super) ptr: *const T, 45 pub(super) end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that 46 // ptr == end is a quick test for the Iterator being empty, that works 47 // for both ZST and non-ZST. 48 } 49 50 #[stable(feature = "vec_intoiter_debug", since = "1.13.0")] 51 impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> { 52 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 53 f.debug_tuple("IntoIter").field(&self.as_slice()).finish() 54 } 55 } 56 57 impl<T, A: Allocator> IntoIter<T, A> { 58 /// Returns the remaining items of this iterator as a slice. 59 /// 60 /// # Examples 61 /// 62 /// ``` 63 /// let vec = vec!['a', 'b', 'c']; 64 /// let mut into_iter = vec.into_iter(); 65 /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); 66 /// let _ = into_iter.next().unwrap(); 67 /// assert_eq!(into_iter.as_slice(), &['b', 'c']); 68 /// ``` 69 #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")] 70 pub fn as_slice(&self) -> &[T] { 71 unsafe { slice::from_raw_parts(self.ptr, self.len()) } 72 } 73 74 /// Returns the remaining items of this iterator as a mutable slice. 75 /// 76 /// # Examples 77 /// 78 /// ``` 79 /// let vec = vec!['a', 'b', 'c']; 80 /// let mut into_iter = vec.into_iter(); 81 /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); 82 /// into_iter.as_mut_slice()[2] = 'z'; 83 /// assert_eq!(into_iter.next().unwrap(), 'a'); 84 /// assert_eq!(into_iter.next().unwrap(), 'b'); 85 /// assert_eq!(into_iter.next().unwrap(), 'z'); 86 /// ``` 87 #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")] 88 pub fn as_mut_slice(&mut self) -> &mut [T] { 89 unsafe { &mut *self.as_raw_mut_slice() } 90 } 91 92 /// Returns a reference to the underlying allocator. 93 #[unstable(feature = "allocator_api", issue = "32838")] 94 #[inline] 95 pub fn allocator(&self) -> &A { 96 &self.alloc 97 } 98 99 fn as_raw_mut_slice(&mut self) -> *mut [T] { 100 ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len()) 101 } 102 103 /// Drops remaining elements and relinquishes the backing allocation. 104 /// This method guarantees it won't panic before relinquishing 105 /// the backing allocation. 106 /// 107 /// This is roughly equivalent to the following, but more efficient 108 /// 109 /// ``` 110 /// # let mut into_iter = Vec::<u8>::with_capacity(10).into_iter(); 111 /// let mut into_iter = std::mem::replace(&mut into_iter, Vec::new().into_iter()); 112 /// (&mut into_iter).for_each(core::mem::drop); 113 /// std::mem::forget(into_iter); 114 /// ``` 115 /// 116 /// This method is used by in-place iteration, refer to the vec::in_place_collect 117 /// documentation for an overview. 118 #[cfg(not(no_global_oom_handling))] 119 pub(super) fn forget_allocation_drop_remaining(&mut self) { 120 let remaining = self.as_raw_mut_slice(); 121 122 // overwrite the individual fields instead of creating a new 123 // struct and then overwriting &mut self. 124 // this creates less assembly 125 self.cap = 0; 126 self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) }; 127 self.ptr = self.buf.as_ptr(); 128 self.end = self.buf.as_ptr(); 129 130 // Dropping the remaining elements can panic, so this needs to be 131 // done only after updating the other fields. 132 unsafe { 133 ptr::drop_in_place(remaining); 134 } 135 } 136 137 /// Forgets to Drop the remaining elements while still allowing the backing allocation to be freed. 138 pub(crate) fn forget_remaining_elements(&mut self) { 139 // For th ZST case, it is crucial that we mutate `end` here, not `ptr`. 140 // `ptr` must stay aligned, while `end` may be unaligned. 141 self.end = self.ptr; 142 } 143 144 #[cfg(not(no_global_oom_handling))] 145 #[inline] 146 pub(crate) fn into_vecdeque(self) -> VecDeque<T, A> { 147 // Keep our `Drop` impl from dropping the elements and the allocator 148 let mut this = ManuallyDrop::new(self); 149 150 // SAFETY: This allocation originally came from a `Vec`, so it passes 151 // all those checks. We have `this.buf` ≤ `this.ptr` ≤ `this.end`, 152 // so the `sub_ptr`s below cannot wrap, and will produce a well-formed 153 // range. `end` ≤ `buf + cap`, so the range will be in-bounds. 154 // Taking `alloc` is ok because nothing else is going to look at it, 155 // since our `Drop` impl isn't going to run so there's no more code. 156 unsafe { 157 let buf = this.buf.as_ptr(); 158 let initialized = if T::IS_ZST { 159 // All the pointers are the same for ZSTs, so it's fine to 160 // say that they're all at the beginning of the "allocation". 161 0..this.len() 162 } else { 163 this.ptr.sub_ptr(buf)..this.end.sub_ptr(buf) 164 }; 165 let cap = this.cap; 166 let alloc = ManuallyDrop::take(&mut this.alloc); 167 VecDeque::from_contiguous_raw_parts_in(buf, initialized, cap, alloc) 168 } 169 } 170 } 171 172 #[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")] 173 impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> { 174 fn as_ref(&self) -> &[T] { 175 self.as_slice() 176 } 177 } 178 179 #[stable(feature = "rust1", since = "1.0.0")] 180 unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {} 181 #[stable(feature = "rust1", since = "1.0.0")] 182 unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {} 183 184 #[stable(feature = "rust1", since = "1.0.0")] 185 impl<T, A: Allocator> Iterator for IntoIter<T, A> { 186 type Item = T; 187 188 #[inline] 189 fn next(&mut self) -> Option<T> { 190 if self.ptr == self.end { 191 None 192 } else if T::IS_ZST { 193 // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by 194 // reducing the `end`. 195 self.end = self.end.wrapping_byte_sub(1); 196 197 // Make up a value of this ZST. 198 Some(unsafe { mem::zeroed() }) 199 } else { 200 let old = self.ptr; 201 self.ptr = unsafe { self.ptr.add(1) }; 202 203 Some(unsafe { ptr::read(old) }) 204 } 205 } 206 207 #[inline] 208 fn size_hint(&self) -> (usize, Option<usize>) { 209 let exact = if T::IS_ZST { 210 self.end.addr().wrapping_sub(self.ptr.addr()) 211 } else { 212 unsafe { self.end.sub_ptr(self.ptr) } 213 }; 214 (exact, Some(exact)) 215 } 216 217 #[inline] 218 fn advance_by(&mut self, n: usize) -> Result<(), usize> { 219 let step_size = self.len().min(n); 220 let to_drop = ptr::slice_from_raw_parts_mut(self.ptr as *mut T, step_size); 221 if T::IS_ZST { 222 // See `next` for why we sub `end` here. 223 self.end = self.end.wrapping_byte_sub(step_size); 224 } else { 225 // SAFETY: the min() above ensures that step_size is in bounds 226 self.ptr = unsafe { self.ptr.add(step_size) }; 227 } 228 // SAFETY: the min() above ensures that step_size is in bounds 229 unsafe { 230 ptr::drop_in_place(to_drop); 231 } 232 if step_size < n { 233 return Err(step_size); 234 } 235 Ok(()) 236 } 237 238 #[inline] 239 fn count(self) -> usize { 240 self.len() 241 } 242 243 #[inline] 244 fn next_chunk<const N: usize>(&mut self) -> Result<[T; N], core::array::IntoIter<T, N>> { 245 let mut raw_ary = MaybeUninit::uninit_array(); 246 247 let len = self.len(); 248 249 if T::IS_ZST { 250 if len < N { 251 self.forget_remaining_elements(); 252 // Safety: ZSTs can be conjured ex nihilo, only the amount has to be correct 253 return Err(unsafe { array::IntoIter::new_unchecked(raw_ary, 0..len) }); 254 } 255 256 self.end = self.end.wrapping_byte_sub(N); 257 // Safety: ditto 258 return Ok(unsafe { raw_ary.transpose().assume_init() }); 259 } 260 261 if len < N { 262 // Safety: `len` indicates that this many elements are available and we just checked that 263 // it fits into the array. 264 unsafe { 265 ptr::copy_nonoverlapping(self.ptr, raw_ary.as_mut_ptr() as *mut T, len); 266 self.forget_remaining_elements(); 267 return Err(array::IntoIter::new_unchecked(raw_ary, 0..len)); 268 } 269 } 270 271 // Safety: `len` is larger than the array size. Copy a fixed amount here to fully initialize 272 // the array. 273 return unsafe { 274 ptr::copy_nonoverlapping(self.ptr, raw_ary.as_mut_ptr() as *mut T, N); 275 self.ptr = self.ptr.add(N); 276 Ok(raw_ary.transpose().assume_init()) 277 }; 278 } 279 280 unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item 281 where 282 Self: TrustedRandomAccessNoCoerce, 283 { 284 // SAFETY: the caller must guarantee that `i` is in bounds of the 285 // `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)` 286 // is guaranteed to pointer to an element of the `Vec<T>` and 287 // thus guaranteed to be valid to dereference. 288 // 289 // Also note the implementation of `Self: TrustedRandomAccess` requires 290 // that `T: Copy` so reading elements from the buffer doesn't invalidate 291 // them for `Drop`. 292 unsafe { 293 if T::IS_ZST { mem::zeroed() } else { ptr::read(self.ptr.add(i)) } 294 } 295 } 296 } 297 298 #[stable(feature = "rust1", since = "1.0.0")] 299 impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> { 300 #[inline] 301 fn next_back(&mut self) -> Option<T> { 302 if self.end == self.ptr { 303 None 304 } else if T::IS_ZST { 305 // See above for why 'ptr.offset' isn't used 306 self.end = self.end.wrapping_byte_sub(1); 307 308 // Make up a value of this ZST. 309 Some(unsafe { mem::zeroed() }) 310 } else { 311 self.end = unsafe { self.end.sub(1) }; 312 313 Some(unsafe { ptr::read(self.end) }) 314 } 315 } 316 317 #[inline] 318 fn advance_back_by(&mut self, n: usize) -> Result<(), usize> { 319 let step_size = self.len().min(n); 320 if T::IS_ZST { 321 // SAFETY: same as for advance_by() 322 self.end = self.end.wrapping_byte_sub(step_size); 323 } else { 324 // SAFETY: same as for advance_by() 325 self.end = unsafe { self.end.sub(step_size) }; 326 } 327 let to_drop = ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size); 328 // SAFETY: same as for advance_by() 329 unsafe { 330 ptr::drop_in_place(to_drop); 331 } 332 if step_size < n { 333 return Err(step_size); 334 } 335 Ok(()) 336 } 337 } 338 339 #[stable(feature = "rust1", since = "1.0.0")] 340 impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> { 341 fn is_empty(&self) -> bool { 342 self.ptr == self.end 343 } 344 } 345 346 #[stable(feature = "fused", since = "1.26.0")] 347 impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {} 348 349 #[unstable(feature = "trusted_len", issue = "37572")] 350 unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {} 351 352 #[doc(hidden)] 353 #[unstable(issue = "none", feature = "std_internals")] 354 #[rustc_unsafe_specialization_marker] 355 pub trait NonDrop {} 356 357 // T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr 358 // and thus we can't implement drop-handling 359 #[unstable(issue = "none", feature = "std_internals")] 360 impl<T: Copy> NonDrop for T {} 361 362 #[doc(hidden)] 363 #[unstable(issue = "none", feature = "std_internals")] 364 // TrustedRandomAccess (without NoCoerce) must not be implemented because 365 // subtypes/supertypes of `T` might not be `NonDrop` 366 unsafe impl<T, A: Allocator> TrustedRandomAccessNoCoerce for IntoIter<T, A> 367 where 368 T: NonDrop, 369 { 370 const MAY_HAVE_SIDE_EFFECT: bool = false; 371 } 372 373 #[cfg(not(no_global_oom_handling))] 374 #[stable(feature = "vec_into_iter_clone", since = "1.8.0")] 375 impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> { 376 #[cfg(not(test))] 377 fn clone(&self) -> Self { 378 self.as_slice().to_vec_in(self.alloc.deref().clone()).into_iter() 379 } 380 #[cfg(test)] 381 fn clone(&self) -> Self { 382 crate::slice::to_vec(self.as_slice(), self.alloc.deref().clone()).into_iter() 383 } 384 } 385 386 #[stable(feature = "rust1", since = "1.0.0")] 387 unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> { 388 fn drop(&mut self) { 389 struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>); 390 391 impl<T, A: Allocator> Drop for DropGuard<'_, T, A> { 392 fn drop(&mut self) { 393 unsafe { 394 // `IntoIter::alloc` is not used anymore after this and will be dropped by RawVec 395 let alloc = ManuallyDrop::take(&mut self.0.alloc); 396 // RawVec handles deallocation 397 let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc); 398 } 399 } 400 } 401 402 let guard = DropGuard(self); 403 // destroy the remaining elements 404 unsafe { 405 ptr::drop_in_place(guard.0.as_raw_mut_slice()); 406 } 407 // now `guard` will be dropped and do the rest 408 } 409 } 410 411 // In addition to the SAFETY invariants of the following three unsafe traits 412 // also refer to the vec::in_place_collect module documentation to get an overview 413 #[unstable(issue = "none", feature = "inplace_iteration")] 414 #[doc(hidden)] 415 unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {} 416 417 #[unstable(issue = "none", feature = "inplace_iteration")] 418 #[doc(hidden)] 419 unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> { 420 type Source = Self; 421 422 #[inline] 423 unsafe fn as_inner(&mut self) -> &mut Self::Source { 424 self 425 } 426 } 427 428 #[cfg(not(no_global_oom_handling))] 429 unsafe impl<T> AsVecIntoIter for IntoIter<T> { 430 type Item = T; 431 432 fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> { 433 self 434 } 435 } 436