Lines Matching refs:T

36     T,
39 pub(super) buf: NonNull<T>,
40 pub(super) phantom: PhantomData<T>,
45 pub(super) ptr: *const T,
46 …pub(super) end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so t…
52 impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
58 impl<T, A: Allocator> IntoIter<T, A> {
71 pub fn as_slice(&self) -> &[T] { in as_slice() argument
89 pub fn as_mut_slice(&mut self) -> &mut [T] { in as_mut_slice() argument
100 fn as_raw_mut_slice(&mut self) -> *mut [T] { in as_raw_mut_slice() argument
101 ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len()) in as_raw_mut_slice()
147 pub(crate) fn into_vecdeque(self) -> VecDeque<T, A> { in into_vecdeque() argument
159 let initialized = if T::IS_ZST { in into_vecdeque()
174 impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
175 fn as_ref(&self) -> &[T] { in as_ref() argument
181 unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
183 unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {}
186 impl<T, A: Allocator> Iterator for IntoIter<T, A> {
187 type Item = T;
190 fn next(&mut self) -> Option<T> { in next() argument
193 } else if T::IS_ZST { in next()
210 let exact = if T::IS_ZST { in size_hint()
221 let to_drop = ptr::slice_from_raw_parts_mut(self.ptr as *mut T, step_size); in advance_by()
222 if T::IS_ZST { in advance_by()
242 fn next_chunk<const N: usize>(&mut self) -> Result<[T; N], core::array::IntoIter<T, N>> { in next_chunk() argument
247 if T::IS_ZST {
263 ptr::copy_nonoverlapping(self.ptr, raw_ary.as_mut_ptr() as *mut T, len);
272 ptr::copy_nonoverlapping(self.ptr, raw_ary.as_mut_ptr() as *mut T, N);
291 if T::IS_ZST { mem::zeroed() } else { ptr::read(self.ptr.add(i)) } in __iterator_get_unchecked()
297 impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
299 fn next_back(&mut self) -> Option<T> { in next_back() argument
302 } else if T::IS_ZST { in next_back()
318 if T::IS_ZST { in advance_back_by()
325 let to_drop = ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size); in advance_back_by()
335 impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
342 impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
345 unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {}
348 impl<T, A> Default for IntoIter<T, A>
373 impl<T: Copy> NonDrop for T {} implementation
379 unsafe impl<T, A: Allocator> TrustedRandomAccessNoCoerce for IntoIter<T, A>
381 T: NonDrop,
388 impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
400 unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
402 struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>); in drop()
404 impl<T, A: Allocator> Drop for DropGuard<'_, T, A> { in drop()
428 unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {}
432 unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
442 unsafe impl<T> AsVecIntoIter for IntoIter<T> {
443 type Item = T;