alloc/vec/
into_iter.rs

1use core::iter::{
2    FusedIterator, InPlaceIterable, SourceIter, TrustedFused, TrustedLen,
3    TrustedRandomAccessNoCoerce,
4};
5use core::marker::PhantomData;
6use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties};
7use core::num::NonZero;
8#[cfg(not(no_global_oom_handling))]
9use core::ops::Deref;
10use core::ptr::{self, NonNull};
11use core::slice::{self};
12use core::{array, fmt};
13
14#[cfg(not(no_global_oom_handling))]
15use super::AsVecIntoIter;
16use crate::alloc::{Allocator, Global};
17#[cfg(not(no_global_oom_handling))]
18use crate::collections::VecDeque;
19use crate::raw_vec::RawVec;
20
21macro non_null {
22    (mut $place:expr, $t:ident) => {{
23        #![allow(unused_unsafe)] // we're sometimes used within an unsafe block
24        unsafe { &mut *((&raw mut $place) as *mut NonNull<$t>) }
25    }},
26    ($place:expr, $t:ident) => {{
27        #![allow(unused_unsafe)] // we're sometimes used within an unsafe block
28        unsafe { *((&raw const $place) as *const NonNull<$t>) }
29    }},
30}
31
32/// An iterator that moves out of a vector.
33///
34/// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec)
35/// (provided by the [`IntoIterator`] trait).
36///
37/// # Example
38///
39/// ```
40/// let v = vec![0, 1, 2];
41/// let iter: std::vec::IntoIter<_> = v.into_iter();
42/// ```
43#[stable(feature = "rust1", since = "1.0.0")]
44#[rustc_insignificant_dtor]
45pub struct IntoIter<
46    T,
47    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
48> {
49    pub(super) buf: NonNull<T>,
50    pub(super) phantom: PhantomData<T>,
51    pub(super) cap: usize,
52    // the drop impl reconstructs a RawVec from buf, cap and alloc
53    // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop
54    pub(super) alloc: ManuallyDrop<A>,
55    pub(super) ptr: NonNull<T>,
56    /// If T is a ZST, this is actually ptr+len. This encoding is picked so that
57    /// ptr == end is a quick test for the Iterator being empty, that works
58    /// for both ZST and non-ZST.
59    /// For non-ZSTs the pointer is treated as `NonNull<T>`
60    pub(super) end: *const T,
61}
62
63#[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
64impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
65    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
66        f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
67    }
68}
69
70impl<T, A: Allocator> IntoIter<T, A> {
71    /// Returns the remaining items of this iterator as a slice.
72    ///
73    /// # Examples
74    ///
75    /// ```
76    /// let vec = vec!['a', 'b', 'c'];
77    /// let mut into_iter = vec.into_iter();
78    /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
79    /// let _ = into_iter.next().unwrap();
80    /// assert_eq!(into_iter.as_slice(), &['b', 'c']);
81    /// ```
82    #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
83    pub fn as_slice(&self) -> &[T] {
84        unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len()) }
85    }
86
87    /// Returns the remaining items of this iterator as a mutable slice.
88    ///
89    /// # Examples
90    ///
91    /// ```
92    /// let vec = vec!['a', 'b', 'c'];
93    /// let mut into_iter = vec.into_iter();
94    /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
95    /// into_iter.as_mut_slice()[2] = 'z';
96    /// assert_eq!(into_iter.next().unwrap(), 'a');
97    /// assert_eq!(into_iter.next().unwrap(), 'b');
98    /// assert_eq!(into_iter.next().unwrap(), 'z');
99    /// ```
100    #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
101    pub fn as_mut_slice(&mut self) -> &mut [T] {
102        unsafe { &mut *self.as_raw_mut_slice() }
103    }
104
105    /// Returns a reference to the underlying allocator.
106    #[unstable(feature = "allocator_api", issue = "32838")]
107    #[inline]
108    pub fn allocator(&self) -> &A {
109        &self.alloc
110    }
111
112    fn as_raw_mut_slice(&mut self) -> *mut [T] {
113        ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), self.len())
114    }
115
116    /// Drops remaining elements and relinquishes the backing allocation.
117    ///
118    /// This method guarantees it won't panic before relinquishing the backing
119    /// allocation.
120    ///
121    /// This is roughly equivalent to the following, but more efficient
122    ///
123    /// ```
124    /// # let mut vec = Vec::<u8>::with_capacity(10);
125    /// # let ptr = vec.as_mut_ptr();
126    /// # let mut into_iter = vec.into_iter();
127    /// let mut into_iter = std::mem::replace(&mut into_iter, Vec::new().into_iter());
128    /// (&mut into_iter).for_each(drop);
129    /// std::mem::forget(into_iter);
130    /// # // FIXME(https://github.com/rust-lang/miri/issues/3670):
131    /// # // use -Zmiri-disable-leak-check instead of unleaking in tests meant to leak.
132    /// # drop(unsafe { Vec::<u8>::from_raw_parts(ptr, 0, 10) });
133    /// ```
134    ///
135    /// This method is used by in-place iteration, refer to the vec::in_place_collect
136    /// documentation for an overview.
137    #[cfg(not(no_global_oom_handling))]
138    pub(super) fn forget_allocation_drop_remaining(&mut self) {
139        let remaining = self.as_raw_mut_slice();
140
141        // overwrite the individual fields instead of creating a new
142        // struct and then overwriting &mut self.
143        // this creates less assembly
144        self.cap = 0;
145        self.buf = RawVec::new().non_null();
146        self.ptr = self.buf;
147        self.end = self.buf.as_ptr();
148
149        // Dropping the remaining elements can panic, so this needs to be
150        // done only after updating the other fields.
151        unsafe {
152            ptr::drop_in_place(remaining);
153        }
154    }
155
156    /// Forgets to Drop the remaining elements while still allowing the backing allocation to be freed.
157    pub(crate) fn forget_remaining_elements(&mut self) {
158        // For the ZST case, it is crucial that we mutate `end` here, not `ptr`.
159        // `ptr` must stay aligned, while `end` may be unaligned.
160        self.end = self.ptr.as_ptr();
161    }
162
163    #[cfg(not(no_global_oom_handling))]
164    #[inline]
165    pub(crate) fn into_vecdeque(self) -> VecDeque<T, A> {
166        // Keep our `Drop` impl from dropping the elements and the allocator
167        let mut this = ManuallyDrop::new(self);
168
169        // SAFETY: This allocation originally came from a `Vec`, so it passes
170        // all those checks. We have `this.buf` ≤ `this.ptr` ≤ `this.end`,
171        // so the `sub_ptr`s below cannot wrap, and will produce a well-formed
172        // range. `end` ≤ `buf + cap`, so the range will be in-bounds.
173        // Taking `alloc` is ok because nothing else is going to look at it,
174        // since our `Drop` impl isn't going to run so there's no more code.
175        unsafe {
176            let buf = this.buf.as_ptr();
177            let initialized = if T::IS_ZST {
178                // All the pointers are the same for ZSTs, so it's fine to
179                // say that they're all at the beginning of the "allocation".
180                0..this.len()
181            } else {
182                this.ptr.offset_from_unsigned(this.buf)..this.end.offset_from_unsigned(buf)
183            };
184            let cap = this.cap;
185            let alloc = ManuallyDrop::take(&mut this.alloc);
186            VecDeque::from_contiguous_raw_parts_in(buf, initialized, cap, alloc)
187        }
188    }
189}
190
191#[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
192impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
193    fn as_ref(&self) -> &[T] {
194        self.as_slice()
195    }
196}
197
198#[stable(feature = "rust1", since = "1.0.0")]
199unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
200#[stable(feature = "rust1", since = "1.0.0")]
201unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {}
202
203#[stable(feature = "rust1", since = "1.0.0")]
204impl<T, A: Allocator> Iterator for IntoIter<T, A> {
205    type Item = T;
206
207    #[inline]
208    fn next(&mut self) -> Option<T> {
209        let ptr = if T::IS_ZST {
210            if self.ptr.as_ptr() == self.end as *mut T {
211                return None;
212            }
213            // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by
214            // reducing the `end`.
215            self.end = self.end.wrapping_byte_sub(1);
216            self.ptr
217        } else {
218            if self.ptr == non_null!(self.end, T) {
219                return None;
220            }
221            let old = self.ptr;
222            self.ptr = unsafe { old.add(1) };
223            old
224        };
225        Some(unsafe { ptr.read() })
226    }
227
228    #[inline]
229    fn size_hint(&self) -> (usize, Option<usize>) {
230        let exact = if T::IS_ZST {
231            self.end.addr().wrapping_sub(self.ptr.as_ptr().addr())
232        } else {
233            unsafe { non_null!(self.end, T).offset_from_unsigned(self.ptr) }
234        };
235        (exact, Some(exact))
236    }
237
238    #[inline]
239    fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
240        let step_size = self.len().min(n);
241        let to_drop = ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), step_size);
242        if T::IS_ZST {
243            // See `next` for why we sub `end` here.
244            self.end = self.end.wrapping_byte_sub(step_size);
245        } else {
246            // SAFETY: the min() above ensures that step_size is in bounds
247            self.ptr = unsafe { self.ptr.add(step_size) };
248        }
249        // SAFETY: the min() above ensures that step_size is in bounds
250        unsafe {
251            ptr::drop_in_place(to_drop);
252        }
253        NonZero::new(n - step_size).map_or(Ok(()), Err)
254    }
255
256    #[inline]
257    fn count(self) -> usize {
258        self.len()
259    }
260
261    #[inline]
262    fn next_chunk<const N: usize>(&mut self) -> Result<[T; N], core::array::IntoIter<T, N>> {
263        let mut raw_ary = [const { MaybeUninit::uninit() }; N];
264
265        let len = self.len();
266
267        if T::IS_ZST {
268            if len < N {
269                self.forget_remaining_elements();
270                // Safety: ZSTs can be conjured ex nihilo, only the amount has to be correct
271                return Err(unsafe { array::IntoIter::new_unchecked(raw_ary, 0..len) });
272            }
273
274            self.end = self.end.wrapping_byte_sub(N);
275            // Safety: ditto
276            return Ok(unsafe { raw_ary.transpose().assume_init() });
277        }
278
279        if len < N {
280            // Safety: `len` indicates that this many elements are available and we just checked that
281            // it fits into the array.
282            unsafe {
283                ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, len);
284                self.forget_remaining_elements();
285                return Err(array::IntoIter::new_unchecked(raw_ary, 0..len));
286            }
287        }
288
289        // Safety: `len` is larger than the array size. Copy a fixed amount here to fully initialize
290        // the array.
291        unsafe {
292            ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, N);
293            self.ptr = self.ptr.add(N);
294            Ok(raw_ary.transpose().assume_init())
295        }
296    }
297
298    fn fold<B, F>(mut self, mut accum: B, mut f: F) -> B
299    where
300        F: FnMut(B, Self::Item) -> B,
301    {
302        if T::IS_ZST {
303            while self.ptr.as_ptr() != self.end.cast_mut() {
304                // SAFETY: we just checked that `self.ptr` is in bounds.
305                let tmp = unsafe { self.ptr.read() };
306                // See `next` for why we subtract from `end` here.
307                self.end = self.end.wrapping_byte_sub(1);
308                accum = f(accum, tmp);
309            }
310        } else {
311            // SAFETY: `self.end` can only be null if `T` is a ZST.
312            while self.ptr != non_null!(self.end, T) {
313                // SAFETY: we just checked that `self.ptr` is in bounds.
314                let tmp = unsafe { self.ptr.read() };
315                // SAFETY: the maximum this can be is `self.end`.
316                // Increment `self.ptr` first to avoid double dropping in the event of a panic.
317                self.ptr = unsafe { self.ptr.add(1) };
318                accum = f(accum, tmp);
319            }
320        }
321        accum
322    }
323
324    fn try_fold<B, F, R>(&mut self, mut accum: B, mut f: F) -> R
325    where
326        Self: Sized,
327        F: FnMut(B, Self::Item) -> R,
328        R: core::ops::Try<Output = B>,
329    {
330        if T::IS_ZST {
331            while self.ptr.as_ptr() != self.end.cast_mut() {
332                // SAFETY: we just checked that `self.ptr` is in bounds.
333                let tmp = unsafe { self.ptr.read() };
334                // See `next` for why we subtract from `end` here.
335                self.end = self.end.wrapping_byte_sub(1);
336                accum = f(accum, tmp)?;
337            }
338        } else {
339            // SAFETY: `self.end` can only be null if `T` is a ZST.
340            while self.ptr != non_null!(self.end, T) {
341                // SAFETY: we just checked that `self.ptr` is in bounds.
342                let tmp = unsafe { self.ptr.read() };
343                // SAFETY: the maximum this can be is `self.end`.
344                // Increment `self.ptr` first to avoid double dropping in the event of a panic.
345                self.ptr = unsafe { self.ptr.add(1) };
346                accum = f(accum, tmp)?;
347            }
348        }
349        R::from_output(accum)
350    }
351
352    unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item
353    where
354        Self: TrustedRandomAccessNoCoerce,
355    {
356        // SAFETY: the caller must guarantee that `i` is in bounds of the
357        // `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)`
358        // is guaranteed to pointer to an element of the `Vec<T>` and
359        // thus guaranteed to be valid to dereference.
360        //
361        // Also note the implementation of `Self: TrustedRandomAccess` requires
362        // that `T: Copy` so reading elements from the buffer doesn't invalidate
363        // them for `Drop`.
364        unsafe { self.ptr.add(i).read() }
365    }
366}
367
368#[stable(feature = "rust1", since = "1.0.0")]
369impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
370    #[inline]
371    fn next_back(&mut self) -> Option<T> {
372        if T::IS_ZST {
373            if self.ptr.as_ptr() == self.end as *mut _ {
374                return None;
375            }
376            // See above for why 'ptr.offset' isn't used
377            self.end = self.end.wrapping_byte_sub(1);
378            // Note that even though this is next_back() we're reading from `self.ptr`, not
379            // `self.end`. We track our length using the byte offset from `self.ptr` to `self.end`,
380            // so the end pointer may not be suitably aligned for T.
381            Some(unsafe { ptr::read(self.ptr.as_ptr()) })
382        } else {
383            if self.ptr == non_null!(self.end, T) {
384                return None;
385            }
386            unsafe {
387                self.end = self.end.sub(1);
388                Some(ptr::read(self.end))
389            }
390        }
391    }
392
393    #[inline]
394    fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
395        let step_size = self.len().min(n);
396        if T::IS_ZST {
397            // SAFETY: same as for advance_by()
398            self.end = self.end.wrapping_byte_sub(step_size);
399        } else {
400            // SAFETY: same as for advance_by()
401            self.end = unsafe { self.end.sub(step_size) };
402        }
403        let to_drop = ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size);
404        // SAFETY: same as for advance_by()
405        unsafe {
406            ptr::drop_in_place(to_drop);
407        }
408        NonZero::new(n - step_size).map_or(Ok(()), Err)
409    }
410}
411
412#[stable(feature = "rust1", since = "1.0.0")]
413impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
414    fn is_empty(&self) -> bool {
415        if T::IS_ZST {
416            self.ptr.as_ptr() == self.end as *mut _
417        } else {
418            self.ptr == non_null!(self.end, T)
419        }
420    }
421}
422
423#[stable(feature = "fused", since = "1.26.0")]
424impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
425
426#[doc(hidden)]
427#[unstable(issue = "none", feature = "trusted_fused")]
428unsafe impl<T, A: Allocator> TrustedFused for IntoIter<T, A> {}
429
430#[unstable(feature = "trusted_len", issue = "37572")]
431unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {}
432
433#[stable(feature = "default_iters", since = "1.70.0")]
434impl<T, A> Default for IntoIter<T, A>
435where
436    A: Allocator + Default,
437{
438    /// Creates an empty `vec::IntoIter`.
439    ///
440    /// ```
441    /// # use std::vec;
442    /// let iter: vec::IntoIter<u8> = Default::default();
443    /// assert_eq!(iter.len(), 0);
444    /// assert_eq!(iter.as_slice(), &[]);
445    /// ```
446    fn default() -> Self {
447        super::Vec::new_in(Default::default()).into_iter()
448    }
449}
450
451#[doc(hidden)]
452#[unstable(issue = "none", feature = "std_internals")]
453#[rustc_unsafe_specialization_marker]
454pub trait NonDrop {}
455
456// T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr
457// and thus we can't implement drop-handling
458#[unstable(issue = "none", feature = "std_internals")]
459impl<T: Copy> NonDrop for T {}
460
461#[doc(hidden)]
462#[unstable(issue = "none", feature = "std_internals")]
463// TrustedRandomAccess (without NoCoerce) must not be implemented because
464// subtypes/supertypes of `T` might not be `NonDrop`
465unsafe impl<T, A: Allocator> TrustedRandomAccessNoCoerce for IntoIter<T, A>
466where
467    T: NonDrop,
468{
469    const MAY_HAVE_SIDE_EFFECT: bool = false;
470}
471
472#[cfg(not(no_global_oom_handling))]
473#[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
474impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
475    fn clone(&self) -> Self {
476        self.as_slice().to_vec_in(self.alloc.deref().clone()).into_iter()
477    }
478}
479
480#[stable(feature = "rust1", since = "1.0.0")]
481unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
482    fn drop(&mut self) {
483        struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
484
485        impl<T, A: Allocator> Drop for DropGuard<'_, T, A> {
486            fn drop(&mut self) {
487                unsafe {
488                    // `IntoIter::alloc` is not used anymore after this and will be dropped by RawVec
489                    let alloc = ManuallyDrop::take(&mut self.0.alloc);
490                    // RawVec handles deallocation
491                    let _ = RawVec::from_nonnull_in(self.0.buf, self.0.cap, alloc);
492                }
493            }
494        }
495
496        let guard = DropGuard(self);
497        // destroy the remaining elements
498        unsafe {
499            ptr::drop_in_place(guard.0.as_raw_mut_slice());
500        }
501        // now `guard` will be dropped and do the rest
502    }
503}
504
505// In addition to the SAFETY invariants of the following three unsafe traits
506// also refer to the vec::in_place_collect module documentation to get an overview
507#[unstable(issue = "none", feature = "inplace_iteration")]
508#[doc(hidden)]
509unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {
510    const EXPAND_BY: Option<NonZero<usize>> = NonZero::new(1);
511    const MERGE_BY: Option<NonZero<usize>> = NonZero::new(1);
512}
513
514#[unstable(issue = "none", feature = "inplace_iteration")]
515#[doc(hidden)]
516unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
517    type Source = Self;
518
519    #[inline]
520    unsafe fn as_inner(&mut self) -> &mut Self::Source {
521        self
522    }
523}
524
525#[cfg(not(no_global_oom_handling))]
526unsafe impl<T> AsVecIntoIter for IntoIter<T> {
527    type Item = T;
528
529    fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> {
530        self
531    }
532}
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy