| 1 | use core::iter::{ |
| 2 | FusedIterator, InPlaceIterable, SourceIter, TrustedFused, TrustedLen, |
| 3 | TrustedRandomAccessNoCoerce, |
| 4 | }; |
| 5 | use core::marker::PhantomData; |
| 6 | use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties}; |
| 7 | use core::num::NonZero; |
| 8 | #[cfg (not(no_global_oom_handling))] |
| 9 | use core::ops::Deref; |
| 10 | use core::ptr::{self, NonNull}; |
| 11 | use core::slice::{self}; |
| 12 | use core::{array, fmt}; |
| 13 | |
| 14 | #[cfg (not(no_global_oom_handling))] |
| 15 | use super::AsVecIntoIter; |
| 16 | use crate::alloc::{Allocator, Global}; |
| 17 | #[cfg (not(no_global_oom_handling))] |
| 18 | use crate::collections::VecDeque; |
| 19 | use crate::raw_vec::RawVec; |
| 20 | |
| 21 | macro non_null { |
| 22 | (mut $place:expr, $t:ident) => {{ |
| 23 | #![allow(unused_unsafe)] // we're sometimes used within an unsafe block |
| 24 | unsafe { &mut *((&raw mut $place) as *mut NonNull<$t>) } |
| 25 | }}, |
| 26 | ($place:expr, $t:ident) => {{ |
| 27 | #![allow(unused_unsafe)] // we're sometimes used within an unsafe block |
| 28 | unsafe { *((&raw const $place) as *const NonNull<$t>) } |
| 29 | }}, |
| 30 | } |
| 31 | |
| 32 | /// An iterator that moves out of a vector. |
| 33 | /// |
| 34 | /// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec) |
| 35 | /// (provided by the [`IntoIterator`] trait). |
| 36 | /// |
| 37 | /// # Example |
| 38 | /// |
| 39 | /// ``` |
| 40 | /// let v = vec![0, 1, 2]; |
| 41 | /// let iter: std::vec::IntoIter<_> = v.into_iter(); |
| 42 | /// ``` |
| 43 | #[stable (feature = "rust1" , since = "1.0.0" )] |
| 44 | #[rustc_insignificant_dtor ] |
| 45 | pub struct IntoIter< |
| 46 | T, |
| 47 | #[unstable (feature = "allocator_api" , issue = "32838" )] A: Allocator = Global, |
| 48 | > { |
| 49 | pub(super) buf: NonNull<T>, |
| 50 | pub(super) phantom: PhantomData<T>, |
| 51 | pub(super) cap: usize, |
| 52 | // the drop impl reconstructs a RawVec from buf, cap and alloc |
| 53 | // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop |
| 54 | pub(super) alloc: ManuallyDrop<A>, |
| 55 | pub(super) ptr: NonNull<T>, |
| 56 | /// If T is a ZST, this is actually ptr+len. This encoding is picked so that |
| 57 | /// ptr == end is a quick test for the Iterator being empty, that works |
| 58 | /// for both ZST and non-ZST. |
| 59 | /// For non-ZSTs the pointer is treated as `NonNull<T>` |
| 60 | pub(super) end: *const T, |
| 61 | } |
| 62 | |
| 63 | #[stable (feature = "vec_intoiter_debug" , since = "1.13.0" )] |
| 64 | impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> { |
| 65 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
| 66 | f.debug_tuple(name:"IntoIter" ).field(&self.as_slice()).finish() |
| 67 | } |
| 68 | } |
| 69 | |
| 70 | impl<T, A: Allocator> IntoIter<T, A> { |
| 71 | /// Returns the remaining items of this iterator as a slice. |
| 72 | /// |
| 73 | /// # Examples |
| 74 | /// |
| 75 | /// ``` |
| 76 | /// let vec = vec!['a' , 'b' , 'c' ]; |
| 77 | /// let mut into_iter = vec.into_iter(); |
| 78 | /// assert_eq!(into_iter.as_slice(), &['a' , 'b' , 'c' ]); |
| 79 | /// let _ = into_iter.next().unwrap(); |
| 80 | /// assert_eq!(into_iter.as_slice(), &['b' , 'c' ]); |
| 81 | /// ``` |
| 82 | #[stable (feature = "vec_into_iter_as_slice" , since = "1.15.0" )] |
| 83 | pub fn as_slice(&self) -> &[T] { |
| 84 | unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len()) } |
| 85 | } |
| 86 | |
| 87 | /// Returns the remaining items of this iterator as a mutable slice. |
| 88 | /// |
| 89 | /// # Examples |
| 90 | /// |
| 91 | /// ``` |
| 92 | /// let vec = vec!['a' , 'b' , 'c' ]; |
| 93 | /// let mut into_iter = vec.into_iter(); |
| 94 | /// assert_eq!(into_iter.as_slice(), &['a' , 'b' , 'c' ]); |
| 95 | /// into_iter.as_mut_slice()[2] = 'z' ; |
| 96 | /// assert_eq!(into_iter.next().unwrap(), 'a' ); |
| 97 | /// assert_eq!(into_iter.next().unwrap(), 'b' ); |
| 98 | /// assert_eq!(into_iter.next().unwrap(), 'z' ); |
| 99 | /// ``` |
| 100 | #[stable (feature = "vec_into_iter_as_slice" , since = "1.15.0" )] |
| 101 | pub fn as_mut_slice(&mut self) -> &mut [T] { |
| 102 | unsafe { &mut *self.as_raw_mut_slice() } |
| 103 | } |
| 104 | |
| 105 | /// Returns a reference to the underlying allocator. |
| 106 | #[unstable (feature = "allocator_api" , issue = "32838" )] |
| 107 | #[inline ] |
| 108 | pub fn allocator(&self) -> &A { |
| 109 | &self.alloc |
| 110 | } |
| 111 | |
| 112 | fn as_raw_mut_slice(&mut self) -> *mut [T] { |
| 113 | ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), self.len()) |
| 114 | } |
| 115 | |
| 116 | /// Drops remaining elements and relinquishes the backing allocation. |
| 117 | /// |
| 118 | /// This method guarantees it won't panic before relinquishing the backing |
| 119 | /// allocation. |
| 120 | /// |
| 121 | /// This is roughly equivalent to the following, but more efficient |
| 122 | /// |
| 123 | /// ``` |
| 124 | /// # let mut vec = Vec::<u8>::with_capacity(10); |
| 125 | /// # let ptr = vec.as_mut_ptr(); |
| 126 | /// # let mut into_iter = vec.into_iter(); |
| 127 | /// let mut into_iter = std::mem::replace(&mut into_iter, Vec::new().into_iter()); |
| 128 | /// (&mut into_iter).for_each(drop); |
| 129 | /// std::mem::forget(into_iter); |
| 130 | /// # // FIXME(https://github.com/rust-lang/miri/issues/3670): |
| 131 | /// # // use -Zmiri-disable-leak-check instead of unleaking in tests meant to leak. |
| 132 | /// # drop(unsafe { Vec::<u8>::from_raw_parts(ptr, 0, 10) }); |
| 133 | /// ``` |
| 134 | /// |
| 135 | /// This method is used by in-place iteration, refer to the vec::in_place_collect |
| 136 | /// documentation for an overview. |
| 137 | #[cfg (not(no_global_oom_handling))] |
| 138 | pub(super) fn forget_allocation_drop_remaining(&mut self) { |
| 139 | let remaining = self.as_raw_mut_slice(); |
| 140 | |
| 141 | // overwrite the individual fields instead of creating a new |
| 142 | // struct and then overwriting &mut self. |
| 143 | // this creates less assembly |
| 144 | self.cap = 0; |
| 145 | self.buf = RawVec::new().non_null(); |
| 146 | self.ptr = self.buf; |
| 147 | self.end = self.buf.as_ptr(); |
| 148 | |
| 149 | // Dropping the remaining elements can panic, so this needs to be |
| 150 | // done only after updating the other fields. |
| 151 | unsafe { |
| 152 | ptr::drop_in_place(remaining); |
| 153 | } |
| 154 | } |
| 155 | |
| 156 | /// Forgets to Drop the remaining elements while still allowing the backing allocation to be freed. |
| 157 | pub(crate) fn forget_remaining_elements(&mut self) { |
| 158 | // For the ZST case, it is crucial that we mutate `end` here, not `ptr`. |
| 159 | // `ptr` must stay aligned, while `end` may be unaligned. |
| 160 | self.end = self.ptr.as_ptr(); |
| 161 | } |
| 162 | |
| 163 | #[cfg (not(no_global_oom_handling))] |
| 164 | #[inline ] |
| 165 | pub(crate) fn into_vecdeque(self) -> VecDeque<T, A> { |
| 166 | // Keep our `Drop` impl from dropping the elements and the allocator |
| 167 | let mut this = ManuallyDrop::new(self); |
| 168 | |
| 169 | // SAFETY: This allocation originally came from a `Vec`, so it passes |
| 170 | // all those checks. We have `this.buf` ≤ `this.ptr` ≤ `this.end`, |
| 171 | // so the `offset_from_unsigned`s below cannot wrap, and will produce a well-formed |
| 172 | // range. `end` ≤ `buf + cap`, so the range will be in-bounds. |
| 173 | // Taking `alloc` is ok because nothing else is going to look at it, |
| 174 | // since our `Drop` impl isn't going to run so there's no more code. |
| 175 | unsafe { |
| 176 | let buf = this.buf.as_ptr(); |
| 177 | let initialized = if T::IS_ZST { |
| 178 | // All the pointers are the same for ZSTs, so it's fine to |
| 179 | // say that they're all at the beginning of the "allocation". |
| 180 | 0..this.len() |
| 181 | } else { |
| 182 | this.ptr.offset_from_unsigned(this.buf)..this.end.offset_from_unsigned(buf) |
| 183 | }; |
| 184 | let cap = this.cap; |
| 185 | let alloc = ManuallyDrop::take(&mut this.alloc); |
| 186 | VecDeque::from_contiguous_raw_parts_in(buf, initialized, cap, alloc) |
| 187 | } |
| 188 | } |
| 189 | } |
| 190 | |
| 191 | #[stable (feature = "vec_intoiter_as_ref" , since = "1.46.0" )] |
| 192 | impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> { |
| 193 | fn as_ref(&self) -> &[T] { |
| 194 | self.as_slice() |
| 195 | } |
| 196 | } |
| 197 | |
| 198 | #[stable (feature = "rust1" , since = "1.0.0" )] |
| 199 | unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {} |
| 200 | #[stable (feature = "rust1" , since = "1.0.0" )] |
| 201 | unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {} |
| 202 | |
| 203 | #[stable (feature = "rust1" , since = "1.0.0" )] |
| 204 | impl<T, A: Allocator> Iterator for IntoIter<T, A> { |
| 205 | type Item = T; |
| 206 | |
| 207 | #[inline ] |
| 208 | fn next(&mut self) -> Option<T> { |
| 209 | let ptr = if T::IS_ZST { |
| 210 | if self.ptr.as_ptr() == self.end as *mut T { |
| 211 | return None; |
| 212 | } |
| 213 | // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by |
| 214 | // reducing the `end`. |
| 215 | self.end = self.end.wrapping_byte_sub(1); |
| 216 | self.ptr |
| 217 | } else { |
| 218 | if self.ptr == non_null!(self.end, T) { |
| 219 | return None; |
| 220 | } |
| 221 | let old = self.ptr; |
| 222 | self.ptr = unsafe { old.add(1) }; |
| 223 | old |
| 224 | }; |
| 225 | Some(unsafe { ptr.read() }) |
| 226 | } |
| 227 | |
| 228 | #[inline ] |
| 229 | fn size_hint(&self) -> (usize, Option<usize>) { |
| 230 | let exact = if T::IS_ZST { |
| 231 | self.end.addr().wrapping_sub(self.ptr.as_ptr().addr()) |
| 232 | } else { |
| 233 | unsafe { non_null!(self.end, T).offset_from_unsigned(self.ptr) } |
| 234 | }; |
| 235 | (exact, Some(exact)) |
| 236 | } |
| 237 | |
| 238 | #[inline ] |
| 239 | fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> { |
| 240 | let step_size = self.len().min(n); |
| 241 | let to_drop = ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), step_size); |
| 242 | if T::IS_ZST { |
| 243 | // See `next` for why we sub `end` here. |
| 244 | self.end = self.end.wrapping_byte_sub(step_size); |
| 245 | } else { |
| 246 | // SAFETY: the min() above ensures that step_size is in bounds |
| 247 | self.ptr = unsafe { self.ptr.add(step_size) }; |
| 248 | } |
| 249 | // SAFETY: the min() above ensures that step_size is in bounds |
| 250 | unsafe { |
| 251 | ptr::drop_in_place(to_drop); |
| 252 | } |
| 253 | NonZero::new(n - step_size).map_or(Ok(()), Err) |
| 254 | } |
| 255 | |
| 256 | #[inline ] |
| 257 | fn count(self) -> usize { |
| 258 | self.len() |
| 259 | } |
| 260 | |
| 261 | #[inline ] |
| 262 | fn last(mut self) -> Option<T> { |
| 263 | self.next_back() |
| 264 | } |
| 265 | |
| 266 | #[inline ] |
| 267 | fn next_chunk<const N: usize>(&mut self) -> Result<[T; N], core::array::IntoIter<T, N>> { |
| 268 | let mut raw_ary = [const { MaybeUninit::uninit() }; N]; |
| 269 | |
| 270 | let len = self.len(); |
| 271 | |
| 272 | if T::IS_ZST { |
| 273 | if len < N { |
| 274 | self.forget_remaining_elements(); |
| 275 | // Safety: ZSTs can be conjured ex nihilo, only the amount has to be correct |
| 276 | return Err(unsafe { array::IntoIter::new_unchecked(raw_ary, 0..len) }); |
| 277 | } |
| 278 | |
| 279 | self.end = self.end.wrapping_byte_sub(N); |
| 280 | // Safety: ditto |
| 281 | return Ok(unsafe { raw_ary.transpose().assume_init() }); |
| 282 | } |
| 283 | |
| 284 | if len < N { |
| 285 | // Safety: `len` indicates that this many elements are available and we just checked that |
| 286 | // it fits into the array. |
| 287 | unsafe { |
| 288 | ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, len); |
| 289 | self.forget_remaining_elements(); |
| 290 | return Err(array::IntoIter::new_unchecked(raw_ary, 0..len)); |
| 291 | } |
| 292 | } |
| 293 | |
| 294 | // Safety: `len` is larger than the array size. Copy a fixed amount here to fully initialize |
| 295 | // the array. |
| 296 | unsafe { |
| 297 | ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, N); |
| 298 | self.ptr = self.ptr.add(N); |
| 299 | Ok(raw_ary.transpose().assume_init()) |
| 300 | } |
| 301 | } |
| 302 | |
| 303 | fn fold<B, F>(mut self, mut accum: B, mut f: F) -> B |
| 304 | where |
| 305 | F: FnMut(B, Self::Item) -> B, |
| 306 | { |
| 307 | if T::IS_ZST { |
| 308 | while self.ptr.as_ptr() != self.end.cast_mut() { |
| 309 | // SAFETY: we just checked that `self.ptr` is in bounds. |
| 310 | let tmp = unsafe { self.ptr.read() }; |
| 311 | // See `next` for why we subtract from `end` here. |
| 312 | self.end = self.end.wrapping_byte_sub(1); |
| 313 | accum = f(accum, tmp); |
| 314 | } |
| 315 | } else { |
| 316 | // SAFETY: `self.end` can only be null if `T` is a ZST. |
| 317 | while self.ptr != non_null!(self.end, T) { |
| 318 | // SAFETY: we just checked that `self.ptr` is in bounds. |
| 319 | let tmp = unsafe { self.ptr.read() }; |
| 320 | // SAFETY: the maximum this can be is `self.end`. |
| 321 | // Increment `self.ptr` first to avoid double dropping in the event of a panic. |
| 322 | self.ptr = unsafe { self.ptr.add(1) }; |
| 323 | accum = f(accum, tmp); |
| 324 | } |
| 325 | } |
| 326 | accum |
| 327 | } |
| 328 | |
| 329 | fn try_fold<B, F, R>(&mut self, mut accum: B, mut f: F) -> R |
| 330 | where |
| 331 | Self: Sized, |
| 332 | F: FnMut(B, Self::Item) -> R, |
| 333 | R: core::ops::Try<Output = B>, |
| 334 | { |
| 335 | if T::IS_ZST { |
| 336 | while self.ptr.as_ptr() != self.end.cast_mut() { |
| 337 | // SAFETY: we just checked that `self.ptr` is in bounds. |
| 338 | let tmp = unsafe { self.ptr.read() }; |
| 339 | // See `next` for why we subtract from `end` here. |
| 340 | self.end = self.end.wrapping_byte_sub(1); |
| 341 | accum = f(accum, tmp)?; |
| 342 | } |
| 343 | } else { |
| 344 | // SAFETY: `self.end` can only be null if `T` is a ZST. |
| 345 | while self.ptr != non_null!(self.end, T) { |
| 346 | // SAFETY: we just checked that `self.ptr` is in bounds. |
| 347 | let tmp = unsafe { self.ptr.read() }; |
| 348 | // SAFETY: the maximum this can be is `self.end`. |
| 349 | // Increment `self.ptr` first to avoid double dropping in the event of a panic. |
| 350 | self.ptr = unsafe { self.ptr.add(1) }; |
| 351 | accum = f(accum, tmp)?; |
| 352 | } |
| 353 | } |
| 354 | R::from_output(accum) |
| 355 | } |
| 356 | |
| 357 | unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item |
| 358 | where |
| 359 | Self: TrustedRandomAccessNoCoerce, |
| 360 | { |
| 361 | // SAFETY: the caller must guarantee that `i` is in bounds of the |
| 362 | // `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)` |
| 363 | // is guaranteed to pointer to an element of the `Vec<T>` and |
| 364 | // thus guaranteed to be valid to dereference. |
| 365 | // |
| 366 | // Also note the implementation of `Self: TrustedRandomAccess` requires |
| 367 | // that `T: Copy` so reading elements from the buffer doesn't invalidate |
| 368 | // them for `Drop`. |
| 369 | unsafe { self.ptr.add(i).read() } |
| 370 | } |
| 371 | } |
| 372 | |
| 373 | #[stable (feature = "rust1" , since = "1.0.0" )] |
| 374 | impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> { |
| 375 | #[inline ] |
| 376 | fn next_back(&mut self) -> Option<T> { |
| 377 | if T::IS_ZST { |
| 378 | if self.ptr.as_ptr() == self.end as *mut _ { |
| 379 | return None; |
| 380 | } |
| 381 | // See above for why 'ptr.offset' isn't used |
| 382 | self.end = self.end.wrapping_byte_sub(1); |
| 383 | // Note that even though this is next_back() we're reading from `self.ptr`, not |
| 384 | // `self.end`. We track our length using the byte offset from `self.ptr` to `self.end`, |
| 385 | // so the end pointer may not be suitably aligned for T. |
| 386 | Some(unsafe { ptr::read(self.ptr.as_ptr()) }) |
| 387 | } else { |
| 388 | if self.ptr == non_null!(self.end, T) { |
| 389 | return None; |
| 390 | } |
| 391 | unsafe { |
| 392 | self.end = self.end.sub(1); |
| 393 | Some(ptr::read(self.end)) |
| 394 | } |
| 395 | } |
| 396 | } |
| 397 | |
| 398 | #[inline ] |
| 399 | fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> { |
| 400 | let step_size = self.len().min(n); |
| 401 | if T::IS_ZST { |
| 402 | // SAFETY: same as for advance_by() |
| 403 | self.end = self.end.wrapping_byte_sub(step_size); |
| 404 | } else { |
| 405 | // SAFETY: same as for advance_by() |
| 406 | self.end = unsafe { self.end.sub(step_size) }; |
| 407 | } |
| 408 | let to_drop = ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size); |
| 409 | // SAFETY: same as for advance_by() |
| 410 | unsafe { |
| 411 | ptr::drop_in_place(to_drop); |
| 412 | } |
| 413 | NonZero::new(n - step_size).map_or(Ok(()), Err) |
| 414 | } |
| 415 | } |
| 416 | |
| 417 | #[stable (feature = "rust1" , since = "1.0.0" )] |
| 418 | impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> { |
| 419 | fn is_empty(&self) -> bool { |
| 420 | if T::IS_ZST { |
| 421 | self.ptr.as_ptr() == self.end as *mut _ |
| 422 | } else { |
| 423 | self.ptr == non_null!(self.end, T) |
| 424 | } |
| 425 | } |
| 426 | } |
| 427 | |
| 428 | #[stable (feature = "fused" , since = "1.26.0" )] |
| 429 | impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {} |
| 430 | |
| 431 | #[doc (hidden)] |
| 432 | #[unstable (issue = "none" , feature = "trusted_fused" )] |
| 433 | unsafe impl<T, A: Allocator> TrustedFused for IntoIter<T, A> {} |
| 434 | |
| 435 | #[unstable (feature = "trusted_len" , issue = "37572" )] |
| 436 | unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {} |
| 437 | |
| 438 | #[stable (feature = "default_iters" , since = "1.70.0" )] |
| 439 | impl<T, A> Default for IntoIter<T, A> |
| 440 | where |
| 441 | A: Allocator + Default, |
| 442 | { |
| 443 | /// Creates an empty `vec::IntoIter`. |
| 444 | /// |
| 445 | /// ``` |
| 446 | /// # use std::vec; |
| 447 | /// let iter: vec::IntoIter<u8> = Default::default(); |
| 448 | /// assert_eq!(iter.len(), 0); |
| 449 | /// assert_eq!(iter.as_slice(), &[]); |
| 450 | /// ``` |
| 451 | fn default() -> Self { |
| 452 | super::Vec::new_in(alloc:Default::default()).into_iter() |
| 453 | } |
| 454 | } |
| 455 | |
| 456 | #[doc (hidden)] |
| 457 | #[unstable (issue = "none" , feature = "std_internals" )] |
| 458 | #[rustc_unsafe_specialization_marker ] |
| 459 | pub trait NonDrop {} |
| 460 | |
| 461 | // T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr |
| 462 | // and thus we can't implement drop-handling |
| 463 | #[unstable (issue = "none" , feature = "std_internals" )] |
| 464 | impl<T: Copy> NonDrop for T {} |
| 465 | |
| 466 | #[doc (hidden)] |
| 467 | #[unstable (issue = "none" , feature = "std_internals" )] |
| 468 | // TrustedRandomAccess (without NoCoerce) must not be implemented because |
| 469 | // subtypes/supertypes of `T` might not be `NonDrop` |
| 470 | unsafe impl<T, A: Allocator> TrustedRandomAccessNoCoerce for IntoIter<T, A> |
| 471 | where |
| 472 | T: NonDrop, |
| 473 | { |
| 474 | const MAY_HAVE_SIDE_EFFECT: bool = false; |
| 475 | } |
| 476 | |
| 477 | #[cfg (not(no_global_oom_handling))] |
| 478 | #[stable (feature = "vec_into_iter_clone" , since = "1.8.0" )] |
| 479 | impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> { |
| 480 | fn clone(&self) -> Self { |
| 481 | self.as_slice().to_vec_in(self.alloc.deref().clone()).into_iter() |
| 482 | } |
| 483 | } |
| 484 | |
| 485 | #[stable (feature = "rust1" , since = "1.0.0" )] |
| 486 | unsafe impl<#[may_dangle ] T, A: Allocator> Drop for IntoIter<T, A> { |
| 487 | fn drop(&mut self) { |
| 488 | struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>); |
| 489 | |
| 490 | impl<T, A: Allocator> Drop for DropGuard<'_, T, A> { |
| 491 | fn drop(&mut self) { |
| 492 | unsafe { |
| 493 | // `IntoIter::alloc` is not used anymore after this and will be dropped by RawVec |
| 494 | let alloc: A = ManuallyDrop::take(&mut self.0.alloc); |
| 495 | // RawVec handles deallocation |
| 496 | let _ = RawVec::from_nonnull_in(self.0.buf, self.0.cap, alloc); |
| 497 | } |
| 498 | } |
| 499 | } |
| 500 | |
| 501 | let guard: DropGuard<'_, T, A> = DropGuard(self); |
| 502 | // destroy the remaining elements |
| 503 | unsafe { |
| 504 | ptr::drop_in_place(to_drop:guard.0.as_raw_mut_slice()); |
| 505 | } |
| 506 | // now `guard` will be dropped and do the rest |
| 507 | } |
| 508 | } |
| 509 | |
| 510 | // In addition to the SAFETY invariants of the following three unsafe traits |
| 511 | // also refer to the vec::in_place_collect module documentation to get an overview |
| 512 | #[unstable (issue = "none" , feature = "inplace_iteration" )] |
| 513 | #[doc (hidden)] |
| 514 | unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> { |
| 515 | const EXPAND_BY: Option<NonZero<usize>> = NonZero::new(1); |
| 516 | const MERGE_BY: Option<NonZero<usize>> = NonZero::new(1); |
| 517 | } |
| 518 | |
| 519 | #[unstable (issue = "none" , feature = "inplace_iteration" )] |
| 520 | #[doc (hidden)] |
| 521 | unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> { |
| 522 | type Source = Self; |
| 523 | |
| 524 | #[inline ] |
| 525 | unsafe fn as_inner(&mut self) -> &mut Self::Source { |
| 526 | self |
| 527 | } |
| 528 | } |
| 529 | |
| 530 | #[cfg (not(no_global_oom_handling))] |
| 531 | unsafe impl<T> AsVecIntoIter for IntoIter<T> { |
| 532 | type Item = T; |
| 533 | |
| 534 | fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> { |
| 535 | self |
| 536 | } |
| 537 | } |
| 538 | |