//! Module containing the [`WeakVec`] API. use alloc::{sync::Weak, vec::Vec}; /// An optimized container for `Weak` references of `T` that minimizes reallocations by /// dropping older elements that no longer have strong references to them. #[derive(Debug)] pub(crate) struct WeakVec { inner: Vec>, } impl Default for WeakVec { fn default() -> Self { Self { inner: Default::default(), } } } impl WeakVec { pub(crate) fn new() -> Self { Self { inner: Vec::new() } } /// Pushes a new element to this collection. /// /// If the inner Vec needs to be reallocated, we will first drop older elements that /// no longer have strong references to them. pub(crate) fn push(&mut self, value: Weak) { if self.inner.len() == self.inner.capacity() { // Iterating backwards has the advantage that we don't do more work than we have to. for i in (0..self.inner.len()).rev() { if self.inner[i].strong_count() == 0 { self.inner.swap_remove(i); } } // Make sure our capacity is twice the number of live elements. // Leaving some spare capacity ensures that we won't re-scan immediately. self.inner.reserve_exact(self.inner.len()); } self.inner.push(value); } } pub(crate) struct WeakVecIter { inner: alloc::vec::IntoIter>, } impl Iterator for WeakVecIter { type Item = Weak; fn next(&mut self) -> Option { self.inner.next() } } impl IntoIterator for WeakVec { type Item = Weak; type IntoIter = WeakVecIter; fn into_iter(self) -> Self::IntoIter { WeakVecIter { inner: self.inner.into_iter(), } } }