|
1 | 1 | #![cfg(feature = "std")] |
2 | 2 |
|
3 | | -use core::{any::type_name, fmt, ptr}; |
| 3 | +use core::{any::type_name, fmt, ptr, slice}; |
4 | 4 |
|
5 | 5 | use crate::{Drain, WeakFixupFn}; |
6 | 6 |
|
@@ -77,104 +77,87 @@ impl<H, I: Iterator> Drop for Splice<'_, H, I> { |
77 | 77 | // Which means we can replace the slice::Iter with pointers that won't point to deallocated |
78 | 78 | // memory, so that Drain::drop is still allowed to call iter.len(), otherwise it would break |
79 | 79 | // the ptr.sub_ptr contract. |
80 | | - self.drain.iter = [].iter(); |
81 | | - |
82 | | - // We will use the replace_with iterator to append elements in place on self.drain.vec. |
83 | | - // When this hits the tail then elements are moved from the tail to tmp_tail. |
84 | | - // When the tail is or becomes empty by that, then the remaining elements can be extended to the vec. |
85 | | - // |
86 | | - // Finally: |
87 | | - // Then have continuous elements in the vec: |head|replace_with|(old_tail|)spare_capacity|. |
88 | | - // The old tail needs to be moved to its final destination. |
89 | | - // Perhaps making space for the elements in the tmp_tail. |
90 | | - let mut tmp_tail = Vec::new(); |
91 | 80 |
|
92 | 81 | unsafe { |
93 | 82 | let vec = self.drain.vec.as_mut(); |
94 | | - loop { |
95 | | - if self.drain.tail_len() == 0 { |
96 | | - // If the tail is empty, we can just extend the vector with the remaining elements. |
97 | | - // but we may have stashed some tmp_tail away and should reserve for that. |
98 | | - // PLANNED: should become 'extend_reserve()' |
99 | | - vec.reserve_intern( |
100 | | - self.replace_with.size_hint().0 + tmp_tail.len(), |
101 | | - false, |
102 | | - &mut self.weak_fixup, |
103 | | - ); |
104 | | - vec.extend(self.replace_with.by_ref()); |
105 | | - // in case the size_hint was not exact (or returned 0) we need to reserve for the tmp_tail |
106 | | - // in most cases this will not allocate. later we expect that we have this space reserved. |
107 | | - vec.reserve_intern(tmp_tail.len(), false, &mut self.weak_fixup); |
108 | | - break; |
109 | | - } else if let Some(next) = self.replace_with.next() { |
110 | | - if vec.len_exact() >= self.drain.tail_start && self.drain.tail_len() > 0 { |
111 | | - // move one element from the tail to the tmp_tail |
112 | | - // We reserve for as much elements are hinted by replace_with or the remaining tail, |
113 | | - // whatever is smaller. |
114 | | - tmp_tail |
115 | | - .reserve(self.replace_with.size_hint().0.min(self.drain.tail_len())); |
116 | | - tmp_tail.push(ptr::read(vec.as_ptr().add(self.drain.tail_start))); |
117 | | - self.drain.tail_start += 1; |
118 | | - } |
119 | | - |
120 | | - // since we overwrite the old tail here this will never reallocate. |
121 | | - // PLANNED: vec.push_within_capacity().unwrap_unchecked() |
122 | | - vec.push(next); |
123 | | - } else { |
124 | | - // replace_with is depleted |
125 | | - break; |
126 | | - } |
| 83 | + |
| 84 | + if self.drain.tail_len == 0 { |
| 85 | + vec.extend(self.replace_with.by_ref()); |
| 86 | + return; |
127 | 87 | } |
128 | 88 |
|
129 | | - let tail_len = self.drain.tail_len(); |
130 | | - if tail_len > 0 { |
131 | | - // In case we need to shift the tail farther back we need to reserve space for that. |
132 | | - // Reserve needs to preserve the tail we have, thus we temporarily set the length to the |
133 | | - // tail_end and then restore it after the reserve. |
134 | | - let old_len = vec.len_exact(); |
135 | | - vec.set_len(self.drain.tail_end); |
136 | | - vec.reserve_intern(tmp_tail.len(), false, &mut self.weak_fixup); |
137 | | - vec.set_len(old_len); |
138 | | - |
139 | | - // now we can move the tail around |
140 | | - ptr::copy( |
141 | | - vec.as_ptr().add(self.drain.tail_start), |
142 | | - vec.as_mut_ptr().add(vec.len_exact() + tmp_tail.len()), |
143 | | - tail_len, |
144 | | - ); |
145 | | - |
146 | | - // all elements are moved from the tail, ensure that Drain drop does nothing. |
147 | | - // PLANNED: eventually we may not need use Drain here |
148 | | - self.drain.tail_start = self.drain.tail_end; |
| 89 | + // First fill the range left by drain(). |
| 90 | + if !self.drain.fill(&mut self.replace_with) { |
| 91 | + return; |
149 | 92 | } |
150 | 93 |
|
151 | | - let tmp_tail_len = tmp_tail.len(); |
152 | | - if !tmp_tail.is_empty() { |
153 | | - // When we stashed tail elements to tmp_tail, then fill the gap |
154 | | - tmp_tail.set_len(0); |
155 | | - ptr::copy_nonoverlapping( |
156 | | - tmp_tail.as_ptr(), |
157 | | - vec.as_mut_ptr().add(vec.len_exact()), |
158 | | - tmp_tail_len, |
159 | | - ); |
| 94 | + // There may be more elements. Use the lower bound as an estimate. |
| 95 | + // FIXME: Is the upper bound a better guess? Or something else? |
| 96 | + let (lower_bound, _upper_bound) = self.replace_with.size_hint(); |
| 97 | + if lower_bound > 0 { |
| 98 | + self.drain.move_tail(lower_bound, &mut self.weak_fixup); |
| 99 | + if !self.drain.fill(&mut self.replace_with) { |
| 100 | + return; |
| 101 | + } |
160 | 102 | } |
161 | 103 |
|
162 | | - // finally fix the vec length |
163 | | - let new_len = vec.len_exact() + tmp_tail_len + tail_len; |
164 | | - vec.set_len(new_len); |
| 104 | + // Collect any remaining elements. |
| 105 | + // This is a zero-length vector which does not allocate if `lower_bound` was exact. |
| 106 | + let mut collected = self |
| 107 | + .replace_with |
| 108 | + .by_ref() |
| 109 | + .collect::<Vec<I::Item>>() |
| 110 | + .into_iter(); |
| 111 | + // Now we have an exact count. |
| 112 | + if collected.len() > 0 { |
| 113 | + self.drain.move_tail(collected.len(), &mut self.weak_fixup); |
| 114 | + let filled = self.drain.fill(&mut collected); |
| 115 | + debug_assert!(filled); |
| 116 | + debug_assert_eq!(collected.len(), 0); |
| 117 | + } |
| 118 | + } |
| 119 | + } |
| 120 | +} |
| 121 | + |
| 122 | +/// Private helper methods for `Splice::drop` |
| 123 | +impl<H, T> Drain<'_, H, T> { |
| 124 | + /// The range from `self.vec.len` to `self.tail_start` contains elements |
| 125 | + /// that have been moved out. |
| 126 | + /// Fill that range as much as possible with new elements from the `replace_with` iterator. |
| 127 | + /// Returns `true` if we filled the entire range. (`replace_with.next()` didn’t return `None`.) |
| 128 | + unsafe fn fill<I: Iterator<Item = T>>(&mut self, replace_with: &mut I) -> bool { |
| 129 | + let vec = unsafe { self.vec.as_mut() }; |
| 130 | + let range_start = vec.len_exact(); |
| 131 | + let range_end = self.tail_start; |
| 132 | + let range_slice = unsafe { |
| 133 | + slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start) |
| 134 | + }; |
| 135 | + |
| 136 | + for place in range_slice { |
| 137 | + if let Some(new_item) = replace_with.next() { |
| 138 | + unsafe { ptr::write(place, new_item) }; |
| 139 | + let len = vec.len_exact(); |
| 140 | + vec.set_len(len + 1); |
| 141 | + } else { |
| 142 | + return false; |
| 143 | + } |
165 | 144 | } |
| 145 | + true |
| 146 | + } |
166 | 147 |
|
167 | | - // IDEA: implement and benchmark batched copying. This leaves a gap in front of the tails which |
168 | | - // needs to be filled before resizing. |
169 | | - // Batch size: |
170 | | - // Moving one element per iteration to the tmp_tail is not efficient to make space for |
171 | | - // a element from the replace_with. Thus we determine a number of elements that we |
172 | | - // transfer in a batch to the tmp_tail. We compute the batch size to be roughly 4kb |
173 | | - // (Common page size on many systems) (or I::Item, whatever is larger) or the size of |
174 | | - // the tail when it is smaller. The later ensure that we do a single reserve with the |
175 | | - // minimum space needed when the tail is smaller than a batch would be . |
176 | | - // let batch_size = (4096 / std::mem::size_of::<I::Item>()) |
177 | | - // .max(1) |
178 | | - // .min(self.drain.tail_len); |
| 148 | + /// Makes room for inserting more elements before the tail. |
| 149 | + #[track_caller] |
| 150 | + unsafe fn move_tail(&mut self, additional: usize, weak_fixup: &mut Option<WeakFixupFn<'_>>) { |
| 151 | + let vec = unsafe { self.vec.as_mut() }; |
| 152 | + let len = self.tail_start + self.tail_len; |
| 153 | + vec.reserve_intern(len + additional, false, weak_fixup); |
| 154 | + |
| 155 | + let new_tail_start = self.tail_start + additional; |
| 156 | + unsafe { |
| 157 | + let src = vec.as_ptr().add(self.tail_start); |
| 158 | + let dst = vec.as_mut_ptr().add(new_tail_start); |
| 159 | + ptr::copy(src, dst, self.tail_len); |
| 160 | + } |
| 161 | + self.tail_start = new_tail_start; |
179 | 162 | } |
180 | 163 | } |
0 commit comments