pub trait LazyMapMonoid {
type Key;
type Agg: Clone;
type Act: Clone;
type AggMonoid: Monoid<T = Self::Agg>;
type ActMonoid: Monoid<T = Self::Act>;
type KeyAct: MonoidAct<Key = Self::Key, Act = Self::Act, ActMonoid = Self::ActMonoid>;
// Required methods
fn single_agg(key: &Self::Key) -> Self::Agg;
fn act_agg(x: &Self::Agg, a: &Self::Act) -> Option<Self::Agg>;
// Provided methods
fn toggle(_x: &mut Self::Agg) { ... }
fn act_key(x: &Self::Key, a: &Self::Act) -> Self::Key { ... }
fn agg_unit() -> Self::Agg { ... }
fn act_unit() -> Self::Act { ... }
fn agg_operate(x: &Self::Agg, y: &Self::Agg) -> Self::Agg { ... }
fn act_operate(x: &Self::Act, y: &Self::Act) -> Self::Act { ... }
fn agg_operate_assign(x: &mut Self::Agg, y: &Self::Agg) { ... }
fn act_operate_assign(x: &mut Self::Act, y: &Self::Act) { ... }
}Required Associated Types§
type Key
type Agg: Clone
type Act: Clone
type AggMonoid: Monoid<T = Self::Agg>
type ActMonoid: Monoid<T = Self::Act>
type KeyAct: MonoidAct<Key = Self::Key, Act = Self::Act, ActMonoid = Self::ActMonoid>
Required Methods§
fn single_agg(key: &Self::Key) -> Self::Agg
fn act_agg(x: &Self::Agg, a: &Self::Act) -> Option<Self::Agg>
Provided Methods§
Sourcefn act_key(x: &Self::Key, a: &Self::Act) -> Self::Key
fn act_key(x: &Self::Key, a: &Self::Act) -> Self::Key
Examples found in repository?
More examples
crates/competitive/src/data_structure/binary_search_tree/data.rs (line 149)
143 pub fn update_act<Spec>(mut node: BstDataMutRef<'_, Spec>, act: &L::Act)
144 where
145 Spec: BstSpec<Data: BstDataAccess<marker::LazyMap, Value = Self>>,
146 {
147 L::act_operate_assign(&mut node.data_mut().bst_data_mut().act, act);
148 node.data_mut().bst_data_mut().key =
149 L::act_key(&node.reborrow().into_data().bst_data().key, act);
150 if let Some(nxlazy) = L::act_agg(&node.reborrow().into_data().bst_data().agg, act) {
151 node.data_mut().bst_data_mut().agg = nxlazy;
152 } else {
153 Self::top_down(node.reborrow_datamut());
154 Self::bottom_up(node.reborrow_datamut());
155 }
156 }Sourcefn agg_unit() -> Self::Agg
fn agg_unit() -> Self::Agg
Examples found in repository?
crates/competitive/src/data_structure/lazy_segment_tree.rs (line 45)
44 pub fn new(n: usize) -> Self {
45 let seg = vec![(M::agg_unit(), M::act_unit()); 2 * n];
46 Self { n, seg }
47 }
48 pub fn from_vec(v: Vec<M::Agg>) -> Self {
49 let n = v.len();
50 let mut seg = vec![(M::agg_unit(), M::act_unit()); 2 * n];
51 for (i, x) in v.into_iter().enumerate() {
52 seg[i + n].0 = x;
53 }
54 for i in (1..n).rev() {
55 seg[i].0 = M::agg_operate(&seg[2 * i].0, &seg[2 * i + 1].0);
56 }
57 Self { n, seg }
58 }
59 pub fn from_keys(keys: impl ExactSizeIterator<Item = M::Key>) -> Self {
60 let n = keys.len();
61 let mut seg = vec![(M::agg_unit(), M::act_unit()); 2 * n];
62 for (i, key) in keys.enumerate() {
63 seg[i + n].0 = M::single_agg(&key);
64 }
65 for i in (1..n).rev() {
66 seg[i].0 = M::agg_operate(&seg[2 * i].0, &seg[2 * i + 1].0);
67 }
68 Self { n, seg }
69 }
70 #[inline]
71 fn update_at(&mut self, k: usize, x: &M::Act) {
72 let nx = M::act_agg(&self.seg[k].0, x);
73 if k < self.n {
74 self.seg[k].1 = M::act_operate(&self.seg[k].1, x);
75 }
76 if let Some(nx) = nx {
77 self.seg[k].0 = nx;
78 } else if k < self.n {
79 self.propagate_at(k);
80 self.recalc_at(k);
81 } else {
82 panic!("act failed on leaf");
83 }
84 }
85 #[inline]
86 fn recalc_at(&mut self, k: usize) {
87 self.seg[k].0 = M::agg_operate(&self.seg[2 * k].0, &self.seg[2 * k + 1].0);
88 }
89 #[inline]
90 fn propagate_at(&mut self, k: usize) {
91 debug_assert!(k < self.n);
92 let x = replace(&mut self.seg[k].1, M::act_unit());
93 self.update_at(2 * k, &x);
94 self.update_at(2 * k + 1, &x);
95 }
96 #[inline]
97 fn propagate(&mut self, k: usize, right: bool, nofilt: bool) {
98 let right = right as usize;
99 for i in (1..(k + 1 - right).next_power_of_two().trailing_zeros()).rev() {
100 if nofilt || (k >> i) << i != k {
101 self.propagate_at((k - right) >> i);
102 }
103 }
104 }
105 #[inline]
106 fn recalc(&mut self, k: usize, right: bool, nofilt: bool) {
107 let right = right as usize;
108 for i in 1..(k + 1 - right).next_power_of_two().trailing_zeros() {
109 if nofilt || (k >> i) << i != k {
110 self.recalc_at((k - right) >> i);
111 }
112 }
113 }
114 pub fn update<R>(&mut self, range: R, x: M::Act)
115 where
116 R: RangeBounds<usize>,
117 {
118 let range = range.to_range_bounded(0, self.n).expect("invalid range");
119 let mut a = range.start + self.n;
120 let mut b = range.end + self.n;
121 self.propagate(a, false, false);
122 self.propagate(b, true, false);
123 while a < b {
124 if a & 1 != 0 {
125 self.update_at(a, &x);
126 a += 1;
127 }
128 if b & 1 != 0 {
129 b -= 1;
130 self.update_at(b, &x);
131 }
132 a /= 2;
133 b /= 2;
134 }
135 self.recalc(range.start + self.n, false, false);
136 self.recalc(range.end + self.n, true, false);
137 }
138 pub fn fold<R>(&mut self, range: R) -> M::Agg
139 where
140 R: RangeBounds<usize>,
141 {
142 let range = range.to_range_bounded(0, self.n).expect("invalid range");
143 let mut l = range.start + self.n;
144 let mut r = range.end + self.n;
145 self.propagate(l, false, true);
146 self.propagate(r, true, true);
147 let mut vl = M::agg_unit();
148 let mut vr = M::agg_unit();
149 while l < r {
150 if l & 1 != 0 {
151 vl = M::agg_operate(&vl, &self.seg[l].0);
152 l += 1;
153 }
154 if r & 1 != 0 {
155 r -= 1;
156 vr = M::agg_operate(&self.seg[r].0, &vr);
157 }
158 l /= 2;
159 r /= 2;
160 }
161 M::agg_operate(&vl, &vr)
162 }
163 pub fn set(&mut self, k: usize, x: M::Agg) {
164 let k = k + self.n;
165 self.propagate(k, false, true);
166 self.seg[k] = (x, M::act_unit());
167 self.recalc(k, false, true);
168 }
169 pub fn get(&mut self, k: usize) -> M::Agg {
170 self.fold(k..k + 1)
171 }
172 pub fn fold_all(&mut self) -> M::Agg {
173 self.fold(0..self.n)
174 }
175 fn bisect_perfect<P>(&mut self, mut pos: usize, mut acc: M::Agg, p: P) -> (usize, M::Agg)
176 where
177 P: Fn(&M::Agg) -> bool,
178 {
179 while pos < self.n {
180 self.propagate_at(pos);
181 pos <<= 1;
182 let nacc = M::agg_operate(&acc, &self.seg[pos].0);
183 if !p(&nacc) {
184 acc = nacc;
185 pos += 1;
186 }
187 }
188 (pos - self.n, acc)
189 }
190 fn rbisect_perfect<P>(&mut self, mut pos: usize, mut acc: M::Agg, p: P) -> (usize, M::Agg)
191 where
192 P: Fn(&M::Agg) -> bool,
193 {
194 while pos < self.n {
195 self.propagate_at(pos);
196 pos = pos * 2 + 1;
197 let nacc = M::agg_operate(&self.seg[pos].0, &acc);
198 if !p(&nacc) {
199 acc = nacc;
200 pos -= 1;
201 }
202 }
203 (pos - self.n, acc)
204 }
205 /// Returns the first index that satisfies a accumlative predicate.
206 pub fn position_acc<R, P>(&mut self, range: R, p: P) -> Option<usize>
207 where
208 R: RangeBounds<usize>,
209 P: Fn(&M::Agg) -> bool,
210 {
211 let range = range.to_range_bounded(0, self.n).expect("invalid range");
212 let mut l = range.start + self.n;
213 let r = range.end + self.n;
214 self.propagate(l, false, true);
215 self.propagate(r, true, true);
216 let mut k = 0usize;
217 let mut acc = M::agg_unit();
218 while l < r >> k {
219 if l & 1 != 0 {
220 let nacc = M::agg_operate(&acc, &self.seg[l].0);
221 if p(&nacc) {
222 return Some(self.bisect_perfect(l, acc, p).0);
223 }
224 acc = nacc;
225 l += 1;
226 }
227 l >>= 1;
228 k += 1;
229 }
230 for k in (0..k).rev() {
231 let r = r >> k;
232 if r & 1 != 0 {
233 let nacc = M::agg_operate(&acc, &self.seg[r - 1].0);
234 if p(&nacc) {
235 return Some(self.bisect_perfect(r - 1, acc, p).0);
236 }
237 acc = nacc;
238 }
239 }
240 None
241 }
242 /// Returns the last index that satisfies a accumlative predicate.
243 pub fn rposition_acc<R, P>(&mut self, range: R, p: P) -> Option<usize>
244 where
245 R: RangeBounds<usize>,
246 P: Fn(&M::Agg) -> bool,
247 {
248 let range = range.to_range_bounded(0, self.n).expect("invalid range");
249 let mut l = range.start + self.n;
250 let mut r = range.end + self.n;
251 self.propagate(l, false, true);
252 self.propagate(r, true, true);
253 let mut c = 0usize;
254 let mut k = 0usize;
255 let mut acc = M::agg_unit();
256 while l >> k < r {
257 c <<= 1;
258 if l & (1 << k) != 0 {
259 l += 1 << k;
260 c += 1;
261 }
262 if r & 1 != 0 {
263 r -= 1;
264 let nacc = M::agg_operate(&self.seg[r].0, &acc);
265 if p(&nacc) {
266 return Some(self.rbisect_perfect(r, acc, p).0);
267 }
268 acc = nacc;
269 }
270 r >>= 1;
271 k += 1;
272 }
273 for k in (0..k).rev() {
274 if c & 1 != 0 {
275 l -= 1 << k;
276 let l = l >> k;
277 let nacc = M::agg_operate(&self.seg[l].0, &acc);
278 if p(&nacc) {
279 return Some(self.rbisect_perfect(l, acc, p).0);
280 }
281 acc = nacc;
282 }
283 c >>= 1;
284 }
285 None
286 }More examples
crates/competitive/src/data_structure/lazy_segment_tree_map.rs (line 53)
52 fn get_mut(&mut self, k: usize) -> &mut (M::Agg, M::Act) {
53 self.seg.entry(k).or_insert((M::agg_unit(), M::act_unit()))
54 }
55 #[inline]
56 fn update_at(&mut self, k: usize, x: &M::Act) {
57 let n = self.n;
58 let a = self.get_mut(k);
59 let nx = M::act_agg(&a.0, x);
60 if k < n {
61 a.1 = M::act_operate(&a.1, x);
62 }
63 if let Some(nx) = nx {
64 a.0 = nx;
65 } else if k < n {
66 self.propagate_at(k);
67 self.recalc_at(k);
68 } else {
69 panic!("act failed on leaf");
70 }
71 }
72 #[inline]
73 fn recalc_at(&mut self, k: usize) {
74 let x = match (self.seg.get(&(2 * k)), self.seg.get(&(2 * k + 1))) {
75 (None, None) => M::agg_unit(),
76 (None, Some((y, _))) => y.clone(),
77 (Some((x, _)), None) => x.clone(),
78 (Some((x, _)), Some((y, _))) => M::agg_operate(x, y),
79 };
80 self.get_mut(k).0 = x;
81 }
82 #[inline]
83 fn propagate_at(&mut self, k: usize) {
84 debug_assert!(k < self.n);
85 let x = match self.seg.get_mut(&k) {
86 Some((_, x)) => replace(x, M::act_unit()),
87 None => M::act_unit(),
88 };
89 self.update_at(2 * k, &x);
90 self.update_at(2 * k + 1, &x);
91 }
92 #[inline]
93 fn propagate(&mut self, k: usize, right: bool, nofilt: bool) {
94 let right = right as usize;
95 for i in (1..(k + 1 - right).next_power_of_two().trailing_zeros()).rev() {
96 if nofilt || (k >> i) << i != k {
97 self.propagate_at((k - right) >> i);
98 }
99 }
100 }
101 #[inline]
102 fn recalc(&mut self, k: usize, right: bool, nofilt: bool) {
103 let right = right as usize;
104 for i in 1..(k + 1 - right).next_power_of_two().trailing_zeros() {
105 if nofilt || (k >> i) << i != k {
106 self.recalc_at((k - right) >> i);
107 }
108 }
109 }
110 pub fn update<R>(&mut self, range: R, x: M::Act)
111 where
112 R: RangeBounds<usize>,
113 {
114 let range = range.to_range_bounded(0, self.n).expect("invalid range");
115 let mut a = range.start + self.n;
116 let mut b = range.end + self.n;
117 self.propagate(a, false, false);
118 self.propagate(b, true, false);
119 while a < b {
120 if a & 1 != 0 {
121 self.update_at(a, &x);
122 a += 1;
123 }
124 if b & 1 != 0 {
125 b -= 1;
126 self.update_at(b, &x);
127 }
128 a /= 2;
129 b /= 2;
130 }
131 self.recalc(range.start + self.n, false, false);
132 self.recalc(range.end + self.n, true, false);
133 }
134 pub fn fold<R>(&mut self, range: R) -> M::Agg
135 where
136 R: RangeBounds<usize>,
137 {
138 let range = range.to_range_bounded(0, self.n).expect("invalid range");
139 let mut l = range.start + self.n;
140 let mut r = range.end + self.n;
141 self.propagate(l, false, true);
142 self.propagate(r, true, true);
143 let mut vl = M::agg_unit();
144 let mut vr = M::agg_unit();
145 while l < r {
146 if l & 1 != 0 {
147 if let Some((x, _)) = self.seg.get(&l) {
148 vl = M::agg_operate(&vl, x);
149 }
150 l += 1;
151 }
152 if r & 1 != 0 {
153 r -= 1;
154 if let Some((x, _)) = self.seg.get(&r) {
155 vr = M::agg_operate(x, &vr);
156 }
157 }
158 l /= 2;
159 r /= 2;
160 }
161 M::agg_operate(&vl, &vr)
162 }
163 pub fn set(&mut self, k: usize, x: M::Agg) {
164 let k = k + self.n;
165 self.propagate(k, false, true);
166 *self.get_mut(k) = (x, M::act_unit());
167 self.recalc(k, false, true);
168 }
169 pub fn get(&mut self, k: usize) -> M::Agg {
170 self.fold(k..k + 1)
171 }
172 pub fn fold_all(&mut self) -> M::Agg {
173 self.fold(0..self.n)
174 }
175 fn bisect_perfect<P>(&mut self, mut pos: usize, mut acc: M::Agg, p: P) -> (usize, M::Agg)
176 where
177 P: Fn(&M::Agg) -> bool,
178 {
179 while pos < self.n {
180 self.propagate_at(pos);
181 pos <<= 1;
182 let nacc = match self.seg.get(&pos) {
183 Some((x, _)) => M::agg_operate(&acc, x),
184 None => acc.clone(),
185 };
186 if !p(&nacc) {
187 acc = nacc;
188 pos += 1;
189 }
190 }
191 (pos - self.n, acc)
192 }
193 fn rbisect_perfect<P>(&mut self, mut pos: usize, mut acc: M::Agg, p: P) -> (usize, M::Agg)
194 where
195 P: Fn(&M::Agg) -> bool,
196 {
197 while pos < self.n {
198 self.propagate_at(pos);
199 pos = pos * 2 + 1;
200 let nacc = match self.seg.get(&pos) {
201 Some((x, _)) => M::agg_operate(x, &acc),
202 None => acc.clone(),
203 };
204 if !p(&nacc) {
205 acc = nacc;
206 pos -= 1;
207 }
208 }
209 (pos - self.n, acc)
210 }
211 /// Returns the first index that satisfies a accumlative predicate.
212 pub fn position_acc<R, P>(&mut self, range: R, p: P) -> Option<usize>
213 where
214 R: RangeBounds<usize>,
215 P: Fn(&M::Agg) -> bool,
216 {
217 let range = range.to_range_bounded(0, self.n).expect("invalid range");
218 let mut l = range.start + self.n;
219 let r = range.end + self.n;
220 self.propagate(l, false, true);
221 self.propagate(r, true, true);
222 let mut k = 0usize;
223 let mut acc = M::agg_unit();
224 while l < r >> k {
225 if l & 1 != 0 {
226 let nacc = match self.seg.get(&l) {
227 Some((x, _)) => M::agg_operate(&acc, x),
228 None => acc.clone(),
229 };
230 if p(&nacc) {
231 return Some(self.bisect_perfect(l, acc, p).0);
232 }
233 acc = nacc;
234 l += 1;
235 }
236 l >>= 1;
237 k += 1;
238 }
239 for k in (0..k).rev() {
240 let r = r >> k;
241 if r & 1 != 0 {
242 let nacc = match self.seg.get(&(r - 1)) {
243 Some((x, _)) => M::agg_operate(&acc, x),
244 None => acc.clone(),
245 };
246 if p(&nacc) {
247 return Some(self.bisect_perfect(r - 1, acc, p).0);
248 }
249 acc = nacc;
250 }
251 }
252 None
253 }
254 /// Returns the last index that satisfies a accumlative predicate.
255 pub fn rposition_acc<R, P>(&mut self, range: R, p: P) -> Option<usize>
256 where
257 R: RangeBounds<usize>,
258 P: Fn(&M::Agg) -> bool,
259 {
260 let range = range.to_range_bounded(0, self.n).expect("invalid range");
261 let mut l = range.start + self.n;
262 let mut r = range.end + self.n;
263 self.propagate(l, false, true);
264 self.propagate(r, true, true);
265 let mut c = 0usize;
266 let mut k = 0usize;
267 let mut acc = M::agg_unit();
268 while l >> k < r {
269 c <<= 1;
270 if l & (1 << k) != 0 {
271 l += 1 << k;
272 c += 1;
273 }
274 if r & 1 != 0 {
275 r -= 1;
276 let nacc = match self.seg.get(&r) {
277 Some((x, _)) => M::agg_operate(x, &acc),
278 None => acc.clone(),
279 };
280 if p(&nacc) {
281 return Some(self.rbisect_perfect(r, acc, p).0);
282 }
283 acc = nacc;
284 }
285 r >>= 1;
286 k += 1;
287 }
288 for k in (0..k).rev() {
289 if c & 1 != 0 {
290 l -= 1 << k;
291 let l = l >> k;
292 let nacc = match self.seg.get(&l) {
293 Some((x, _)) => M::agg_operate(x, &acc),
294 None => acc.clone(),
295 };
296 if p(&nacc) {
297 return Some(self.rbisect_perfect(l, acc, p).0);
298 }
299 acc = nacc;
300 }
301 c >>= 1;
302 }
303 None
304 }crates/competitive/src/data_structure/splay_tree/sequence.rs (line 156)
154 fn new(f: F) -> Self {
155 Self {
156 acc: T::agg_unit(),
157 f,
158 _marker: PhantomData,
159 }
160 }
161}
162impl<F, T> SplaySeeker for SeekByAccCond<F, T>
163where
164 F: FnMut(&T::Agg) -> bool,
165 T: LazyMapMonoid,
166{
167 type S = LazyAggSplay<T>;
168 fn splay_seek(&mut self, node: NodeRef<marker::Immut<'_>, Self::S>) -> Ordering {
169 if let Some(lagg) = node.left().map(|l| &l.data().agg) {
170 let nacc = T::agg_operate(&self.acc, lagg);
171 if (self.f)(&nacc) {
172 return Ordering::Less;
173 }
174 self.acc = nacc;
175 };
176 self.acc = T::agg_operate(&self.acc, &T::single_agg(&node.data().key));
177 if (self.f)(&self.acc) {
178 Ordering::Equal
179 } else {
180 Ordering::Greater
181 }
182 }
183}
184
185struct SeekByRaccCond<F, T>
186where
187 T: LazyMapMonoid,
188{
189 acc: T::Agg,
190 f: F,
191 _marker: PhantomData<fn() -> T>,
192}
193impl<F, T> SeekByRaccCond<F, T>
194where
195 T: LazyMapMonoid,
196{
197 fn new(f: F) -> Self {
198 Self {
199 acc: T::agg_unit(),
200 f,
201 _marker: PhantomData,
202 }
203 }
204}
205impl<F, T> SplaySeeker for SeekByRaccCond<F, T>
206where
207 F: FnMut(&T::Agg) -> bool,
208 T: LazyMapMonoid,
209{
210 type S = LazyAggSplay<T>;
211 fn splay_seek(&mut self, node: NodeRef<marker::Immut<'_>, Self::S>) -> Ordering {
212 if let Some(lagg) = node.right().map(|r| &r.data().agg) {
213 let nacc = T::agg_operate(lagg, &self.acc);
214 if (self.f)(&nacc) {
215 return Ordering::Greater;
216 }
217 self.acc = nacc;
218 };
219 self.acc = T::agg_operate(&T::single_agg(&node.data().key), &self.acc);
220 if (self.f)(&self.acc) {
221 Ordering::Equal
222 } else {
223 Ordering::Less
224 }
225 }
226}
227
228pub struct SplaySequence<T, A = MemoryPool<Node<LazyAggElement<T>>>>
229where
230 T: LazyMapMonoid,
231 A: Allocator<Node<LazyAggElement<T>>>,
232{
233 root: Root<LazyAggSplay<T>>,
234 length: usize,
235 alloc: ManuallyDrop<A>,
236}
237
238impl<T, A> Debug for SplaySequence<T, A>
239where
240 T: LazyMapMonoid<Key: Debug, Agg: Debug, Act: Debug>,
241 A: Allocator<Node<LazyAggElement<T>>>,
242{
243 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
244 f.debug_struct("SplayMap")
245 .field("root", &self.root)
246 .field("length", &self.length)
247 .finish()
248 }
249}
250
251impl<T, A> Drop for SplaySequence<T, A>
252where
253 T: LazyMapMonoid,
254 A: Allocator<Node<LazyAggElement<T>>>,
255{
256 fn drop(&mut self) {
257 unsafe {
258 while let Some(node) = self.root.take_first() {
259 self.alloc.deallocate(node.into_dying().into_inner());
260 }
261 ManuallyDrop::drop(&mut self.alloc);
262 }
263 }
264}
265
266impl<T, A> Default for SplaySequence<T, A>
267where
268 T: LazyMapMonoid,
269 A: Allocator<Node<LazyAggElement<T>>> + Default,
270{
271 fn default() -> Self {
272 Self {
273 root: Root::default(),
274 length: 0,
275 alloc: Default::default(),
276 }
277 }
278}
279
280impl<T> SplaySequence<T>
281where
282 T: LazyMapMonoid,
283{
284 pub fn new() -> Self {
285 Default::default()
286 }
287 pub fn with_capacity(capacity: usize) -> Self {
288 Self {
289 root: Root::default(),
290 length: 0,
291 alloc: ManuallyDrop::new(MemoryPool::with_capacity(capacity)),
292 }
293 }
294 pub fn len(&self) -> usize {
295 self.length
296 }
297 pub fn is_empty(&self) -> bool {
298 self.length == 0
299 }
300}
301impl<T, A> SplaySequence<T, A>
302where
303 T: LazyMapMonoid,
304 A: Allocator<Node<LazyAggElement<T>>>,
305{
306 fn range<R>(&mut self, range: R) -> NodeRange<'_, LazyAggSplay<T>>
307 where
308 R: RangeBounds<usize>,
309 {
310 let start = match range.start_bound() {
311 Bound::Included(&index) => Bound::Included(SeekBySize::new(index)),
312 Bound::Excluded(&index) => Bound::Excluded(SeekBySize::new(index)),
313 Bound::Unbounded => Bound::Unbounded,
314 };
315 let end = match range.end_bound() {
316 Bound::Included(&index) => Bound::Included(SeekBySize::new(index)),
317 Bound::Excluded(&index) => Bound::Excluded(SeekBySize::new(index)),
318 Bound::Unbounded => Bound::Unbounded,
319 };
320 self.root.range(start, end)
321 }
322 pub fn update<R>(&mut self, range: R, x: T::Act)
323 where
324 R: RangeBounds<usize>,
325 {
326 if let Some(root) = self.range(range).root_mut().root_data_mut() {
327 LazyAggSplay::<T>::update_lazy(root, &x);
328 }
329 }
330 pub fn fold<R>(&mut self, range: R) -> T::Agg
331 where
332 R: RangeBounds<usize>,
333 {
334 if let Some(root) = self.range(range).root().root() {
335 root.data().agg.clone()
336 } else {
337 T::agg_unit()
338 }
339 }crates/competitive/src/data_structure/binary_search_tree/seeker.rs (line 151)
149 pub fn new(f: F) -> Self {
150 Self {
151 acc: L::agg_unit(),
152 f,
153 _marker: PhantomData,
154 }
155 }
156}
157
158impl<Spec, L, F> BstSeeker for SeekByAccCond<Spec, L, F>
159where
160 Spec: BstSpec<Data: BstDataAccess<data::marker::LazyMap, Value = LazyMapElement<L>>>,
161 L: LazyMapMonoid,
162 F: FnMut(&L::Agg) -> bool,
163{
164 type Spec = Spec;
165
166 fn bst_seek(&mut self, node: BstImmutRef<'_, Self::Spec>) -> Ordering {
167 if let Ok(left) = node.reborrow().left().descend() {
168 let left_agg = &left.into_data().bst_data().agg;
169 let nagg = L::agg_operate(&self.acc, left_agg);
170 if (self.f)(&nagg) {
171 return Ordering::Greater;
172 }
173 let nagg = L::agg_operate(
174 &nagg,
175 &L::single_agg(&node.reborrow().into_data().bst_data().key),
176 );
177 if (self.f)(&nagg) {
178 Ordering::Equal
179 } else {
180 self.acc = nagg;
181 Ordering::Less
182 }
183 } else {
184 let nagg = L::agg_operate(
185 &self.acc,
186 &L::single_agg(&node.reborrow().into_data().bst_data().key),
187 );
188 if (self.f)(&nagg) {
189 Ordering::Equal
190 } else {
191 self.acc = nagg;
192 Ordering::Less
193 }
194 }
195 }
196}
197
198pub struct SeekByRaccCond<Spec, L, F>
199where
200 L: LazyMapMonoid,
201{
202 acc: L::Agg,
203 f: F,
204 _marker: PhantomData<fn() -> (Spec, L)>,
205}
206
207impl<Spec, L, F> SeekByRaccCond<Spec, L, F>
208where
209 L: LazyMapMonoid,
210 F: FnMut(&L::Agg) -> bool,
211{
212 pub fn new(f: F) -> Self {
213 Self {
214 acc: L::agg_unit(),
215 f,
216 _marker: PhantomData,
217 }
218 }Sourcefn act_unit() -> Self::Act
fn act_unit() -> Self::Act
Examples found in repository?
crates/competitive/src/data_structure/lazy_segment_tree.rs (line 45)
44 pub fn new(n: usize) -> Self {
45 let seg = vec![(M::agg_unit(), M::act_unit()); 2 * n];
46 Self { n, seg }
47 }
48 pub fn from_vec(v: Vec<M::Agg>) -> Self {
49 let n = v.len();
50 let mut seg = vec![(M::agg_unit(), M::act_unit()); 2 * n];
51 for (i, x) in v.into_iter().enumerate() {
52 seg[i + n].0 = x;
53 }
54 for i in (1..n).rev() {
55 seg[i].0 = M::agg_operate(&seg[2 * i].0, &seg[2 * i + 1].0);
56 }
57 Self { n, seg }
58 }
59 pub fn from_keys(keys: impl ExactSizeIterator<Item = M::Key>) -> Self {
60 let n = keys.len();
61 let mut seg = vec![(M::agg_unit(), M::act_unit()); 2 * n];
62 for (i, key) in keys.enumerate() {
63 seg[i + n].0 = M::single_agg(&key);
64 }
65 for i in (1..n).rev() {
66 seg[i].0 = M::agg_operate(&seg[2 * i].0, &seg[2 * i + 1].0);
67 }
68 Self { n, seg }
69 }
70 #[inline]
71 fn update_at(&mut self, k: usize, x: &M::Act) {
72 let nx = M::act_agg(&self.seg[k].0, x);
73 if k < self.n {
74 self.seg[k].1 = M::act_operate(&self.seg[k].1, x);
75 }
76 if let Some(nx) = nx {
77 self.seg[k].0 = nx;
78 } else if k < self.n {
79 self.propagate_at(k);
80 self.recalc_at(k);
81 } else {
82 panic!("act failed on leaf");
83 }
84 }
85 #[inline]
86 fn recalc_at(&mut self, k: usize) {
87 self.seg[k].0 = M::agg_operate(&self.seg[2 * k].0, &self.seg[2 * k + 1].0);
88 }
89 #[inline]
90 fn propagate_at(&mut self, k: usize) {
91 debug_assert!(k < self.n);
92 let x = replace(&mut self.seg[k].1, M::act_unit());
93 self.update_at(2 * k, &x);
94 self.update_at(2 * k + 1, &x);
95 }
96 #[inline]
97 fn propagate(&mut self, k: usize, right: bool, nofilt: bool) {
98 let right = right as usize;
99 for i in (1..(k + 1 - right).next_power_of_two().trailing_zeros()).rev() {
100 if nofilt || (k >> i) << i != k {
101 self.propagate_at((k - right) >> i);
102 }
103 }
104 }
105 #[inline]
106 fn recalc(&mut self, k: usize, right: bool, nofilt: bool) {
107 let right = right as usize;
108 for i in 1..(k + 1 - right).next_power_of_two().trailing_zeros() {
109 if nofilt || (k >> i) << i != k {
110 self.recalc_at((k - right) >> i);
111 }
112 }
113 }
114 pub fn update<R>(&mut self, range: R, x: M::Act)
115 where
116 R: RangeBounds<usize>,
117 {
118 let range = range.to_range_bounded(0, self.n).expect("invalid range");
119 let mut a = range.start + self.n;
120 let mut b = range.end + self.n;
121 self.propagate(a, false, false);
122 self.propagate(b, true, false);
123 while a < b {
124 if a & 1 != 0 {
125 self.update_at(a, &x);
126 a += 1;
127 }
128 if b & 1 != 0 {
129 b -= 1;
130 self.update_at(b, &x);
131 }
132 a /= 2;
133 b /= 2;
134 }
135 self.recalc(range.start + self.n, false, false);
136 self.recalc(range.end + self.n, true, false);
137 }
138 pub fn fold<R>(&mut self, range: R) -> M::Agg
139 where
140 R: RangeBounds<usize>,
141 {
142 let range = range.to_range_bounded(0, self.n).expect("invalid range");
143 let mut l = range.start + self.n;
144 let mut r = range.end + self.n;
145 self.propagate(l, false, true);
146 self.propagate(r, true, true);
147 let mut vl = M::agg_unit();
148 let mut vr = M::agg_unit();
149 while l < r {
150 if l & 1 != 0 {
151 vl = M::agg_operate(&vl, &self.seg[l].0);
152 l += 1;
153 }
154 if r & 1 != 0 {
155 r -= 1;
156 vr = M::agg_operate(&self.seg[r].0, &vr);
157 }
158 l /= 2;
159 r /= 2;
160 }
161 M::agg_operate(&vl, &vr)
162 }
163 pub fn set(&mut self, k: usize, x: M::Agg) {
164 let k = k + self.n;
165 self.propagate(k, false, true);
166 self.seg[k] = (x, M::act_unit());
167 self.recalc(k, false, true);
168 }More examples
crates/competitive/src/data_structure/lazy_segment_tree_map.rs (line 53)
52 fn get_mut(&mut self, k: usize) -> &mut (M::Agg, M::Act) {
53 self.seg.entry(k).or_insert((M::agg_unit(), M::act_unit()))
54 }
55 #[inline]
56 fn update_at(&mut self, k: usize, x: &M::Act) {
57 let n = self.n;
58 let a = self.get_mut(k);
59 let nx = M::act_agg(&a.0, x);
60 if k < n {
61 a.1 = M::act_operate(&a.1, x);
62 }
63 if let Some(nx) = nx {
64 a.0 = nx;
65 } else if k < n {
66 self.propagate_at(k);
67 self.recalc_at(k);
68 } else {
69 panic!("act failed on leaf");
70 }
71 }
72 #[inline]
73 fn recalc_at(&mut self, k: usize) {
74 let x = match (self.seg.get(&(2 * k)), self.seg.get(&(2 * k + 1))) {
75 (None, None) => M::agg_unit(),
76 (None, Some((y, _))) => y.clone(),
77 (Some((x, _)), None) => x.clone(),
78 (Some((x, _)), Some((y, _))) => M::agg_operate(x, y),
79 };
80 self.get_mut(k).0 = x;
81 }
82 #[inline]
83 fn propagate_at(&mut self, k: usize) {
84 debug_assert!(k < self.n);
85 let x = match self.seg.get_mut(&k) {
86 Some((_, x)) => replace(x, M::act_unit()),
87 None => M::act_unit(),
88 };
89 self.update_at(2 * k, &x);
90 self.update_at(2 * k + 1, &x);
91 }
92 #[inline]
93 fn propagate(&mut self, k: usize, right: bool, nofilt: bool) {
94 let right = right as usize;
95 for i in (1..(k + 1 - right).next_power_of_two().trailing_zeros()).rev() {
96 if nofilt || (k >> i) << i != k {
97 self.propagate_at((k - right) >> i);
98 }
99 }
100 }
101 #[inline]
102 fn recalc(&mut self, k: usize, right: bool, nofilt: bool) {
103 let right = right as usize;
104 for i in 1..(k + 1 - right).next_power_of_two().trailing_zeros() {
105 if nofilt || (k >> i) << i != k {
106 self.recalc_at((k - right) >> i);
107 }
108 }
109 }
110 pub fn update<R>(&mut self, range: R, x: M::Act)
111 where
112 R: RangeBounds<usize>,
113 {
114 let range = range.to_range_bounded(0, self.n).expect("invalid range");
115 let mut a = range.start + self.n;
116 let mut b = range.end + self.n;
117 self.propagate(a, false, false);
118 self.propagate(b, true, false);
119 while a < b {
120 if a & 1 != 0 {
121 self.update_at(a, &x);
122 a += 1;
123 }
124 if b & 1 != 0 {
125 b -= 1;
126 self.update_at(b, &x);
127 }
128 a /= 2;
129 b /= 2;
130 }
131 self.recalc(range.start + self.n, false, false);
132 self.recalc(range.end + self.n, true, false);
133 }
134 pub fn fold<R>(&mut self, range: R) -> M::Agg
135 where
136 R: RangeBounds<usize>,
137 {
138 let range = range.to_range_bounded(0, self.n).expect("invalid range");
139 let mut l = range.start + self.n;
140 let mut r = range.end + self.n;
141 self.propagate(l, false, true);
142 self.propagate(r, true, true);
143 let mut vl = M::agg_unit();
144 let mut vr = M::agg_unit();
145 while l < r {
146 if l & 1 != 0 {
147 if let Some((x, _)) = self.seg.get(&l) {
148 vl = M::agg_operate(&vl, x);
149 }
150 l += 1;
151 }
152 if r & 1 != 0 {
153 r -= 1;
154 if let Some((x, _)) = self.seg.get(&r) {
155 vr = M::agg_operate(x, &vr);
156 }
157 }
158 l /= 2;
159 r /= 2;
160 }
161 M::agg_operate(&vl, &vr)
162 }
163 pub fn set(&mut self, k: usize, x: M::Agg) {
164 let k = k + self.n;
165 self.propagate(k, false, true);
166 *self.get_mut(k) = (x, M::act_unit());
167 self.recalc(k, false, true);
168 }crates/competitive/src/data_structure/binary_search_tree/data.rs (line 139)
134 pub fn from_key(key: L::Key) -> Self {
135 let agg = L::single_agg(&key);
136 Self {
137 key,
138 agg,
139 act: L::act_unit(),
140 }
141 }
142
143 pub fn update_act<Spec>(mut node: BstDataMutRef<'_, Spec>, act: &L::Act)
144 where
145 Spec: BstSpec<Data: BstDataAccess<marker::LazyMap, Value = Self>>,
146 {
147 L::act_operate_assign(&mut node.data_mut().bst_data_mut().act, act);
148 node.data_mut().bst_data_mut().key =
149 L::act_key(&node.reborrow().into_data().bst_data().key, act);
150 if let Some(nxlazy) = L::act_agg(&node.reborrow().into_data().bst_data().agg, act) {
151 node.data_mut().bst_data_mut().agg = nxlazy;
152 } else {
153 Self::top_down(node.reborrow_datamut());
154 Self::bottom_up(node.reborrow_datamut());
155 }
156 }
157
158 pub fn top_down<Spec>(mut node: BstDataMutRef<'_, Spec>)
159 where
160 Spec: BstSpec<Data: BstDataAccess<marker::LazyMap, Value = Self>>,
161 {
162 let act = replace(&mut node.data_mut().bst_data_mut().act, L::act_unit());
163 if let Ok(left) = node.reborrow_datamut().left().descend() {
164 Self::update_act(left, &act);
165 }
166 if let Ok(right) = node.reborrow_datamut().right().descend() {
167 Self::update_act(right, &act);
168 }
169 }crates/competitive/src/data_structure/splay_tree/sequence.rs (line 62)
61 fn propagate(node: NodeRef<marker::DataMut<'_>, Self>) -> NodeRef<marker::DataMut<'_>, Self> {
62 let lazy = replace(&mut node.data_mut().lazy, T::act_unit());
63 if let Some(left) = node.left() {
64 Self::update_lazy(left, &lazy);
65 }
66 if let Some(right) = node.right() {
67 Self::update_lazy(right, &lazy);
68 }
69 if replace(&mut node.data_mut().rev, false) {
70 if let Some(left) = node.left() {
71 Self::reverse(left);
72 }
73 if let Some(right) = node.right() {
74 Self::reverse(right);
75 }
76 }
77 node
78 }
79 fn recalc(node: NodeRef<marker::DataMut<'_>, Self>) -> NodeRef<marker::DataMut<'_>, Self> {
80 let mut agg = T::single_agg(&node.data().key);
81 let mut size = 1;
82 if let Some(left) = node.left() {
83 let data = left.data();
84 agg = T::agg_operate(&data.agg, &agg);
85 // agg = <T::AggMonoid as Magma>::operate(&data.agg, &agg);
86 size += data.size;
87 }
88 if let Some(right) = node.right() {
89 let data = right.data();
90 agg = T::agg_operate(&agg, &data.agg);
91 size += data.size;
92 }
93 let data = node.data_mut();
94 data.agg = agg;
95 data.size = size;
96 node
97 }
98}
99impl<T> SplaySpec for LazyAggSplay<T>
100where
101 T: LazyMapMonoid,
102{
103 type T = LazyAggElement<T>;
104 fn has_bottom_up() -> bool {
105 true
106 }
107 fn top_down(node: NodeRef<marker::DataMut<'_>, Self>) {
108 Self::propagate(node);
109 }
110 fn bottom_up(node: NodeRef<marker::DataMut<'_>, Self>) {
111 Self::recalc(node);
112 }
113}
114
115struct SeekBySize<T> {
116 index: usize,
117 _marker: PhantomData<fn() -> T>,
118}
119impl<T> SeekBySize<T> {
120 fn new(index: usize) -> Self {
121 Self {
122 index,
123 _marker: PhantomData,
124 }
125 }
126}
127impl<T> SplaySeeker for SeekBySize<T>
128where
129 T: LazyMapMonoid,
130{
131 type S = LazyAggSplay<T>;
132 fn splay_seek(&mut self, node: NodeRef<marker::Immut<'_>, Self::S>) -> Ordering {
133 let lsize = node.left().map(|l| l.data().size).unwrap_or_default();
134 let ord = self.index.cmp(&lsize);
135 if matches!(ord, Ordering::Greater) {
136 self.index -= lsize + 1;
137 }
138 ord
139 }
140}
141
142struct SeekByAccCond<F, T>
143where
144 T: LazyMapMonoid,
145{
146 acc: T::Agg,
147 f: F,
148 _marker: PhantomData<fn() -> T>,
149}
150impl<F, T> SeekByAccCond<F, T>
151where
152 T: LazyMapMonoid,
153{
154 fn new(f: F) -> Self {
155 Self {
156 acc: T::agg_unit(),
157 f,
158 _marker: PhantomData,
159 }
160 }
161}
162impl<F, T> SplaySeeker for SeekByAccCond<F, T>
163where
164 F: FnMut(&T::Agg) -> bool,
165 T: LazyMapMonoid,
166{
167 type S = LazyAggSplay<T>;
168 fn splay_seek(&mut self, node: NodeRef<marker::Immut<'_>, Self::S>) -> Ordering {
169 if let Some(lagg) = node.left().map(|l| &l.data().agg) {
170 let nacc = T::agg_operate(&self.acc, lagg);
171 if (self.f)(&nacc) {
172 return Ordering::Less;
173 }
174 self.acc = nacc;
175 };
176 self.acc = T::agg_operate(&self.acc, &T::single_agg(&node.data().key));
177 if (self.f)(&self.acc) {
178 Ordering::Equal
179 } else {
180 Ordering::Greater
181 }
182 }
183}
184
185struct SeekByRaccCond<F, T>
186where
187 T: LazyMapMonoid,
188{
189 acc: T::Agg,
190 f: F,
191 _marker: PhantomData<fn() -> T>,
192}
193impl<F, T> SeekByRaccCond<F, T>
194where
195 T: LazyMapMonoid,
196{
197 fn new(f: F) -> Self {
198 Self {
199 acc: T::agg_unit(),
200 f,
201 _marker: PhantomData,
202 }
203 }
204}
205impl<F, T> SplaySeeker for SeekByRaccCond<F, T>
206where
207 F: FnMut(&T::Agg) -> bool,
208 T: LazyMapMonoid,
209{
210 type S = LazyAggSplay<T>;
211 fn splay_seek(&mut self, node: NodeRef<marker::Immut<'_>, Self::S>) -> Ordering {
212 if let Some(lagg) = node.right().map(|r| &r.data().agg) {
213 let nacc = T::agg_operate(lagg, &self.acc);
214 if (self.f)(&nacc) {
215 return Ordering::Greater;
216 }
217 self.acc = nacc;
218 };
219 self.acc = T::agg_operate(&T::single_agg(&node.data().key), &self.acc);
220 if (self.f)(&self.acc) {
221 Ordering::Equal
222 } else {
223 Ordering::Less
224 }
225 }
226}
227
228pub struct SplaySequence<T, A = MemoryPool<Node<LazyAggElement<T>>>>
229where
230 T: LazyMapMonoid,
231 A: Allocator<Node<LazyAggElement<T>>>,
232{
233 root: Root<LazyAggSplay<T>>,
234 length: usize,
235 alloc: ManuallyDrop<A>,
236}
237
238impl<T, A> Debug for SplaySequence<T, A>
239where
240 T: LazyMapMonoid<Key: Debug, Agg: Debug, Act: Debug>,
241 A: Allocator<Node<LazyAggElement<T>>>,
242{
243 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
244 f.debug_struct("SplayMap")
245 .field("root", &self.root)
246 .field("length", &self.length)
247 .finish()
248 }
249}
250
251impl<T, A> Drop for SplaySequence<T, A>
252where
253 T: LazyMapMonoid,
254 A: Allocator<Node<LazyAggElement<T>>>,
255{
256 fn drop(&mut self) {
257 unsafe {
258 while let Some(node) = self.root.take_first() {
259 self.alloc.deallocate(node.into_dying().into_inner());
260 }
261 ManuallyDrop::drop(&mut self.alloc);
262 }
263 }
264}
265
266impl<T, A> Default for SplaySequence<T, A>
267where
268 T: LazyMapMonoid,
269 A: Allocator<Node<LazyAggElement<T>>> + Default,
270{
271 fn default() -> Self {
272 Self {
273 root: Root::default(),
274 length: 0,
275 alloc: Default::default(),
276 }
277 }
278}
279
280impl<T> SplaySequence<T>
281where
282 T: LazyMapMonoid,
283{
284 pub fn new() -> Self {
285 Default::default()
286 }
287 pub fn with_capacity(capacity: usize) -> Self {
288 Self {
289 root: Root::default(),
290 length: 0,
291 alloc: ManuallyDrop::new(MemoryPool::with_capacity(capacity)),
292 }
293 }
294 pub fn len(&self) -> usize {
295 self.length
296 }
297 pub fn is_empty(&self) -> bool {
298 self.length == 0
299 }
300}
301impl<T, A> SplaySequence<T, A>
302where
303 T: LazyMapMonoid,
304 A: Allocator<Node<LazyAggElement<T>>>,
305{
306 fn range<R>(&mut self, range: R) -> NodeRange<'_, LazyAggSplay<T>>
307 where
308 R: RangeBounds<usize>,
309 {
310 let start = match range.start_bound() {
311 Bound::Included(&index) => Bound::Included(SeekBySize::new(index)),
312 Bound::Excluded(&index) => Bound::Excluded(SeekBySize::new(index)),
313 Bound::Unbounded => Bound::Unbounded,
314 };
315 let end = match range.end_bound() {
316 Bound::Included(&index) => Bound::Included(SeekBySize::new(index)),
317 Bound::Excluded(&index) => Bound::Excluded(SeekBySize::new(index)),
318 Bound::Unbounded => Bound::Unbounded,
319 };
320 self.root.range(start, end)
321 }
322 pub fn update<R>(&mut self, range: R, x: T::Act)
323 where
324 R: RangeBounds<usize>,
325 {
326 if let Some(root) = self.range(range).root_mut().root_data_mut() {
327 LazyAggSplay::<T>::update_lazy(root, &x);
328 }
329 }
330 pub fn fold<R>(&mut self, range: R) -> T::Agg
331 where
332 R: RangeBounds<usize>,
333 {
334 if let Some(root) = self.range(range).root().root() {
335 root.data().agg.clone()
336 } else {
337 T::agg_unit()
338 }
339 }
340 pub fn reverse<R>(&mut self, range: R)
341 where
342 R: RangeBounds<usize>,
343 {
344 if let Some(root) = self.range(range).root_mut().root_data_mut() {
345 LazyAggSplay::<T>::reverse(root);
346 }
347 }
348 pub fn get(&mut self, index: usize) -> Option<&T::Key> {
349 self.root.splay_by(SeekBySize::new(index))?;
350 self.root.root().map(|root| &root.data().key)
351 }
352 pub fn modify<F>(&mut self, index: usize, f: F)
353 where
354 F: FnOnce(&T::Key) -> T::Key,
355 {
356 self.root.splay_by(SeekBySize::new(index)).unwrap();
357 let data = self.root.root_data_mut().unwrap().data_mut();
358 data.key = f(&data.key);
359 LazyAggSplay::<T>::bottom_up(self.root.root_data_mut().unwrap());
360 }
361 pub fn insert(&mut self, index: usize, x: T::Key) {
362 assert!(index <= self.length);
363 self.root.splay_by(SeekBySize::new(index));
364 let agg = T::single_agg(&x);
365 unsafe {
366 let node = NodeRef::from_data(
367 LazyAggElement {
368 key: x,
369 agg,
370 lazy: T::act_unit(),
371 size: 1,
372 rev: false,
373 },
374 self.alloc.deref_mut(),
375 );
376 if index == self.length {
377 self.root.insert_right(node);
378 } else {
379 self.root.insert_left(node);
380 }
381 }
382 self.length += 1;
383 }
384 pub fn remove(&mut self, index: usize) -> Option<T::Key> {
385 if index >= self.length {
386 return None;
387 }
388 self.root.splay_by(SeekBySize::new(index));
389 self.length -= 1;
390 let node = self.root.take_root().unwrap().into_dying();
391 unsafe { Some(node.into_data(self.alloc.deref_mut()).key) }
392 }
393 pub fn position_acc<R, F>(&mut self, range: R, f: F) -> Option<usize>
394 where
395 R: RangeBounds<usize>,
396 F: FnMut(&T::Agg) -> bool,
397 {
398 let mut range = self.range(range);
399 let ord = range.root_mut().splay_by(SeekByAccCond::new(f));
400 if !matches!(ord, Some(Ordering::Equal)) {
401 return None;
402 }
403 let front_size = range.front().size();
404 let left_size = range.root().left_size();
405 Some(front_size + left_size)
406 }
407 pub fn rposition_acc<R, F>(&mut self, range: R, f: F) -> Option<usize>
408 where
409 R: RangeBounds<usize>,
410 F: FnMut(&T::Agg) -> bool,
411 {
412 let mut range = self.range(range);
413 let ord = range.root_mut().splay_by(SeekByRaccCond::new(f));
414 if !matches!(ord, Some(Ordering::Equal)) {
415 return None;
416 }
417 let front_size = range.front().size();
418 let left_size = range.root().left_size();
419 Some(front_size + left_size)
420 }
421 pub fn rotate_left(&mut self, mid: usize) {
422 assert!(mid <= self.length);
423 if mid != 0 || mid != self.length {
424 self.range(mid..).drop_rotate_left()
425 }
426 }
427 pub fn rotate_right(&mut self, k: usize) {
428 assert!(k <= self.length);
429 self.rotate_left(self.length - k);
430 }
431}
432
433impl<T, A> Extend<T::Key> for SplaySequence<T, A>
434where
435 T: LazyMapMonoid,
436 A: Allocator<Node<LazyAggElement<T>>>,
437{
438 fn extend<I>(&mut self, iter: I)
439 where
440 I: IntoIterator<Item = T::Key>,
441 {
442 let nodes: Vec<_> = unsafe {
443 iter.into_iter()
444 .map(|key| {
445 let agg = T::single_agg(&key);
446 NodeRef::from_data(
447 LazyAggElement {
448 key,
449 agg,
450 lazy: T::act_unit(),
451 size: 1,
452 rev: false,
453 },
454 self.alloc.deref_mut(),
455 )
456 })
457 .collect()
458 };
459 self.length += nodes.len();
460 let mut root = unsafe { Root::from_single_nodes(nodes) };
461 self.root.append(&mut root);
462 }Sourcefn agg_operate(x: &Self::Agg, y: &Self::Agg) -> Self::Agg
fn agg_operate(x: &Self::Agg, y: &Self::Agg) -> Self::Agg
Examples found in repository?
crates/competitive/src/data_structure/lazy_segment_tree_map.rs (line 78)
73 fn recalc_at(&mut self, k: usize) {
74 let x = match (self.seg.get(&(2 * k)), self.seg.get(&(2 * k + 1))) {
75 (None, None) => M::agg_unit(),
76 (None, Some((y, _))) => y.clone(),
77 (Some((x, _)), None) => x.clone(),
78 (Some((x, _)), Some((y, _))) => M::agg_operate(x, y),
79 };
80 self.get_mut(k).0 = x;
81 }
82 #[inline]
83 fn propagate_at(&mut self, k: usize) {
84 debug_assert!(k < self.n);
85 let x = match self.seg.get_mut(&k) {
86 Some((_, x)) => replace(x, M::act_unit()),
87 None => M::act_unit(),
88 };
89 self.update_at(2 * k, &x);
90 self.update_at(2 * k + 1, &x);
91 }
92 #[inline]
93 fn propagate(&mut self, k: usize, right: bool, nofilt: bool) {
94 let right = right as usize;
95 for i in (1..(k + 1 - right).next_power_of_two().trailing_zeros()).rev() {
96 if nofilt || (k >> i) << i != k {
97 self.propagate_at((k - right) >> i);
98 }
99 }
100 }
101 #[inline]
102 fn recalc(&mut self, k: usize, right: bool, nofilt: bool) {
103 let right = right as usize;
104 for i in 1..(k + 1 - right).next_power_of_two().trailing_zeros() {
105 if nofilt || (k >> i) << i != k {
106 self.recalc_at((k - right) >> i);
107 }
108 }
109 }
110 pub fn update<R>(&mut self, range: R, x: M::Act)
111 where
112 R: RangeBounds<usize>,
113 {
114 let range = range.to_range_bounded(0, self.n).expect("invalid range");
115 let mut a = range.start + self.n;
116 let mut b = range.end + self.n;
117 self.propagate(a, false, false);
118 self.propagate(b, true, false);
119 while a < b {
120 if a & 1 != 0 {
121 self.update_at(a, &x);
122 a += 1;
123 }
124 if b & 1 != 0 {
125 b -= 1;
126 self.update_at(b, &x);
127 }
128 a /= 2;
129 b /= 2;
130 }
131 self.recalc(range.start + self.n, false, false);
132 self.recalc(range.end + self.n, true, false);
133 }
134 pub fn fold<R>(&mut self, range: R) -> M::Agg
135 where
136 R: RangeBounds<usize>,
137 {
138 let range = range.to_range_bounded(0, self.n).expect("invalid range");
139 let mut l = range.start + self.n;
140 let mut r = range.end + self.n;
141 self.propagate(l, false, true);
142 self.propagate(r, true, true);
143 let mut vl = M::agg_unit();
144 let mut vr = M::agg_unit();
145 while l < r {
146 if l & 1 != 0 {
147 if let Some((x, _)) = self.seg.get(&l) {
148 vl = M::agg_operate(&vl, x);
149 }
150 l += 1;
151 }
152 if r & 1 != 0 {
153 r -= 1;
154 if let Some((x, _)) = self.seg.get(&r) {
155 vr = M::agg_operate(x, &vr);
156 }
157 }
158 l /= 2;
159 r /= 2;
160 }
161 M::agg_operate(&vl, &vr)
162 }
163 pub fn set(&mut self, k: usize, x: M::Agg) {
164 let k = k + self.n;
165 self.propagate(k, false, true);
166 *self.get_mut(k) = (x, M::act_unit());
167 self.recalc(k, false, true);
168 }
169 pub fn get(&mut self, k: usize) -> M::Agg {
170 self.fold(k..k + 1)
171 }
172 pub fn fold_all(&mut self) -> M::Agg {
173 self.fold(0..self.n)
174 }
175 fn bisect_perfect<P>(&mut self, mut pos: usize, mut acc: M::Agg, p: P) -> (usize, M::Agg)
176 where
177 P: Fn(&M::Agg) -> bool,
178 {
179 while pos < self.n {
180 self.propagate_at(pos);
181 pos <<= 1;
182 let nacc = match self.seg.get(&pos) {
183 Some((x, _)) => M::agg_operate(&acc, x),
184 None => acc.clone(),
185 };
186 if !p(&nacc) {
187 acc = nacc;
188 pos += 1;
189 }
190 }
191 (pos - self.n, acc)
192 }
193 fn rbisect_perfect<P>(&mut self, mut pos: usize, mut acc: M::Agg, p: P) -> (usize, M::Agg)
194 where
195 P: Fn(&M::Agg) -> bool,
196 {
197 while pos < self.n {
198 self.propagate_at(pos);
199 pos = pos * 2 + 1;
200 let nacc = match self.seg.get(&pos) {
201 Some((x, _)) => M::agg_operate(x, &acc),
202 None => acc.clone(),
203 };
204 if !p(&nacc) {
205 acc = nacc;
206 pos -= 1;
207 }
208 }
209 (pos - self.n, acc)
210 }
211 /// Returns the first index that satisfies a accumlative predicate.
212 pub fn position_acc<R, P>(&mut self, range: R, p: P) -> Option<usize>
213 where
214 R: RangeBounds<usize>,
215 P: Fn(&M::Agg) -> bool,
216 {
217 let range = range.to_range_bounded(0, self.n).expect("invalid range");
218 let mut l = range.start + self.n;
219 let r = range.end + self.n;
220 self.propagate(l, false, true);
221 self.propagate(r, true, true);
222 let mut k = 0usize;
223 let mut acc = M::agg_unit();
224 while l < r >> k {
225 if l & 1 != 0 {
226 let nacc = match self.seg.get(&l) {
227 Some((x, _)) => M::agg_operate(&acc, x),
228 None => acc.clone(),
229 };
230 if p(&nacc) {
231 return Some(self.bisect_perfect(l, acc, p).0);
232 }
233 acc = nacc;
234 l += 1;
235 }
236 l >>= 1;
237 k += 1;
238 }
239 for k in (0..k).rev() {
240 let r = r >> k;
241 if r & 1 != 0 {
242 let nacc = match self.seg.get(&(r - 1)) {
243 Some((x, _)) => M::agg_operate(&acc, x),
244 None => acc.clone(),
245 };
246 if p(&nacc) {
247 return Some(self.bisect_perfect(r - 1, acc, p).0);
248 }
249 acc = nacc;
250 }
251 }
252 None
253 }
254 /// Returns the last index that satisfies a accumlative predicate.
255 pub fn rposition_acc<R, P>(&mut self, range: R, p: P) -> Option<usize>
256 where
257 R: RangeBounds<usize>,
258 P: Fn(&M::Agg) -> bool,
259 {
260 let range = range.to_range_bounded(0, self.n).expect("invalid range");
261 let mut l = range.start + self.n;
262 let mut r = range.end + self.n;
263 self.propagate(l, false, true);
264 self.propagate(r, true, true);
265 let mut c = 0usize;
266 let mut k = 0usize;
267 let mut acc = M::agg_unit();
268 while l >> k < r {
269 c <<= 1;
270 if l & (1 << k) != 0 {
271 l += 1 << k;
272 c += 1;
273 }
274 if r & 1 != 0 {
275 r -= 1;
276 let nacc = match self.seg.get(&r) {
277 Some((x, _)) => M::agg_operate(x, &acc),
278 None => acc.clone(),
279 };
280 if p(&nacc) {
281 return Some(self.rbisect_perfect(r, acc, p).0);
282 }
283 acc = nacc;
284 }
285 r >>= 1;
286 k += 1;
287 }
288 for k in (0..k).rev() {
289 if c & 1 != 0 {
290 l -= 1 << k;
291 let l = l >> k;
292 let nacc = match self.seg.get(&l) {
293 Some((x, _)) => M::agg_operate(x, &acc),
294 None => acc.clone(),
295 };
296 if p(&nacc) {
297 return Some(self.rbisect_perfect(l, acc, p).0);
298 }
299 acc = nacc;
300 }
301 c >>= 1;
302 }
303 None
304 }More examples
crates/competitive/src/data_structure/lazy_segment_tree.rs (line 55)
48 pub fn from_vec(v: Vec<M::Agg>) -> Self {
49 let n = v.len();
50 let mut seg = vec![(M::agg_unit(), M::act_unit()); 2 * n];
51 for (i, x) in v.into_iter().enumerate() {
52 seg[i + n].0 = x;
53 }
54 for i in (1..n).rev() {
55 seg[i].0 = M::agg_operate(&seg[2 * i].0, &seg[2 * i + 1].0);
56 }
57 Self { n, seg }
58 }
59 pub fn from_keys(keys: impl ExactSizeIterator<Item = M::Key>) -> Self {
60 let n = keys.len();
61 let mut seg = vec![(M::agg_unit(), M::act_unit()); 2 * n];
62 for (i, key) in keys.enumerate() {
63 seg[i + n].0 = M::single_agg(&key);
64 }
65 for i in (1..n).rev() {
66 seg[i].0 = M::agg_operate(&seg[2 * i].0, &seg[2 * i + 1].0);
67 }
68 Self { n, seg }
69 }
70 #[inline]
71 fn update_at(&mut self, k: usize, x: &M::Act) {
72 let nx = M::act_agg(&self.seg[k].0, x);
73 if k < self.n {
74 self.seg[k].1 = M::act_operate(&self.seg[k].1, x);
75 }
76 if let Some(nx) = nx {
77 self.seg[k].0 = nx;
78 } else if k < self.n {
79 self.propagate_at(k);
80 self.recalc_at(k);
81 } else {
82 panic!("act failed on leaf");
83 }
84 }
85 #[inline]
86 fn recalc_at(&mut self, k: usize) {
87 self.seg[k].0 = M::agg_operate(&self.seg[2 * k].0, &self.seg[2 * k + 1].0);
88 }
89 #[inline]
90 fn propagate_at(&mut self, k: usize) {
91 debug_assert!(k < self.n);
92 let x = replace(&mut self.seg[k].1, M::act_unit());
93 self.update_at(2 * k, &x);
94 self.update_at(2 * k + 1, &x);
95 }
96 #[inline]
97 fn propagate(&mut self, k: usize, right: bool, nofilt: bool) {
98 let right = right as usize;
99 for i in (1..(k + 1 - right).next_power_of_two().trailing_zeros()).rev() {
100 if nofilt || (k >> i) << i != k {
101 self.propagate_at((k - right) >> i);
102 }
103 }
104 }
105 #[inline]
106 fn recalc(&mut self, k: usize, right: bool, nofilt: bool) {
107 let right = right as usize;
108 for i in 1..(k + 1 - right).next_power_of_two().trailing_zeros() {
109 if nofilt || (k >> i) << i != k {
110 self.recalc_at((k - right) >> i);
111 }
112 }
113 }
114 pub fn update<R>(&mut self, range: R, x: M::Act)
115 where
116 R: RangeBounds<usize>,
117 {
118 let range = range.to_range_bounded(0, self.n).expect("invalid range");
119 let mut a = range.start + self.n;
120 let mut b = range.end + self.n;
121 self.propagate(a, false, false);
122 self.propagate(b, true, false);
123 while a < b {
124 if a & 1 != 0 {
125 self.update_at(a, &x);
126 a += 1;
127 }
128 if b & 1 != 0 {
129 b -= 1;
130 self.update_at(b, &x);
131 }
132 a /= 2;
133 b /= 2;
134 }
135 self.recalc(range.start + self.n, false, false);
136 self.recalc(range.end + self.n, true, false);
137 }
138 pub fn fold<R>(&mut self, range: R) -> M::Agg
139 where
140 R: RangeBounds<usize>,
141 {
142 let range = range.to_range_bounded(0, self.n).expect("invalid range");
143 let mut l = range.start + self.n;
144 let mut r = range.end + self.n;
145 self.propagate(l, false, true);
146 self.propagate(r, true, true);
147 let mut vl = M::agg_unit();
148 let mut vr = M::agg_unit();
149 while l < r {
150 if l & 1 != 0 {
151 vl = M::agg_operate(&vl, &self.seg[l].0);
152 l += 1;
153 }
154 if r & 1 != 0 {
155 r -= 1;
156 vr = M::agg_operate(&self.seg[r].0, &vr);
157 }
158 l /= 2;
159 r /= 2;
160 }
161 M::agg_operate(&vl, &vr)
162 }
163 pub fn set(&mut self, k: usize, x: M::Agg) {
164 let k = k + self.n;
165 self.propagate(k, false, true);
166 self.seg[k] = (x, M::act_unit());
167 self.recalc(k, false, true);
168 }
169 pub fn get(&mut self, k: usize) -> M::Agg {
170 self.fold(k..k + 1)
171 }
172 pub fn fold_all(&mut self) -> M::Agg {
173 self.fold(0..self.n)
174 }
175 fn bisect_perfect<P>(&mut self, mut pos: usize, mut acc: M::Agg, p: P) -> (usize, M::Agg)
176 where
177 P: Fn(&M::Agg) -> bool,
178 {
179 while pos < self.n {
180 self.propagate_at(pos);
181 pos <<= 1;
182 let nacc = M::agg_operate(&acc, &self.seg[pos].0);
183 if !p(&nacc) {
184 acc = nacc;
185 pos += 1;
186 }
187 }
188 (pos - self.n, acc)
189 }
190 fn rbisect_perfect<P>(&mut self, mut pos: usize, mut acc: M::Agg, p: P) -> (usize, M::Agg)
191 where
192 P: Fn(&M::Agg) -> bool,
193 {
194 while pos < self.n {
195 self.propagate_at(pos);
196 pos = pos * 2 + 1;
197 let nacc = M::agg_operate(&self.seg[pos].0, &acc);
198 if !p(&nacc) {
199 acc = nacc;
200 pos -= 1;
201 }
202 }
203 (pos - self.n, acc)
204 }
205 /// Returns the first index that satisfies a accumlative predicate.
206 pub fn position_acc<R, P>(&mut self, range: R, p: P) -> Option<usize>
207 where
208 R: RangeBounds<usize>,
209 P: Fn(&M::Agg) -> bool,
210 {
211 let range = range.to_range_bounded(0, self.n).expect("invalid range");
212 let mut l = range.start + self.n;
213 let r = range.end + self.n;
214 self.propagate(l, false, true);
215 self.propagate(r, true, true);
216 let mut k = 0usize;
217 let mut acc = M::agg_unit();
218 while l < r >> k {
219 if l & 1 != 0 {
220 let nacc = M::agg_operate(&acc, &self.seg[l].0);
221 if p(&nacc) {
222 return Some(self.bisect_perfect(l, acc, p).0);
223 }
224 acc = nacc;
225 l += 1;
226 }
227 l >>= 1;
228 k += 1;
229 }
230 for k in (0..k).rev() {
231 let r = r >> k;
232 if r & 1 != 0 {
233 let nacc = M::agg_operate(&acc, &self.seg[r - 1].0);
234 if p(&nacc) {
235 return Some(self.bisect_perfect(r - 1, acc, p).0);
236 }
237 acc = nacc;
238 }
239 }
240 None
241 }
242 /// Returns the last index that satisfies a accumlative predicate.
243 pub fn rposition_acc<R, P>(&mut self, range: R, p: P) -> Option<usize>
244 where
245 R: RangeBounds<usize>,
246 P: Fn(&M::Agg) -> bool,
247 {
248 let range = range.to_range_bounded(0, self.n).expect("invalid range");
249 let mut l = range.start + self.n;
250 let mut r = range.end + self.n;
251 self.propagate(l, false, true);
252 self.propagate(r, true, true);
253 let mut c = 0usize;
254 let mut k = 0usize;
255 let mut acc = M::agg_unit();
256 while l >> k < r {
257 c <<= 1;
258 if l & (1 << k) != 0 {
259 l += 1 << k;
260 c += 1;
261 }
262 if r & 1 != 0 {
263 r -= 1;
264 let nacc = M::agg_operate(&self.seg[r].0, &acc);
265 if p(&nacc) {
266 return Some(self.rbisect_perfect(r, acc, p).0);
267 }
268 acc = nacc;
269 }
270 r >>= 1;
271 k += 1;
272 }
273 for k in (0..k).rev() {
274 if c & 1 != 0 {
275 l -= 1 << k;
276 let l = l >> k;
277 let nacc = M::agg_operate(&self.seg[l].0, &acc);
278 if p(&nacc) {
279 return Some(self.rbisect_perfect(l, acc, p).0);
280 }
281 acc = nacc;
282 }
283 c >>= 1;
284 }
285 None
286 }crates/competitive/src/data_structure/binary_search_tree/data.rs (line 177)
171 pub fn bottom_up<Spec>(mut node: BstDataMutRef<'_, Spec>)
172 where
173 Spec: BstSpec<Data: BstDataAccess<marker::LazyMap, Value = Self>>,
174 {
175 let mut agg = L::single_agg(&node.reborrow().into_data().bst_data().key);
176 if let Ok(left) = node.reborrow().left().descend() {
177 agg = L::agg_operate(&left.into_data().bst_data().agg, &agg);
178 }
179 if let Ok(right) = node.reborrow().right().descend() {
180 agg = L::agg_operate(&agg, &right.into_data().bst_data().agg);
181 }
182 node.data_mut().bst_data_mut().agg = agg;
183 }crates/competitive/src/data_structure/splay_tree/sequence.rs (line 84)
79 fn recalc(node: NodeRef<marker::DataMut<'_>, Self>) -> NodeRef<marker::DataMut<'_>, Self> {
80 let mut agg = T::single_agg(&node.data().key);
81 let mut size = 1;
82 if let Some(left) = node.left() {
83 let data = left.data();
84 agg = T::agg_operate(&data.agg, &agg);
85 // agg = <T::AggMonoid as Magma>::operate(&data.agg, &agg);
86 size += data.size;
87 }
88 if let Some(right) = node.right() {
89 let data = right.data();
90 agg = T::agg_operate(&agg, &data.agg);
91 size += data.size;
92 }
93 let data = node.data_mut();
94 data.agg = agg;
95 data.size = size;
96 node
97 }
98}
99impl<T> SplaySpec for LazyAggSplay<T>
100where
101 T: LazyMapMonoid,
102{
103 type T = LazyAggElement<T>;
104 fn has_bottom_up() -> bool {
105 true
106 }
107 fn top_down(node: NodeRef<marker::DataMut<'_>, Self>) {
108 Self::propagate(node);
109 }
110 fn bottom_up(node: NodeRef<marker::DataMut<'_>, Self>) {
111 Self::recalc(node);
112 }
113}
114
115struct SeekBySize<T> {
116 index: usize,
117 _marker: PhantomData<fn() -> T>,
118}
119impl<T> SeekBySize<T> {
120 fn new(index: usize) -> Self {
121 Self {
122 index,
123 _marker: PhantomData,
124 }
125 }
126}
127impl<T> SplaySeeker for SeekBySize<T>
128where
129 T: LazyMapMonoid,
130{
131 type S = LazyAggSplay<T>;
132 fn splay_seek(&mut self, node: NodeRef<marker::Immut<'_>, Self::S>) -> Ordering {
133 let lsize = node.left().map(|l| l.data().size).unwrap_or_default();
134 let ord = self.index.cmp(&lsize);
135 if matches!(ord, Ordering::Greater) {
136 self.index -= lsize + 1;
137 }
138 ord
139 }
140}
141
142struct SeekByAccCond<F, T>
143where
144 T: LazyMapMonoid,
145{
146 acc: T::Agg,
147 f: F,
148 _marker: PhantomData<fn() -> T>,
149}
150impl<F, T> SeekByAccCond<F, T>
151where
152 T: LazyMapMonoid,
153{
154 fn new(f: F) -> Self {
155 Self {
156 acc: T::agg_unit(),
157 f,
158 _marker: PhantomData,
159 }
160 }
161}
162impl<F, T> SplaySeeker for SeekByAccCond<F, T>
163where
164 F: FnMut(&T::Agg) -> bool,
165 T: LazyMapMonoid,
166{
167 type S = LazyAggSplay<T>;
168 fn splay_seek(&mut self, node: NodeRef<marker::Immut<'_>, Self::S>) -> Ordering {
169 if let Some(lagg) = node.left().map(|l| &l.data().agg) {
170 let nacc = T::agg_operate(&self.acc, lagg);
171 if (self.f)(&nacc) {
172 return Ordering::Less;
173 }
174 self.acc = nacc;
175 };
176 self.acc = T::agg_operate(&self.acc, &T::single_agg(&node.data().key));
177 if (self.f)(&self.acc) {
178 Ordering::Equal
179 } else {
180 Ordering::Greater
181 }
182 }
183}
184
185struct SeekByRaccCond<F, T>
186where
187 T: LazyMapMonoid,
188{
189 acc: T::Agg,
190 f: F,
191 _marker: PhantomData<fn() -> T>,
192}
193impl<F, T> SeekByRaccCond<F, T>
194where
195 T: LazyMapMonoid,
196{
197 fn new(f: F) -> Self {
198 Self {
199 acc: T::agg_unit(),
200 f,
201 _marker: PhantomData,
202 }
203 }
204}
205impl<F, T> SplaySeeker for SeekByRaccCond<F, T>
206where
207 F: FnMut(&T::Agg) -> bool,
208 T: LazyMapMonoid,
209{
210 type S = LazyAggSplay<T>;
211 fn splay_seek(&mut self, node: NodeRef<marker::Immut<'_>, Self::S>) -> Ordering {
212 if let Some(lagg) = node.right().map(|r| &r.data().agg) {
213 let nacc = T::agg_operate(lagg, &self.acc);
214 if (self.f)(&nacc) {
215 return Ordering::Greater;
216 }
217 self.acc = nacc;
218 };
219 self.acc = T::agg_operate(&T::single_agg(&node.data().key), &self.acc);
220 if (self.f)(&self.acc) {
221 Ordering::Equal
222 } else {
223 Ordering::Less
224 }
225 }crates/competitive/src/data_structure/binary_search_tree/seeker.rs (line 169)
166 fn bst_seek(&mut self, node: BstImmutRef<'_, Self::Spec>) -> Ordering {
167 if let Ok(left) = node.reborrow().left().descend() {
168 let left_agg = &left.into_data().bst_data().agg;
169 let nagg = L::agg_operate(&self.acc, left_agg);
170 if (self.f)(&nagg) {
171 return Ordering::Greater;
172 }
173 let nagg = L::agg_operate(
174 &nagg,
175 &L::single_agg(&node.reborrow().into_data().bst_data().key),
176 );
177 if (self.f)(&nagg) {
178 Ordering::Equal
179 } else {
180 self.acc = nagg;
181 Ordering::Less
182 }
183 } else {
184 let nagg = L::agg_operate(
185 &self.acc,
186 &L::single_agg(&node.reborrow().into_data().bst_data().key),
187 );
188 if (self.f)(&nagg) {
189 Ordering::Equal
190 } else {
191 self.acc = nagg;
192 Ordering::Less
193 }
194 }
195 }
196}
197
198pub struct SeekByRaccCond<Spec, L, F>
199where
200 L: LazyMapMonoid,
201{
202 acc: L::Agg,
203 f: F,
204 _marker: PhantomData<fn() -> (Spec, L)>,
205}
206
207impl<Spec, L, F> SeekByRaccCond<Spec, L, F>
208where
209 L: LazyMapMonoid,
210 F: FnMut(&L::Agg) -> bool,
211{
212 pub fn new(f: F) -> Self {
213 Self {
214 acc: L::agg_unit(),
215 f,
216 _marker: PhantomData,
217 }
218 }
219}
220
221impl<Spec, L, F> BstSeeker for SeekByRaccCond<Spec, L, F>
222where
223 Spec: BstSpec<Data: BstDataAccess<data::marker::LazyMap, Value = LazyMapElement<L>>>,
224 L: LazyMapMonoid,
225 F: FnMut(&L::Agg) -> bool,
226{
227 type Spec = Spec;
228
229 fn bst_seek(&mut self, node: BstImmutRef<'_, Self::Spec>) -> Ordering {
230 if let Ok(right) = node.reborrow().right().descend() {
231 let right_agg = &right.into_data().bst_data().agg;
232 let nagg = L::agg_operate(right_agg, &self.acc);
233 if (self.f)(&nagg) {
234 return Ordering::Less;
235 }
236 let nagg = L::agg_operate(
237 &L::single_agg(&node.reborrow().into_data().bst_data().key),
238 &nagg,
239 );
240 if (self.f)(&nagg) {
241 Ordering::Equal
242 } else {
243 self.acc = nagg;
244 Ordering::Greater
245 }
246 } else {
247 let nagg = L::agg_operate(
248 &L::single_agg(&node.reborrow().into_data().bst_data().key),
249 &self.acc,
250 );
251 if (self.f)(&nagg) {
252 Ordering::Equal
253 } else {
254 self.acc = nagg;
255 Ordering::Greater
256 }
257 }
258 }Sourcefn act_operate(x: &Self::Act, y: &Self::Act) -> Self::Act
fn act_operate(x: &Self::Act, y: &Self::Act) -> Self::Act
Examples found in repository?
crates/competitive/src/data_structure/lazy_segment_tree.rs (line 74)
71 fn update_at(&mut self, k: usize, x: &M::Act) {
72 let nx = M::act_agg(&self.seg[k].0, x);
73 if k < self.n {
74 self.seg[k].1 = M::act_operate(&self.seg[k].1, x);
75 }
76 if let Some(nx) = nx {
77 self.seg[k].0 = nx;
78 } else if k < self.n {
79 self.propagate_at(k);
80 self.recalc_at(k);
81 } else {
82 panic!("act failed on leaf");
83 }
84 }More examples
crates/competitive/src/data_structure/lazy_segment_tree_map.rs (line 61)
56 fn update_at(&mut self, k: usize, x: &M::Act) {
57 let n = self.n;
58 let a = self.get_mut(k);
59 let nx = M::act_agg(&a.0, x);
60 if k < n {
61 a.1 = M::act_operate(&a.1, x);
62 }
63 if let Some(nx) = nx {
64 a.0 = nx;
65 } else if k < n {
66 self.propagate_at(k);
67 self.recalc_at(k);
68 } else {
69 panic!("act failed on leaf");
70 }
71 }fn agg_operate_assign(x: &mut Self::Agg, y: &Self::Agg)
Sourcefn act_operate_assign(x: &mut Self::Act, y: &Self::Act)
fn act_operate_assign(x: &mut Self::Act, y: &Self::Act)
Examples found in repository?
More examples
crates/competitive/src/data_structure/binary_search_tree/data.rs (line 147)
143 pub fn update_act<Spec>(mut node: BstDataMutRef<'_, Spec>, act: &L::Act)
144 where
145 Spec: BstSpec<Data: BstDataAccess<marker::LazyMap, Value = Self>>,
146 {
147 L::act_operate_assign(&mut node.data_mut().bst_data_mut().act, act);
148 node.data_mut().bst_data_mut().key =
149 L::act_key(&node.reborrow().into_data().bst_data().key, act);
150 if let Some(nxlazy) = L::act_agg(&node.reborrow().into_data().bst_data().agg, act) {
151 node.data_mut().bst_data_mut().agg = nxlazy;
152 } else {
153 Self::top_down(node.reborrow_datamut());
154 Self::bottom_up(node.reborrow_datamut());
155 }
156 }Dyn Compatibility§
This trait is not dyn compatible.
In older versions of Rust, dyn compatibility was called "object safety", so this trait is not object safe.