1use super::{AbelianGroup, AbelianMonoid, Group, Monoid, RangeBoundsExt};
2use std::{
3 fmt::{self, Debug, Formatter},
4 iter::FromIterator,
5 ops::RangeBounds,
6};
7
8pub struct Accumulate<M>
10where
11 M: Monoid,
12{
13 data: Vec<M::T>,
14}
15
16impl<M> Debug for Accumulate<M>
17where
18 M: Monoid<T: Debug>,
19{
20 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
21 f.debug_struct("Accumulate")
22 .field("data", &self.data)
23 .finish()
24 }
25}
26
27impl<M> FromIterator<M::T> for Accumulate<M>
28where
29 M: Monoid,
30{
31 fn from_iter<T>(iter: T) -> Self
32 where
33 T: IntoIterator<Item = M::T>,
34 {
35 let iter = iter.into_iter();
36 let (lower, _) = iter.size_hint();
37 let mut data = Vec::with_capacity(lower.saturating_add(1));
38 let mut acc = M::unit();
39 for x in iter {
40 let y = M::operate(&acc, &x);
41 data.push(acc);
42 acc = y;
43 }
44 data.push(acc);
45 Self { data }
46 }
47}
48
49impl<M> Accumulate<M>
50where
51 M: Monoid,
52{
53 pub fn from_vec(mut data: Vec<M::T>) -> Self {
54 let mut acc = M::unit();
55 for x in &mut data {
56 let y = M::operate(&acc, x);
57 *x = acc;
58 acc = y;
59 }
60 data.push(acc);
61 Self { data }
62 }
63
64 pub fn accumulate(&self, k: usize) -> M::T {
66 assert!(
67 k < self.data.len(),
68 "index out of range: the len is {} but the index is {}",
69 self.data.len(),
70 k
71 );
72 unsafe { self.data.get_unchecked(k) }.clone()
73 }
74}
75
76impl<M> Accumulate<M>
77where
78 M: Group,
79{
80 pub fn fold<R>(&self, range: R) -> M::T
82 where
83 R: RangeBounds<usize>,
84 {
85 let n = self.data.len() - 1;
86 let range = range.to_range_bounded(0, n).expect("invalid range");
87 let (l, r) = (range.start, range.end);
88 assert!(l <= r, "bad range [{}, {})", l, r);
89 M::operate(&M::inverse(unsafe { self.data.get_unchecked(l) }), unsafe {
90 self.data.get_unchecked(r)
91 })
92 }
93}
94
95pub struct Accumulate2d<M>
97where
98 M: AbelianMonoid,
99{
100 h: usize,
101 w: usize,
102 data: Vec<M::T>,
103}
104
105impl<M> Debug for Accumulate2d<M>
106where
107 M: AbelianMonoid<T: Debug>,
108{
109 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
110 f.debug_struct("Accumulate2d")
111 .field("h", &self.h)
112 .field("w", &self.w)
113 .field("data", &self.data)
114 .finish()
115 }
116}
117
118impl<M> Accumulate2d<M>
119where
120 M: AbelianMonoid,
121{
122 pub fn new(arr2d: &[Vec<M::T>]) -> Self {
123 let h = arr2d.len();
124 assert!(h > 0);
125 let w = arr2d[0].len();
126 assert!(w > 0);
127 let w1 = w + 1;
128 let mut data = Vec::with_capacity((h + 1) * w1);
129 data.resize_with(w1, M::unit);
130 for (i, arr) in arr2d.iter().enumerate() {
131 assert_eq!(w, arr.len(), "expected 2d array");
132 let mut acc = M::unit();
133 for (j, x) in arr.iter().enumerate() {
134 let y = M::operate(&acc, x);
135 data.push(M::operate(&acc, unsafe { data.get_unchecked(w1 * i + j) }));
136 acc = y;
137 }
138 data.push(M::operate(&acc, unsafe { data.get_unchecked(w1 * i + w) }));
139 }
140 Self { h, w, data }
141 }
142 pub fn from_fn<F>(h: usize, w: usize, mut f: F) -> Self
143 where
144 F: FnMut(usize, usize) -> M::T,
145 {
146 let w1 = w + 1;
147 let mut data = Vec::with_capacity((h + 1) * w1);
148 data.resize_with(w1, M::unit);
149 for i in 0..h {
150 let mut acc = M::unit();
151 for j in 0..w {
152 let y = M::operate(&acc, &f(i, j));
153 data.push(M::operate(&acc, unsafe { data.get_unchecked(w1 * i + j) }));
154 acc = y;
155 }
156 data.push(M::operate(&acc, unsafe { data.get_unchecked(w1 * i + w) }));
157 }
158 Self { h, w, data }
159 }
160 pub fn accumulate(&self, x: usize, y: usize) -> M::T {
162 let h1 = self.h + 1;
163 let w1 = self.w + 1;
164 assert!(
165 x < h1,
166 "index out of range: the first len is {} but the index is {}",
167 h1,
168 x
169 );
170 assert!(
171 y < w1,
172 "index out of range: the second len is {} but the index is {}",
173 w1,
174 y
175 );
176 unsafe { self.data.get_unchecked(w1 * x + y) }.clone()
177 }
178}
179
180impl<M> Accumulate2d<M>
181where
182 M: AbelianGroup,
183{
184 pub fn fold<R0, R1>(&self, range0: R0, range1: R1) -> M::T
186 where
187 R0: RangeBounds<usize>,
188 R1: RangeBounds<usize>,
189 {
190 let range0 = range0.to_range_bounded(0, self.h).expect("invalid range");
191 let range1 = range1.to_range_bounded(0, self.w).expect("invalid range");
192 let (xl, xr) = (range0.start, range0.end);
193 let (yl, yr) = (range1.start, range1.end);
194 assert!(xl <= xr, "bad range [{}, {})", xl, xr);
195 assert!(yl <= yr, "bad range [{}, {})", yl, yr);
196 let w1 = self.w + 1;
197 unsafe {
198 M::rinv_operate(
199 &M::operate(
200 self.data.get_unchecked(w1 * xl + yl),
201 self.data.get_unchecked(w1 * xr + yr),
202 ),
203 &M::operate(
204 self.data.get_unchecked(w1 * xl + yr),
205 self.data.get_unchecked(w1 * xr + yl),
206 ),
207 )
208 }
209 }
210}
211
212pub struct AccumulateKd<const K: usize, M>
213where
214 M: AbelianMonoid,
215{
216 dim: [usize; K],
217 offset: [usize; K],
218 data: Vec<M::T>,
219}
220
221impl<const K: usize, M> Debug for AccumulateKd<K, M>
222where
223 M: AbelianMonoid<T: Debug>,
224{
225 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
226 f.debug_struct("AccumulateKd")
227 .field("dim", &self.dim)
228 .field("offset", &self.offset)
229 .field("data", &self.data)
230 .finish()
231 }
232}
233
234impl<const K: usize, M> AccumulateKd<K, M>
235where
236 M: AbelianMonoid,
237{
238 pub fn from_fn(dim: [usize; K], mut f: impl FnMut([usize; K]) -> M::T) -> Self {
239 fn fill<const K: usize, T>(
240 dim: &[usize; K],
241 offset: &[usize; K],
242 data: &mut [T],
243 f: &mut impl FnMut([usize; K]) -> T,
244 mut index: [usize; K],
245 pos: usize,
246 ) {
247 if pos < K {
248 for i in 0..dim[pos] {
249 index[pos] = i;
250 fill(dim, offset, data, f, index, pos + 1);
251 }
252 } else {
253 let i: usize = index.iter().zip(offset).map(|(x, y)| (x + 1) * y).sum();
254 data[i] = f(index);
255 }
256 }
257
258 let mut offset = [1; K];
259 for d in (1..K).rev() {
260 offset[d - 1] = offset[d] * (dim[d] + 1);
261 }
262 let size = offset[0] * (dim[0] + 1);
263 let mut data = vec![M::unit(); size];
264 fill(&dim, &offset, &mut data, &mut f, [0; K], 0);
265 for d in 0..K {
266 for i in 1..size {
267 if i / offset[d] % (dim[d] + 1) != 0 {
268 data[i] = M::operate(&data[i], &data[i - offset[d]]);
269 }
270 }
271 }
272 Self { dim, offset, data }
273 }
274 pub fn accumulate(&self, x: [usize; K]) -> M::T {
275 for (d, x) in x.into_iter().enumerate() {
276 assert!(
277 x <= self.dim[d],
278 "index out of range: the len is {} but the index is {}",
279 self.dim[d] + 1,
280 x
281 );
282 }
283 let p: usize = x.iter().zip(&self.offset).map(|(x, y)| x * y).sum();
284 unsafe { self.data.get_unchecked(p) }.clone()
285 }
286}
287
288impl<const K: usize, M> AccumulateKd<K, M>
289where
290 M: AbelianGroup,
291{
292 pub fn fold<R>(&self, ranges: [R; K]) -> M::T
293 where
294 R: RangeBounds<usize>,
295 {
296 let ranges: [_; K] = std::array::from_fn(|i| {
297 let range = ranges[i]
298 .to_range_bounded(0, self.dim[i])
299 .expect("invalid range");
300 let (l, r) = (range.start, range.end);
301 assert!(l <= r, "bad range [{}, {})", l, r);
302 [l, r]
303 });
304 let mut acc = M::unit();
305 for bit in 0..1 << K {
306 let p: usize = ranges
307 .iter()
308 .zip(&self.offset)
309 .enumerate()
310 .map(|(d, (range, offset))| range[(bit >> d) & 1 ^ 1] * offset)
311 .sum();
312 if bit.count_ones() & 1 == 0 {
313 acc = M::operate(&acc, unsafe { self.data.get_unchecked(p) });
314 } else {
315 acc = M::rinv_operate(&acc, unsafe { self.data.get_unchecked(p) });
316 }
317 }
318 acc
319 }
320}
321
322#[cfg(test)]
323mod tests {
324 use super::*;
325 use crate::{
326 algebra::{AdditiveOperation, LinearOperation, Magma, Unital},
327 num::mint_basic::MInt1000000007,
328 rand,
329 tools::Xorshift,
330 };
331 type M = LinearOperation<MInt1000000007>;
332 type A = AdditiveOperation<MInt1000000007>;
333
334 #[test]
335 fn test_accumlate() {
336 let mut rng = Xorshift::default();
337 const Q: usize = 1_000;
338 const N: usize = 50;
339 for n in 0..Q {
340 let n = n % N;
341 rand!(rng, v: [(.., ..); n], t: 0..2);
342 let acc: Accumulate<M> = if t == 0 {
343 v.iter().cloned().collect()
344 } else {
345 Accumulate::from_vec(v.clone())
346 };
347 for r in 0..=n {
348 assert_eq!(
349 v[..r].iter().fold(M::unit(), |x, y| M::operate(&x, y)),
350 acc.accumulate(r)
351 );
352 for l in 0..=r {
353 assert_eq!(
354 v[l..r].iter().fold(M::unit(), |x, y| M::operate(&x, y)),
355 acc.fold(l..r)
356 );
357 }
358 }
359 }
360 }
361
362 #[test]
363 fn test_accumlate2d() {
364 let mut rng = Xorshift::default();
365 const Q: usize = 1_000;
366 const N: usize = 10;
367 for i in 0..Q {
368 let h = i % N + 1;
369 let w = i / N % N + 1;
370 rand!(rng, v: [[..; w]; h]);
371 let acc2d = Accumulate2d::<A>::new(&v);
372 for xr in 0..=h {
373 for yr in 0..=w {
374 assert_eq!(
375 v[..xr]
376 .iter()
377 .flat_map(|v| v[..yr].iter())
378 .fold(A::unit(), |x, y| A::operate(&x, y)),
379 acc2d.accumulate(xr, yr)
380 );
381 for xl in 0..=xr {
382 for yl in 0..=yr {
383 assert_eq!(
384 v[xl..xr]
385 .iter()
386 .flat_map(|v| v[yl..yr].iter())
387 .fold(A::unit(), |x, y| A::operate(&x, y)),
388 acc2d.fold(xl..xr, yl..yr)
389 );
390 }
391 }
392 }
393 }
394 }
395 }
396
397 #[test]
398 fn test_accumlate2d_from_fn() {
399 let mut rng = Xorshift::default();
400 const Q: usize = 1_000;
401 const N: usize = 10;
402 for i in 0..Q {
403 let h = i % N;
404 let w = i / N % N;
405 rand!(rng, v: [[..; w]; h]);
406 let acc2d = Accumulate2d::<A>::from_fn(h, w, |i, j| v[i][j]);
407 for xr in 0..=h {
408 for yr in 0..=w {
409 assert_eq!(
410 v[..xr]
411 .iter()
412 .flat_map(|v| v[..yr].iter())
413 .fold(A::unit(), |x, y| A::operate(&x, y)),
414 acc2d.accumulate(xr, yr)
415 );
416 for xl in 0..=xr {
417 for yl in 0..=yr {
418 assert_eq!(
419 v[xl..xr]
420 .iter()
421 .flat_map(|v| v[yl..yr].iter())
422 .fold(A::unit(), |x, y| A::operate(&x, y)),
423 acc2d.fold(xl..xr, yl..yr)
424 );
425 }
426 }
427 }
428 }
429 }
430 }
431
432 #[test]
433 fn test_accumlatekd_from_fn_3d() {
434 let mut rng = Xorshift::default();
435 const N: usize = 5;
436 for i in 0..N * N * N {
437 let dim = [i % N, i / N % N, i / N / N % N];
438 rand!(rng, v: [[[..; dim[2]]; dim[1]]; dim[0]]);
439 let acc = AccumulateKd::<3, A>::from_fn(dim, |[i, j, k]| v[i][j][k]);
440 for xr in 0..=dim[0] {
441 for yr in 0..=dim[1] {
442 for zr in 0..=dim[2] {
443 assert_eq!(
444 v[..xr]
445 .iter()
446 .flat_map(|v| v[..yr].iter().flat_map(|v| v[..zr].iter()))
447 .fold(A::unit(), |x, y| A::operate(&x, y)),
448 acc.accumulate([xr, yr, zr])
449 );
450 for xl in 0..=xr {
451 for yl in 0..=yr {
452 for zl in 0..=zr {
453 assert_eq!(
454 v[xl..xr]
455 .iter()
456 .flat_map(|v| v[yl..yr]
457 .iter()
458 .flat_map(|v| v[zl..zr].iter()))
459 .fold(A::unit(), |x, y| A::operate(&x, y)),
460 acc.fold([xl..xr, yl..yr, zl..zr])
461 );
462 }
463 }
464 }
465 }
466 }
467 }
468 }
469 }
470
471 #[test]
472 fn test_accumlatekd_from_fn_4d() {
473 let mut rng = Xorshift::default();
474 const N: usize = 4;
475 for i in 0..N * N * N * N {
476 let dim = [i % N, i / N % N, i / N / N % N, i / N / N / N % N];
477 rand!(rng, v: [[[[..; dim[3]]; dim[2]]; dim[1]]; dim[0]]);
478 let acc = AccumulateKd::<4, A>::from_fn(dim, |[i, j, k, l]| v[i][j][k][l]);
479 for xr in 0..=dim[0] {
480 for yr in 0..=dim[1] {
481 for zr in 0..=dim[2] {
482 for wr in 0..=dim[3] {
483 assert_eq!(
484 v[..xr]
485 .iter()
486 .flat_map(|v| v[..yr]
487 .iter()
488 .flat_map(|v| v[..zr].iter().flat_map(|v| v[..wr].iter())))
489 .fold(A::unit(), |x, y| A::operate(&x, y)),
490 acc.accumulate([xr, yr, zr, wr])
491 );
492 for xl in 0..=xr {
493 for yl in 0..=yr {
494 for zl in 0..=zr {
495 for wl in 0..=wr {
496 assert_eq!(
497 v[xl..xr]
498 .iter()
499 .flat_map(|v| v[yl..yr]
500 .iter()
501 .flat_map(|v| v[zl..zr]
502 .iter()
503 .flat_map(|v| v[wl..wr].iter())))
504 .fold(A::unit(), |x, y| A::operate(&x, y)),
505 acc.fold([xl..xr, yl..yr, zl..zr, wl..wr])
506 );
507 }
508 }
509 }
510 }
511 }
512 }
513 }
514 }
515 }
516 }
517}