SegmentTreeMap

Struct SegmentTreeMap 

Source
pub struct SegmentTreeMap<M>
where M: Monoid,
{ n: usize, seg: HashMap<usize, M::T>, u: M::T, }

Fields§

§n: usize§seg: HashMap<usize, M::T>§u: M::T

Implementations§

Source§

impl<M> SegmentTreeMap<M>
where M: Monoid,

Source

pub fn new(n: usize) -> Self

Source

fn get_ref(&self, k: usize) -> &M::T

Examples found in repository?
crates/competitive/src/data_structure/segment_tree_map.rs (line 66)
59    pub fn set(&mut self, k: usize, x: M::T) {
60        debug_assert!(k < self.n);
61        let mut k = k + self.n;
62        *self.seg.entry(k).or_insert(M::unit()) = x;
63        k /= 2;
64        while k > 0 {
65            *self.seg.entry(k).or_insert(M::unit()) =
66                M::operate(self.get_ref(2 * k), self.get_ref(2 * k + 1));
67            k /= 2;
68        }
69    }
70    pub fn update(&mut self, k: usize, x: M::T) {
71        debug_assert!(k < self.n);
72        let mut k = k + self.n;
73        let t = self.seg.entry(k).or_insert(M::unit());
74        *t = M::operate(t, &x);
75        k /= 2;
76        while k > 0 {
77            *self.seg.entry(k).or_insert(M::unit()) =
78                M::operate(self.get_ref(2 * k), self.get_ref(2 * k + 1));
79            k /= 2;
80        }
81    }
82    pub fn get(&self, k: usize) -> M::T {
83        debug_assert!(k < self.n);
84        self.seg.get(&(k + self.n)).cloned().unwrap_or_else(M::unit)
85    }
86    pub fn fold<R>(&self, range: R) -> M::T
87    where
88        R: RangeBounds<usize>,
89    {
90        let range = range.to_range();
91        debug_assert!(range.end <= self.n);
92        let mut l = range.start + self.n;
93        let mut r = range.end + self.n;
94        let mut vl = M::unit();
95        let mut vr = M::unit();
96        while l < r {
97            if l & 1 != 0 {
98                vl = M::operate(&vl, self.get_ref(l));
99                l += 1;
100            }
101            if r & 1 != 0 {
102                r -= 1;
103                vr = M::operate(self.get_ref(r), &vr);
104            }
105            l /= 2;
106            r /= 2;
107        }
108        M::operate(&vl, &vr)
109    }
110    fn bisect_perfect<F>(&self, mut pos: usize, mut acc: M::T, f: F) -> (usize, M::T)
111    where
112        F: Fn(&M::T) -> bool,
113    {
114        while pos < self.n {
115            pos <<= 1;
116            let nacc = M::operate(&acc, self.get_ref(pos));
117            if !f(&nacc) {
118                acc = nacc;
119                pos += 1;
120            }
121        }
122        (pos - self.n, acc)
123    }
124    fn rbisect_perfect<F>(&self, mut pos: usize, mut acc: M::T, f: F) -> (usize, M::T)
125    where
126        F: Fn(&M::T) -> bool,
127    {
128        while pos < self.n {
129            pos = pos * 2 + 1;
130            let nacc = M::operate(self.get_ref(pos), &acc);
131            if !f(&nacc) {
132                acc = nacc;
133                pos -= 1;
134            }
135        }
136        (pos - self.n, acc)
137    }
138    /// Returns the first index that satisfies a accumlative predicate.
139    pub fn position_acc<R, F>(&self, range: R, f: F) -> Option<usize>
140    where
141        R: RangeBounds<usize>,
142        F: Fn(&M::T) -> bool,
143    {
144        let range = range.to_range();
145        debug_assert!(range.end <= self.n);
146        let mut l = range.start + self.n;
147        let r = range.end + self.n;
148        let mut k = 0usize;
149        let mut acc = M::unit();
150        while l < r >> k {
151            if l & 1 != 0 {
152                let nacc = M::operate(&acc, self.get_ref(l));
153                if f(&nacc) {
154                    return Some(self.bisect_perfect(l, acc, f).0);
155                }
156                acc = nacc;
157                l += 1;
158            }
159            l >>= 1;
160            k += 1;
161        }
162        for k in (0..k).rev() {
163            let r = r >> k;
164            if r & 1 != 0 {
165                let nacc = M::operate(&acc, self.get_ref(r - 1));
166                if f(&nacc) {
167                    return Some(self.bisect_perfect(r - 1, acc, f).0);
168                }
169                acc = nacc;
170            }
171        }
172        None
173    }
174    /// Returns the last index that satisfies a accumlative predicate.
175    pub fn rposition_acc<R, F>(&self, range: R, f: F) -> Option<usize>
176    where
177        R: RangeBounds<usize>,
178        F: Fn(&M::T) -> bool,
179    {
180        let range = range.to_range();
181        debug_assert!(range.end <= self.n);
182        let mut l = range.start + self.n;
183        let mut r = range.end + self.n;
184        let mut c = 0usize;
185        let mut k = 0usize;
186        let mut acc = M::unit();
187        while l >> k < r {
188            c <<= 1;
189            if l & (1 << k) != 0 {
190                l += 1 << k;
191                c += 1;
192            }
193            if r & 1 != 0 {
194                r -= 1;
195                let nacc = M::operate(self.get_ref(r), &acc);
196                if f(&nacc) {
197                    return Some(self.rbisect_perfect(r, acc, f).0);
198                }
199                acc = nacc;
200            }
201            r >>= 1;
202            k += 1;
203        }
204        for k in (0..k).rev() {
205            if c & 1 != 0 {
206                l -= 1 << k;
207                let l = l >> k;
208                let nacc = M::operate(self.get_ref(l), &acc);
209                if f(&nacc) {
210                    return Some(self.rbisect_perfect(l, acc, f).0);
211                }
212                acc = nacc;
213            }
214            c >>= 1;
215        }
216        None
217    }
Source

pub fn set(&mut self, k: usize, x: M::T)

Source

pub fn update(&mut self, k: usize, x: M::T)

Source

pub fn get(&self, k: usize) -> M::T

Source

pub fn fold<R>(&self, range: R) -> M::T
where R: RangeBounds<usize>,

Source

fn bisect_perfect<F>(&self, pos: usize, acc: M::T, f: F) -> (usize, M::T)
where F: Fn(&M::T) -> bool,

Examples found in repository?
crates/competitive/src/data_structure/segment_tree_map.rs (line 154)
139    pub fn position_acc<R, F>(&self, range: R, f: F) -> Option<usize>
140    where
141        R: RangeBounds<usize>,
142        F: Fn(&M::T) -> bool,
143    {
144        let range = range.to_range();
145        debug_assert!(range.end <= self.n);
146        let mut l = range.start + self.n;
147        let r = range.end + self.n;
148        let mut k = 0usize;
149        let mut acc = M::unit();
150        while l < r >> k {
151            if l & 1 != 0 {
152                let nacc = M::operate(&acc, self.get_ref(l));
153                if f(&nacc) {
154                    return Some(self.bisect_perfect(l, acc, f).0);
155                }
156                acc = nacc;
157                l += 1;
158            }
159            l >>= 1;
160            k += 1;
161        }
162        for k in (0..k).rev() {
163            let r = r >> k;
164            if r & 1 != 0 {
165                let nacc = M::operate(&acc, self.get_ref(r - 1));
166                if f(&nacc) {
167                    return Some(self.bisect_perfect(r - 1, acc, f).0);
168                }
169                acc = nacc;
170            }
171        }
172        None
173    }
Source

fn rbisect_perfect<F>(&self, pos: usize, acc: M::T, f: F) -> (usize, M::T)
where F: Fn(&M::T) -> bool,

Examples found in repository?
crates/competitive/src/data_structure/segment_tree_map.rs (line 197)
175    pub fn rposition_acc<R, F>(&self, range: R, f: F) -> Option<usize>
176    where
177        R: RangeBounds<usize>,
178        F: Fn(&M::T) -> bool,
179    {
180        let range = range.to_range();
181        debug_assert!(range.end <= self.n);
182        let mut l = range.start + self.n;
183        let mut r = range.end + self.n;
184        let mut c = 0usize;
185        let mut k = 0usize;
186        let mut acc = M::unit();
187        while l >> k < r {
188            c <<= 1;
189            if l & (1 << k) != 0 {
190                l += 1 << k;
191                c += 1;
192            }
193            if r & 1 != 0 {
194                r -= 1;
195                let nacc = M::operate(self.get_ref(r), &acc);
196                if f(&nacc) {
197                    return Some(self.rbisect_perfect(r, acc, f).0);
198                }
199                acc = nacc;
200            }
201            r >>= 1;
202            k += 1;
203        }
204        for k in (0..k).rev() {
205            if c & 1 != 0 {
206                l -= 1 << k;
207                let l = l >> k;
208                let nacc = M::operate(self.get_ref(l), &acc);
209                if f(&nacc) {
210                    return Some(self.rbisect_perfect(l, acc, f).0);
211                }
212                acc = nacc;
213            }
214            c >>= 1;
215        }
216        None
217    }
Source

pub fn position_acc<R, F>(&self, range: R, f: F) -> Option<usize>
where R: RangeBounds<usize>, F: Fn(&M::T) -> bool,

Returns the first index that satisfies a accumlative predicate.

Source

pub fn rposition_acc<R, F>(&self, range: R, f: F) -> Option<usize>
where R: RangeBounds<usize>, F: Fn(&M::T) -> bool,

Returns the last index that satisfies a accumlative predicate.

Source§

impl<M> SegmentTreeMap<M>
where M: AbelianMonoid,

Source

pub fn fold_all(&self) -> M::T

Trait Implementations§

Source§

impl<M> Clone for SegmentTreeMap<M>
where M: Monoid,

Source§

fn clone(&self) -> Self

Returns a duplicate of the value. Read more
1.0.0 · Source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
Source§

impl<M> Debug for SegmentTreeMap<M>
where M: Monoid<T: Debug>,

Source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

§

impl<M> Freeze for SegmentTreeMap<M>
where <M as Magma>::T: Freeze,

§

impl<M> RefUnwindSafe for SegmentTreeMap<M>
where <M as Magma>::T: RefUnwindSafe,

§

impl<M> Send for SegmentTreeMap<M>
where <M as Magma>::T: Send,

§

impl<M> Sync for SegmentTreeMap<M>
where <M as Magma>::T: Sync,

§

impl<M> Unpin for SegmentTreeMap<M>
where <M as Magma>::T: Unpin,

§

impl<M> UnwindSafe for SegmentTreeMap<M>
where <M as Magma>::T: UnwindSafe,

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> CloneToUninit for T
where T: Clone,

Source§

unsafe fn clone_to_uninit(&self, dest: *mut u8)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dest. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> ToOwned for T
where T: Clone,

Source§

type Owned = T

The resulting type after obtaining ownership.
Source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
Source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.