competitive/data_structure/
allocator.rs1use std::{
2 marker::PhantomData,
3 mem::{replace, size_of, take},
4 ptr::{self, NonNull, read, write},
5};
6
7pub trait Allocator<T> {
8 fn allocate(&mut self, value: T) -> NonNull<T>;
9 fn deallocate(&mut self, ptr: NonNull<T>) -> T;
10}
11
12#[derive(Debug)]
13pub struct MemoryPool<T> {
14 pool: Vec<T>,
15 chunks: Vec<Vec<T>>,
16 unused: Vec<NonNull<T>>,
17}
18
19impl<T> Default for MemoryPool<T> {
20 fn default() -> Self {
21 Self::with_capacity(CAP / 1usize.max(size_of::<T>()))
22 }
23}
24
25const CAP: usize = 1024;
26
27impl<T> MemoryPool<T> {
28 pub fn new() -> Self {
29 Default::default()
30 }
31 pub fn with_capacity(capacity: usize) -> Self {
32 let pool = Vec::with_capacity(capacity.max(1));
33 Self {
34 pool,
35 chunks: Vec::new(),
36 unused: Vec::new(),
37 }
38 }
39}
40
41impl<T> Drop for MemoryPool<T> {
42 fn drop(&mut self) {
43 self.chunks.push(take(&mut self.pool));
44 let mut removed = vec![vec![]; self.chunks.len()];
45 for p in self.unused.iter() {
46 let p = p.as_ptr();
47 for (chunk, removed) in self.chunks.iter().zip(&mut removed).rev() {
48 let ptr = chunk.as_ptr() as *mut _;
49 let len = chunk.len();
50 if ptr <= p && p < ptr.wrapping_add(len) {
51 removed.push(p);
52 }
53 }
54 }
55 for (chunk, removed) in self.chunks.iter_mut().zip(&mut removed) {
56 removed.sort_unstable();
57 for &p in removed.iter() {
58 unsafe {
59 let len = chunk.len();
60 let base_ptr = chunk.as_mut_ptr();
61 ptr::copy(base_ptr.add(len - 1), p, 1);
62 chunk.set_len(len - 1);
63 }
64 }
65 }
66 }
67}
68
69impl<T> Allocator<T> for MemoryPool<T> {
70 fn allocate(&mut self, value: T) -> NonNull<T> {
71 if let Some(mut ptr) = self.unused.pop() {
72 unsafe { write(ptr.as_mut(), value) };
73 ptr
74 } else {
75 let len = self.pool.len();
76 if len >= self.pool.capacity() {
77 let new_capacity = self.pool.capacity() * 2;
78 let new_pool = Vec::with_capacity(new_capacity);
79 self.chunks.push(replace(&mut self.pool, new_pool));
80 }
81 let len = self.pool.len();
82 debug_assert!(len < self.pool.capacity());
83 self.pool.push(value);
84 unsafe { NonNull::new_unchecked(self.pool.as_mut_ptr().add(len)) }
85 }
86 }
87
88 fn deallocate(&mut self, ptr: NonNull<T>) -> T {
89 self.unused.push(ptr);
90 unsafe { read(ptr.as_ptr()) }
91 }
92}
93
94#[derive(Debug, Default)]
95pub struct BoxAllocator<T>(PhantomData<fn() -> T>);
96impl<T> Allocator<T> for BoxAllocator<T> {
97 fn allocate(&mut self, value: T) -> NonNull<T> {
98 unsafe { NonNull::new_unchecked(Box::leak(Box::new(value))) }
99 }
100 fn deallocate(&mut self, ptr: NonNull<T>) -> T {
101 unsafe { *Box::from_raw(ptr.as_ptr()) }
102 }
103}
104
105#[cfg(test)]
106mod tests {
107 use super::*;
108 use crate::tools::Xorshift;
109 use std::cell::RefCell;
110
111 #[test]
112 fn test_alloc() {
113 let mut pool = MemoryPool::<usize>::with_capacity(2);
114 let mut a = vec![];
115 for i in 0..100 {
116 let p = pool.allocate(i);
117 a.push(p);
118 for (i, &p) in a.iter().enumerate() {
119 assert_eq!(unsafe { *p.as_ref() }, i);
120 }
121 }
122 }
123
124 #[test]
125 fn test_drop() {
126 #[derive(Debug)]
127 struct CheckDrop<T>(T);
128 thread_local! {
129 static CNT: RefCell<usize> = const { RefCell::new(0) };
130 }
131 impl<T> Drop for CheckDrop<T> {
132 fn drop(&mut self) {
133 CNT.with(|cnt| *cnt.borrow_mut() += 1);
134 }
135 }
136 const Q: usize = 100;
137 let mut cnt = 0usize;
138 let mut rng = Xorshift::default();
139 for _ in 0..10 {
140 let mut pool = MemoryPool::new();
141 let mut a = vec![];
142 for _ in 0..Q {
143 if a.is_empty() || rng.gen_bool(0.8) {
144 let k = rng.rand(!0);
145 a.push(pool.allocate(CheckDrop(k)));
146 } else {
147 let i = rng.rand(a.len() as _) as usize;
148 let p = a.swap_remove(i);
149 pool.deallocate(p);
150 cnt += 1;
151 }
152 assert_eq!(cnt, CNT.with(|cnt| *cnt.borrow()));
153 }
154 cnt += a.len();
155 }
156 assert_eq!(cnt, CNT.with(|cnt| *cnt.borrow()));
157 }
158}