competitive/data_structure/binary_search_tree/
node_id.rs1use super::{BstNode, BstNodePtr, BstNodeRef, BstRoot, BstSpec, node};
2use std::{
3 collections::HashMap,
4 hash::{Hash, Hasher},
5 ptr::NonNull,
6 sync::atomic::{self, AtomicU64},
7};
8
9pub struct BstNodeId<Spec>
10where
11 Spec: BstSpec,
12{
13 node: NonNull<BstNode<Spec::Data, Spec::Parent>>,
14 generation: u64,
15}
16
17impl<Spec> Clone for BstNodeId<Spec>
18where
19 Spec: BstSpec,
20{
21 fn clone(&self) -> Self {
22 *self
23 }
24}
25
26impl<Spec> Copy for BstNodeId<Spec> where Spec: BstSpec {}
27
28impl<Spec> PartialEq for BstNodeId<Spec>
29where
30 Spec: BstSpec,
31{
32 fn eq(&self, other: &Self) -> bool {
33 self.node == other.node && self.generation == other.generation
34 }
35}
36
37impl<Spec> Eq for BstNodeId<Spec> where Spec: BstSpec {}
38
39impl<Spec> Hash for BstNodeId<Spec>
40where
41 Spec: BstSpec,
42{
43 fn hash<H: Hasher>(&self, state: &mut H) {
44 self.node.hash(state);
45 self.generation.hash(state);
46 }
47}
48
49impl<Spec> BstNodeId<Spec>
50where
51 Spec: BstSpec,
52{
53 pub unsafe fn reborrow<'a>(
54 self,
55 _root: &'a Option<BstRoot<Spec>>,
56 ) -> BstNodeRef<node::marker::Immut<'a>, Spec> {
57 unsafe { BstNodeRef::new_unchecked(self.node) }
58 }
59
60 pub unsafe fn reborrow_datamut<'a>(
61 self,
62 _root: &'a mut Option<BstRoot<Spec>>,
63 ) -> BstNodeRef<node::marker::DataMut<'a>, Spec> {
64 unsafe { BstNodeRef::new_unchecked(self.node) }
65 }
66
67 pub unsafe fn reborrow_mut<'a>(
68 self,
69 _root: &'a mut Option<BstRoot<Spec>>,
70 ) -> BstNodeRef<node::marker::Mut<'a>, Spec> {
71 unsafe { BstNodeRef::new_unchecked(self.node) }
72 }
73}
74
75static GENERATION: AtomicU64 = AtomicU64::new(0);
76
77pub struct BstNodeIdManager<Spec>
78where
79 Spec: BstSpec,
80{
81 node_ids: HashMap<BstNodePtr<Spec::Data, Spec::Parent>, u64>,
82}
83
84impl<Spec> Default for BstNodeIdManager<Spec>
85where
86 Spec: BstSpec,
87{
88 fn default() -> Self {
89 Self {
90 node_ids: Default::default(),
91 }
92 }
93}
94
95impl<Spec> BstNodeIdManager<Spec>
96where
97 Spec: BstSpec,
98{
99 pub fn new() -> Self {
100 Self::default()
101 }
102
103 pub fn is_empty(&self) -> bool {
104 self.node_ids.is_empty()
105 }
106
107 pub fn len(&self) -> usize {
108 self.node_ids.len()
109 }
110
111 pub fn contains(&self, node_id: &BstNodeId<Spec>) -> bool {
112 self.node_ids
113 .get(&node_id.node)
114 .is_some_and(|&g| g == node_id.generation)
115 }
116
117 pub fn registerd_node_id(
118 &self,
119 node: BstNodeRef<node::marker::Immut<'_>, Spec>,
120 ) -> Option<BstNodeId<Spec>> {
121 self.node_ids.get(&node.node).map(|&generation| BstNodeId {
122 node: node.node,
123 generation,
124 })
125 }
126
127 pub fn clear(&mut self) {
128 self.node_ids.clear();
129 }
130
131 pub fn register(&mut self, node: &BstRoot<Spec>) -> BstNodeId<Spec> {
132 let node_id = BstNodeId {
133 node: node.node,
134 generation: GENERATION
135 .fetch_update(atomic::Ordering::Relaxed, atomic::Ordering::Relaxed, |x| {
136 x.checked_add(1)
137 })
138 .expect("Generation counter overflow"),
139 };
140 self.node_ids.insert(node_id.node, node_id.generation);
141 node_id
142 }
143
144 pub fn unregister(&mut self, node_id: BstNodeId<Spec>) {
145 self.node_ids.remove(&node_id.node);
146 }
147}