1use rustc_hash::FxBuildHasher;
2use std::borrow::Borrow;
3use std::collections::BTreeMap;
4use std::hash::{BuildHasher, Hash, Hasher};
5
6mod private {
7
8 pub trait Sealed {}
9 pub trait Weak: Sealed {
10 type A: ?Sized;
11 type Strong: Ptr<A = Self::A, Weak = Self>;
12 fn upgrade_ptr(&self) -> Option<Self::Strong>;
13 fn value(&self) -> *const Self::A;
14 fn is_alive(&self) -> bool;
15 }
16 pub trait Ptr: Sealed {
17 type A: ?Sized;
18 type Weak: Weak<A = Self::A, Strong = Self>;
19 fn inner(&self) -> &Self::A;
20 fn value(&self) -> *const Self::A;
22 fn weak(&self) -> Self::Weak;
23 }
24 impl<A: ?Sized> Sealed for std::rc::Rc<A> {}
25 impl<A: ?Sized> Sealed for std::sync::Arc<A> {}
26 impl<A: ?Sized> Sealed for std::rc::Weak<A> {}
27 impl<A: ?Sized> Sealed for std::sync::Weak<A> {}
28 impl<A: ?Sized> Ptr for std::rc::Rc<A> {
29 type A = A;
30 type Weak = std::rc::Weak<A>;
31 #[inline]
32 fn inner(&self) -> &A {
33 self
34 }
35 #[inline]
38 fn value(&self) -> *const A {
39 Self::as_ptr(self)
40 }
41 #[inline]
42 fn weak(&self) -> Self::Weak {
43 Self::downgrade(self)
44 }
45 }
46 impl<A: ?Sized> Weak for std::rc::Weak<A> {
47 type A = A;
48 type Strong = std::rc::Rc<A>;
49 #[inline]
50 fn upgrade_ptr(&self) -> Option<Self::Strong> {
51 Self::upgrade(self)
52 }
53 #[inline]
54 fn value(&self) -> *const A {
55 self.as_ptr()
56 }
57 #[inline]
58 fn is_alive(&self) -> bool {
59 self.strong_count() > 0
60 }
61 }
62 impl<A: ?Sized> Ptr for std::sync::Arc<A> {
63 type A = A;
64 type Weak = std::sync::Weak<A>;
65 #[inline]
66 fn inner(&self) -> &A {
67 self
68 }
69 #[inline]
72 fn value(&self) -> *const A {
73 Self::as_ptr(self)
74 }
75 #[inline]
76 fn weak(&self) -> Self::Weak {
77 Self::downgrade(self)
78 }
79 }
80 impl<A: ?Sized> Weak for std::sync::Weak<A> {
81 type A = A;
82 type Strong = std::sync::Arc<A>;
83 #[inline]
84 fn upgrade_ptr(&self) -> Option<Self::Strong> {
85 Self::upgrade(self)
86 }
87 #[inline]
88 fn value(&self) -> *const A {
89 self.as_ptr()
90 }
91 #[inline]
92 fn is_alive(&self) -> bool {
93 self.strong_count() > 0
94 }
95 }
96}
97pub use private::{Ptr, Weak};
98
99#[derive(Clone, Debug)]
100pub struct Interned<P>(P);
101impl<P: Ptr> AsRef<P::A> for Interned<P> {
102 #[inline]
103 fn as_ref(&self) -> &P::A {
104 self.0.inner()
105 }
106}
107impl<P: Ptr> Hash for Interned<P> {
108 #[inline]
109 #[allow(clippy::ptr_as_ptr)]
110 fn hash<H: Hasher>(&self, state: &mut H) {
111 (self.0.value() as *const () as usize).hash(state);
112 }
113}
114
115impl<P: Ptr> PartialOrd for Interned<P> {
116 #[inline]
117 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
118 Some(self.cmp(other))
119 }
120}
121impl<P: Ptr> Ord for Interned<P> {
122 #[inline]
123 #[allow(clippy::ptr_as_ptr)]
124 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
125 (self.0.value() as *const () as usize).cmp(&(other.0.value() as *const () as usize))
126 }
127}
128
129impl<P: Ptr> PartialEq for Interned<P> {
130 #[inline]
131 #[allow(clippy::ptr_as_ptr)]
132 #[allow(clippy::ptr_eq)]
133 fn eq(&self, other: &Self) -> bool {
134 (self.0.value() as *const () as usize) == (other.0.value() as *const () as usize)
135 }
136}
137impl<P: Ptr> Eq for Interned<P> {}
138
139impl<A: ?Sized> AsRef<A> for Interned<triomphe::Arc<A>> {
140 #[inline]
141 fn as_ref(&self) -> &A {
142 self.0.as_ref()
143 }
144}
145impl<A: ?Sized> Hash for Interned<triomphe::Arc<A>> {
146 #[inline]
147 #[allow(clippy::ptr_as_ptr)]
148 #[allow(clippy::ref_as_ptr)]
149 fn hash<H: Hasher>(&self, state: &mut H) {
150 (self.as_ref() as *const _ as *const () as usize).hash(state);
151 }
152}
153
154impl<A: ?Sized> PartialEq for Interned<triomphe::Arc<A>> {
155 #[inline]
156 #[allow(clippy::ref_as_ptr)]
157 #[allow(clippy::ptr_as_ptr)]
158 #[allow(clippy::ptr_eq)]
159 fn eq(&self, other: &Self) -> bool {
160 (self.as_ref() as *const _ as *const () as usize)
161 == (other.as_ref() as *const _ as *const () as usize)
162 }
163}
164impl<A: ?Sized> Eq for Interned<triomphe::Arc<A>> {}
165impl<A: ?Sized> PartialOrd for Interned<triomphe::Arc<A>> {
166 #[inline]
167 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
168 Some(self.cmp(other))
169 }
170}
171impl<A: ?Sized> Ord for Interned<triomphe::Arc<A>> {
172 #[inline]
173 #[allow(clippy::ptr_as_ptr)]
174 #[allow(clippy::ref_as_ptr)]
175 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
176 (self.as_ref() as *const _ as *const () as usize)
177 .cmp(&(other.as_ref() as *const _ as *const () as usize))
178 }
179}
180
181pub type RcInterned<A> = Interned<std::rc::Rc<A>>;
182pub type ArcInterned<A> = Interned<std::sync::Arc<A>>;
183pub type TArcInterned<A> = Interned<triomphe::Arc<A>>;
184
185pub struct GCInterner<W, const N: usize = 4, const GC: usize = 0> {
186 store: BTreeMap<u64, smallvec::SmallVec<W, N>>,
187}
188impl<W, const N: usize, const GC: usize> Default for GCInterner<W, N, GC> {
189 fn default() -> Self {
190 Self {
191 store: BTreeMap::default(),
192 }
193 }
194}
195impl<W, const N: usize, const GC: usize> GCInterner<W, N, GC> {
196 #[inline]
197 fn hash<V: Hash + ?Sized>(v: &V) -> u64 {
198 FxBuildHasher.hash_one(v)
199 }
200 #[inline]
201 #[must_use]
202 pub fn len(&self) -> usize {
203 self.store.len()
204 }
205 #[inline]
206 #[must_use]
207 pub fn is_empty(&self) -> bool {
208 self.store.is_empty()
209 }
210}
211impl<W: Weak, const N: usize, const GC: usize> GCInterner<W, N, GC> {
212 #[inline]
213 pub fn gc(&mut self) {
214 self.store.retain(|_, v| {
215 v.retain(|weak| weak.is_alive());
216 !v.is_empty()
217 });
218 }
219 pub fn get_or_intern<V>(&mut self, v: V) -> Interned<W::Strong>
220 where
221 for<'a> &'a W::A: PartialEq<V>,
222 V: Hash + PartialEq + Eq + Into<W::Strong>,
223 {
224 let hash = Self::hash(&v);
225 let ret = match self.store.entry(hash) {
226 std::collections::btree_map::Entry::Occupied(mut e) => {
227 let vec = e.get_mut();
228 let r = vec.iter().find_map(|weak| unsafe {
229 weak.value()
230 .as_ref()
231 .map_or_else(
232 || None,
233 |p| {
234 if weak.is_alive() && p == v {
235 Some(weak.upgrade_ptr())
236 } else {
237 None
238 }
239 },
240 )
241 .flatten()
242 });
243 r.map_or_else(
244 || {
245 let p = v.into();
246 vec.push(p.weak());
247 p
248 },
249 |r| r,
250 )
251 }
252 std::collections::btree_map::Entry::Vacant(e) => {
253 let p = v.into();
254 e.insert(smallvec::smallvec![p.weak()]);
255 p
256 }
257 };
258 if GC > 0 && self.store.len() > GC {
259 self.gc();
260 }
261 Interned(ret)
262 }
263 pub fn get<V>(&self, v: &V) -> Option<Interned<W::Strong>>
264 where
265 for<'a> &'a W::A: PartialEq<V>,
266 V: Hash + PartialEq + Eq,
267 {
268 let hash = Self::hash(&v);
269 self.store
270 .get(&hash)
271 .and_then(|vec| {
272 vec.iter().find_map(|weak| unsafe {
273 weak.borrow().value().as_ref().map_or_else(
274 || None,
275 |p| {
276 if weak.is_alive() && p == *v {
277 Some(weak.upgrade_ptr())
278 } else {
279 None
280 }
281 },
282 )
283 })
284 })
285 .flatten()
286 .map(Interned)
287 }
288}
289
290impl<T: ?Sized + Hash, const N: usize, const GC: usize> GCInterner<triomphe::Arc<T>, N, GC> {
291 #[inline]
292 pub fn gc(&mut self) {
293 self.store.retain(|_, v| {
294 v.retain(|weak| !weak.is_unique());
295 !v.is_empty()
296 });
297 }
298 pub fn get_or_intern<V>(&mut self, v: V) -> Interned<triomphe::Arc<T>>
299 where
300 for<'a> &'a T: PartialEq<V>,
301 V: Hash + PartialEq + Eq + Into<triomphe::Arc<T>>,
302 {
303 let hash = Self::hash(&v);
304 let ret = match self.store.entry(hash) {
305 std::collections::btree_map::Entry::Occupied(mut e) => {
306 let vec = e.get_mut();
307 let r = vec.iter_mut().find_map(|weak| {
308 if weak.as_ref() == v {
309 Some(weak.clone())
310 } else {
311 None
312 }
313 });
314 r.map_or_else(
315 || {
316 let p = v.into();
317 vec.push(p.clone());
318 p
319 },
320 |r| r,
321 )
322 }
323 std::collections::btree_map::Entry::Vacant(e) => {
324 let p = v.into();
325 e.insert(smallvec::smallvec![p.clone()]);
326 p
327 }
328 };
329 let r = Interned(ret);
330 if GC > 0 && self.store.len() > GC {
331 self.gc();
332 }
333 r
334 }
335 pub fn get<V>(&self, v: &V) -> Option<Interned<triomphe::Arc<T>>>
336 where
337 for<'a> &'a T: PartialEq<V>,
338 V: Hash + PartialEq + Eq,
339 {
340 let hash = Self::hash(&v);
341 self.store
342 .get(&hash)
343 .and_then(|vec| {
344 vec.iter().find_map(|weak| {
345 if weak.as_ref() == *v {
346 Some(weak.clone())
347 } else {
348 None
349 }
350 })
351 })
352 .map(Interned)
353 }
354}
355
356pub type RcInterner<T, const N: usize = 4, const GC: usize = 0> =
357 GCInterner<std::rc::Weak<T>, N, GC>;
358pub type ArcInterner<T, const N: usize = 4, const GC: usize = 0> =
359 GCInterner<std::sync::Weak<T>, N, GC>;
360pub type TArcInterner<T, const N: usize = 4, const GC: usize = 0> =
361 GCInterner<triomphe::Arc<T>, N, GC>;