1#![feature(rustc_private)]
6extern crate rustc_abi;
7extern crate rustc_serialize;
8
9use std::{
10 fmt::{self, Debug, Display},
11 hash::{BuildHasherDefault, Hash, Hasher},
12 ops::Deref,
13 sync::{Arc, OnceLock},
14};
15
16use dashmap::{DashMap, SharedValue};
17use hashbrown::{HashMap, hash_map::RawEntryMut};
18use rustc_hash::FxHasher;
19use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
20
21type InternMap<T> = DashMap<Arc<T>, (), BuildHasherDefault<FxHasher>>;
22type Guard<T> = dashmap::RwLockWriteGuard<
23 'static,
24 HashMap<Arc<T>, SharedValue<()>, BuildHasherDefault<FxHasher>>,
25>;
26
27pub struct Interned<T: Internable + ?Sized> {
28 arc: Arc<T>,
29}
30
31pub type List<T> = Interned<[T]>;
32
33impl<T> Default for List<T>
34where
35 [T]: Internable,
36{
37 fn default() -> Self {
38 Self::from_arr([])
39 }
40}
41
42impl<T: Internable> Interned<T> {
43 pub fn new(obj: T) -> Self {
44 let (mut shard, hash) = Self::select(&obj);
45 match shard.raw_entry_mut().from_key_hashed_nocheck(hash, &obj) {
46 RawEntryMut::Occupied(occ) => Self { arc: occ.key().clone() },
47 RawEntryMut::Vacant(vac) => {
48 Self {
49 arc: vac
50 .insert_hashed_nocheck(hash, Arc::new(obj), SharedValue::new(()))
51 .0
52 .clone(),
53 }
54 }
55 }
56 }
57}
58
59impl<T> List<T>
60where
61 [T]: Internable,
62{
63 fn list_with<S>(obj: S, to_arc: impl FnOnce(S) -> Arc<[T]>) -> List<T>
64 where
65 S: std::borrow::Borrow<[T]>,
66 {
67 let (mut shard, hash) = Self::select(obj.borrow());
68 match shard
69 .raw_entry_mut()
70 .from_key_hashed_nocheck(hash, obj.borrow())
71 {
72 RawEntryMut::Occupied(occ) => Self { arc: occ.key().clone() },
73 RawEntryMut::Vacant(vac) => {
74 Self {
75 arc: vac
76 .insert_hashed_nocheck(hash, to_arc(obj), SharedValue::new(()))
77 .0
78 .clone(),
79 }
80 }
81 }
82 }
83
84 pub fn from_vec(vec: Vec<T>) -> List<T> {
85 List::list_with(vec, Arc::from)
86 }
87
88 pub fn from_arr<const N: usize>(arr: [T; N]) -> List<T> {
89 List::list_with(arr, |arr| Arc::new(arr))
90 }
91
92 pub fn empty() -> List<T> {
93 Self::from_arr([])
94 }
95
96 pub fn singleton(x: T) -> List<T> {
97 Self::from_arr([x])
98 }
99}
100
101impl<T> List<T>
102where
103 T: Clone,
104 [T]: Internable,
105{
106 pub fn from_slice(slice: &[T]) -> List<T> {
107 List::list_with(slice, Arc::from)
108 }
109}
110
111impl<T> FromIterator<T> for List<T>
112where
113 [T]: Internable,
114{
115 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
116 Self::from_vec(iter.into_iter().collect())
117 }
118}
119
120impl<T> From<&[T]> for Interned<[T]>
121where
122 [T]: Internable,
123 T: Clone,
124{
125 fn from(slice: &[T]) -> Self {
126 List::from_slice(slice)
127 }
128}
129
130impl<T> From<Vec<T>> for Interned<[T]>
131where
132 [T]: Internable,
133{
134 fn from(vec: Vec<T>) -> Self {
135 List::from_vec(vec)
136 }
137}
138
139impl<T: Internable + ?Sized> Interned<T> {
140 #[inline]
141 fn select(obj: &T) -> (Guard<T>, u64) {
142 let storage = T::storage().get();
143 let hash = {
144 let mut hasher = std::hash::BuildHasher::build_hasher(storage.hasher());
145 obj.hash(&mut hasher);
146 hasher.finish()
147 };
148 let shard_idx = storage.determine_shard(hash as usize);
149 let shard = &storage.shards()[shard_idx];
150 (shard.write(), hash)
151 }
152}
153
154impl<T: Internable + ?Sized> Drop for Interned<T> {
155 #[inline]
156 fn drop(&mut self) {
157 if Arc::strong_count(&self.arc) == 2 {
159 self.drop_slow();
161 }
162 }
163}
164
165impl<T: Internable + ?Sized> Interned<T> {
166 #[cold]
167 fn drop_slow(&mut self) {
168 let (mut shard, hash) = Self::select(&self.arc);
169
170 if Arc::strong_count(&self.arc) != 2 {
171 return;
173 }
174
175 match shard
176 .raw_entry_mut()
177 .from_key_hashed_nocheck(hash, &self.arc)
178 {
179 RawEntryMut::Occupied(occ) => occ.remove(),
180 RawEntryMut::Vacant(_) => unreachable!(),
181 };
182
183 if shard.len() * 2 < shard.capacity() {
185 shard.shrink_to_fit();
186 }
187 }
188}
189
190impl<T: Internable> PartialEq for Interned<T> {
192 #[inline]
195 fn eq(&self, other: &Self) -> bool {
196 Arc::ptr_eq(&self.arc, &other.arc)
197 }
198}
199
200impl<T: Internable> Eq for Interned<T> {}
201
202impl<T> PartialEq for Interned<[T]>
203where
204 [T]: Internable,
205{
206 #[inline]
207 fn eq(&self, other: &Self) -> bool {
208 Arc::ptr_eq(&self.arc, &other.arc)
209 }
210}
211
212impl<T> Eq for Interned<[T]> where [T]: Internable {}
213
214impl<T: Internable + ?Sized> Hash for Interned<T> {
215 fn hash<H: Hasher>(&self, state: &mut H) {
216 state.write_usize(Arc::as_ptr(&self.arc) as *const () as usize);
218 }
219}
220
221impl<T: PartialOrd + Internable> PartialOrd for Interned<T> {
222 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
223 <T as PartialOrd>::partial_cmp(&self.arc, &other.arc)
224 }
225}
226
227impl<T: Ord + Internable> Ord for Interned<T> {
228 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
229 <T as Ord>::cmp(&self.arc, &other.arc)
230 }
231}
232
233impl<T> PartialOrd for List<T>
234where
235 T: PartialOrd,
236 [T]: Internable,
237{
238 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
239 <[T] as PartialOrd>::partial_cmp(&self.arc, &other.arc)
240 }
241}
242
243impl<T> Ord for List<T>
244where
245 T: Ord,
246 [T]: Internable,
247{
248 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
249 <[T] as Ord>::cmp(&self.arc, &other.arc)
250 }
251}
252
253impl<T: Internable + ?Sized> AsRef<T> for Interned<T> {
254 #[inline]
255 fn as_ref(&self) -> &T {
256 &self.arc
257 }
258}
259
260impl<T: Internable + ?Sized> Deref for Interned<T> {
261 type Target = T;
262
263 #[inline]
264 fn deref(&self) -> &Self::Target {
265 &self.arc
266 }
267}
268
269impl<T: Internable + ?Sized> Clone for Interned<T> {
270 fn clone(&self) -> Self {
271 Self { arc: self.arc.clone() }
272 }
273}
274
275impl<T: Debug + Internable + ?Sized> Debug for Interned<T> {
276 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
277 (*self.arc).fmt(f)
278 }
279}
280
281impl<T: Display + Internable + ?Sized> Display for Interned<T> {
282 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
283 (*self.arc).fmt(f)
284 }
285}
286
287impl<'a, T> IntoIterator for &'a Interned<[T]>
288where
289 [T]: Internable,
290{
291 type Item = &'a T;
292
293 type IntoIter = std::slice::Iter<'a, T>;
294
295 fn into_iter(self) -> Self::IntoIter {
296 self.iter()
297 }
298}
299
300impl<E, T> Encodable<E> for Interned<T>
301where
302 E: Encoder,
303 T: Encodable<E> + Internable,
304{
305 fn encode(&self, s: &mut E) {
306 (**self).encode(s);
307 }
308}
309
310impl<D, T> Decodable<D> for Interned<T>
311where
312 D: Decoder,
313 T: Decodable<D> + Internable,
314{
315 fn decode(d: &mut D) -> Self {
316 Interned::new(T::decode(d))
317 }
318}
319
320impl<E, T> Encodable<E> for Interned<[T]>
321where
322 E: Encoder,
323 T: Encodable<E>,
324 [T]: Internable,
325{
326 fn encode(&self, s: &mut E) {
327 (**self).encode(s);
328 }
329}
330
331impl<D, T> Decodable<D> for Interned<[T]>
332where
333 D: Decoder,
334 T: Decodable<D>,
335 [T]: Internable,
336{
337 fn decode(d: &mut D) -> Self {
338 Interned::from_vec(Decodable::decode(d))
339 }
340}
341
342pub struct InternStorage<T: ?Sized> {
343 map: OnceLock<InternMap<T>>,
344}
345
346impl<T: ?Sized> InternStorage<T> {
347 pub const fn new() -> Self {
348 Self { map: OnceLock::new() }
349 }
350}
351
352impl<T: Internable + ?Sized> InternStorage<T> {
353 fn get(&self) -> &InternMap<T> {
354 self.map.get_or_init(DashMap::default)
355 }
356}
357
358pub trait Internable: Hash + Eq + 'static {
359 fn storage() -> &'static InternStorage<Self>;
360}
361
362pub trait SliceInternable: Hash + Eq + 'static + Sized {
363 fn storage() -> &'static InternStorage<[Self]>;
364}
365
366impl<T> Internable for [T]
367where
368 T: SliceInternable,
369{
370 fn storage() -> &'static InternStorage<Self> {
371 <T as SliceInternable>::storage()
372 }
373}
374
375#[macro_export]
377#[doc(hidden)]
378macro_rules! _impl_internable {
379 ( $($t:ty),+ $(,)? ) => { $(
380 impl $crate::Internable for $t {
381 fn storage() -> &'static $crate::InternStorage<Self> {
382 static STORAGE: $crate::InternStorage<$t> = $crate::InternStorage::new();
383 &STORAGE
384 }
385 }
386 )+ };
387}
388pub use crate::_impl_internable as impl_internable;
389
390#[macro_export]
392#[doc(hidden)]
393macro_rules! _impl_slice_internable {
394 ( $($t:ty),+ $(,)? ) => { $(
395 impl $crate::SliceInternable for $t {
396 fn storage() -> &'static $crate::InternStorage<[Self]> {
397 static STORAGE: $crate::InternStorage<[$t]> = $crate::InternStorage::new();
398 &STORAGE
399 }
400 }
401 )+ };
402}
403pub use crate::_impl_slice_internable as impl_slice_internable;
404
405impl_slice_internable!(rustc_abi::FieldIdx);