use std::{cmp::Ordering, fmt, hash, marker::PhantomData, num::NonZeroU32, ops};
type Index = NonZeroU32;
use crate::{FastIndexSet, Span};
#[derive(Clone, Copy, Debug, thiserror::Error, PartialEq)]
#[error("Handle {index} of {kind} is either not present, or inaccessible yet")]
pub struct BadHandle {
pub kind: &'static str,
pub index: usize,
}
impl BadHandle {
fn new<T>(handle: Handle<T>) -> Self {
Self {
kind: std::any::type_name::<T>(),
index: handle.index(),
}
}
}
#[cfg_attr(feature = "serialize", derive(serde::Serialize))]
#[cfg_attr(feature = "deserialize", derive(serde::Deserialize))]
#[cfg_attr(
any(feature = "serialize", feature = "deserialize"),
serde(transparent)
)]
#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
pub struct Handle<T> {
index: Index,
#[cfg_attr(any(feature = "serialize", feature = "deserialize"), serde(skip))]
marker: PhantomData<T>,
}
impl<T> Clone for Handle<T> {
fn clone(&self) -> Self {
*self
}
}
impl<T> Copy for Handle<T> {}
impl<T> PartialEq for Handle<T> {
fn eq(&self, other: &Self) -> bool {
self.index == other.index
}
}
impl<T> Eq for Handle<T> {}
impl<T> PartialOrd for Handle<T> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<T> Ord for Handle<T> {
fn cmp(&self, other: &Self) -> Ordering {
self.index.cmp(&other.index)
}
}
impl<T> fmt::Debug for Handle<T> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "[{}]", self.index)
}
}
impl<T> hash::Hash for Handle<T> {
fn hash<H: hash::Hasher>(&self, hasher: &mut H) {
self.index.hash(hasher)
}
}
impl<T> Handle<T> {
#[cfg(test)]
pub const DUMMY: Self = Handle {
index: unsafe { NonZeroU32::new_unchecked(u32::MAX) },
marker: PhantomData,
};
pub(crate) const fn new(index: Index) -> Self {
Handle {
index,
marker: PhantomData,
}
}
pub const fn index(self) -> usize {
let index = self.index.get() - 1;
index as usize
}
fn from_usize(index: usize) -> Self {
let handle_index = u32::try_from(index + 1)
.ok()
.and_then(Index::new)
.expect("Failed to insert into arena. Handle overflows");
Handle::new(handle_index)
}
const unsafe fn from_usize_unchecked(index: usize) -> Self {
Handle::new(Index::new_unchecked((index + 1) as u32))
}
}
#[cfg_attr(feature = "serialize", derive(serde::Serialize))]
#[cfg_attr(feature = "deserialize", derive(serde::Deserialize))]
#[cfg_attr(
any(feature = "serialize", feature = "deserialize"),
serde(transparent)
)]
#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
pub struct Range<T> {
inner: ops::Range<u32>,
#[cfg_attr(any(feature = "serialize", feature = "deserialize"), serde(skip))]
marker: PhantomData<T>,
}
impl<T> Range<T> {
pub(crate) const fn erase_type(self) -> Range<()> {
let Self { inner, marker: _ } = self;
Range {
inner,
marker: PhantomData,
}
}
}
#[derive(Clone, Debug, thiserror::Error)]
#[error("Handle range {range:?} of {kind} is either not present, or inaccessible yet")]
pub struct BadRangeError {
kind: &'static str,
range: Range<()>,
}
impl BadRangeError {
pub fn new<T>(range: Range<T>) -> Self {
Self {
kind: std::any::type_name::<T>(),
range: range.erase_type(),
}
}
}
impl<T> Clone for Range<T> {
fn clone(&self) -> Self {
Range {
inner: self.inner.clone(),
marker: self.marker,
}
}
}
impl<T> fmt::Debug for Range<T> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "[{}..{}]", self.inner.start + 1, self.inner.end)
}
}
impl<T> Iterator for Range<T> {
type Item = Handle<T>;
fn next(&mut self) -> Option<Self::Item> {
if self.inner.start < self.inner.end {
self.inner.start += 1;
Some(Handle {
index: NonZeroU32::new(self.inner.start).unwrap(),
marker: self.marker,
})
} else {
None
}
}
}
impl<T> Range<T> {
pub fn new_from_bounds(first: Handle<T>, last: Handle<T>) -> Self {
Self {
inner: (first.index() as u32)..(last.index() as u32 + 1),
marker: Default::default(),
}
}
pub fn first_and_last(&self) -> Option<(Handle<T>, Handle<T>)> {
if self.inner.start < self.inner.end {
Some((
Handle::new(Index::new(self.inner.start + 1).unwrap()),
Handle::new(Index::new(self.inner.end).unwrap()),
))
} else {
None
}
}
pub fn zero_based_index_range(&self) -> ops::Range<u32> {
self.inner.clone()
}
pub fn from_zero_based_index_range(inner: ops::Range<u32>, arena: &Arena<T>) -> Self {
assert!(inner.start <= inner.end);
assert!(inner.end as usize <= arena.len());
Self {
inner,
marker: Default::default(),
}
}
}
#[cfg_attr(feature = "clone", derive(Clone))]
#[cfg_attr(feature = "serialize", derive(serde::Serialize))]
#[cfg_attr(feature = "serialize", serde(transparent))]
#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
#[cfg_attr(test, derive(PartialEq))]
pub struct Arena<T> {
data: Vec<T>,
#[cfg_attr(feature = "serialize", serde(skip))]
span_info: Vec<Span>,
}
impl<T> Default for Arena<T> {
fn default() -> Self {
Self::new()
}
}
impl<T: fmt::Debug> fmt::Debug for Arena<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_map().entries(self.iter()).finish()
}
}
impl<T> Arena<T> {
pub const fn new() -> Self {
Arena {
data: Vec::new(),
span_info: Vec::new(),
}
}
#[allow(clippy::missing_const_for_fn)] pub fn into_inner(self) -> Vec<T> {
self.data
}
pub fn len(&self) -> usize {
self.data.len()
}
pub fn is_empty(&self) -> bool {
self.data.is_empty()
}
pub fn iter(&self) -> impl DoubleEndedIterator<Item = (Handle<T>, &T)> {
self.data
.iter()
.enumerate()
.map(|(i, v)| unsafe { (Handle::from_usize_unchecked(i), v) })
}
pub fn iter_mut(&mut self) -> impl DoubleEndedIterator<Item = (Handle<T>, &mut T)> {
self.data
.iter_mut()
.enumerate()
.map(|(i, v)| unsafe { (Handle::from_usize_unchecked(i), v) })
}
pub fn append(&mut self, value: T, span: Span) -> Handle<T> {
let index = self.data.len();
self.data.push(value);
self.span_info.push(span);
Handle::from_usize(index)
}
pub fn fetch_if<F: Fn(&T) -> bool>(&self, fun: F) -> Option<Handle<T>> {
self.data
.iter()
.position(fun)
.map(|index| unsafe { Handle::from_usize_unchecked(index) })
}
pub fn fetch_if_or_append<F: Fn(&T, &T) -> bool>(
&mut self,
value: T,
span: Span,
fun: F,
) -> Handle<T> {
if let Some(index) = self.data.iter().position(|d| fun(d, &value)) {
unsafe { Handle::from_usize_unchecked(index) }
} else {
self.append(value, span)
}
}
pub fn fetch_or_append(&mut self, value: T, span: Span) -> Handle<T>
where
T: PartialEq,
{
self.fetch_if_or_append(value, span, T::eq)
}
pub fn try_get(&self, handle: Handle<T>) -> Result<&T, BadHandle> {
self.data
.get(handle.index())
.ok_or_else(|| BadHandle::new(handle))
}
pub fn get_mut(&mut self, handle: Handle<T>) -> &mut T {
self.data.get_mut(handle.index()).unwrap()
}
pub fn range_from(&self, old_length: usize) -> Range<T> {
Range {
inner: old_length as u32..self.data.len() as u32,
marker: PhantomData,
}
}
pub fn clear(&mut self) {
self.data.clear()
}
pub fn get_span(&self, handle: Handle<T>) -> Span {
*self
.span_info
.get(handle.index())
.unwrap_or(&Span::default())
}
pub fn check_contains_handle(&self, handle: Handle<T>) -> Result<(), BadHandle> {
if handle.index() < self.data.len() {
Ok(())
} else {
Err(BadHandle::new(handle))
}
}
pub fn check_contains_range(&self, range: &Range<T>) -> Result<(), BadRangeError> {
if range.inner.start > range.inner.end {
return Err(BadRangeError::new(range.clone()));
}
if range.inner.start == range.inner.end {
return Ok(());
}
let last_handle = Handle::new(range.inner.end.try_into().unwrap());
if self.check_contains_handle(last_handle).is_err() {
return Err(BadRangeError::new(range.clone()));
}
Ok(())
}
#[cfg(feature = "compact")]
pub(crate) fn retain_mut<P>(&mut self, mut predicate: P)
where
P: FnMut(Handle<T>, &mut T) -> bool,
{
let mut index = 0;
let mut retained = 0;
self.data.retain_mut(|elt| {
let handle = Handle::new(Index::new(index as u32 + 1).unwrap());
let keep = predicate(handle, elt);
if keep {
self.span_info[retained] = self.span_info[index];
retained += 1;
}
index += 1;
keep
});
self.span_info.truncate(retained);
}
}
#[cfg(feature = "deserialize")]
impl<'de, T> serde::Deserialize<'de> for Arena<T>
where
T: serde::Deserialize<'de>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let data = Vec::deserialize(deserializer)?;
let span_info = std::iter::repeat(Span::default())
.take(data.len())
.collect();
Ok(Self { data, span_info })
}
}
impl<T> ops::Index<Handle<T>> for Arena<T> {
type Output = T;
fn index(&self, handle: Handle<T>) -> &T {
&self.data[handle.index()]
}
}
impl<T> ops::IndexMut<Handle<T>> for Arena<T> {
fn index_mut(&mut self, handle: Handle<T>) -> &mut T {
&mut self.data[handle.index()]
}
}
impl<T> ops::Index<Range<T>> for Arena<T> {
type Output = [T];
fn index(&self, range: Range<T>) -> &[T] {
&self.data[range.inner.start as usize..range.inner.end as usize]
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn append_non_unique() {
let mut arena: Arena<u8> = Arena::new();
let t1 = arena.append(0, Default::default());
let t2 = arena.append(0, Default::default());
assert!(t1 != t2);
assert!(arena[t1] == arena[t2]);
}
#[test]
fn append_unique() {
let mut arena: Arena<u8> = Arena::new();
let t1 = arena.append(0, Default::default());
let t2 = arena.append(1, Default::default());
assert!(t1 != t2);
assert!(arena[t1] != arena[t2]);
}
#[test]
fn fetch_or_append_non_unique() {
let mut arena: Arena<u8> = Arena::new();
let t1 = arena.fetch_or_append(0, Default::default());
let t2 = arena.fetch_or_append(0, Default::default());
assert!(t1 == t2);
assert!(arena[t1] == arena[t2])
}
#[test]
fn fetch_or_append_unique() {
let mut arena: Arena<u8> = Arena::new();
let t1 = arena.fetch_or_append(0, Default::default());
let t2 = arena.fetch_or_append(1, Default::default());
assert!(t1 != t2);
assert!(arena[t1] != arena[t2]);
}
}
#[cfg_attr(feature = "clone", derive(Clone))]
pub struct UniqueArena<T> {
set: FastIndexSet<T>,
span_info: Vec<Span>,
}
impl<T> UniqueArena<T> {
pub fn new() -> Self {
UniqueArena {
set: FastIndexSet::default(),
span_info: Vec::new(),
}
}
pub fn len(&self) -> usize {
self.set.len()
}
pub fn is_empty(&self) -> bool {
self.set.is_empty()
}
pub fn clear(&mut self) {
self.set.clear();
self.span_info.clear();
}
pub fn get_span(&self, handle: Handle<T>) -> Span {
*self
.span_info
.get(handle.index())
.unwrap_or(&Span::default())
}
#[cfg(feature = "compact")]
pub(crate) fn drain_all(&mut self) -> UniqueArenaDrain<T> {
UniqueArenaDrain {
inner_elts: self.set.drain(..),
inner_spans: self.span_info.drain(..),
index: Index::new(1).unwrap(),
}
}
}
#[cfg(feature = "compact")]
pub(crate) struct UniqueArenaDrain<'a, T> {
inner_elts: indexmap::set::Drain<'a, T>,
inner_spans: std::vec::Drain<'a, Span>,
index: Index,
}
#[cfg(feature = "compact")]
impl<'a, T> Iterator for UniqueArenaDrain<'a, T> {
type Item = (Handle<T>, T, Span);
fn next(&mut self) -> Option<Self::Item> {
match self.inner_elts.next() {
Some(elt) => {
let handle = Handle::new(self.index);
self.index = self.index.checked_add(1).unwrap();
let span = self.inner_spans.next().unwrap();
Some((handle, elt, span))
}
None => None,
}
}
}
impl<T: Eq + hash::Hash> UniqueArena<T> {
pub fn iter(&self) -> impl DoubleEndedIterator<Item = (Handle<T>, &T)> {
self.set.iter().enumerate().map(|(i, v)| {
let position = i + 1;
let index = unsafe { Index::new_unchecked(position as u32) };
(Handle::new(index), v)
})
}
pub fn insert(&mut self, value: T, span: Span) -> Handle<T> {
let (index, added) = self.set.insert_full(value);
if added {
debug_assert!(index == self.span_info.len());
self.span_info.push(span);
}
debug_assert!(self.set.len() == self.span_info.len());
Handle::from_usize(index)
}
pub fn replace(&mut self, old: Handle<T>, new: T) {
let (index, added) = self.set.insert_full(new);
assert!(added && index == self.set.len() - 1);
self.set.swap_remove_index(old.index()).unwrap();
}
pub fn get(&self, value: &T) -> Option<Handle<T>> {
self.set
.get_index_of(value)
.map(|index| unsafe { Handle::from_usize_unchecked(index) })
}
pub fn get_handle(&self, handle: Handle<T>) -> Result<&T, BadHandle> {
self.set
.get_index(handle.index())
.ok_or_else(|| BadHandle::new(handle))
}
pub fn check_contains_handle(&self, handle: Handle<T>) -> Result<(), BadHandle> {
if handle.index() < self.set.len() {
Ok(())
} else {
Err(BadHandle::new(handle))
}
}
}
impl<T> Default for UniqueArena<T> {
fn default() -> Self {
Self::new()
}
}
impl<T: fmt::Debug + Eq + hash::Hash> fmt::Debug for UniqueArena<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_map().entries(self.iter()).finish()
}
}
impl<T> ops::Index<Handle<T>> for UniqueArena<T> {
type Output = T;
fn index(&self, handle: Handle<T>) -> &T {
&self.set[handle.index()]
}
}
#[cfg(feature = "serialize")]
impl<T> serde::Serialize for UniqueArena<T>
where
T: Eq + hash::Hash + serde::Serialize,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
self.set.serialize(serializer)
}
}
#[cfg(feature = "deserialize")]
impl<'de, T> serde::Deserialize<'de> for UniqueArena<T>
where
T: Eq + hash::Hash + serde::Deserialize<'de>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let set = FastIndexSet::deserialize(deserializer)?;
let span_info = std::iter::repeat(Span::default()).take(set.len()).collect();
Ok(Self { set, span_info })
}
}
#[cfg(feature = "arbitrary")]
impl<'a, T> arbitrary::Arbitrary<'a> for UniqueArena<T>
where
T: Eq + hash::Hash + arbitrary::Arbitrary<'a>,
{
fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
let mut arena = Self::default();
for elem in u.arbitrary_iter()? {
arena.set.insert(elem?);
arena.span_info.push(Span::UNDEFINED);
}
Ok(arena)
}
fn arbitrary_take_rest(u: arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
let mut arena = Self::default();
for elem in u.arbitrary_take_rest_iter()? {
arena.set.insert(elem?);
arena.span_info.push(Span::UNDEFINED);
}
Ok(arena)
}
#[inline]
fn size_hint(depth: usize) -> (usize, Option<usize>) {
let depth_hint = <usize as arbitrary::Arbitrary>::size_hint(depth);
arbitrary::size_hint::and(depth_hint, (0, None))
}
}