Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
pola-rs
GitHub Repository: pola-rs/polars
Path: blob/main/crates/polars-utils/src/arena.rs
6939 views
1
#[cfg(feature = "ir_serde")]
2
use serde::{Deserialize, Serialize};
3
4
use crate::error::*;
5
use crate::relaxed_cell::RelaxedCell;
6
7
unsafe fn index_of_unchecked<T>(slice: &[T], item: &T) -> usize {
8
(item as *const _ as usize - slice.as_ptr() as usize) / size_of::<T>()
9
}
10
11
fn index_of<T>(slice: &[T], item: &T) -> Option<usize> {
12
debug_assert!(size_of::<T>() > 0);
13
let ptr = item as *const T;
14
unsafe {
15
if slice.as_ptr() < ptr && slice.as_ptr().add(slice.len()) > ptr {
16
Some(index_of_unchecked(slice, item))
17
} else {
18
None
19
}
20
}
21
}
22
23
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Ord, PartialOrd)]
24
#[repr(transparent)]
25
#[cfg_attr(feature = "ir_serde", derive(Serialize, Deserialize))]
26
pub struct Node(pub usize);
27
28
impl Default for Node {
29
fn default() -> Self {
30
Node(usize::MAX)
31
}
32
}
33
34
static ARENA_VERSION: RelaxedCell<u32> = RelaxedCell::new_u32(0);
35
36
#[derive(Debug, Clone)]
37
#[cfg_attr(feature = "ir_serde", derive(Serialize, Deserialize))]
38
pub struct Arena<T> {
39
version: u32,
40
items: Vec<T>,
41
}
42
43
impl<T> Default for Arena<T> {
44
fn default() -> Self {
45
Self::new()
46
}
47
}
48
49
/// Simple Arena implementation
50
/// Allocates memory and stores item in a Vec. Only deallocates when being dropped itself.
51
impl<T> Arena<T> {
52
#[inline]
53
pub fn version(&self) -> u32 {
54
self.version
55
}
56
57
pub fn add(&mut self, val: T) -> Node {
58
let idx = self.items.len();
59
self.items.push(val);
60
Node(idx)
61
}
62
63
pub fn pop(&mut self) -> Option<T> {
64
self.items.pop()
65
}
66
67
pub fn last_node(&mut self) -> Option<Node> {
68
if self.is_empty() {
69
None
70
} else {
71
Some(Node(self.items.len() - 1))
72
}
73
}
74
75
pub fn len(&self) -> usize {
76
self.items.len()
77
}
78
79
pub fn is_empty(&self) -> bool {
80
self.items.is_empty()
81
}
82
83
pub fn new() -> Self {
84
Arena {
85
items: vec![],
86
version: ARENA_VERSION.fetch_add(1),
87
}
88
}
89
90
pub fn with_capacity(cap: usize) -> Self {
91
Arena {
92
items: Vec::with_capacity(cap),
93
version: ARENA_VERSION.fetch_add(1),
94
}
95
}
96
97
pub fn get_node(&self, val: &T) -> Option<Node> {
98
index_of(&self.items, val).map(Node)
99
}
100
101
pub fn swap(&mut self, idx_a: Node, idx_b: Node) {
102
self.items.swap(idx_a.0, idx_b.0)
103
}
104
105
#[inline]
106
pub fn get(&self, idx: Node) -> &T {
107
self.items.get(idx.0).unwrap()
108
}
109
110
#[inline]
111
/// # Safety
112
/// Doesn't do any bound checks
113
pub unsafe fn get_unchecked(&self, idx: Node) -> &T {
114
unsafe { self.items.get_unchecked(idx.0) }
115
}
116
117
#[inline]
118
pub fn get_mut(&mut self, idx: Node) -> &mut T {
119
self.items.get_mut(idx.0).unwrap()
120
}
121
122
#[inline]
123
/// Get mutable references to several items of the Arena
124
///
125
/// The `idxs` is asserted to contain unique `Node` elements which are preferably (not
126
/// necessarily) in order.
127
pub fn get_many_mut<const N: usize>(&mut self, indices: [Node; N]) -> [&mut T; N] {
128
// @NOTE: This implementation is adapted from the Rust Nightly Standard Library. When
129
// `get_many_mut` gets stabilized we should use that.
130
131
let len = self.items.len();
132
133
// NB: The optimizer should inline the loops into a sequence
134
// of instructions without additional branching.
135
let mut valid = true;
136
for (i, &idx) in indices.iter().enumerate() {
137
valid &= idx.0 < len;
138
for &idx2 in &indices[..i] {
139
valid &= idx != idx2;
140
}
141
}
142
143
assert!(valid, "Duplicate index or out-of-bounds index");
144
145
// NB: This implementation is written as it is because any variation of
146
// `indices.map(|i| self.get_unchecked_mut(i))` would make miri unhappy,
147
// or generate worse code otherwise. This is also why we need to go
148
// through a raw pointer here.
149
let slice: *mut [T] = &mut self.items[..] as *mut _;
150
let mut arr: std::mem::MaybeUninit<[&mut T; N]> = std::mem::MaybeUninit::uninit();
151
let arr_ptr = arr.as_mut_ptr();
152
153
// SAFETY: We expect `indices` to contain disjunct values that are
154
// in bounds of `self`.
155
unsafe {
156
for i in 0..N {
157
let idx = *indices.get_unchecked(i);
158
let slice_ref: &mut [T] = &mut *slice;
159
*(*arr_ptr).get_unchecked_mut(i) = slice_ref.get_unchecked_mut(idx.0);
160
}
161
arr.assume_init()
162
}
163
}
164
165
#[inline]
166
pub fn replace(&mut self, idx: Node, val: T) -> T {
167
let x = self.get_mut(idx);
168
std::mem::replace(x, val)
169
}
170
171
pub fn clear(&mut self) {
172
self.items.clear();
173
self.version = ARENA_VERSION.fetch_add(1);
174
}
175
}
176
177
impl<T: Clone> Arena<T> {
178
pub fn duplicate(&mut self, node: Node) -> Node {
179
let item = self.items[node.0].clone();
180
self.add(item)
181
}
182
}
183
184
impl<T: Default> Arena<T> {
185
#[inline]
186
pub fn take(&mut self, idx: Node) -> T {
187
std::mem::take(self.get_mut(idx))
188
}
189
190
pub fn replace_with<F>(&mut self, idx: Node, f: F)
191
where
192
F: FnOnce(T) -> T,
193
{
194
let val = self.take(idx);
195
self.replace(idx, f(val));
196
}
197
198
pub fn try_replace_with<F>(&mut self, idx: Node, mut f: F) -> Result<()>
199
where
200
F: FnMut(T) -> Result<T>,
201
{
202
let val = self.take(idx);
203
self.replace(idx, f(val)?);
204
Ok(())
205
}
206
}
207
208