bones_schema/alloc/
resizable.rs1use std::{
2 alloc::{self, handle_alloc_error, Layout, LayoutError},
3 ffi::c_void,
4 ptr::NonNull,
5};
6
7use super::layout::*;
8
9#[derive(Debug)]
17pub struct ResizableAlloc {
18 ptr: NonNull<c_void>,
21 layout: Layout,
23 cap: usize,
25}
26
27impl Clone for ResizableAlloc {
28 fn clone(&self) -> Self {
29 let mut copy = ResizableAlloc::new(self.layout);
31 copy.resize(self.cap).unwrap();
33
34 if self.layout.size() > 0 {
36 unsafe {
38 copy.ptr
41 .as_ptr()
42 .copy_from_nonoverlapping(self.ptr.as_ptr(), self.capacity());
43 }
44 }
45
46 copy
48 }
49}
50
51impl ResizableAlloc {
52 #[inline]
66 pub fn new(layout: Layout) -> Self {
67 assert_eq!(
68 layout,
69 layout.pad_to_align(),
70 "Layout must be padded to it's alignment"
71 );
72 Self {
73 ptr: Self::dangling(&layout),
74 layout,
75 cap: 0,
76 }
77 }
78
79 #[inline]
81 pub fn with_capacity(layout: Layout, capacity: usize) -> Result<Self, LayoutError> {
82 let mut a = Self::new(layout);
83 a.resize(capacity)?;
84 Ok(a)
85 }
86
87 pub fn resize(&mut self, new_capacity: usize) -> Result<(), LayoutError> {
89 if self.cap == new_capacity {
91 return Ok(());
92 }
93
94 if self.layout.size() == 0 {
96 self.cap = new_capacity;
97 return Ok(());
98 }
99
100 let old_capacity = self.cap;
102
103 self.cap = new_capacity;
105
106 if new_capacity == 0 {
108 if old_capacity > 0 {
110 let old_alloc_layout = self.layout.repeat(old_capacity)?.0;
112
113 unsafe { alloc::dealloc(self.ptr.as_ptr() as *mut u8, old_alloc_layout) }
115 }
116
117 self.ptr = Self::dangling(&self.layout);
119
120 } else {
122 if old_capacity > 0 {
124 let old_alloc_layout = self.layout.repeat(old_capacity).unwrap().0;
125 let new_alloc_layout = self.layout.repeat(new_capacity).unwrap().0;
126 self.ptr = NonNull::new(unsafe {
127 alloc::realloc(
128 self.ptr.as_ptr() as *mut u8,
129 old_alloc_layout,
130 new_alloc_layout.size(),
131 ) as *mut c_void
132 })
133 .unwrap_or_else(|| handle_alloc_error(new_alloc_layout));
134
135 } else {
137 let alloc_layout = self.layout.repeat(new_capacity).unwrap().0;
138 self.ptr = NonNull::new(unsafe { alloc::alloc(alloc_layout) } as *mut c_void)
139 .unwrap_or_else(|| handle_alloc_error(alloc_layout));
140 }
141 }
142
143 Ok(())
144 }
145
146 #[inline]
148 pub fn layout(&self) -> Layout {
149 self.layout
150 }
151
152 #[inline]
154 pub fn capacity(&self) -> usize {
155 self.cap
156 }
157
158 pub fn as_ptr(&self) -> *mut c_void {
160 self.ptr.as_ptr()
161 }
162
163 pub fn iter(&self) -> ResizableAllocIter<'_> {
165 ResizableAllocIter {
166 alloc: self,
167 idx: 0,
168 }
169 }
170
171 pub fn iter_mut(&mut self) -> ResizableAllocIterMut<'_> {
173 ResizableAllocIterMut {
174 alloc: self,
175 idx: 0,
176 }
177 }
178
179 #[inline]
183 pub unsafe fn unchecked_idx(&self, idx: usize) -> *mut c_void {
184 self.ptr.as_ptr().add(self.layout.size() * idx)
185 }
186
187 #[inline]
189 fn dangling(layout: &Layout) -> NonNull<c_void> {
190 unsafe { NonNull::new_unchecked(sptr::invalid_mut(layout.align())) }
192 }
193}
194
195impl Drop for ResizableAlloc {
196 fn drop(&mut self) {
197 if self.cap > 0 && self.layout.size() > 0 {
198 unsafe {
199 alloc::dealloc(
200 self.ptr.as_ptr() as *mut u8,
201 self.layout.repeat(self.cap).unwrap().0,
202 )
203 }
204 }
205 }
206}
207
208pub struct ResizableAllocIter<'a> {
210 alloc: &'a ResizableAlloc,
211 idx: usize,
212}
213impl<'a> Iterator for ResizableAllocIter<'a> {
214 type Item = *const c_void;
215
216 fn next(&mut self) -> Option<Self::Item> {
217 if self.idx < self.alloc.cap {
218 let r = unsafe { self.alloc.unchecked_idx(self.idx) };
220 self.idx += 1;
221 Some(r)
222 } else {
223 None
224 }
225 }
226}
227
228pub struct ResizableAllocIterMut<'a> {
230 alloc: &'a mut ResizableAlloc,
231 idx: usize,
232}
233impl<'a> Iterator for ResizableAllocIterMut<'a> {
234 type Item = *mut c_void;
235
236 fn next(&mut self) -> Option<Self::Item> {
237 if self.idx < self.alloc.cap {
238 let r = unsafe { self.alloc.unchecked_idx(self.idx) };
241 self.idx += 1;
242 Some(r)
243 } else {
244 None
245 }
246 }
247}
248
249#[cfg(test)]
250mod test {
251 use std::alloc::Layout;
252
253 use crate::alloc::ResizableAlloc;
254
255 #[test]
256 #[cfg(feature = "glam")]
257 fn realloc_transform() {
258 use crate as bones_schema;
259 use bones_schema_macros::HasSchema;
260 use glam::*;
261
262 #[derive(HasSchema, Clone, Default)]
263 #[repr(C)]
264 pub struct Transform {
265 pub translation: Vec3,
266 pub rotation: Quat,
267 pub scale: Vec3,
268 }
269
270 let layout = Layout::new::<Transform>();
271
272 let mut a = ResizableAlloc::new(layout);
273
274 a.resize(1).unwrap();
275 a.resize(2).unwrap();
276 a.resize(6).unwrap();
277 }
278
279 #[test]
280 fn resizable_allocation() {
281 type Ty = (u32, u8);
283 let layout = Layout::new::<Ty>();
284
285 let mut a = ResizableAlloc::new(layout);
287
288 a.resize(3).unwrap();
290
291 for i in 0..3 {
293 unsafe {
294 a.as_ptr().cast::<Ty>().add(i).write((i as _, i as _));
295 }
296 }
297 unsafe {
298 assert_eq!((0, 0), (a.as_ptr() as *mut Ty).read());
299 assert_eq!((1, 1), (a.as_ptr() as *mut Ty).add(1).read());
300 assert_eq!((2, 2), (a.as_ptr() as *mut Ty).add(2).read());
301 }
302
303 a.resize(4).unwrap();
305
306 unsafe {
308 a.as_ptr().cast::<Ty>().add(3).write((3, 3));
309
310 assert_eq!((0, 0), (a.as_ptr() as *mut Ty).read());
312 assert_eq!((1, 1), (a.as_ptr() as *mut Ty).add(1).read());
313 assert_eq!((2, 2), (a.as_ptr() as *mut Ty).add(2).read());
314 assert_eq!((3, 3), (a.as_ptr() as *mut Ty).add(3).read());
316 }
317
318 a.resize(1).unwrap();
321 unsafe {
322 assert_eq!((0, 0), (a.as_ptr() as *mut Ty).read());
323 }
324
325 a.resize(0).unwrap();
328
329 assert_eq!(a.as_ptr() as usize, layout.align());
331 }
332}