diff --git a/Cargo.toml b/Cargo.toml index f7efd5d5..845801ba 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -33,7 +33,6 @@ glam = {version = "0.27", features = ["scalar-math"] } image = { version = "0.24", default-features = false, features = ["png", "tga"] } macroquad_macro = { version = "0.1.8", path = "macroquad_macro" } fontdue = "0.7" -bumpalo = "3.4" backtrace = { version = "0.3.60", optional = true, default-features = false, features = [ "std", "libbacktrace" ] } log = { version = "0.4", optional = true } quad-snd = { version = "0.2", optional = true } diff --git a/src/experimental/scene.rs b/src/experimental/scene.rs index 82eb7bcd..bd8368dd 100644 --- a/src/experimental/scene.rs +++ b/src/experimental/scene.rs @@ -4,6 +4,8 @@ use crate::camera::Camera2D; pub use macroquad_macro::CapabilityTrait; +mod arena; + #[rustfmt::skip] pub trait Node { fn ready(_node: RefMut) where Self: Sized {} @@ -12,10 +14,6 @@ pub trait Node { fn draw(_node: RefMut) where Self: Sized {} } -trait NodeTyped { - fn self_node(&self) -> &T; -} - trait NodeAny: Any + Node { fn as_any(&self) -> &dyn Any; fn as_any_mut(&mut self) -> &mut dyn Any; @@ -345,7 +343,7 @@ struct Scene { dense: Vec, dense_ongoing: Vec>, nodes: Vec>, - arena: bumpalo::Bump, + arena: arena::Arena, camera: [Option; 4], camera_pos: crate::Vec2, @@ -363,7 +361,7 @@ impl Scene { dense: vec![], dense_ongoing: vec![], nodes: Vec::new(), - arena: bumpalo::Bump::new(), + arena: arena::Arena::new(), free_nodes: Vec::new(), camera: [Some(Camera2D::default()), None, None, None], camera_pos: crate::vec2(0., 0.), @@ -461,15 +459,19 @@ impl Scene { let trait_obj = &data as &dyn NodeAny; let (_, vtable) = unsafe { std::mem::transmute::<_, (*mut (), *mut ())>(trait_obj) }; - let data = self.arena.alloc(data) as *mut _ as *mut _; - let used = self.arena.alloc(false) as *mut _ as *mut _; + let ptr = self.arena.alloc(std::mem::size_of::()) as *mut _ as *mut T; + unsafe { std::ptr::write(ptr, data); } + let ptr = ptr as *mut (); + let used = self.arena.alloc(1) as *mut _ as *mut bool; + unsafe { std::ptr::write(used, false); } + let used = used as *mut _ as *mut bool; id = Id { id: self.nodes.len(), generation: 0, }; self.nodes - .push(Some(Cell::new::(id, data, vtable, used))); + .push(Some(Cell::new::(id, ptr, vtable, used))); } self.dense.push(id); @@ -616,7 +618,7 @@ unsafe fn get_scene() -> &'static mut Scene { } pub(crate) fn allocated_memory() -> usize { - unsafe { get_scene() }.arena.allocated_bytes() + unsafe { get_scene().arena.offset() } } pub fn clear() { diff --git a/src/experimental/scene/arena.rs b/src/experimental/scene/arena.rs new file mode 100644 index 00000000..b2c042ac --- /dev/null +++ b/src/experimental/scene/arena.rs @@ -0,0 +1,91 @@ +//! Gleaned from https://github.com/ratel-rust/toolshed/blob/master/src/arena.rs +//! and than modified a lot. +//! +//! Module containing the `Arena` and `Uninitialized` structs. For convenience the +//! `Arena` is exported at the root of the crate. + +use std::cell::Cell; +use std::mem::size_of; + +const ARENA_BLOCK: usize = 64 * 1024; + +/// An arena implementation that uses preallocated 64KiB pages for all allocations. +/// If a new allocation were to be pushed over the the boundaries of the page, a +/// new page is internally allocated first, thus this version of the arena can never +/// run out of memory unless the process runs out of heap altogether. +/// +/// Allocating a type larger than the page size will result in a new heap allocation +/// just for that type separate from the page mechanism. +pub struct Arena { + store: Cell>>, + ptr: Cell<*mut u8>, + offset: Cell, +} + +impl Arena { + /// Create a new arena with a single preallocated 64KiB page. + pub fn new() -> Self { + let mut store = vec![Vec::with_capacity(ARENA_BLOCK)]; + let ptr = store[0].as_mut_ptr(); + + Arena { + store: Cell::new(store), + ptr: Cell::new(ptr), + offset: Cell::new(0), + } + } + + pub fn alloc(&self, size: usize) -> *mut u8 { + // This should be optimized away for size known at compile time. + if size > ARENA_BLOCK { + return self.alloc_bytes(size); + } + + let size = match size % size_of::() { + 0 => size, + n => size + (size_of::() - n), + }; + + let offset = self.offset.get(); + let cap = offset + size; + + if cap > ARENA_BLOCK { + self.grow(); + + self.offset.set(size); + self.ptr.get() + } else { + self.offset.set(cap); + unsafe { self.ptr.get().add(offset) } + } + } + + #[inline] + fn alloc_byte_vec(&self, mut val: Vec) -> *mut u8 { + let ptr = val.as_mut_ptr(); + + let mut temp = self.store.replace(Vec::new()); + temp.push(val); + self.store.replace(temp); + + ptr + } + + pub fn grow(&self) { + let ptr = self.alloc_byte_vec(Vec::with_capacity(ARENA_BLOCK)); + self.ptr.set(ptr); + } + + fn alloc_bytes(&self, size: usize) -> *mut u8 { + self.alloc_byte_vec(Vec::with_capacity(size)) + } + + #[doc(hidden)] + #[inline] + pub unsafe fn offset(&self) -> usize { + self.offset.get() + } +} + +/// Akin to `CopyCell`: `Sync` is unsafe but `Send` is totally fine! +unsafe impl Send for Arena {}