diff --git a/src/arc.rs b/src/arc.rs index 3488312..6e8fd92 100644 --- a/src/arc.rs +++ b/src/arc.rs @@ -244,7 +244,7 @@ impl Hash for Arc { pub(crate) struct HeaderSlice { pub(crate) header: H, length: usize, - slice: T, + pub(crate) slice: T, } impl HeaderSlice { diff --git a/src/cursor.rs b/src/cursor.rs index 39b0c96..2575acc 100644 --- a/src/cursor.rs +++ b/src/cursor.rs @@ -83,19 +83,23 @@ use std::{ borrow::Cow, - cell::Cell, + cell::{Cell, RefCell}, fmt, hash::{Hash, Hasher}, iter, + marker::PhantomData, mem::{self, ManuallyDrop}, ops::Range, - ptr, slice, + ptr, + rc::Rc, + slice, }; use countme::Count; use crate::{ green::{GreenChild, GreenElementRef, GreenNodeData, GreenTokenData, SyntaxKind}, + pool::Pool, sll, utility_types::Delta, Direction, GreenNode, GreenToken, NodeOrToken, SyntaxText, TextRange, TextSize, TokenAtOffset, @@ -186,12 +190,27 @@ impl Drop for SyntaxToken { } } +struct NodeDataDeallocator { + data: ptr::NonNull, +} + +impl Drop for NodeDataDeallocator { + fn drop(&mut self) { + unsafe { + NodeData::POOL.with(|pool| { + pool.borrow_mut().deallocate(self.data); + }); + } + } +} + #[inline(never)] unsafe fn free(mut data: ptr::NonNull) { loop { debug_assert_eq!(data.as_ref().rc.get(), 0); debug_assert!(data.as_ref().first.get().is_null()); - let node = Box::from_raw(data.as_ptr()); + let _to_drop = NodeDataDeallocator { data }; + let node = data.as_ref(); match node.parent.take() { Some(parent) => { debug_assert!(parent.as_ref().rc.get() > 0); @@ -220,6 +239,10 @@ unsafe fn free(mut data: ptr::NonNull) { } impl NodeData { + thread_local! { + static POOL: RefCell> = RefCell::new(Pool::default()); + } + #[inline] fn new( parent: Option, @@ -269,13 +292,13 @@ impl NodeData { return ptr::NonNull::new_unchecked(res); } it => { - let res = Box::into_raw(Box::new(res)); - it.add_to_sll(res); - return ptr::NonNull::new_unchecked(res); + let res = Self::POOL.with(move |pool| pool.borrow_mut().allocate(res)); + it.add_to_sll(res.as_ptr()); + return res; } } } - ptr::NonNull::new_unchecked(Box::into_raw(Box::new(res))) + Self::POOL.with(move |pool| pool.borrow_mut().allocate(res)) } } @@ -388,6 +411,7 @@ impl NodeData { }) }) } + fn prev_sibling(&self) -> Option { let mut rev_siblings = self.green_siblings().enumerate().rev(); let index = rev_siblings.len() - (self.index() as usize); @@ -647,6 +671,7 @@ impl SyntaxNode { }) }) } + pub fn last_child(&self) -> Option { self.green_ref().children().raw.enumerate().rev().find_map(|(index, child)| { child.as_ref().into_node().map(|green| { @@ -679,6 +704,7 @@ impl SyntaxNode { pub fn next_sibling(&self) -> Option { self.data().next_sibling() } + pub fn prev_sibling(&self) -> Option { self.data().prev_sibling() } diff --git a/src/lib.rs b/src/lib.rs index bab6cd9..e806cc9 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -10,6 +10,10 @@ #[allow(unsafe_code)] mod green; + +#[allow(unsafe_code)] +pub mod pool; + #[allow(unsafe_code)] pub mod cursor; diff --git a/src/pool.rs b/src/pool.rs new file mode 100644 index 0000000..8bcb583 --- /dev/null +++ b/src/pool.rs @@ -0,0 +1,342 @@ +use crate::arc::HeaderSlice; +use std::{ + alloc::{self, Layout}, + default::Default, + marker::PhantomData, + mem::{self, MaybeUninit}, + ptr, +}; + +use memoffset::offset_of; + +struct Chunk { + data: MaybeUninit, + next: Option>>, + page: PagePtr, +} + +#[inline] +fn ptr_to_chunk(item: ptr::NonNull) -> ptr::NonNull> { + let data_offset = offset_of!(Chunk, data); + unsafe { + ptr::NonNull::new_unchecked(((item.as_ptr() as usize) - data_offset) as *mut Chunk) + } +} + +#[inline] +fn chunk_to_ptr(mut chunk: ptr::NonNull>) -> ptr::NonNull { + unsafe { ptr::NonNull::new_unchecked(chunk.as_mut().data.as_mut_ptr()) } +} + +impl Default for Chunk { + fn default() -> Self { + Self { + data: MaybeUninit::uninit(), + next: None, + page: PagePtr { ptr: ptr::NonNull::dangling() }, + } + } +} + +struct PageHeader { + prev_page: Option>, + next_page: Option>, + free_chunk: Option>>, + num_chunks: usize, + _p: PhantomData, +} + +type Page = HeaderSlice, [Chunk; 0]>; + +struct PagePtr { + ptr: ptr::NonNull>, +} + +impl Clone for PagePtr { + fn clone(&self) -> Self { + Self { ptr: self.ptr.clone() } + } +} + +impl Copy for PagePtr {} + +impl PagePtr { + #[inline] + fn next_page(&self) -> Option { + unsafe { self.ptr.as_ref().header.next_page } + } + + #[inline] + fn prev_page(&self) -> Option { + unsafe { self.ptr.as_ref().header.prev_page } + } + + #[inline] + fn set_next_page(&mut self, next: Option) { + unsafe { + self.ptr.as_mut().header.next_page = next; + } + } + + #[inline] + fn set_prev_page(&mut self, prev: Option) { + unsafe { + self.ptr.as_mut().header.prev_page = prev; + } + } + + #[inline] + fn link_next_page(&mut self, next: Option) { + self.set_next_page(next); + if let Some(mut ptr) = next { + ptr.set_prev_page(Some(self.clone())); + } + } + + #[inline] + fn link_prev_page(&mut self, prev: Option) { + self.set_prev_page(prev); + if let Some(mut ptr) = prev { + ptr.set_next_page(Some(self.clone())); + } + } + + fn num_chunks(&self) -> usize { + unsafe { self.ptr.as_ref().header.num_chunks } + } + + fn remove_from_list(&self) { + if let Some(mut prev) = self.prev_page() { + prev.set_next_page(self.next_page()); + } + + if let Some(mut next) = self.next_page() { + next.set_prev_page(self.prev_page()); + } + } + + fn free(&self) { + self.remove_from_list(); + unsafe { + let _to_free = Box::from_raw(self.ptr.as_ptr()); + } + } + + fn new(capacity: usize, prev_page: Option>, next_page: Option>) -> Self { + // Implementation mostly based on arc.rs + assert!(capacity > 0); + + // Find size of the HeaderSlice + let slice_offset = offset_of!(Page, slice); + let slice_size = mem::size_of::>().checked_mul(capacity).expect("size overflow"); + let usable_size = slice_offset.checked_add(slice_size).expect("size overflows"); + + // Round size up to alignment + let align = mem::align_of::>(); + let size = usable_size.wrapping_add(align - 1) & !(align - 1); + assert!(size >= usable_size, "size overflows"); + + let layout = Layout::from_size_align(size, align).expect("invalid layout"); + + unsafe { + let buffer = alloc::alloc(layout); + if buffer.is_null() { + alloc::handle_alloc_error(layout); + } + + let ptr = buffer as *mut Page; + let result = Self { ptr: ptr::NonNull::new_unchecked(ptr) }; + + let mut current = ptr::addr_of_mut!((*ptr).slice) as *mut Chunk; + + let header = PageHeader { + prev_page, + next_page, + free_chunk: Some(ptr::NonNull::new_unchecked(current)), + num_chunks: 0, + _p: PhantomData, + }; + + ptr::write(ptr::addr_of_mut!((*ptr).header), header); + for idx in 0..capacity { + let chunk = Chunk { + data: MaybeUninit::uninit(), + next: if idx == capacity - 1 { + None + } else { + Some(ptr::NonNull::new_unchecked(current.offset(1))) + }, + page: result, + }; + ptr::write(current, chunk); + current = current.offset(1); + } + + result + } + } + + #[inline] + fn allocate(&mut self, item: T) -> ptr::NonNull { + let header = unsafe { &mut self.ptr.as_mut().header }; + unsafe { + header.free_chunk.unwrap().as_mut().data.write(item); + } + + header.num_chunks += 1; + let result = header.free_chunk.unwrap(); + header.free_chunk = unsafe { header.free_chunk.unwrap().as_ref().next }; + + chunk_to_ptr(result) + } +} + +pub struct Pool { + free_pages: Option>, + full_pages: Option>, + + num_empty_pages: usize, + + max_empty_pages: usize, + + page_capacity: usize, +} + +impl Default for Pool { + fn default() -> Self { + Self::new(1024, 4) + } +} + +impl Drop for Pool { + fn drop(&mut self) { + while let Some(ptr) = self.free_pages { + self.free_pages = ptr.next_page(); + ptr.free(); + } + + while let Some(ptr) = self.full_pages { + self.full_pages = ptr.next_page(); + ptr.free(); + } + } +} + +impl Pool { + pub fn new(page_capacity: usize, max_empty_pages: usize) -> Self { + debug_assert!(page_capacity > 0); + debug_assert!(max_empty_pages > 0); + + Self { + free_pages: None, + full_pages: None, + num_empty_pages: 0, + max_empty_pages, + page_capacity, + } + } + + #[inline] + pub fn allocate(&mut self, item: T) -> ptr::NonNull { + if self.free_pages.is_none() { + self.free_pages = Some(PagePtr::new(self.page_capacity, None, None)); + self.num_empty_pages += 1; + } + + let mut free_page = self.free_pages.unwrap(); + if free_page.num_chunks() == 0 { + self.num_empty_pages -= 1; + } + + let result = free_page.allocate(item); + if self.page_capacity == free_page.num_chunks() { + self.free_pages = free_page.next_page(); + free_page.remove_from_list(); + free_page.link_next_page(self.full_pages); + free_page.set_prev_page(None); + self.full_pages = Some(free_page); + } + + result + } + + #[inline] + pub fn deallocate(&mut self, mut item: ptr::NonNull) { + let mut chunk_ptr = ptr_to_chunk(item); + let mut page = unsafe { chunk_ptr.as_ref().page }; + let was_full_page = unsafe { page.ptr.as_ref().header.num_chunks == self.page_capacity }; + let empty_page = unsafe { page.ptr.as_ref().header.num_chunks - 1 == 0 }; + + unsafe { + ptr::drop_in_place(item.as_mut()); + chunk_ptr.as_mut().next = page.ptr.as_ref().header.free_chunk; + + page.ptr.as_mut().header.free_chunk = Some(chunk_ptr); + page.ptr.as_mut().header.num_chunks -= 1; + + if was_full_page { + if page.prev_page().is_none() { + // head of list + self.full_pages = page.next_page(); + } + page.remove_from_list(); + page.link_next_page(self.free_pages); + page.set_prev_page(None); + self.free_pages = Some(page); + } else if empty_page && self.max_empty_pages <= self.num_empty_pages { + if page.prev_page().is_none() { + // head of list + self.free_pages = page.next_page(); + } + page.free(); + } else if empty_page { + self.num_empty_pages += 1; + } + } + } +} + +mod tests { + use super::*; + + struct TestStruct { + id: usize, + } + + impl Drop for TestStruct { + fn drop(&mut self) { + println!("{} dropped", self.id); + } + } + + #[test] + fn test_pool() { + let mut pool = Pool::new(2, 10); + let mut allocated = Vec::new(); + for id in 0..100 { + allocated.push(pool.allocate(TestStruct { id })); + } + + for id in 0..100 { + unsafe { + assert_eq!(allocated[id].as_ref().id, id); + } + } + + for it in allocated.iter() { + pool.deallocate(*it); + } + } + + #[test] + fn test_chunk_ptr_conversion() { + let mut chunk: Chunk = Chunk::default(); + chunk.data.write(TestStruct { id: 0 }); + let chunk_ptr = unsafe { ptr::NonNull::new_unchecked(&mut chunk) }; + let test_struct = chunk_to_ptr(chunk_ptr); + unsafe { + assert_eq!(test_struct.as_ref().id, 0); + assert_eq!(ptr_to_chunk(test_struct), chunk_ptr); + } + } +}