From 933f61b75dee417443d10c300cf76166184d87fa Mon Sep 17 00:00:00 2001 From: Arjan Mels <43108771+arjanmels@users.noreply.github.com> Date: Sun, 3 May 2020 15:01:14 +0200 Subject: [PATCH 1/5] Added used and free functions --- src/lib.rs | 49 ++++++++++++++++++++++++++++++++++--------------- 1 file changed, 34 insertions(+), 15 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 44daae3..dca2742 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -14,6 +14,7 @@ extern crate alloc; use alloc::alloc::Layout; #[cfg(feature = "alloc_ref")] use alloc::alloc::{AllocErr, AllocInit, AllocRef, MemoryBlock}; +#[cfg(feature = "use_spin")] use core::alloc::GlobalAlloc; use core::mem; #[cfg(feature = "use_spin")] @@ -31,6 +32,7 @@ mod test; pub struct Heap { bottom: usize, size: usize, + used: usize, holes: HoleList, } @@ -40,6 +42,7 @@ impl Heap { Heap { bottom: 0, size: 0, + used: 0, holes: HoleList::empty(), } } @@ -53,6 +56,7 @@ impl Heap { pub unsafe fn init(&mut self, heap_bottom: usize, heap_size: usize) { self.bottom = heap_bottom; self.size = heap_size; + self.used = 0; self.holes = HoleList::new(heap_bottom, heap_size); } @@ -67,17 +71,13 @@ impl Heap { Heap { bottom: heap_bottom, size: heap_size, + used: 0, holes: HoleList::new(heap_bottom, heap_size), } } } - /// Allocates a chunk of the given size with the given alignment. Returns a pointer to the - /// beginning of that chunk if it was successful. Else it returns `None`. - /// This function scans the list of free memory blocks and uses the first block that is big - /// enough. The runtime is in O(n) where n is the number of free blocks, but it should be - /// reasonably fast for small allocations. - pub fn allocate_first_fit(&mut self, layout: Layout) -> Result, ()> { + pub fn align_layout(&self, layout: Layout) -> Layout { let mut size = layout.size(); if size < HoleList::min_size() { size = HoleList::min_size(); @@ -85,7 +85,21 @@ impl Heap { let size = align_up(size, mem::align_of::()); let layout = Layout::from_size_align(size, layout.align()).unwrap(); - self.holes.allocate_first_fit(layout) + layout + } + + /// Allocates a chunk of the given size with the given alignment. Returns a pointer to the + /// beginning of that chunk if it was successful. Else it returns `None`. + /// This function scans the list of free memory blocks and uses the first block that is big + /// enough. The runtime is in O(n) where n is the number of free blocks, but it should be + /// reasonably fast for small allocations. + pub fn allocate_first_fit(&mut self, layout: Layout) -> Result, ()> { + let aligned_layout = self.align_layout(layout); + let res = self.holes.allocate_first_fit(aligned_layout); + if res.is_ok() { + self.used += aligned_layout.size(); + } + res } /// Frees the given allocation. `ptr` must be a pointer returned @@ -96,14 +110,9 @@ impl Heap { /// correct place. If the freed block is adjacent to another free block, the blocks are merged /// again. This operation is in `O(n)` since the list needs to be sorted by address. pub unsafe fn deallocate(&mut self, ptr: NonNull, layout: Layout) { - let mut size = layout.size(); - if size < HoleList::min_size() { - size = HoleList::min_size(); - } - let size = align_up(size, mem::align_of::()); - let layout = Layout::from_size_align(size, layout.align()).unwrap(); - - self.holes.deallocate(ptr, layout); + let aligned_layout = self.align_layout(layout); + self.holes.deallocate(ptr, aligned_layout); + self.used -= aligned_layout.size(); } /// Returns the bottom address of the heap. @@ -121,6 +130,16 @@ impl Heap { self.bottom + self.size } + /// Returns the size of the used part of the heap + pub fn used(&self) -> usize { + self.used + } + + /// Returns the size of the free part of the heap + pub fn free(&self) -> usize { + self.size - self.used + } + /// Extends the size of the heap by creating a new hole at the end /// /// # Unsafety From 464081fd2bd37be2048a3852717b9d52f6a22916 Mon Sep 17 00:00:00 2001 From: Arjan Mels <43108771+arjanmels@users.noreply.github.com> Date: Sun, 3 May 2020 15:06:18 +0200 Subject: [PATCH 2/5] Updated version to 0.8.4 and updated Changelog.md --- Cargo.toml | 2 +- Changelog.md | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 6f61909..ef46e61 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "linked_list_allocator" -version = "0.8.3" +version = "0.8.4" authors = ["Philipp Oppermann "] license = "Apache-2.0/MIT" diff --git a/Changelog.md b/Changelog.md index 80a21d9..6e1b2d3 100644 --- a/Changelog.md +++ b/Changelog.md @@ -1,3 +1,7 @@ +# 0.8.4 + +- Add function to get used and free heap size ([#32](https://github.com/phil-opp/linked-list-allocator/pull/32)) + # 0.8.3 - Prevent writing to heap memory range when size too small ([#31](https://github.com/phil-opp/linked-list-allocator/pull/31)) From b2e3337e14704760274d0b302c95a8c15fdbbee9 Mon Sep 17 00:00:00 2001 From: Arjan Mels <43108771+arjanmels@users.noreply.github.com> Date: Sun, 3 May 2020 15:09:28 +0200 Subject: [PATCH 3/5] Fixed used field initialization for LockedHeap --- src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/lib.rs b/src/lib.rs index dca2742..07516ed 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -201,6 +201,7 @@ impl LockedHeap { LockedHeap(Spinlock::new(Heap { bottom: heap_bottom, size: heap_size, + used: 0, holes: HoleList::new(heap_bottom, heap_size), })) } From f71eec2c78db3dcd1463e7f37013df5bb9767843 Mon Sep 17 00:00:00 2001 From: Arjan Mels <43108771+arjanmels@users.noreply.github.com> Date: Sun, 3 May 2020 21:46:18 +0200 Subject: [PATCH 4/5] Undo version increments --- Cargo.toml | 2 +- Changelog.md | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index ef46e61..6f61909 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "linked_list_allocator" -version = "0.8.4" +version = "0.8.3" authors = ["Philipp Oppermann "] license = "Apache-2.0/MIT" diff --git a/Changelog.md b/Changelog.md index 6e1b2d3..7a45452 100644 --- a/Changelog.md +++ b/Changelog.md @@ -1,4 +1,3 @@ -# 0.8.4 - Add function to get used and free heap size ([#32](https://github.com/phil-opp/linked-list-allocator/pull/32)) From 6daea0874006f5fc05ae453b050668b0a9043aa4 Mon Sep 17 00:00:00 2001 From: Arjan Mels <43108771+arjanmels@users.noreply.github.com> Date: Sun, 3 May 2020 21:50:15 +0200 Subject: [PATCH 5/5] Changed align_layout to private and added documentation --- src/lib.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 07516ed..01bd637 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -77,7 +77,9 @@ impl Heap { } } - pub fn align_layout(&self, layout: Layout) -> Layout { + /// Align layout. Returns a layout with size increased to + /// fit at least `HoleList::min_size` and proper alignment of a `Hole`. + fn align_layout(layout: Layout) -> Layout { let mut size = layout.size(); if size < HoleList::min_size() { size = HoleList::min_size(); @@ -94,7 +96,7 @@ impl Heap { /// enough. The runtime is in O(n) where n is the number of free blocks, but it should be /// reasonably fast for small allocations. pub fn allocate_first_fit(&mut self, layout: Layout) -> Result, ()> { - let aligned_layout = self.align_layout(layout); + let aligned_layout = Self::align_layout(layout); let res = self.holes.allocate_first_fit(aligned_layout); if res.is_ok() { self.used += aligned_layout.size(); @@ -110,7 +112,7 @@ impl Heap { /// correct place. If the freed block is adjacent to another free block, the blocks are merged /// again. This operation is in `O(n)` since the list needs to be sorted by address. pub unsafe fn deallocate(&mut self, ptr: NonNull, layout: Layout) { - let aligned_layout = self.align_layout(layout); + let aligned_layout = Self::align_layout(layout); self.holes.deallocate(ptr, aligned_layout); self.used -= aligned_layout.size(); }