Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
### Added

- Added a `init` macro to make initialization easier.
- Added `Heap::free` and `Heap::used` for the TLSF heap.

### Changed

Expand Down
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ llff = ["linked_list_allocator"]
[dependencies]
critical-section = "1.0"
linked_list_allocator = { version = "0.10.5", default-features = false, optional = true }
rlsf = { version = "0.2.1", default-features = false, optional = true }
rlsf = { version = "0.2.1", default-features = false, features = ["unstable"], optional = true }
const-default = { version = "1.0.0", default-features = false, optional = true }

[dev-dependencies]
Expand Down
2 changes: 1 addition & 1 deletion examples/allocator_api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,5 +34,5 @@ fn main() -> ! {
#[panic_handler]
fn panic(info: &PanicInfo) -> ! {
defmt::error!("{}", info);
semihosting::process::exit(0);
semihosting::process::exit(-1);
}
38 changes: 38 additions & 0 deletions examples/exhaustion.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
//! Example which shows behavior on pool exhaustion. It simply panics.
#![no_std]
#![no_main]

extern crate alloc;

use cortex_m as _;
use cortex_m_rt::entry;
use defmt::Debug2Format;
use defmt_semihosting as _;

use core::panic::PanicInfo;
use embedded_alloc::TlsfHeap as Heap;

#[global_allocator]
static HEAP: Heap = Heap::empty();

#[entry]
fn main() -> ! {
// Initialize the allocator BEFORE you use it
unsafe {
embedded_alloc::init!(HEAP, 16);
}

let _vec = alloc::vec![0; 16];

defmt::error!("unexpected vector allocation success");

// Panic is expected here.
semihosting::process::exit(-1);
}

#[panic_handler]
fn panic(info: &PanicInfo) -> ! {
defmt::warn!("received expected heap exhaustion panic");
defmt::warn!("{}: {}", info, Debug2Format(&info.message()));
semihosting::process::exit(0);
}
2 changes: 1 addition & 1 deletion examples/global_alloc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,5 +37,5 @@ fn main() -> ! {
#[panic_handler]
fn panic(info: &PanicInfo) -> ! {
defmt::error!("{}", info);
semihosting::process::exit(0);
semihosting::process::exit(-1);
}
54 changes: 54 additions & 0 deletions examples/track_usage.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
#![no_std]
#![no_main]

extern crate alloc;

use cortex_m as _;
use cortex_m_rt::entry;
use defmt::Debug2Format;
use defmt_semihosting as _;

use core::{mem::MaybeUninit, panic::PanicInfo};
use embedded_alloc::TlsfHeap as Heap;
//use embedded_alloc::LlffHeap as Heap;

#[global_allocator]
static HEAP: Heap = Heap::empty();

#[entry]
fn main() -> ! {
// Initialize the allocator BEFORE you use it
const HEAP_SIZE: usize = 4096;
static mut HEAP_MEM: [MaybeUninit<u8>; HEAP_SIZE] = [MaybeUninit::uninit(); HEAP_SIZE];
unsafe { HEAP.init(&raw mut HEAP_MEM as usize, HEAP_SIZE) }

let mut alloc_vecs = alloc::vec::Vec::new();
let mut free_memory = HEAP_SIZE;
// Keep allocating until we are getting low on memory. It doesn't have to end in a panic.
while free_memory > 512 {
defmt::info!(
"{} of {} heap memory allocated so far...",
HEAP_SIZE - free_memory,
HEAP_SIZE
);
let new_vec = alloc::vec![1_u8; 64];
alloc_vecs.push(new_vec);
free_memory = HEAP.free();
}

drop(alloc_vecs);

defmt::info!(
"{} of {} heap memory are allocated after drop",
HEAP_SIZE - HEAP.free(),
HEAP_SIZE
);

semihosting::process::exit(0);
}

#[panic_handler]
fn panic(info: &PanicInfo) -> ! {
defmt::error!("{}: {}", info, Debug2Format(&info.message()));
semihosting::process::exit(-1);
}
61 changes: 54 additions & 7 deletions src/tlsf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,20 @@ use rlsf::Tlsf;

type TlsfHeap = Tlsf<'static, usize, usize, { usize::BITS as usize }, { usize::BITS as usize }>;

struct Inner {
tlsf: TlsfHeap,
initialized: bool,
raw_block: Option<NonNull<[u8]>>,
raw_block_size: usize,
}

// Safety: The whole inner type is wrapped by a [Mutex].
unsafe impl Sync for Inner {}
unsafe impl Send for Inner {}

/// A two-Level segregated fit heap.
pub struct Heap {
heap: Mutex<RefCell<(TlsfHeap, bool)>>,
heap: Mutex<RefCell<Inner>>,
}

impl Heap {
Expand All @@ -20,7 +31,12 @@ impl Heap {
/// [`init`](Self::init) method before using the allocator.
pub const fn empty() -> Heap {
Heap {
heap: Mutex::new(RefCell::new((ConstDefault::DEFAULT, false))),
heap: Mutex::new(RefCell::new(Inner {
tlsf: ConstDefault::DEFAULT,
initialized: false,
raw_block: None,
raw_block_size: 0,
})),
}
}

Expand Down Expand Up @@ -59,26 +75,57 @@ impl Heap {
assert!(size > 0);
critical_section::with(|cs| {
let mut heap = self.heap.borrow_ref_mut(cs);
assert!(!heap.1);
heap.1 = true;
assert!(!heap.initialized);
heap.initialized = true;
let block: NonNull<[u8]> =
NonNull::slice_from_raw_parts(NonNull::new_unchecked(start_addr as *mut u8), size);
heap.0.insert_free_block_ptr(block);
heap.tlsf.insert_free_block_ptr(block);
heap.raw_block = Some(block);
heap.raw_block_size = size;
});
}

fn alloc(&self, layout: Layout) -> Option<NonNull<u8>> {
critical_section::with(|cs| self.heap.borrow_ref_mut(cs).0.allocate(layout))
critical_section::with(|cs| self.heap.borrow_ref_mut(cs).tlsf.allocate(layout))
}

unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
critical_section::with(|cs| {
self.heap
.borrow_ref_mut(cs)
.0
.tlsf
.deallocate(NonNull::new_unchecked(ptr), layout.align())
})
}

/// Get the amount of bytes used by the allocator.
pub fn used(&self) -> usize {
critical_section::with(|cs| {
self.heap.borrow_ref_mut(cs).raw_block_size - self.free_with_cs(cs)
})
}

/// Get the amount of free bytes in the allocator.
pub fn free(&self) -> usize {
critical_section::with(|cs| self.free_with_cs(cs))
}

fn free_with_cs(&self, cs: critical_section::CriticalSection) -> usize {
let inner_mut = self.heap.borrow_ref_mut(cs);
if !inner_mut.initialized {
return 0;
}
// Safety: We pass the memory block we previously initialized the heap with
// to the `iter_blocks` method.
unsafe {
inner_mut
.tlsf
.iter_blocks(inner_mut.raw_block.unwrap())
.filter(|block_info| !block_info.is_occupied())
.map(|block_info| block_info.max_payload_size())
.sum::<usize>()
}
}
}

unsafe impl GlobalAlloc for Heap {
Expand Down