scx_userspace_arena/
alloc.rs1use crate::bpf_intf::scx_userspace_arena_alloc_pages_args;
7use crate::bpf_intf::scx_userspace_arena_free_pages_args;
8
9use anyhow::Result;
10use buddy_system_allocator::Heap;
11use libbpf_rs::ProgramInput;
12
13use std::alloc::Layout;
14use std::ptr::NonNull;
15use std::sync::Mutex;
16
17pub unsafe trait Allocator {
26 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, anyhow::Error>;
27
28 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, anyhow::Error> {
29 let ptr = self.allocate(layout)?;
30 let slice: &mut [u8] = unsafe { &mut *ptr.as_ptr() };
32 slice.fill(0);
33 Ok(ptr)
34 }
35
36 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout);
40}
41
42type FreeList = Vec<(NonNull<[u8]>, Layout)>;
43
44pub struct HeapAllocator<T>
45where
46 T: Allocator,
47{
48 backing_allocator: T,
49 alloc: Mutex<(Heap<31>, FreeList)>,
50}
51
52impl<T> HeapAllocator<T>
53where
54 T: Allocator,
55{
56 pub fn new(backing_allocator: T) -> Self {
57 Self {
58 backing_allocator,
59 alloc: Mutex::new((Heap::empty(), Vec::new())),
60 }
61 }
62}
63
64impl<T> Drop for HeapAllocator<T>
65where
66 T: Allocator,
67{
68 fn drop(&mut self) {
69 for a in self.alloc.get_mut().unwrap().1.iter() {
70 let first_byte_pointer = unsafe {
71 NonNull::new_unchecked(a.0.as_ptr() as *mut u8)
73 };
74 unsafe {
75 self.backing_allocator.deallocate(first_byte_pointer, a.1);
77 }
78 }
79 }
80}
81
82unsafe impl<T> Allocator for HeapAllocator<T>
83where
84 T: Allocator,
85{
86 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, anyhow::Error> {
87 let mut guard = self.alloc.lock().unwrap();
88 let (alloc, free_list) = &mut *guard;
89
90 if let Ok(a) = alloc.alloc(layout) {
91 return Ok(NonNull::slice_from_raw_parts(a, layout.size()));
95 }
96
97 let next_allocation_size = alloc
100 .stats_total_bytes()
101 .next_power_of_two()
102 .clamp(16 * 1024, 1024 * 1024);
103 let backing_layout = if layout.size() > next_allocation_size {
104 layout
105 } else {
106 Layout::from_size_align(next_allocation_size, 1)?
107 };
108 let ptr = self.backing_allocator.allocate(backing_layout)?;
109
110 free_list.push((ptr, backing_layout));
111
112 unsafe {
113 alloc.init(ptr.cast::<u8>().as_ptr() as usize, backing_layout.size())
115 };
116
117 alloc
118 .alloc(layout)
119 .map(|a| NonNull::slice_from_raw_parts(a, layout.size()))
120 .map_err(|_| anyhow::anyhow!("failed to allocate"))
121 }
122
123 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
124 self.alloc.lock().unwrap().0.dealloc(ptr, layout)
125 }
126}
127
128pub unsafe fn call_allocate_program(
136 prog: &libbpf_rs::ProgramMut<'_>,
137 layout: Layout,
138) -> anyhow::Result<NonNull<[u8]>> {
139 let mut args = scx_userspace_arena_alloc_pages_args {
140 sz: u32::try_from(layout.size())?,
141 ret: std::ptr::null_mut(),
142 };
143 let input = ProgramInput {
144 context_in: Some(unsafe {
145 std::slice::from_raw_parts_mut(
146 &mut args as *mut _ as *mut u8,
147 std::mem::size_of_val(&args),
148 )
149 }),
150 ..Default::default()
151 };
152 prog.test_run(input)?;
153
154 let base = NonNull::new(args.ret as *mut u8)
155 .ok_or_else(|| anyhow::anyhow!("arena allocation failed"))?;
156
157 Ok(NonNull::slice_from_raw_parts(base, args.sz as usize))
158}
159
160pub unsafe fn call_deallocate_program(
168 prog: &libbpf_rs::ProgramMut<'_>,
169 addr: NonNull<u8>,
170 layout: Layout,
171) {
172 let mut args = scx_userspace_arena_free_pages_args {
173 addr: addr.as_ptr() as *mut std::ffi::c_void,
174 sz: u32::try_from(layout.size())
175 .expect("memory allocated in the arena must fit in 32-bits"),
176 };
177 let input = ProgramInput {
178 context_in: Some(unsafe {
179 std::slice::from_raw_parts_mut(
180 &mut args as *mut _ as *mut u8,
181 std::mem::size_of_val(&args),
182 )
183 }),
184 ..Default::default()
185 };
186 prog.test_run(input).unwrap();
187}