1818#include <sof/trace/trace.h>
1919#include <rtos/symbol.h>
2020#include <rtos/wait.h>
21+ #if CONFIG_VIRTUAL_HEAP
22+ #include <sof/lib/regions_mm.h>
23+
24+ struct vmh_heap * virtual_buffers_heap [CONFIG_MP_MAX_NUM_CPUS ];
25+ struct k_spinlock vmh_lock ;
26+
27+ #undef HEAPMEM_SIZE
28+ /* Buffers are allocated from virtual space so we can safely reduce the heap size.
29+ */
30+ #define HEAPMEM_SIZE 0x40000
31+ #endif /* CONFIG_VIRTUAL_HEAP */
32+
2133
2234/* Zephyr includes */
2335#include <zephyr/init.h>
@@ -193,6 +205,98 @@ static void l3_heap_free(struct k_heap *h, void *mem)
193205
194206#endif
195207
208+ #if CONFIG_VIRTUAL_HEAP
209+ static void * virtual_heap_alloc (struct vmh_heap * heap , uint32_t flags , uint32_t caps , size_t bytes ,
210+ uint32_t align )
211+ {
212+ void * mem ;
213+
214+ //K_SPINLOCK(&vmh_lock) {
215+ // heap->core_id = cpu_get_id();
216+ mem = vmh_alloc (heap , bytes );
217+ //}
218+
219+ if (!mem )
220+ return NULL ;
221+
222+ assert (IS_ALIGNED (mem , align ));
223+
224+ if (flags & SOF_MEM_FLAG_COHERENT )
225+ return sys_cache_uncached_ptr_get ((__sparse_force void __sparse_cache * )mem );
226+
227+ return mem ;
228+ }
229+
230+ /**
231+ * Checks whether pointer is from virtual memory range.
232+ * @param ptr Pointer to memory being checked.
233+ * @return True if pointer falls into virtual memory region, false otherwise.
234+ */
235+ static bool is_virtual_heap_pointer (void * ptr )
236+ {
237+ uintptr_t virtual_heap_start = POINTER_TO_UINT (sys_cache_cached_ptr_get (& heapmem )) +
238+ HEAPMEM_SIZE ;
239+ uintptr_t virtual_heap_end = CONFIG_KERNEL_VM_BASE + CONFIG_KERNEL_VM_SIZE ;
240+
241+ if (!is_cached (ptr ))
242+ ptr = (__sparse_force void * )sys_cache_cached_ptr_get (ptr );
243+
244+ return ((POINTER_TO_UINT (ptr ) >= virtual_heap_start ) &&
245+ (POINTER_TO_UINT (ptr ) < virtual_heap_end ));
246+ }
247+
248+ static void virtual_heap_free (void * ptr )
249+ {
250+ struct vmh_heap * const heap = virtual_buffers_heap [cpu_get_id ()];
251+ int ret ;
252+
253+ ptr = (__sparse_force void * )sys_cache_cached_ptr_get (ptr );
254+
255+ //K_SPINLOCK(&vmh_lock) {
256+ //virtual_buffers_heap->core_id = cpu_get_id();
257+ ret = vmh_free (heap , ptr );
258+ //}
259+
260+ if (ret )
261+ tr_err (& zephyr_tr , "Unable to free %p! %d" , ptr , ret );
262+ }
263+
264+ static const struct vmh_heap_config static_hp_buffers = {
265+ {
266+ { 128 , 32 },
267+ { 512 , 8 },
268+ { 1024 , 44 },
269+ { 2048 , 8 },
270+ { 4096 , 11 },
271+ { 8192 , 10 },
272+ { 65536 , 3 },
273+ { 131072 , 1 },
274+ { 524288 , 1 } /* buffer for kpb */
275+ },
276+ };
277+
278+ static int virtual_heap_init (void )
279+ {
280+ int core ;
281+
282+ k_spinlock_init (& vmh_lock );
283+
284+ for (core = 0 ; core < CONFIG_MP_MAX_NUM_CPUS ; core ++ ) {
285+ struct vmh_heap * heap = vmh_init_heap (& static_hp_buffers , MEM_REG_ATTR_CORE_HEAP ,
286+ core , false);
287+ if (!heap )
288+ tr_err (& zephyr_tr , "Unable to init virtual heap for core %d!" , core );
289+
290+ virtual_buffers_heap [core ] = heap ;
291+ }
292+
293+ return 0 ;
294+ }
295+
296+ SYS_INIT (virtual_heap_init , POST_KERNEL , 1 );
297+
298+ #endif /* CONFIG_VIRTUAL_HEAP */
299+
196300static void * heap_alloc_aligned (struct k_heap * h , size_t min_align , size_t bytes )
197301{
198302 k_spinlock_key_t key ;
@@ -384,6 +488,9 @@ EXPORT_SYMBOL(rzalloc);
384488void * rballoc_align (uint32_t flags , uint32_t caps , size_t bytes ,
385489 uint32_t align )
386490{
491+ #if CONFIG_VIRTUAL_HEAP
492+ struct vmh_heap * virtual_heap ;
493+ #endif
387494 struct k_heap * heap ;
388495
389496 /* choose a heap */
@@ -399,6 +506,14 @@ void *rballoc_align(uint32_t flags, uint32_t caps, size_t bytes,
399506 heap = & sof_heap ;
400507 }
401508
509+ #if CONFIG_VIRTUAL_HEAP
510+ /* Use virtual heap if it is available */
511+ virtual_heap = virtual_buffers_heap [cpu_get_id ()];
512+ if (virtual_heap ) {
513+ return virtual_heap_alloc (virtual_heap , flags , caps , bytes , align );
514+ }
515+ #endif /* CONFIG_VIRTUAL_HEAP */
516+
402517 if (flags & SOF_MEM_FLAG_COHERENT )
403518 return heap_alloc_aligned (heap , align , bytes );
404519
@@ -421,6 +536,13 @@ void rfree(void *ptr)
421536 }
422537#endif
423538
539+ #if CONFIG_VIRTUAL_HEAP
540+ if (is_virtual_heap_pointer (ptr )) {
541+ virtual_heap_free (ptr );
542+ return ;
543+ }
544+ #endif
545+
424546 heap_free (& sof_heap , ptr );
425547}
426548EXPORT_SYMBOL (rfree );
@@ -432,7 +554,6 @@ static int heap_init(void)
432554#if CONFIG_L3_HEAP
433555 sys_heap_init (& l3_heap .heap , UINT_TO_POINTER (get_l3_heap_start ()), get_l3_heap_size ());
434556#endif
435-
436557 return 0 ;
437558}
438559
0 commit comments