16#include "../offload/offload_library.h"
17#include "../offload/offload_runtime.h"
22#if defined(__OFFLOAD) && !defined(__NO_OFFLOAD_DBM)
23#define DBM_MEMPOOL_OFFLOAD_ENABLED 1
25#define DBM_MEMPOOL_OFFLOAD_ENABLED 0
27#define DBM_MEMPOOL_DEVICE_ENABLED \
28 (DBM_MEMPOOL_DEVICE && DBM_MEMPOOL_OFFLOAD_ENABLED)
29#define DBM_MEMPOOL_HOST_ENABLED \
30 ((DBM_MEMPOOL_HOST && DBM_ALLOC_OFFLOAD && DBM_MEMPOOL_OFFLOAD_ENABLED) || \
31 (1 < DBM_MEMPOOL_HOST))
47#if DBM_MEMPOOL_DEVICE_ENABLED
56#if DBM_MEMPOOL_HOST_ENABLED
75#if DBM_MEMPOOL_OFFLOAD_ENABLED
77 offload_activate_chosen_device();
78 offloadMalloc(&memory,
size);
81 offloadMallocHost(&memory,
size);
90 assert(memory != NULL);
110 if (NULL != memory) {
111 void *
mem = (
void *)(uintptr_t)memory;
112#if DBM_MEMPOOL_OFFLOAD_ENABLED
114 offload_activate_chosen_device();
118 offloadFreeHost(
mem);
134#if DBM_MEMPOOL_DEVICE_ENABLED || DBM_MEMPOOL_HOST_ENABLED
135static void *internal_mempool_malloc(
dbm_memchunk_t **available_head,
143#if DBM_MEMPOOL_DEVICE_ENABLED
144 const bool on_device = (&mempool_device_available_head == available_head);
146 const bool on_device =
false;
148#if DBM_MEMPOOL_HOST_ENABLED
149 assert(on_device || &mempool_host_available_head == available_head);
150 assert(on_device || &mempool_host_allocated_head == allocated_head);
153#pragma omp critical(dbm_mempool_modify)
157 for (; NULL != *available_head; available_head = &(*available_head)->
next) {
158 const size_t s = (*available_head)->
size;
159 if (
size <= s && (NULL == reuse || s < (*reuse)->size)) {
160 reuse = available_head;
161 if (
size == (*reuse)->size) {
164 }
else if (NULL == reclaim || s > (*reclaim)->size) {
165 reclaim = available_head;
175 *reuse = chunk->
next;
178 assert(chunk != NULL);
182 chunk->
next = *allocated_head;
183 *allocated_head = chunk;
188 void *memory = chunk->
mem;
205#if DBM_MEMPOOL_HOST_ENABLED
206 return internal_mempool_malloc(&mempool_host_available_head,
207 &mempool_host_allocated_head,
size);
218#if DBM_MEMPOOL_DEVICE_ENABLED
219 return internal_mempool_malloc(&mempool_device_available_head,
220 &mempool_device_allocated_head,
size);
221#elif DBM_MEMPOOL_DEVICE
232#if DBM_MEMPOOL_DEVICE_ENABLED || DBM_MEMPOOL_HOST_ENABLED
237#pragma omp critical(dbm_mempool_modify)
240 while (NULL != *allocated_head && (*allocated_head)->mem !=
mem) {
241 allocated_head = &(*allocated_head)->
next;
244 assert(NULL != chunk && chunk->
mem ==
mem);
247 *allocated_head = chunk->
next;
250 chunk->
next = *available_head;
251 *available_head = chunk;
262#if DBM_MEMPOOL_HOST_ENABLED
263 internal_mempool_free(&mempool_host_available_head,
264 &mempool_host_allocated_head, memory);
275#if DBM_MEMPOOL_DEVICE_ENABLED
276 internal_mempool_free(&mempool_device_available_head,
277 &mempool_device_allocated_head, memory);
278#elif DBM_MEMPOOL_DEVICE
289#if DBM_MEMPOOL_DEVICE_ENABLED || DBM_MEMPOOL_HOST_ENABLED
290static void internal_mempool_clear(
dbm_memchunk_t **available_head) {
291#if DBM_MEMPOOL_DEVICE_ENABLED
292 const bool on_device = (&mempool_device_available_head == available_head);
294 const bool on_device =
false;
296#if DBM_MEMPOOL_HOST_ENABLED
297 assert(on_device || &mempool_host_available_head == available_head);
301 while (NULL != *available_head) {
303 *available_head = chunk->
next;
315#pragma omp critical(dbm_mempool_modify)
317#if DBM_MEMPOOL_DEVICE_ENABLED
318 assert(mempool_device_allocated_head == NULL);
319 internal_mempool_clear(&mempool_device_available_head);
321#if DBM_MEMPOOL_HOST_ENABLED
322 assert(mempool_host_allocated_head == NULL);
323 internal_mempool_clear(&mempool_host_available_head);
333 assert(NULL != memstats);
335#pragma omp critical(dbm_mempool_modify)
337#if DBM_MEMPOOL_DEVICE_ENABLED
338 for (
dbm_memchunk_t *chunk = mempool_device_available_head; NULL != chunk;
339 chunk = chunk->
next) {
343 for (
dbm_memchunk_t *chunk = mempool_device_allocated_head; NULL != chunk;
344 chunk = chunk->
next) {
355#if DBM_MEMPOOL_HOST_ENABLED
356 for (
dbm_memchunk_t *chunk = mempool_host_available_head; NULL != chunk;
357 chunk = chunk->
next) {
361 for (
dbm_memchunk_t *chunk = mempool_host_allocated_head; NULL != chunk;
362 chunk = chunk->
next) {
static void * actual_malloc(size_t size, bool on_device)
Private routine for actually allocating system memory.
void dbm_mempool_device_free(const void *memory)
Internal routine for releasing memory back to the pool.
void dbm_mempool_statistics(dbm_memstats_t *memstats)
Internal routine to query statistics.
void dbm_mempool_host_free(const void *memory)
Private routine for releasing memory back to the pool.
void dbm_mempool_clear(void)
Private routine for freeing all memory in the pool.
struct dbm_memchunk dbm_memchunk_t
Private struct for storing a chunk of memory.
static dbm_memstats_t mempool_stats
Private single-linked lists of memory chunks available and allocated.
void * dbm_mempool_host_malloc(size_t size)
Private routine for allocating host or device memory from the pool.
static void actual_free(const void *memory, bool on_device)
Private routine for actually freeing system memory.
void * dbm_mempool_device_malloc(size_t size)
Internal routine for allocating device memory from the pool.
void dbm_mpi_free_mem(void *mem)
Wrapper around MPI_Free_mem.
void * dbm_mpi_alloc_mem(size_t size)
Wrapper around MPI_Alloc_mem.
Private struct for storing a chunk of memory.
struct dbm_memchunk * next
Internal struct for pool statistics.