16#include "../offload/offload_library.h"
17#include "../offload/offload_runtime.h"
22#if defined(__OFFLOAD) && !defined(__NO_OFFLOAD_DBM)
23#define DBM_MEMPOOL_OFFLOAD_ENABLED 1
25#define DBM_MEMPOOL_OFFLOAD_ENABLED 0
27#define DBM_MEMPOOL_DEVICE_ENABLED \
28 (DBM_MEMPOOL_DEVICE && DBM_MEMPOOL_OFFLOAD_ENABLED)
29#define DBM_MEMPOOL_HOST_ENABLED \
30 ((DBM_MEMPOOL_HOST && DBM_ALLOC_OFFLOAD && DBM_MEMPOOL_OFFLOAD_ENABLED) || \
31 (1 < DBM_MEMPOOL_HOST))
47#if DBM_MEMPOOL_DEVICE_ENABLED
56#if DBM_MEMPOOL_HOST_ENABLED
75#if DBM_MEMPOOL_OFFLOAD_ENABLED
77 offload_activate_chosen_device();
78 offloadMalloc(&memory,
size);
81 offload_activate_chosen_device();
82 offloadMallocHost(&memory,
size);
91 assert(memory != NULL);
111 if (NULL != memory) {
112 void *
mem = (
void *)(uintptr_t)memory;
113#if DBM_MEMPOOL_OFFLOAD_ENABLED
115 offload_activate_chosen_device();
119 offload_activate_chosen_device();
120 offloadFreeHost(
mem);
136#if DBM_MEMPOOL_DEVICE_ENABLED || DBM_MEMPOOL_HOST_ENABLED
137static void *internal_mempool_malloc(
dbm_memchunk_t **available_head,
145#if DBM_MEMPOOL_DEVICE_ENABLED
146 const bool on_device = (&mempool_device_available_head == available_head);
148 const bool on_device =
false;
150#if DBM_MEMPOOL_HOST_ENABLED
151 assert(on_device || &mempool_host_available_head == available_head);
152 assert(on_device || &mempool_host_allocated_head == allocated_head);
155#pragma omp critical(dbm_mempool_modify)
159 for (; NULL != *available_head; available_head = &(*available_head)->
next) {
160 const size_t s = (*available_head)->
size;
161 if (
size <= s && (NULL == reuse || s < (*reuse)->size)) {
162 reuse = available_head;
163 if (
size == (*reuse)->size) {
166 }
else if (NULL == reclaim || s > (*reclaim)->size) {
167 reclaim = available_head;
177 *reuse = chunk->
next;
180 assert(chunk != NULL);
184 chunk->
next = *allocated_head;
185 *allocated_head = chunk;
190 void *memory = chunk->
mem;
207#if DBM_MEMPOOL_HOST_ENABLED
208 return internal_mempool_malloc(&mempool_host_available_head,
209 &mempool_host_allocated_head,
size);
220#if DBM_MEMPOOL_DEVICE_ENABLED
221 return internal_mempool_malloc(&mempool_device_available_head,
222 &mempool_device_allocated_head,
size);
223#elif DBM_MEMPOOL_DEVICE
234#if DBM_MEMPOOL_DEVICE_ENABLED || DBM_MEMPOOL_HOST_ENABLED
239#pragma omp critical(dbm_mempool_modify)
242 while (NULL != *allocated_head && (*allocated_head)->mem !=
mem) {
243 allocated_head = &(*allocated_head)->
next;
246 assert(NULL != chunk && chunk->
mem ==
mem);
249 *allocated_head = chunk->
next;
252 chunk->
next = *available_head;
253 *available_head = chunk;
264#if DBM_MEMPOOL_HOST_ENABLED
265 internal_mempool_free(&mempool_host_available_head,
266 &mempool_host_allocated_head, memory);
277#if DBM_MEMPOOL_DEVICE_ENABLED
278 internal_mempool_free(&mempool_device_available_head,
279 &mempool_device_allocated_head, memory);
280#elif DBM_MEMPOOL_DEVICE
291#if DBM_MEMPOOL_DEVICE_ENABLED || DBM_MEMPOOL_HOST_ENABLED
292static void internal_mempool_clear(
dbm_memchunk_t **available_head) {
293#if DBM_MEMPOOL_DEVICE_ENABLED
294 const bool on_device = (&mempool_device_available_head == available_head);
296 const bool on_device =
false;
298#if DBM_MEMPOOL_HOST_ENABLED
299 assert(on_device || &mempool_host_available_head == available_head);
303 while (NULL != *available_head) {
305 *available_head = chunk->
next;
317#pragma omp critical(dbm_mempool_modify)
319#if DBM_MEMPOOL_DEVICE_ENABLED
320 assert(mempool_device_allocated_head == NULL);
321 internal_mempool_clear(&mempool_device_available_head);
323#if DBM_MEMPOOL_HOST_ENABLED
324 assert(mempool_host_allocated_head == NULL);
325 internal_mempool_clear(&mempool_host_available_head);
335 assert(NULL != memstats);
337#pragma omp critical(dbm_mempool_modify)
339#if DBM_MEMPOOL_DEVICE_ENABLED
340 for (
dbm_memchunk_t *chunk = mempool_device_available_head; NULL != chunk;
341 chunk = chunk->
next) {
345 for (
dbm_memchunk_t *chunk = mempool_device_allocated_head; NULL != chunk;
346 chunk = chunk->
next) {
357#if DBM_MEMPOOL_HOST_ENABLED
358 for (
dbm_memchunk_t *chunk = mempool_host_available_head; NULL != chunk;
359 chunk = chunk->
next) {
363 for (
dbm_memchunk_t *chunk = mempool_host_allocated_head; NULL != chunk;
364 chunk = chunk->
next) {
static void * actual_malloc(size_t size, bool on_device)
Private routine for actually allocating system memory.
void dbm_mempool_device_free(const void *memory)
Internal routine for releasing memory back to the pool.
void dbm_mempool_statistics(dbm_memstats_t *memstats)
Internal routine to query statistics.
void dbm_mempool_host_free(const void *memory)
Private routine for releasing memory back to the pool.
void dbm_mempool_clear(void)
Private routine for freeing all memory in the pool.
struct dbm_memchunk dbm_memchunk_t
Private struct for storing a chunk of memory.
static dbm_memstats_t mempool_stats
Private single-linked lists of memory chunks available and allocated.
void * dbm_mempool_host_malloc(size_t size)
Private routine for allocating host or device memory from the pool.
static void actual_free(const void *memory, bool on_device)
Private routine for actually freeing system memory.
void * dbm_mempool_device_malloc(size_t size)
Internal routine for allocating device memory from the pool.
void dbm_mpi_free_mem(void *mem)
Wrapper around MPI_Free_mem.
void * dbm_mpi_alloc_mem(size_t size)
Wrapper around MPI_Alloc_mem.
Private struct for storing a chunk of memory.
struct dbm_memchunk * next
Internal struct for pool statistics.