6 #if __has_include(<numa.h>)
12 return numa_available() >= 0;
20 : bitmask_(alloc_node_bitmask()) {}
22 struct bitmask*
get()
const {
return bitmask_->get(); }
29 numa_bitmask_setbit(
get(), node);
35 numa_bitmask_clearall(
get());
39 using bitmask_holder_t = std::unique_ptr<
struct bitmask, void(*)(
struct bitmask*)>;
41 std::optional<bitmask_holder_t> alloc_node_bitmask()
const {
42 if (!
enabled())
return std::nullopt;
43 return bitmask_holder_t{numa_allocate_nodemask(), numa_free_nodemask};
46 std::optional<bitmask_holder_t> bitmask_;
51 return numa_node_of_cpu(sched_getcpu());
58 ITYR_CHECK(
reinterpret_cast<uintptr_t
>(addr) % numa_pagesize() == 0);
60 numa_tonode_memory(addr,
size, node);
63 inline void interleave(
void* addr, std::size_t
size,
const node_bitmask& nodemask) {
65 ITYR_CHECK(
reinterpret_cast<uintptr_t
>(addr) % numa_pagesize() == 0);
67 numa_interleave_memory(addr,
size, nodemask.get());
81 void*
get()
const {
return nullptr; }
void clear()
Definition: numa.hpp:83
void * get() const
Definition: numa.hpp:81
void setbit(node_t node)
Definition: numa.hpp:82
node_bitmask()
Definition: numa.hpp:80
#define ITYR_CHECK(cond)
Definition: util.hpp:48
void bind_to(void *, std::size_t, node_t)
Definition: numa.hpp:88
void interleave(void *, std::size_t, const node_bitmask &)
Definition: numa.hpp:89
int node_t
Definition: numa.hpp:76
bool enabled()
Definition: numa.hpp:86
node_t get_current_node()
Definition: numa.hpp:87
std::size_t get_page_size()
Definition: util.hpp:170
constexpr auto size(const span< T > &s) noexcept
Definition: span.hpp:61