7#include <botan/internal/mem_pool.h>
9#include <botan/mem_ops.h>
10#include <botan/internal/target_info.h>
13#if defined(BOTAN_HAS_VALGRIND) || defined(BOTAN_ENABLE_DEBUG_ASSERTS)
27 #define BOTAN_MEM_POOL_USE_MMU_PROTECTIONS
30#if defined(BOTAN_MEM_POOL_USE_MMU_PROTECTIONS) && defined(BOTAN_HAS_OS_UTILS)
31 #include <botan/internal/os_utils.h>
110size_t choose_bucket(
size_t n) {
111 const size_t MINIMUM_ALLOCATION = 16;
112 const size_t MAXIMUM_ALLOCATION = 256;
114 if(n < MINIMUM_ALLOCATION || n > MAXIMUM_ALLOCATION) {
120 const size_t buckets[] = {
136 for(
size_t i = 0; buckets[i]; ++i) {
137 if(n <= buckets[i]) {
145inline bool ptr_in_pool(
const void* pool_ptr,
size_t poolsize,
const void* buf_ptr,
size_t bufsize) {
146 const uintptr_t pool =
reinterpret_cast<uintptr_t
>(pool_ptr);
147 const uintptr_t buf =
reinterpret_cast<uintptr_t
>(buf_ptr);
148 return (buf >= pool) && (buf + bufsize <= pool + poolsize);
153size_t find_set_bit(T b) {
154 size_t s = 8 *
sizeof(T) / 2;
159 const T mask = (
static_cast<T
>(1) << s) - 1;
160 if((b & mask) == 0) {
172 explicit BitMap(
size_t bits) : m_len(bits) {
173 m_bits.resize((bits + BITMASK_BITS - 1) / BITMASK_BITS);
175 m_main_mask =
static_cast<bitmask_type
>(~0);
176 m_last_mask = m_main_mask;
178 if(bits % BITMASK_BITS != 0) {
179 m_last_mask = (
static_cast<bitmask_type
>(1) << (bits % BITMASK_BITS)) - 1;
183 bool find_free(
size_t* bit);
185 void free(
size_t bit) {
187 const size_t w = bit / BITMASK_BITS;
189 const bitmask_type mask =
static_cast<bitmask_type
>(1) << (bit % BITMASK_BITS);
190 m_bits[w] = m_bits[w] & (~mask);
194 for(
auto bitset : m_bits) {
204#if defined(BOTAN_ENABLE_DEBUG_ASSERTS)
205 using bitmask_type = uint8_t;
207 using bitmask_type =
word;
210 static const size_t BITMASK_BITS =
sizeof(bitmask_type) * 8;
213 bitmask_type m_main_mask;
214 bitmask_type m_last_mask;
215 std::vector<bitmask_type> m_bits;
218bool BitMap::find_free(
size_t* bit) {
219 for(
size_t i = 0; i != m_bits.size(); ++i) {
220 const bitmask_type mask = (i == m_bits.size() - 1) ? m_last_mask : m_main_mask;
221 if((m_bits[i] & mask) != mask) {
222 const size_t free_bit = find_set_bit(~m_bits[i]);
223 const bitmask_type bmask =
static_cast<bitmask_type
>(1) << (free_bit % BITMASK_BITS);
226 *bit = BITMASK_BITS * i + free_bit;
238 Bucket(uint8_t* mem,
size_t mem_size,
size_t item_size) :
239 m_item_size(item_size),
240 m_page_size(mem_size),
242 m_bitmap(mem_size / item_size),
252 if(!m_bitmap.find_free(&offset)) {
258 BOTAN_ASSERT(offset * m_item_size < m_page_size,
"Offset is in range");
259 return m_range + m_item_size * offset;
263 if(!in_this_bucket(p)) {
271 std::memset(p, 0, m_item_size);
273 const size_t offset = (
reinterpret_cast<uintptr_t
>(p) -
reinterpret_cast<uintptr_t
>(m_range)) / m_item_size;
275 m_bitmap.free(offset);
281 bool in_this_bucket(
void* p)
const {
return ptr_in_pool(m_range, m_page_size, p, m_item_size); }
283 bool empty()
const {
return m_bitmap.empty(); }
285 uint8_t* ptr()
const {
return m_range; }
296 m_min_page_ptr = ~static_cast<uintptr_t>(0);
299 for(
auto page : pages) {
300 const uintptr_t p =
reinterpret_cast<uintptr_t
>(page);
302 m_min_page_ptr = std::min(p, m_min_page_ptr);
303 m_max_page_ptr = std::max(p, m_max_page_ptr);
306#if defined(BOTAN_MEM_POOL_USE_MMU_PROTECTIONS)
309 m_free_pages.push_back(
static_cast<uint8_t*
>(page));
316 m_max_page_ptr += page_size;
321#if defined(BOTAN_MEM_POOL_USE_MMU_PROTECTIONS)
322 for(
size_t i = 0; i != m_free_pages.size(); ++i) {
329 if(n > m_page_size) {
333 const size_t n_bucket = choose_bucket(n);
338 std::deque<Bucket>& buckets = m_buckets_for[n_bucket];
346 for(
auto& bucket : buckets) {
347 if(uint8_t* p = bucket.alloc()) {
355 if(!m_free_pages.empty()) {
356 uint8_t* ptr = m_free_pages[0];
357 m_free_pages.pop_front();
358#if defined(BOTAN_MEM_POOL_USE_MMU_PROTECTIONS)
361 buckets.push_front(Bucket(ptr, m_page_size, n_bucket));
362 void* p = buckets[0].alloc();
374 const uintptr_t p_val =
reinterpret_cast<uintptr_t
>(p);
375 if(p_val < m_min_page_ptr || p_val > m_max_page_ptr) {
379 const size_t n_bucket = choose_bucket(len);
385 std::deque<Bucket>& buckets = m_buckets_for[n_bucket];
387 for(
size_t i = 0; i != buckets.size(); ++i) {
388 Bucket& bucket = buckets[i];
391#if defined(BOTAN_MEM_POOL_USE_MMU_PROTECTIONS)
394 m_free_pages.push_back(bucket.ptr());
396 if(i != buckets.size() - 1) {
397 std::swap(buckets.back(), buckets[i]);
#define BOTAN_ASSERT_NOMSG(expr)
#define BOTAN_ASSERT(expr, assertion_made)
Memory_Pool(const std::vector< void * > &pages, size_t page_size)
bool deallocate(void *p, size_t size) noexcept
void * allocate(size_t size)
void page_allow_access(void *page)
void page_prohibit_access(void *page)
secure_vector< T > lock(const std::vector< T > &in)
constexpr void clear_bytes(void *ptr, size_t bytes)
lock_guard< T > lock_guard_type
std::conditional_t< HasNative64BitRegisters, std::uint64_t, uint32_t > word