22 assert(header_size <= size);
23 uint64_t rounded_size =
RoundUp8(size);
24 int64_t real_size = rounded_size +
sizeof(
Tag);
25 if (gauge_ + real_size > capacity_)
28 unsigned char *new_block = heap_ + gauge_;
29 new (new_block)
Tag(rounded_size);
30 new_block +=
sizeof(
Tag);
31 memcpy(new_block, header, header_size);
33 stored_ += rounded_size;
44 Tag *heap_top =
reinterpret_cast<Tag *
>(heap_ + gauge_);
45 Tag *current_tag =
reinterpret_cast<Tag *
>(heap_);
49 while (next_tag < heap_top) {
50 if (current_tag->
IsFree()) {
58 int64_t free_space = current_tag->
size;
64 next_tag->
size = free_space;
68 current_tag = next_tag;
73 gauge_ = (
reinterpret_cast<unsigned char *
>(current_tag) - heap_);
74 if (!current_tag->
IsFree())
75 gauge_ +=
sizeof(
Tag) + current_tag->
GetSize();
80 uint64_t old_size = GetSize(block);
81 assert(old_size <= new_size);
82 void *new_block = Allocate(new_size, block, old_size);
83 if (new_block != NULL)
90 return RoundUp8(gauge_ + nbytes +
sizeof(
Tag)) <= capacity_;
95 Tag *tag =
reinterpret_cast<Tag *
>(block) - 1;
107 Tag *tag =
reinterpret_cast<Tag *
>(block) - 1;
114 : callback_ptr_(callback_ptr)
115 , capacity_(capacity)
123 heap_ =
reinterpret_cast<unsigned char *
>(sxmmap(capacity));
static const unsigned kMinCapacity
void * Allocate(uint64_t size, void *header, unsigned header_size)
MallocHeap(uint64_t capacity, CallbackPtr callback_ptr)
assert((mem||(size==0))&&"Out Of Memory")
void * Expand(void *block, uint64_t new_size)
uint64_t GetSize(void *block)
Callbackable< BlockPtr >::CallbackTN * CallbackPtr
void MarkFree(void *block)
static uint64_t RoundUp8(const uint64_t size)
bool HasSpaceFor(uint64_t nbytes)
unsigned char * GetBlock()