mirror of
https://github.com/neovim/neovim.git
synced 2024-12-29 14:41:06 -07:00
fix(memory): fix memory alignment for dynamic allocation
all pointers returned by arena_alloc residing in arena block should be
properly aligned
to meet neovim's alignment requirements but keeping it simple settle on
ARENA_ALIGN = MAX(sizeof(void *), sizeof(double)).
(cherry picked from commit 0240fd6d0f
)
This commit is contained in:
parent
b994deb0aa
commit
9c32331904
@ -6,6 +6,7 @@
|
||||
#include <assert.h>
|
||||
#include <inttypes.h>
|
||||
#include <stdbool.h>
|
||||
#include <stdint.h>
|
||||
#include <string.h>
|
||||
|
||||
#include "nvim/api/extmark.h"
|
||||
@ -576,6 +577,12 @@ void alloc_block(Arena *arena)
|
||||
blk->prev = prev_blk;
|
||||
}
|
||||
|
||||
static size_t arena_align_offset(void *ptr, size_t alignment)
|
||||
{
|
||||
uintptr_t uptr = (uintptr_t)ptr;
|
||||
return ((uptr + (alignment - 1)) & ~(alignment - 1)) - uptr;
|
||||
}
|
||||
|
||||
/// @param arena if NULL, do a global allocation. caller must then free the value!
|
||||
/// @param size if zero, will still return a non-null pointer, but not a unique one
|
||||
void *arena_alloc(Arena *arena, size_t size, bool align)
|
||||
@ -583,34 +590,33 @@ void *arena_alloc(Arena *arena, size_t size, bool align)
|
||||
if (!arena) {
|
||||
return xmalloc(size);
|
||||
}
|
||||
if (align) {
|
||||
arena->pos = (arena->pos + (ARENA_ALIGN - 1)) & ~(ARENA_ALIGN - 1);
|
||||
if (!arena->cur_blk) {
|
||||
alloc_block(arena);
|
||||
}
|
||||
if (arena->pos + size > arena->size || !arena->cur_blk) {
|
||||
if (size > (ARENA_BLOCK_SIZE - sizeof(struct consumed_blk)) >> 1) {
|
||||
size_t align_pos = align ? arena_align_offset(arena->cur_blk + arena->pos, ARENA_ALIGN) : 0;
|
||||
if (arena->pos + align_pos + size > arena->size) {
|
||||
if (size + (align ? (ARENA_ALIGN - 1) : 0) > (ARENA_BLOCK_SIZE - sizeof(struct consumed_blk))
|
||||
>> 1) {
|
||||
// if allocation is too big, allocate a large block with the requested
|
||||
// size, but still with block pointer head. We do this even for
|
||||
// arena->size / 2, as there likely is space left for the next
|
||||
// small allocation in the current block.
|
||||
if (!arena->cur_blk) {
|
||||
// to simplify free-list management, arena->cur_blk must
|
||||
// always be a normal, ARENA_BLOCK_SIZE sized, block
|
||||
alloc_block(arena);
|
||||
}
|
||||
arena_alloc_count++;
|
||||
char *alloc = xmalloc(size + sizeof(struct consumed_blk));
|
||||
char *alloc = xmalloc(size + sizeof(struct consumed_blk) + (align ? (ARENA_ALIGN - 1) : 0));
|
||||
struct consumed_blk *cur_blk = (struct consumed_blk *)arena->cur_blk;
|
||||
struct consumed_blk *fix_blk = (struct consumed_blk *)alloc;
|
||||
fix_blk->prev = cur_blk->prev;
|
||||
cur_blk->prev = fix_blk;
|
||||
return (alloc + sizeof(struct consumed_blk));
|
||||
char *mem = (alloc + sizeof(struct consumed_blk));
|
||||
return mem + (align ? arena_align_offset(mem, ARENA_ALIGN) : 0);
|
||||
} else {
|
||||
alloc_block(arena);
|
||||
align_pos = align ? arena_align_offset(arena->cur_blk + arena->pos, ARENA_ALIGN) : 0;
|
||||
}
|
||||
}
|
||||
|
||||
char *mem = arena->cur_blk + arena->pos;
|
||||
arena->pos += size;
|
||||
char *mem = arena->cur_blk + arena->pos + align_pos;
|
||||
arena->pos += (size + align_pos);
|
||||
return mem;
|
||||
}
|
||||
|
||||
|
@ -45,7 +45,7 @@ typedef struct consumed_blk {
|
||||
struct consumed_blk *prev;
|
||||
} *ArenaMem;
|
||||
|
||||
#define ARENA_ALIGN sizeof(void *)
|
||||
#define ARENA_ALIGN MAX(sizeof(void *), sizeof(double))
|
||||
|
||||
typedef struct {
|
||||
char *cur_blk;
|
||||
|
Loading…
Reference in New Issue
Block a user