1
Fork 0
mirror of https://git.savannah.gnu.org/git/guile.git synced 2025-04-30 11:50:28 +02:00

Fix copy space compilation in debug mode

Also add copy_space_should_promote
This commit is contained in:
Andy Wingo 2025-01-10 16:02:37 +01:00
parent 0318770266
commit e65c81518d

View file

@ -82,8 +82,7 @@ struct copy_space_slab {
STATIC_ASSERT_EQ(sizeof(struct copy_space_slab), COPY_SPACE_SLAB_SIZE); STATIC_ASSERT_EQ(sizeof(struct copy_space_slab), COPY_SPACE_SLAB_SIZE);
static inline struct copy_space_block* static inline struct copy_space_block*
copy_space_block_header(struct copy_space_block_payload *payload) { copy_space_block_for_addr(uintptr_t addr) {
uintptr_t addr = (uintptr_t) payload;
uintptr_t base = align_down(addr, COPY_SPACE_SLAB_SIZE); uintptr_t base = align_down(addr, COPY_SPACE_SLAB_SIZE);
struct copy_space_slab *slab = (struct copy_space_slab*) base; struct copy_space_slab *slab = (struct copy_space_slab*) base;
uintptr_t block_idx = uintptr_t block_idx =
@ -91,6 +90,11 @@ copy_space_block_header(struct copy_space_block_payload *payload) {
return &slab->headers[block_idx - COPY_SPACE_HEADER_BLOCKS_PER_SLAB]; return &slab->headers[block_idx - COPY_SPACE_HEADER_BLOCKS_PER_SLAB];
} }
static inline struct copy_space_block*
copy_space_block_header(struct copy_space_block_payload *payload) {
return copy_space_block_for_addr((uintptr_t) payload);
}
static inline struct copy_space_block_payload* static inline struct copy_space_block_payload*
copy_space_block_payload(struct copy_space_block *block) { copy_space_block_payload(struct copy_space_block *block) {
uintptr_t addr = (uintptr_t) block; uintptr_t addr = (uintptr_t) block;
@ -469,12 +473,34 @@ copy_space_finish_gc(struct copy_space *space) {
space->in_gc = 0; space->in_gc = 0;
} }
static int
copy_space_can_allocate(struct copy_space *space, size_t bytes) {
// With lock!
for (struct copy_space_block *empties = space->empty.list.head;
empties;
empties = empties->next) {
if (bytes <= COPY_SPACE_REGION_SIZE) return 1;
bytes -= COPY_SPACE_REGION_SIZE;
}
return 0;
}
static void static void
copy_space_add_to_allocation_counter(struct copy_space *space, copy_space_add_to_allocation_counter(struct copy_space *space,
uintptr_t *counter) { uintptr_t *counter) {
*counter += space->allocated_bytes - space->allocated_bytes_at_last_gc; *counter += space->allocated_bytes - space->allocated_bytes_at_last_gc;
} }
static inline int
copy_space_contains_address(struct copy_space *space, uintptr_t addr) {
return extents_contain_addr(space->extents, addr);
}
static inline int
copy_space_contains(struct copy_space *space, struct gc_ref ref) {
return copy_space_contains_address(space, gc_ref_value(ref));
}
static void static void
copy_space_gc_during_evacuation(void *data) { copy_space_gc_during_evacuation(void *data) {
// If space is really tight and reordering of objects during // If space is really tight and reordering of objects during
@ -617,11 +643,6 @@ copy_space_forward_if_traced(struct copy_space *space, struct gc_edge edge,
return copy_space_forward_if_traced_nonatomic(space, edge, old_ref); return copy_space_forward_if_traced_nonatomic(space, edge, old_ref);
} }
static inline int
copy_space_contains(struct copy_space *space, struct gc_ref ref) {
return extents_contain_addr(space->extents, gc_ref_value(ref));
}
static int static int
copy_space_is_aligned(struct copy_space *space) { copy_space_is_aligned(struct copy_space *space) {
return space->flags & COPY_SPACE_ALIGNED; return space->flags & COPY_SPACE_ALIGNED;
@ -655,6 +676,12 @@ copy_space_contains_address_aligned(struct copy_space *space, uintptr_t addr) {
return (addr - low_addr) < size; return (addr - low_addr) < size;
} }
static inline int
copy_space_contains_edge_aligned(struct copy_space *space,
struct gc_edge edge) {
return copy_space_contains_address_aligned(space, gc_edge_address(edge));
}
static uint8_t* static uint8_t*
copy_space_field_logged_byte(struct gc_edge edge) { copy_space_field_logged_byte(struct gc_edge edge) {
uintptr_t addr = gc_edge_address(edge); uintptr_t addr = gc_edge_address(edge);
@ -672,6 +699,20 @@ copy_space_field_logged_bit(struct gc_edge edge) {
return 1 << (field % 8); return 1 << (field % 8);
} }
static inline int
copy_space_should_promote(struct copy_space *space, struct gc_ref ref) {
GC_ASSERT(copy_space_contains(space, ref));
uintptr_t addr = gc_ref_value(ref);
struct copy_space_block *block = copy_space_block_for_addr(gc_ref_value(ref));
GC_ASSERT_EQ(copy_space_object_region(ref), space->active_region ^ 1);
return block->is_survivor[space->active_region ^ 1];
}
static int
copy_space_contains_edge(struct copy_space *space, struct gc_edge edge) {
return copy_space_contains_address(space, gc_edge_address(edge));
}
static int static int
copy_space_remember_edge(struct copy_space *space, struct gc_edge edge) { copy_space_remember_edge(struct copy_space *space, struct gc_edge edge) {
GC_ASSERT(copy_space_contains_edge(space, edge)); GC_ASSERT(copy_space_contains_edge(space, edge));