1
Fork 0
mirror of https://git.savannah.gnu.org/git/guile.git synced 2025-07-05 09:10:18 +02:00

Allow conservative mmc configs to trace some objects precisely

* api/mmc-attrs.h (gc_allocator_alloc_table_begin_pattern): Unify the
strategies between conservative and precise configurations.  Notably
this mens that TAGGED allocations are traced precisely in
heap-conservative configurations.  Don't pin untagged pointerless
allocations; that is the mutator's responsibility.
* src/mmc.c (compute_trace_kind): Trace tagged objects precisely in all
cases.
(gc_init): Update expectations for alloc table bytes.
This commit is contained in:
Andy Wingo 2025-07-02 10:44:05 +02:00
parent dda1522ab0
commit b874ad5da6
3 changed files with 33 additions and 68 deletions

View file

@ -35,30 +35,18 @@ static inline uint8_t gc_allocator_alloc_table_begin_pattern(enum gc_allocation_
uint8_t trace_precisely = 0; uint8_t trace_precisely = 0;
uint8_t trace_none = 8; uint8_t trace_none = 8;
uint8_t trace_conservatively = 16; uint8_t trace_conservatively = 16;
uint8_t pinned = 16;
if (GC_CONSERVATIVE_TRACE) {
switch (kind) { switch (kind) {
case GC_ALLOCATION_TAGGED: case GC_ALLOCATION_TAGGED:
return young | trace_precisely;
case GC_ALLOCATION_UNTAGGED_CONSERVATIVE: case GC_ALLOCATION_UNTAGGED_CONSERVATIVE:
if (!GC_CONSERVATIVE_TRACE)
GC_CRASH ();
return young | trace_conservatively; return young | trace_conservatively;
case GC_ALLOCATION_TAGGED_POINTERLESS: case GC_ALLOCATION_TAGGED_POINTERLESS:
case GC_ALLOCATION_UNTAGGED_POINTERLESS: case GC_ALLOCATION_UNTAGGED_POINTERLESS:
return young | trace_none; return young | trace_none;
default: default:
GC_CRASH(); GC_CRASH();
};
} else {
switch (kind) {
case GC_ALLOCATION_TAGGED:
return young | trace_precisely;
case GC_ALLOCATION_TAGGED_POINTERLESS:
return young | trace_none;
case GC_ALLOCATION_UNTAGGED_POINTERLESS:
return young | trace_none | pinned;
case GC_ALLOCATION_UNTAGGED_CONSERVATIVE:
default:
GC_CRASH();
};
} }
} }
static inline uint8_t gc_allocator_alloc_table_end_pattern(void) { static inline uint8_t gc_allocator_alloc_table_end_pattern(void) {

View file

@ -12,13 +12,12 @@ static inline size_t gc_finalizer_priority_count(void) { return 2; }
static inline int static inline int
gc_is_valid_conservative_ref_displacement(uintptr_t displacement) { gc_is_valid_conservative_ref_displacement(uintptr_t displacement) {
#if GC_CONSERVATIVE_ROOTS || GC_CONSERVATIVE_TRACE if (GC_CONSERVATIVE_ROOTS || GC_CONSERVATIVE_TRACE)
// Here is where you would allow tagged heap object references. // Here is where you would allow tagged heap object references.
return displacement == 0; return displacement == 0;
#else
// Shouldn't get here. // Shouldn't get here.
GC_CRASH(); GC_CRASH();
#endif
} }
// No external objects in simple benchmarks. // No external objects in simple benchmarks.
@ -40,10 +39,6 @@ static inline void gc_trace_object(struct gc_ref ref,
struct gc_heap *heap, struct gc_heap *heap,
void *trace_data, void *trace_data,
size_t *size) { size_t *size) {
#if GC_CONSERVATIVE_TRACE
// Shouldn't get here.
GC_CRASH();
#else
switch (tag_live_alloc_kind(*tag_word(ref))) { switch (tag_live_alloc_kind(*tag_word(ref))) {
#define SCAN_OBJECT(name, Name, NAME) \ #define SCAN_OBJECT(name, Name, NAME) \
case ALLOC_KIND_##NAME: \ case ALLOC_KIND_##NAME: \
@ -58,7 +53,6 @@ static inline void gc_trace_object(struct gc_ref ref,
default: default:
GC_CRASH(); GC_CRASH();
} }
#endif
} }
static inline void visit_roots(struct handle *roots, static inline void visit_roots(struct handle *roots,

View file

@ -1031,28 +1031,18 @@ void gc_safepoint_signal_reallow(struct gc_mutator *mut) { GC_CRASH(); }
static enum gc_trace_kind static enum gc_trace_kind
compute_trace_kind(enum gc_allocation_kind kind) { compute_trace_kind(enum gc_allocation_kind kind) {
if (GC_CONSERVATIVE_TRACE) {
switch (kind) { switch (kind) {
case GC_ALLOCATION_TAGGED: case GC_ALLOCATION_TAGGED:
return GC_TRACE_PRECISELY;
case GC_ALLOCATION_UNTAGGED_CONSERVATIVE: case GC_ALLOCATION_UNTAGGED_CONSERVATIVE:
if (!GC_CONSERVATIVE_TRACE)
GC_CRASH ();
return GC_TRACE_CONSERVATIVELY; return GC_TRACE_CONSERVATIVELY;
case GC_ALLOCATION_TAGGED_POINTERLESS: case GC_ALLOCATION_TAGGED_POINTERLESS:
case GC_ALLOCATION_UNTAGGED_POINTERLESS: case GC_ALLOCATION_UNTAGGED_POINTERLESS:
return GC_TRACE_NONE; return GC_TRACE_NONE;
default: default:
GC_CRASH(); GC_CRASH();
};
} else {
switch (kind) {
case GC_ALLOCATION_TAGGED:
return GC_TRACE_PRECISELY;
case GC_ALLOCATION_TAGGED_POINTERLESS:
case GC_ALLOCATION_UNTAGGED_POINTERLESS:
return GC_TRACE_NONE;
case GC_ALLOCATION_UNTAGGED_CONSERVATIVE:
default:
GC_CRASH();
};
} }
} }
@ -1367,20 +1357,13 @@ gc_init(const struct gc_options *options, struct gc_stack_addr stack_base,
GC_ASSERT_EQ(gc_allocator_alloc_table_alignment(), NOFL_SLAB_SIZE); GC_ASSERT_EQ(gc_allocator_alloc_table_alignment(), NOFL_SLAB_SIZE);
GC_ASSERT_EQ(gc_allocator_alloc_table_begin_pattern(GC_ALLOCATION_TAGGED_POINTERLESS), GC_ASSERT_EQ(gc_allocator_alloc_table_begin_pattern(GC_ALLOCATION_TAGGED_POINTERLESS),
NOFL_METADATA_BYTE_YOUNG | NOFL_METADATA_BYTE_TRACE_NONE); NOFL_METADATA_BYTE_YOUNG | NOFL_METADATA_BYTE_TRACE_NONE);
if (GC_CONSERVATIVE_TRACE) {
GC_ASSERT_EQ(gc_allocator_alloc_table_begin_pattern(GC_ALLOCATION_TAGGED), GC_ASSERT_EQ(gc_allocator_alloc_table_begin_pattern(GC_ALLOCATION_TAGGED),
NOFL_METADATA_BYTE_YOUNG | NOFL_METADATA_BYTE_TRACE_CONSERVATIVELY); NOFL_METADATA_BYTE_YOUNG | NOFL_METADATA_BYTE_TRACE_PRECISELY);
if (GC_CONSERVATIVE_TRACE)
GC_ASSERT_EQ(gc_allocator_alloc_table_begin_pattern(GC_ALLOCATION_UNTAGGED_CONSERVATIVE), GC_ASSERT_EQ(gc_allocator_alloc_table_begin_pattern(GC_ALLOCATION_UNTAGGED_CONSERVATIVE),
NOFL_METADATA_BYTE_YOUNG | NOFL_METADATA_BYTE_TRACE_CONSERVATIVELY); NOFL_METADATA_BYTE_YOUNG | NOFL_METADATA_BYTE_TRACE_CONSERVATIVELY);
GC_ASSERT_EQ(gc_allocator_alloc_table_begin_pattern(GC_ALLOCATION_UNTAGGED_POINTERLESS), GC_ASSERT_EQ(gc_allocator_alloc_table_begin_pattern(GC_ALLOCATION_UNTAGGED_POINTERLESS),
NOFL_METADATA_BYTE_YOUNG | NOFL_METADATA_BYTE_TRACE_NONE); NOFL_METADATA_BYTE_YOUNG | NOFL_METADATA_BYTE_TRACE_NONE);
} else {
GC_ASSERT_EQ(gc_allocator_alloc_table_begin_pattern(GC_ALLOCATION_TAGGED),
NOFL_METADATA_BYTE_YOUNG | NOFL_METADATA_BYTE_TRACE_PRECISELY);
GC_ASSERT_EQ(gc_allocator_alloc_table_begin_pattern(GC_ALLOCATION_UNTAGGED_POINTERLESS),
NOFL_METADATA_BYTE_YOUNG | NOFL_METADATA_BYTE_TRACE_NONE |
NOFL_METADATA_BYTE_PINNED);
}
GC_ASSERT_EQ(gc_allocator_alloc_table_end_pattern(), NOFL_METADATA_BYTE_END); GC_ASSERT_EQ(gc_allocator_alloc_table_end_pattern(), NOFL_METADATA_BYTE_END);
if (GC_GENERATIONAL) { if (GC_GENERATIONAL) {
GC_ASSERT_EQ(gc_write_barrier_field_table_alignment(), NOFL_SLAB_SIZE); GC_ASSERT_EQ(gc_write_barrier_field_table_alignment(), NOFL_SLAB_SIZE);