1
Fork 0
mirror of https://git.savannah.gnu.org/git/guile.git synced 2025-05-11 08:10:21 +02:00

Remove embedder requirement for per-object remset bits

Since we now have a field-logging write barrier, we don't need
per-object log bits.
This commit is contained in:
Andy Wingo 2025-01-06 15:47:20 +01:00
parent 4be3e69ac1
commit 8e631ca3f3
5 changed files with 3 additions and 57 deletions

View file

@ -50,14 +50,6 @@ GC_EMBEDDER_API inline void gc_trace_heap_roots(struct gc_heap_roots *roots,
struct gc_heap *heap, struct gc_heap *heap,
void *trace_data); void *trace_data);
// Some heap objects have space for a "remembered" bit, indicating they
// are in the remembered set. Large or potentially large objects
// (e.g. a vector whose size is a run-time property) must have a
// remembered set bit. Small objects may or may not have such a bit.
GC_EMBEDDER_API inline int gc_object_set_remembered(struct gc_ref ref);
GC_EMBEDDER_API inline int gc_object_is_remembered_nonatomic(struct gc_ref ref);
GC_EMBEDDER_API inline void gc_object_clear_remembered_nonatomic(struct gc_ref ref);
GC_EMBEDDER_API inline uintptr_t gc_object_forwarded_nonatomic(struct gc_ref ref); GC_EMBEDDER_API inline uintptr_t gc_object_forwarded_nonatomic(struct gc_ref ref);
GC_EMBEDDER_API inline void gc_object_forward_nonatomic(struct gc_ref ref, GC_EMBEDDER_API inline void gc_object_forward_nonatomic(struct gc_ref ref,
struct gc_ref new_ref); struct gc_ref new_ref);

View file

@ -102,32 +102,6 @@ static inline void gc_object_forward_nonatomic(struct gc_ref ref,
*tag_word(ref) = gc_ref_value(new_ref); *tag_word(ref) = gc_ref_value(new_ref);
} }
static inline int gc_object_set_remembered(struct gc_ref ref) {
uintptr_t *loc = tag_word(ref);
uintptr_t tag = atomic_load_explicit(loc, memory_order_relaxed);
while (1) {
if (tag & gcobj_remembered_bit)
return 0;
if (atomic_compare_exchange_weak_explicit(loc, &tag,
tag | gcobj_remembered_bit,
memory_order_acq_rel,
memory_order_acquire))
return 1;
}
}
static inline int gc_object_is_remembered_nonatomic(struct gc_ref ref) {
uintptr_t *loc = tag_word(ref);
uintptr_t tag = *loc;
return tag & gcobj_remembered_bit;
}
static inline void gc_object_clear_remembered_nonatomic(struct gc_ref ref) {
uintptr_t *loc = tag_word(ref);
uintptr_t tag = *loc;
*loc = tag & ~(uintptr_t)gcobj_remembered_bit;
}
static inline struct gc_atomic_forward static inline struct gc_atomic_forward
gc_atomic_forward_begin(struct gc_ref ref) { gc_atomic_forward_begin(struct gc_ref ref) {
uintptr_t tag = atomic_load_explicit(tag_word(ref), memory_order_acquire); uintptr_t tag = atomic_load_explicit(tag_word(ref), memory_order_acquire);

View file

@ -7,11 +7,9 @@ struct gc_header {
uintptr_t tag; uintptr_t tag;
}; };
// Alloc kind is in bits 2-7, for live objects. // Alloc kind is in bits 1-7, for live objects.
static const uintptr_t gcobj_alloc_kind_mask = 0x3f; static const uintptr_t gcobj_alloc_kind_mask = 0x7f;
static const uintptr_t gcobj_alloc_kind_shift = 2; static const uintptr_t gcobj_alloc_kind_shift = 1;
static const uintptr_t gcobj_remembered_mask = 0x2;
static const uintptr_t gcobj_remembered_bit = 0x2;
static const uintptr_t gcobj_forwarded_mask = 0x1; static const uintptr_t gcobj_forwarded_mask = 0x1;
static const uintptr_t gcobj_not_forwarded_bit = 0x1; static const uintptr_t gcobj_not_forwarded_bit = 0x1;
static const uintptr_t gcobj_busy = 0; static const uintptr_t gcobj_busy = 0;

View file

@ -87,22 +87,6 @@ in the `gc_trace_object` function by calling `gc_trace_ephemeron` from
allocates finalizers, it should trace them by calling allocates finalizers, it should trace them by calling
`gc_trace_finalizer` from [`gc-finalizer.h`](../api/gc-finalizer.h). `gc_trace_finalizer` from [`gc-finalizer.h`](../api/gc-finalizer.h).
### Remembered-set bits
When built to support generational garbage collection, Whippet requires
that all "large" or potentially large objects have a flag bit reserved
for use of the garbage collector. A large object is one whose size
exceeds the `gc_allocator_large_threshold()` (see
[`gc-attrs.h`](../api/gc-attrs.h)), which is a collector-specific value.
Currently the only generational collector is the in-place `mmc`
collector, whose large object threshold is 4096 bytes. The
`gc_object_set_remembered`, `gc_object_is_remembered_nonatomic`, and
`gc_object_clear_remembered_nonatomic` embedder functions manage the
remembered bit. Setting the remembered bit should be idempotent;
multiple threads can race to call `gc_object_set_remembered` and do not
synchronize. The query and clear functions are called without
concurrent accessors and so don't have to be atomic.
### Forwarding objects ### Forwarding objects
When built with a collector that moves objects, the embedder must also When built with a collector that moves objects, the embedder must also

View file

@ -113,8 +113,6 @@ static int large_object_space_copy(struct large_object_space *space,
address_set_remove(&space->from_space, addr); address_set_remove(&space->from_space, addr);
address_set_add(GC_GENERATIONAL ? &space->survivor_space : &space->to_space, address_set_add(GC_GENERATIONAL ? &space->survivor_space : &space->to_space,
addr); addr);
if (GC_GENERATIONAL && gc_object_is_remembered_nonatomic(ref))
gc_object_clear_remembered_nonatomic(ref);
// Object is grey; place it on mark stack to visit its fields. // Object is grey; place it on mark stack to visit its fields.
copied = 1; copied = 1;
done: done: