diff --git a/src/parallel-tracer.h b/src/parallel-tracer.h index 10bb207c5..80638603a 100644 --- a/src/parallel-tracer.h +++ b/src/parallel-tracer.h @@ -36,6 +36,7 @@ struct trace_worker { #define TRACE_WORKERS_MAX_COUNT 8 struct gc_tracer { + struct gc_heap *heap; atomic_size_t active_tracers; size_t worker_count; long epoch; @@ -92,8 +93,9 @@ trace_worker_spawn(struct trace_worker *worker) { } static int -gc_tracer_init(struct gc_heap *heap, size_t parallelism) { - struct gc_tracer *tracer = heap_tracer(heap); +gc_tracer_init(struct gc_tracer *tracer, struct gc_heap *heap, + size_t parallelism) { + tracer->heap = heap; atomic_init(&tracer->active_tracers, 0); tracer->epoch = 0; pthread_mutex_init(&tracer->lock, NULL); @@ -116,13 +118,11 @@ gc_tracer_init(struct gc_heap *heap, size_t parallelism) { return 1; } -static void gc_tracer_prepare(struct gc_heap *heap) { - struct gc_tracer *tracer = heap_tracer(heap); +static void gc_tracer_prepare(struct gc_tracer *tracer) { for (size_t i = 0; i < tracer->worker_count; i++) tracer->workers[i].steal_id = 0; } -static void gc_tracer_release(struct gc_heap *heap) { - struct gc_tracer *tracer = heap_tracer(heap); +static void gc_tracer_release(struct gc_tracer *tracer) { for (size_t i = 0; i < tracer->worker_count; i++) shared_worklist_release(&tracer->workers[i].deque); } @@ -159,7 +159,8 @@ tracer_share(struct local_tracer *trace) { } static inline void -gc_tracer_enqueue(struct gc_ref ref, struct gc_heap *heap, void *trace_data) { +gc_tracer_enqueue(struct gc_tracer *tracer, struct gc_ref ref, + void *trace_data) { struct local_tracer *trace = trace_data; if (local_worklist_full(&trace->local)) tracer_share(trace); @@ -316,9 +317,7 @@ gc_tracer_enqueue_roots(struct gc_tracer *tracer, struct gc_ref *objv, } static inline void -gc_tracer_trace(struct gc_heap *heap) { - struct gc_tracer *tracer = heap_tracer(heap); - +gc_tracer_trace(struct gc_tracer *tracer) { DEBUG("starting trace; %zu workers\n", tracer->worker_count); ssize_t parallel_threshold = diff --git a/src/serial-tracer.h b/src/serial-tracer.h index d353162d9..c208e10e5 100644 --- a/src/serial-tracer.h +++ b/src/serial-tracer.h @@ -10,16 +10,19 @@ #include "tracer.h" struct gc_tracer { + struct gc_heap *heap; struct simple_worklist worklist; }; static int -gc_tracer_init(struct gc_heap *heap, size_t parallelism) { +gc_tracer_init(struct gc_tracer *tracer, struct gc_heap *heap, + size_t parallelism) { + tracer->heap = heap; return simple_worklist_init(&heap_tracer(heap)->worklist); } -static void gc_tracer_prepare(struct gc_heap *heap) {} -static void gc_tracer_release(struct gc_heap *heap) { - simple_worklist_release(&heap_tracer(heap)->worklist); +static void gc_tracer_prepare(struct gc_tracer *tracer) {} +static void gc_tracer_release(struct gc_tracer *tracer) { + simple_worklist_release(&tracer->worklist); } static inline void @@ -32,16 +35,17 @@ gc_tracer_enqueue_roots(struct gc_tracer *tracer, struct gc_ref *objs, simple_worklist_push_many(&tracer->worklist, objs, count); } static inline void -gc_tracer_enqueue(struct gc_ref ref, struct gc_heap *heap, void *trace_data) { - gc_tracer_enqueue_root(heap_tracer(heap), ref); +gc_tracer_enqueue(struct gc_tracer *tracer, struct gc_ref ref, + void *trace_data) { + gc_tracer_enqueue_root(tracer, ref); } static inline void -gc_tracer_trace(struct gc_heap *heap) { +gc_tracer_trace(struct gc_tracer *tracer) { do { - struct gc_ref obj = simple_worklist_pop(&heap_tracer(heap)->worklist); + struct gc_ref obj = simple_worklist_pop(&tracer->worklist); if (!gc_ref_is_heap_object(obj)) break; - trace_one(obj, heap, NULL); + trace_one(obj, tracer->heap, NULL); } while (1); } diff --git a/src/tracer.h b/src/tracer.h index d0f99938d..a49d25887 100644 --- a/src/tracer.h +++ b/src/tracer.h @@ -17,10 +17,6 @@ static inline struct gc_tracer* heap_tracer(struct gc_heap *heap); static inline void trace_one(struct gc_ref ref, struct gc_heap *heap, void *trace_data) GC_ALWAYS_INLINE; -// Visit one edge. Return nonzero if this call shaded the object grey. -static inline int trace_edge(struct gc_heap *heap, - struct gc_edge edge) GC_ALWAYS_INLINE; - //////////////////////////////////////////////////////////////////////// /// To be implemented by tracer. //////////////////////////////////////////////////////////////////////// @@ -30,13 +26,14 @@ static inline int trace_edge(struct gc_heap *heap, struct gc_tracer; // Initialize the tracer when the heap is created. -static int gc_tracer_init(struct gc_heap *heap, size_t parallelism); +static int gc_tracer_init(struct gc_tracer *tracer, struct gc_heap *heap, + size_t parallelism); // Initialize the tracer for a new GC cycle. -static void gc_tracer_prepare(struct gc_heap *heap); +static void gc_tracer_prepare(struct gc_tracer *tracer); // Release any resources allocated during the trace. -static void gc_tracer_release(struct gc_heap *heap); +static void gc_tracer_release(struct gc_tracer *tracer); // Add root objects to the trace. Call before tracer_trace. static inline void gc_tracer_enqueue_root(struct gc_tracer *tracer, @@ -46,24 +43,11 @@ static inline void gc_tracer_enqueue_roots(struct gc_tracer *tracer, size_t count); // Given that an object has been shaded grey, enqueue for tracing. -static inline void gc_tracer_enqueue(struct gc_ref ref, struct gc_heap *heap, +static inline void gc_tracer_enqueue(struct gc_tracer *tracer, + struct gc_ref ref, void *trace_data) GC_ALWAYS_INLINE; // Run the full trace. -static inline void gc_tracer_trace(struct gc_heap *heap); - -//////////////////////////////////////////////////////////////////////// -/// Procedures that work with any tracer. -//////////////////////////////////////////////////////////////////////// - -// Visit one edge. If we shade the edge target grey, enqueue it for -// tracing. -static inline void tracer_visit(struct gc_edge edge, struct gc_heap *heap, - void *trace_data) GC_ALWAYS_INLINE; -static inline void -tracer_visit(struct gc_edge edge, struct gc_heap *heap, void *trace_data) { - if (trace_edge(heap, edge)) - gc_tracer_enqueue(gc_edge_ref(edge), heap, trace_data); -} +static inline void gc_tracer_trace(struct gc_tracer *tracer); #endif // TRACER_H diff --git a/src/whippet.c b/src/whippet.c index f111dd8fd..cf1f6d6e9 100644 --- a/src/whippet.c +++ b/src/whippet.c @@ -674,6 +674,9 @@ static inline int do_trace(struct gc_heap *heap, struct gc_edge edge, return gc_extern_space_visit(heap_extern_space(heap), edge, ref); } +static inline int trace_edge(struct gc_heap *heap, + struct gc_edge edge) GC_ALWAYS_INLINE; + static inline int trace_edge(struct gc_heap *heap, struct gc_edge edge) { struct gc_ref ref = gc_edge_ref(edge); int is_new = do_trace(heap, edge, ref); @@ -1094,6 +1097,14 @@ void gc_heap_set_extern_space(struct gc_heap *heap, heap->extern_space = space; } +static inline void tracer_visit(struct gc_edge edge, struct gc_heap *heap, + void *trace_data) GC_ALWAYS_INLINE; +static inline void +tracer_visit(struct gc_edge edge, struct gc_heap *heap, void *trace_data) { + if (trace_edge(heap, edge)) + gc_tracer_enqueue(&heap->tracer, gc_edge_ref(edge), trace_data); +} + static void trace_and_enqueue_locally(struct gc_edge edge, struct gc_heap *heap, void *data) { @@ -1177,7 +1188,7 @@ static inline void tracer_trace_conservative_ref(struct gc_conservative_ref ref, int possibly_interior = 0; struct gc_ref resolved = trace_conservative_ref(heap, ref, possibly_interior); if (gc_ref_is_heap_object(resolved)) - gc_tracer_enqueue(resolved, heap, data); + gc_tracer_enqueue(&heap->tracer, resolved, data); } static inline void trace_one_conservatively(struct gc_ref ref, @@ -1889,7 +1900,7 @@ static void collect(struct gc_mutator *mut, large_object_space_start_gc(lospace, is_minor); gc_extern_space_start_gc(exspace, is_minor); resolve_ephemerons_lazily(heap); - gc_tracer_prepare(heap); + gc_tracer_prepare(&heap->tracer); HEAP_EVENT(heap, requesting_stop); request_mutators_to_stop(heap); trace_mutator_roots_with_lock_before_stop(mut); @@ -1906,14 +1917,14 @@ static void collect(struct gc_mutator *mut, prepare_for_evacuation(heap); trace_roots_after_stop(heap); HEAP_EVENT(heap, roots_traced); - gc_tracer_trace(heap); + gc_tracer_trace(&heap->tracer); HEAP_EVENT(heap, heap_traced); resolve_ephemerons_eagerly(heap); while (enqueue_resolved_ephemerons(heap)) - gc_tracer_trace(heap); + gc_tracer_trace(&heap->tracer); HEAP_EVENT(heap, ephemerons_traced); sweep_ephemerons(heap); - gc_tracer_release(heap); + gc_tracer_release(&heap->tracer); mark_space_finish_gc(space, gc_kind); large_object_space_finish_gc(lospace, is_minor); gc_extern_space_finish_gc(exspace, is_minor); @@ -2366,7 +2377,7 @@ static int heap_init(struct gc_heap *heap, const struct gc_options *options) { pthread_cond_init(&heap->collector_cond, NULL); heap->size = options->common.heap_size; - if (!gc_tracer_init(heap, options->common.parallelism)) + if (!gc_tracer_init(&heap->tracer, heap, options->common.parallelism)) GC_CRASH(); heap->pending_ephemerons_size_factor = 0.005;