8350898: Shenandoah: Eliminate final roots safepoint

Reviewed-by: rkennke, kdnilsen, cslucas
This commit is contained in:
William Kemper 2025-03-19 16:56:53 +00:00
parent 8e999b83a4
commit 8a1c85eaa9
14 changed files with 248 additions and 97 deletions

View File

@ -36,6 +36,7 @@ class ShenandoahBarrierSet;
class ShenandoahHeap; class ShenandoahHeap;
class ShenandoahMarkingContext; class ShenandoahMarkingContext;
class ShenandoahReferenceProcessor; class ShenandoahReferenceProcessor;
class SATBMarkQueueSet;
// //
// ========= Super // ========= Super
@ -55,6 +56,14 @@ public:
// ========= Marking // ========= Marking
// //
class ShenandoahFlushSATBHandshakeClosure : public HandshakeClosure {
private:
SATBMarkQueueSet& _qset;
public:
inline explicit ShenandoahFlushSATBHandshakeClosure(SATBMarkQueueSet& qset);
inline void do_thread(Thread* thread) override;
};
class ShenandoahMarkRefsSuperClosure : public ShenandoahSuperClosure { class ShenandoahMarkRefsSuperClosure : public ShenandoahSuperClosure {
private: private:
ShenandoahObjToScanQueue* _queue; ShenandoahObjToScanQueue* _queue;

View File

@ -59,6 +59,13 @@ void ShenandoahSuperClosure::do_nmethod(nmethod* nm) {
// //
// ========= Marking // ========= Marking
// //
ShenandoahFlushSATBHandshakeClosure::ShenandoahFlushSATBHandshakeClosure(SATBMarkQueueSet& qset) :
HandshakeClosure("Shenandoah Flush SATB"),
_qset(qset) {}
void ShenandoahFlushSATBHandshakeClosure::do_thread(Thread* thread) {
_qset.flush_queue(ShenandoahThreadLocalData::satb_mark_queue(thread));
}
ShenandoahMarkRefsSuperClosure::ShenandoahMarkRefsSuperClosure(ShenandoahObjToScanQueue* q, ShenandoahMarkRefsSuperClosure::ShenandoahMarkRefsSuperClosure(ShenandoahObjToScanQueue* q,
ShenandoahReferenceProcessor* rp, ShenandoahReferenceProcessor* rp,

View File

@ -102,6 +102,16 @@ ShenandoahGC::ShenandoahDegenPoint ShenandoahConcurrentGC::degen_point() const {
return _degen_point; return _degen_point;
} }
void ShenandoahConcurrentGC::entry_concurrent_update_refs_prepare(ShenandoahHeap* const heap) {
TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters());
const char* msg = conc_init_update_refs_event_message();
ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_update_refs_prepare);
EventMark em("%s", msg);
// Evacuation is complete, retire gc labs and change gc state
heap->concurrent_prepare_for_update_refs();
}
bool ShenandoahConcurrentGC::collect(GCCause::Cause cause) { bool ShenandoahConcurrentGC::collect(GCCause::Cause cause) {
ShenandoahHeap* const heap = ShenandoahHeap::heap(); ShenandoahHeap* const heap = ShenandoahHeap::heap();
@ -192,8 +202,7 @@ bool ShenandoahConcurrentGC::collect(GCCause::Cause cause) {
return false; return false;
} }
// Evacuation is complete, retire gc labs entry_concurrent_update_refs_prepare(heap);
heap->concurrent_prepare_for_update_refs();
// Perform update-refs phase. // Perform update-refs phase.
if (ShenandoahVerify || ShenandoahPacing) { if (ShenandoahVerify || ShenandoahPacing) {
@ -216,24 +225,14 @@ bool ShenandoahConcurrentGC::collect(GCCause::Cause cause) {
// Update references freed up collection set, kick the cleanup to reclaim the space. // Update references freed up collection set, kick the cleanup to reclaim the space.
entry_cleanup_complete(); entry_cleanup_complete();
} else { } else {
// We chose not to evacuate because we found sufficient immediate garbage. if (!entry_final_roots()) {
// However, there may still be regions to promote in place, so do that now. assert(_degen_point != _degenerated_unset, "Need to know where to start degenerated cycle");
if (has_in_place_promotions(heap)) { return false;
entry_promote_in_place();
// If the promote-in-place operation was cancelled, we can have the degenerated
// cycle complete the operation. It will see that no evacuations are in progress,
// and that there are regions wanting promotion. The risk with not handling the
// cancellation would be failing to restore top for these regions and leaving
// them unable to serve allocations for the old generation.
if (check_cancellation_and_abort(ShenandoahDegenPoint::_degenerated_evac)) {
return false;
}
} }
// At this point, the cycle is effectively complete. If the cycle has been cancelled here, if (VerifyAfterGC) {
// the control thread will detect it on its next iteration and run a degenerated young cycle. vmop_entry_verify_final_roots();
vmop_entry_final_roots(); }
_abbreviated = true; _abbreviated = true;
} }
@ -251,6 +250,52 @@ bool ShenandoahConcurrentGC::collect(GCCause::Cause cause) {
return true; return true;
} }
bool ShenandoahConcurrentGC::complete_abbreviated_cycle() {
shenandoah_assert_generational();
ShenandoahGenerationalHeap* const heap = ShenandoahGenerationalHeap::heap();
// We chose not to evacuate because we found sufficient immediate garbage.
// However, there may still be regions to promote in place, so do that now.
if (heap->old_generation()->has_in_place_promotions()) {
entry_promote_in_place();
// If the promote-in-place operation was cancelled, we can have the degenerated
// cycle complete the operation. It will see that no evacuations are in progress,
// and that there are regions wanting promotion. The risk with not handling the
// cancellation would be failing to restore top for these regions and leaving
// them unable to serve allocations for the old generation.This will leave the weak
// roots flag set (the degenerated cycle will unset it).
if (check_cancellation_and_abort(ShenandoahDegenPoint::_degenerated_evac)) {
return false;
}
}
// At this point, the cycle is effectively complete. If the cycle has been cancelled here,
// the control thread will detect it on its next iteration and run a degenerated young cycle.
if (!_generation->is_old()) {
heap->update_region_ages(_generation->complete_marking_context());
}
if (!heap->is_concurrent_old_mark_in_progress()) {
heap->concurrent_final_roots();
} else {
// Since the cycle was shortened for having enough immediate garbage, this will be
// the last phase before concurrent marking of old resumes. We must be sure
// that old mark threads don't see any pointers to garbage in the SATB queues. Even
// though nothing was evacuated, overwriting unreachable weak roots with null may still
// put pointers to regions that become trash in the SATB queues. The following will
// piggyback flushing the thread local SATB queues on the same handshake that propagates
// the gc state change.
ShenandoahSATBMarkQueueSet& satb_queues = ShenandoahBarrierSet::satb_mark_queue_set();
ShenandoahFlushSATBHandshakeClosure complete_thread_local_satb_buffers(satb_queues);
heap->concurrent_final_roots(&complete_thread_local_satb_buffers);
heap->old_generation()->concurrent_transfer_pointers_from_satb();
}
return true;
}
void ShenandoahConcurrentGC::vmop_entry_init_mark() { void ShenandoahConcurrentGC::vmop_entry_init_mark() {
ShenandoahHeap* const heap = ShenandoahHeap::heap(); ShenandoahHeap* const heap = ShenandoahHeap::heap();
TraceCollectorStats tcs(heap->monitoring_support()->stw_collection_counters()); TraceCollectorStats tcs(heap->monitoring_support()->stw_collection_counters());
@ -291,7 +336,7 @@ void ShenandoahConcurrentGC::vmop_entry_final_update_refs() {
VMThread::execute(&op); VMThread::execute(&op);
} }
void ShenandoahConcurrentGC::vmop_entry_final_roots() { void ShenandoahConcurrentGC::vmop_entry_verify_final_roots() {
ShenandoahHeap* const heap = ShenandoahHeap::heap(); ShenandoahHeap* const heap = ShenandoahHeap::heap();
TraceCollectorStats tcs(heap->monitoring_support()->stw_collection_counters()); TraceCollectorStats tcs(heap->monitoring_support()->stw_collection_counters());
ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::final_roots_gross); ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::final_roots_gross);
@ -347,12 +392,12 @@ void ShenandoahConcurrentGC::entry_final_update_refs() {
op_final_update_refs(); op_final_update_refs();
} }
void ShenandoahConcurrentGC::entry_final_roots() { void ShenandoahConcurrentGC::entry_verify_final_roots() {
const char* msg = final_roots_event_message(); const char* msg = verify_final_roots_event_message();
ShenandoahPausePhase gc_phase(msg, ShenandoahPhaseTimings::final_roots); ShenandoahPausePhase gc_phase(msg, ShenandoahPhaseTimings::final_roots);
EventMark em("%s", msg); EventMark em("%s", msg);
op_final_roots(); op_verify_final_roots();
} }
void ShenandoahConcurrentGC::entry_reset() { void ShenandoahConcurrentGC::entry_reset() {
@ -526,19 +571,12 @@ void ShenandoahConcurrentGC::entry_evacuate() {
op_evacuate(); op_evacuate();
} }
void ShenandoahConcurrentGC::entry_promote_in_place() { void ShenandoahConcurrentGC::entry_promote_in_place() const {
shenandoah_assert_generational(); shenandoah_assert_generational();
ShenandoahHeap* const heap = ShenandoahHeap::heap(); ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::promote_in_place);
TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters()); ShenandoahGCWorkerPhase worker_phase(ShenandoahPhaseTimings::promote_in_place);
EventMark em("%s", "Promote in place");
static const char* msg = "Promote in place";
ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::promote_in_place);
EventMark em("%s", msg);
ShenandoahWorkerScope scope(heap->workers(),
ShenandoahWorkerPolicy::calc_workers_for_conc_evac(),
"promote in place");
ShenandoahGenerationalHeap::heap()->promote_regions_in_place(true); ShenandoahGenerationalHeap::heap()->promote_regions_in_place(true);
} }
@ -663,6 +701,7 @@ void ShenandoahConcurrentGC::op_init_mark() {
} }
if (ShenandoahVerify) { if (ShenandoahVerify) {
ShenandoahTimingsTracker v(ShenandoahPhaseTimings::init_mark_verify);
heap->verifier()->verify_before_concmark(); heap->verifier()->verify_before_concmark();
} }
@ -751,6 +790,7 @@ void ShenandoahConcurrentGC::op_final_mark() {
} }
if (ShenandoahVerify) { if (ShenandoahVerify) {
ShenandoahTimingsTracker v(ShenandoahPhaseTimings::final_mark_verify);
heap->verifier()->verify_before_evacuation(); heap->verifier()->verify_before_evacuation();
} }
@ -767,6 +807,7 @@ void ShenandoahConcurrentGC::op_final_mark() {
} }
} else { } else {
if (ShenandoahVerify) { if (ShenandoahVerify) {
ShenandoahTimingsTracker v(ShenandoahPhaseTimings::final_mark_verify);
if (has_in_place_promotions(heap)) { if (has_in_place_promotions(heap)) {
heap->verifier()->verify_after_concmark_with_promotions(); heap->verifier()->verify_after_concmark_with_promotions();
} else { } else {
@ -1088,6 +1129,7 @@ void ShenandoahConcurrentGC::op_evacuate() {
void ShenandoahConcurrentGC::op_init_update_refs() { void ShenandoahConcurrentGC::op_init_update_refs() {
ShenandoahHeap* const heap = ShenandoahHeap::heap(); ShenandoahHeap* const heap = ShenandoahHeap::heap();
if (ShenandoahVerify) { if (ShenandoahVerify) {
ShenandoahTimingsTracker v(ShenandoahPhaseTimings::init_update_refs_verify);
heap->verifier()->verify_before_update_refs(); heap->verifier()->verify_before_update_refs();
} }
if (ShenandoahPacing) { if (ShenandoahPacing) {
@ -1175,6 +1217,7 @@ void ShenandoahConcurrentGC::op_final_update_refs() {
} }
if (ShenandoahVerify) { if (ShenandoahVerify) {
ShenandoahTimingsTracker v(ShenandoahPhaseTimings::final_update_refs_verify);
heap->verifier()->verify_after_update_refs(); heap->verifier()->verify_after_update_refs();
} }
@ -1190,33 +1233,32 @@ void ShenandoahConcurrentGC::op_final_update_refs() {
} }
} }
void ShenandoahConcurrentGC::op_final_roots() { bool ShenandoahConcurrentGC::entry_final_roots() {
ShenandoahHeap* const heap = ShenandoahHeap::heap();
TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters());
ShenandoahHeap *heap = ShenandoahHeap::heap();
heap->set_concurrent_weak_root_in_progress(false);
heap->set_evacuation_in_progress(false);
if (heap->mode()->is_generational()) { const char* msg = conc_final_roots_event_message();
// If the cycle was shortened for having enough immediate garbage, this could be ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_final_roots);
// the last GC safepoint before concurrent marking of old resumes. We must be sure EventMark em("%s", msg);
// that old mark threads don't see any pointers to garbage in the SATB buffers. ShenandoahWorkerScope scope(heap->workers(),
if (heap->is_concurrent_old_mark_in_progress()) { ShenandoahWorkerPolicy::calc_workers_for_conc_evac(),
heap->old_generation()->transfer_pointers_from_satb(); msg);
}
if (!_generation->is_old()) { if (!heap->mode()->is_generational()) {
ShenandoahGenerationalHeap::heap()->update_region_ages(_generation->complete_marking_context()); heap->concurrent_final_roots();
} else {
if (!complete_abbreviated_cycle()) {
return false;
} }
} }
return true;
}
void ShenandoahConcurrentGC::op_verify_final_roots() {
if (VerifyAfterGC) { if (VerifyAfterGC) {
Universe::verify(); Universe::verify();
} }
{
ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::final_roots_propagate_gc_state);
heap->propagate_gc_state_to_all_threads();
}
} }
void ShenandoahConcurrentGC::op_cleanup_complete() { void ShenandoahConcurrentGC::op_cleanup_complete() {
@ -1301,11 +1343,19 @@ const char* ShenandoahConcurrentGC::conc_reset_after_collect_event_message() con
} }
} }
const char* ShenandoahConcurrentGC::final_roots_event_message() const { const char* ShenandoahConcurrentGC::verify_final_roots_event_message() const {
if (ShenandoahHeap::heap()->unload_classes()) { if (ShenandoahHeap::heap()->unload_classes()) {
SHENANDOAH_RETURN_EVENT_MESSAGE(_generation->type(), "Pause Final Roots", " (unload classes)"); SHENANDOAH_RETURN_EVENT_MESSAGE(_generation->type(), "Pause Verify Final Roots", " (unload classes)");
} else { } else {
SHENANDOAH_RETURN_EVENT_MESSAGE(_generation->type(), "Pause Final Roots", ""); SHENANDOAH_RETURN_EVENT_MESSAGE(_generation->type(), "Pause Verify Final Roots", "");
}
}
const char* ShenandoahConcurrentGC::conc_final_roots_event_message() const {
if (ShenandoahHeap::heap()->unload_classes()) {
SHENANDOAH_RETURN_EVENT_MESSAGE(_generation->type(), "Concurrent Final Roots", " (unload classes)");
} else {
SHENANDOAH_RETURN_EVENT_MESSAGE(_generation->type(), "Concurrent Final Roots", "");
} }
} }
@ -1332,3 +1382,11 @@ const char* ShenandoahConcurrentGC::conc_cleanup_event_message() const {
SHENANDOAH_RETURN_EVENT_MESSAGE(_generation->type(), "Concurrent cleanup", ""); SHENANDOAH_RETURN_EVENT_MESSAGE(_generation->type(), "Concurrent cleanup", "");
} }
} }
const char* ShenandoahConcurrentGC::conc_init_update_refs_event_message() const {
if (ShenandoahHeap::heap()->unload_classes()) {
SHENANDOAH_RETURN_EVENT_MESSAGE(_generation->type(), "Concurrent Init Update Refs", " (unload classes)");
} else {
SHENANDOAH_RETURN_EVENT_MESSAGE(_generation->type(), "Concurrent Init Update Refs", "");
}
}

View File

@ -56,9 +56,12 @@ private:
public: public:
ShenandoahConcurrentGC(ShenandoahGeneration* generation, bool do_old_gc_bootstrap); ShenandoahConcurrentGC(ShenandoahGeneration* generation, bool do_old_gc_bootstrap);
bool collect(GCCause::Cause cause) override; bool collect(GCCause::Cause cause) override;
ShenandoahDegenPoint degen_point() const; ShenandoahDegenPoint degen_point() const;
void entry_concurrent_update_refs_prepare(ShenandoahHeap* heap);
// Return true if this cycle found enough immediate garbage to skip evacuation // Return true if this cycle found enough immediate garbage to skip evacuation
bool abbreviated() const { return _abbreviated; } bool abbreviated() const { return _abbreviated; }
@ -69,7 +72,7 @@ protected:
void vmop_entry_final_mark(); void vmop_entry_final_mark();
void vmop_entry_init_update_refs(); void vmop_entry_init_update_refs();
void vmop_entry_final_update_refs(); void vmop_entry_final_update_refs();
void vmop_entry_final_roots(); void vmop_entry_verify_final_roots();
// Entry methods to normally STW GC operations. These set up logging, monitoring // Entry methods to normally STW GC operations. These set up logging, monitoring
// and workers for next VM operation // and workers for next VM operation
@ -77,7 +80,7 @@ protected:
void entry_final_mark(); void entry_final_mark();
void entry_init_update_refs(); void entry_init_update_refs();
void entry_final_update_refs(); void entry_final_update_refs();
void entry_final_roots(); void entry_verify_final_roots();
// Entry methods to normally concurrent GC operations. These set up logging, monitoring // Entry methods to normally concurrent GC operations. These set up logging, monitoring
// for concurrent operation. // for concurrent operation.
@ -96,8 +99,11 @@ protected:
void entry_update_refs(); void entry_update_refs();
void entry_cleanup_complete(); void entry_cleanup_complete();
// This is the last phase of a cycle which performs no evacuations
bool entry_final_roots();
// Called when the collection set is empty, but the generational mode has regions to promote in place // Called when the collection set is empty, but the generational mode has regions to promote in place
void entry_promote_in_place(); void entry_promote_in_place() const;
// Actual work for the phases // Actual work for the phases
void op_reset(); void op_reset();
@ -116,7 +122,8 @@ protected:
void op_update_refs(); void op_update_refs();
void op_update_thread_roots(); void op_update_thread_roots();
void op_final_update_refs(); void op_final_update_refs();
void op_final_roots();
void op_verify_final_roots();
void op_cleanup_complete(); void op_cleanup_complete();
void op_reset_after_collect(); void op_reset_after_collect();
@ -129,19 +136,23 @@ protected:
private: private:
void start_mark(); void start_mark();
static bool has_in_place_promotions(ShenandoahHeap* heap) ; bool complete_abbreviated_cycle();
static bool has_in_place_promotions(ShenandoahHeap* heap);
// Messages for GC trace events, they have to be immortal for // Messages for GC trace events, they have to be immortal for
// passing around the logging/tracing systems // passing around the logging/tracing systems
const char* init_mark_event_message() const; const char* init_mark_event_message() const;
const char* final_mark_event_message() const; const char* final_mark_event_message() const;
const char* final_roots_event_message() const; const char* verify_final_roots_event_message() const;
const char* conc_final_roots_event_message() const;
const char* conc_mark_event_message() const; const char* conc_mark_event_message() const;
const char* conc_reset_event_message() const; const char* conc_reset_event_message() const;
const char* conc_reset_after_collect_event_message() const; const char* conc_reset_after_collect_event_message() const;
const char* conc_weak_refs_event_message() const; const char* conc_weak_refs_event_message() const;
const char* conc_weak_roots_event_message() const; const char* conc_weak_roots_event_message() const;
const char* conc_cleanup_event_message() const; const char* conc_cleanup_event_message() const;
const char* conc_init_update_refs_event_message() const;
}; };
#endif // SHARE_GC_SHENANDOAH_SHENANDOAHCONCURRENTGC_HPP #endif // SHARE_GC_SHENANDOAH_SHENANDOAHCONCURRENTGC_HPP

View File

@ -212,19 +212,6 @@ void ShenandoahConcurrentMark::mark_concurrent_roots() {
} }
} }
class ShenandoahFlushSATBHandshakeClosure : public HandshakeClosure {
private:
SATBMarkQueueSet& _qset;
public:
ShenandoahFlushSATBHandshakeClosure(SATBMarkQueueSet& qset) :
HandshakeClosure("Shenandoah Flush SATB"),
_qset(qset) {}
void do_thread(Thread* thread) {
_qset.flush_queue(ShenandoahThreadLocalData::satb_mark_queue(thread));
}
};
void ShenandoahConcurrentMark::concurrent_mark() { void ShenandoahConcurrentMark::concurrent_mark() {
ShenandoahHeap* const heap = ShenandoahHeap::heap(); ShenandoahHeap* const heap = ShenandoahHeap::heap();
WorkerThreads* workers = heap->workers(); WorkerThreads* workers = heap->workers();

View File

@ -409,7 +409,7 @@ void ShenandoahDegenGC::op_evacuate() {
void ShenandoahDegenGC::op_init_update_refs() { void ShenandoahDegenGC::op_init_update_refs() {
// Evacuation has completed // Evacuation has completed
ShenandoahHeap* const heap = ShenandoahHeap::heap(); ShenandoahHeap* const heap = ShenandoahHeap::heap();
heap->prepare_update_heap_references(false /*concurrent*/); heap->prepare_update_heap_references();
heap->set_update_refs_in_progress(true); heap->set_update_refs_in_progress(true);
} }

View File

@ -1247,9 +1247,11 @@ public:
} }
}; };
class ShenandoahGCStatePropagator : public ThreadClosure { class ShenandoahGCStatePropagator : public HandshakeClosure {
public: public:
explicit ShenandoahGCStatePropagator(char gc_state) : _gc_state(gc_state) {} explicit ShenandoahGCStatePropagator(char gc_state) :
HandshakeClosure("Shenandoah GC State Change"),
_gc_state(gc_state) {}
void do_thread(Thread* thread) override { void do_thread(Thread* thread) override {
ShenandoahThreadLocalData::set_gc_state(thread, _gc_state); ShenandoahThreadLocalData::set_gc_state(thread, _gc_state);
@ -1306,6 +1308,37 @@ void ShenandoahHeap::concurrent_prepare_for_update_refs() {
_update_refs_iterator.reset(); _update_refs_iterator.reset();
} }
class ShenandoahCompositeHandshakeClosure : public HandshakeClosure {
HandshakeClosure* _handshake_1;
HandshakeClosure* _handshake_2;
public:
ShenandoahCompositeHandshakeClosure(HandshakeClosure* handshake_1, HandshakeClosure* handshake_2) :
HandshakeClosure(handshake_2->name()),
_handshake_1(handshake_1), _handshake_2(handshake_2) {}
void do_thread(Thread* thread) override {
_handshake_1->do_thread(thread);
_handshake_2->do_thread(thread);
}
};
void ShenandoahHeap::concurrent_final_roots(HandshakeClosure* handshake_closure) {
{
assert(!is_evacuation_in_progress(), "Should not evacuate for abbreviated or old cycles");
MutexLocker lock(Threads_lock);
set_gc_state_concurrent(WEAK_ROOTS, false);
}
ShenandoahGCStatePropagator propagator(_gc_state.raw_value());
Threads::non_java_threads_do(&propagator);
if (handshake_closure == nullptr) {
Handshake::execute(&propagator);
} else {
ShenandoahCompositeHandshakeClosure composite(&propagator, handshake_closure);
Handshake::execute(&composite);
}
}
oop ShenandoahHeap::evacuate_object(oop p, Thread* thread) { oop ShenandoahHeap::evacuate_object(oop p, Thread* thread) {
assert(thread == Thread::current(), "Expected thread parameter to be current thread."); assert(thread == Thread::current(), "Expected thread parameter to be current thread.");
if (ShenandoahThreadLocalData::is_oom_during_evac(thread)) { if (ShenandoahThreadLocalData::is_oom_during_evac(thread)) {
@ -2019,16 +2052,14 @@ void ShenandoahHeap::stw_weak_refs(bool full_gc) {
gc_generation()->ref_processor()->process_references(phase, workers(), false /* concurrent */); gc_generation()->ref_processor()->process_references(phase, workers(), false /* concurrent */);
} }
void ShenandoahHeap::prepare_update_heap_references(bool concurrent) { void ShenandoahHeap::prepare_update_heap_references() {
assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "must be at safepoint"); assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "must be at safepoint");
// Evacuation is over, no GCLABs are needed anymore. GCLABs are under URWM, so we need to // Evacuation is over, no GCLABs are needed anymore. GCLABs are under URWM, so we need to
// make them parsable for update code to work correctly. Plus, we can compute new sizes // make them parsable for update code to work correctly. Plus, we can compute new sizes
// for future GCLABs here. // for future GCLABs here.
if (UseTLAB) { if (UseTLAB) {
ShenandoahGCPhase phase(concurrent ? ShenandoahGCPhase phase(ShenandoahPhaseTimings::degen_gc_init_update_refs_manage_gclabs);
ShenandoahPhaseTimings::init_update_refs_manage_gclabs :
ShenandoahPhaseTimings::degen_gc_init_update_refs_manage_gclabs);
gclabs_retire(ResizeTLAB); gclabs_retire(ResizeTLAB);
} }

View File

@ -481,11 +481,14 @@ private:
// Concurrent class unloading support // Concurrent class unloading support
void do_class_unloading(); void do_class_unloading();
// Reference updating // Reference updating
void prepare_update_heap_references(bool concurrent); void prepare_update_heap_references();
// Retires LABs used for evacuation // Retires LABs used for evacuation
void concurrent_prepare_for_update_refs(); void concurrent_prepare_for_update_refs();
// Turn off weak roots flag, purge old satb buffers in generational mode
void concurrent_final_roots(HandshakeClosure* handshake_closure = nullptr);
virtual void update_heap_references(bool concurrent); virtual void update_heap_references(bool concurrent);
// Final update region states // Final update region states
void update_heap_region_states(bool concurrent); void update_heap_region_states(bool concurrent);

View File

@ -142,7 +142,7 @@ bool ShenandoahOldGC::collect(GCCause::Cause cause) {
// return from here with weak roots in progress. This is not a valid gc state // return from here with weak roots in progress. This is not a valid gc state
// for any young collections (or allocation failures) that interrupt the old // for any young collections (or allocation failures) that interrupt the old
// collection. // collection.
vmop_entry_final_roots(); heap->concurrent_final_roots();
// We do not rebuild_free following increments of old marking because memory has not been reclaimed. However, we may // We do not rebuild_free following increments of old marking because memory has not been reclaimed. However, we may
// need to transfer memory to OLD in order to efficiently support the mixed evacuations that might immediately follow. // need to transfer memory to OLD in order to efficiently support the mixed evacuations that might immediately follow.

View File

@ -23,7 +23,6 @@
* *
*/ */
#include "gc/shared/strongRootsScope.hpp"
#include "gc/shenandoah/heuristics/shenandoahOldHeuristics.hpp" #include "gc/shenandoah/heuristics/shenandoahOldHeuristics.hpp"
#include "gc/shenandoah/shenandoahAsserts.hpp" #include "gc/shenandoah/shenandoahAsserts.hpp"
#include "gc/shenandoah/shenandoahCardTable.hpp" #include "gc/shenandoah/shenandoahCardTable.hpp"
@ -94,6 +93,8 @@ public:
class ShenandoahPurgeSATBTask : public WorkerTask { class ShenandoahPurgeSATBTask : public WorkerTask {
private: private:
ShenandoahObjToScanQueueSet* _mark_queues; ShenandoahObjToScanQueueSet* _mark_queues;
// Keep track of the number of oops that are not transferred to mark queues.
// This is volatile because workers update it, but the vm thread reads it.
volatile size_t _trashed_oops; volatile size_t _trashed_oops;
public: public:
@ -124,6 +125,35 @@ public:
} }
}; };
class ShenandoahTransferOldSATBTask : public WorkerTask {
ShenandoahSATBMarkQueueSet& _satb_queues;
ShenandoahObjToScanQueueSet* _mark_queues;
// Keep track of the number of oops that are not transferred to mark queues.
// This is volatile because workers update it, but the control thread reads it.
volatile size_t _trashed_oops;
public:
explicit ShenandoahTransferOldSATBTask(ShenandoahSATBMarkQueueSet& satb_queues, ShenandoahObjToScanQueueSet* mark_queues) :
WorkerTask("Transfer SATB"),
_satb_queues(satb_queues),
_mark_queues(mark_queues),
_trashed_oops(0) {}
~ShenandoahTransferOldSATBTask() {
if (_trashed_oops > 0) {
log_debug(gc)("Purged %zu oops from old generation SATB buffers", _trashed_oops);
}
}
void work(uint worker_id) override {
ShenandoahObjToScanQueue* mark_queue = _mark_queues->queue(worker_id);
ShenandoahProcessOldSATB processor(mark_queue);
while (_satb_queues.apply_closure_to_completed_buffer(&processor)) {}
Atomic::add(&_trashed_oops, processor.trashed_oops());
}
};
class ShenandoahConcurrentCoalesceAndFillTask : public WorkerTask { class ShenandoahConcurrentCoalesceAndFillTask : public WorkerTask {
private: private:
uint _nworkers; uint _nworkers;
@ -423,14 +453,25 @@ bool ShenandoahOldGeneration::coalesce_and_fill() {
} }
} }
void ShenandoahOldGeneration::transfer_pointers_from_satb() { void ShenandoahOldGeneration::concurrent_transfer_pointers_from_satb() const {
ShenandoahHeap* heap = ShenandoahHeap::heap(); const ShenandoahHeap* heap = ShenandoahHeap::heap();
shenandoah_assert_safepoint();
assert(heap->is_concurrent_old_mark_in_progress(), "Only necessary during old marking."); assert(heap->is_concurrent_old_mark_in_progress(), "Only necessary during old marking.");
log_debug(gc)("Transfer SATB buffers"); log_debug(gc)("Transfer SATB buffers");
uint nworkers = heap->workers()->active_workers();
StrongRootsScope scope(nworkers);
// Step 1. All threads need to 'complete' partially filled, thread local SATB buffers. This
// is accomplished in ShenandoahConcurrentGC::complete_abbreviated_cycle using a Handshake
// operation.
// Step 2. Use worker threads to transfer oops from old, active regions in the completed
// SATB buffers to old generation mark queues.
ShenandoahSATBMarkQueueSet& satb_queues = ShenandoahBarrierSet::satb_mark_queue_set();
ShenandoahTransferOldSATBTask transfer_task(satb_queues, task_queues());
heap->workers()->run_task(&transfer_task);
}
void ShenandoahOldGeneration::transfer_pointers_from_satb() const {
const ShenandoahHeap* heap = ShenandoahHeap::heap();
assert(heap->is_concurrent_old_mark_in_progress(), "Only necessary during old marking.");
log_debug(gc)("Transfer SATB buffers");
ShenandoahPurgeSATBTask purge_satb_task(task_queues()); ShenandoahPurgeSATBTask purge_satb_task(task_queues());
heap->workers()->run_task(&purge_satb_task); heap->workers()->run_task(&purge_satb_task);
} }

View File

@ -232,7 +232,8 @@ public:
// Alternatively, we could inspect the state of the heap and the age of the // Alternatively, we could inspect the state of the heap and the age of the
// object at the barrier, but we reject this approach because it is likely // object at the barrier, but we reject this approach because it is likely
// the performance impact would be too severe. // the performance impact would be too severe.
void transfer_pointers_from_satb(); void transfer_pointers_from_satb() const;
void concurrent_transfer_pointers_from_satb() const;
// True if there are old regions waiting to be selected for a mixed collection // True if there are old regions waiting to be selected for a mixed collection
bool has_unprocessed_collection_candidates(); bool has_unprocessed_collection_candidates();

View File

@ -121,6 +121,7 @@ bool ShenandoahPhaseTimings::is_worker_phase(Phase phase) {
case conc_weak_refs: case conc_weak_refs:
case conc_strong_roots: case conc_strong_roots:
case conc_coalesce_and_fill: case conc_coalesce_and_fill:
case promote_in_place:
return true; return true;
default: default:
return false; return false;

View File

@ -54,6 +54,7 @@ class outputStream;
f(conc_reset_old, "Concurrent Reset (OLD)") \ f(conc_reset_old, "Concurrent Reset (OLD)") \
f(init_mark_gross, "Pause Init Mark (G)") \ f(init_mark_gross, "Pause Init Mark (G)") \
f(init_mark, "Pause Init Mark (N)") \ f(init_mark, "Pause Init Mark (N)") \
f(init_mark_verify, " Verify") \
f(init_manage_tlabs, " Manage TLABs") \ f(init_manage_tlabs, " Manage TLABs") \
f(init_swap_rset, " Swap Remembered Set") \ f(init_swap_rset, " Swap Remembered Set") \
f(init_transfer_satb, " Transfer Old From SATB") \ f(init_transfer_satb, " Transfer Old From SATB") \
@ -71,6 +72,7 @@ class outputStream;
\ \
f(final_mark_gross, "Pause Final Mark (G)") \ f(final_mark_gross, "Pause Final Mark (G)") \
f(final_mark, "Pause Final Mark (N)") \ f(final_mark, "Pause Final Mark (N)") \
f(final_mark_verify, " Verify") \
f(finish_mark, " Finish Mark") \ f(finish_mark, " Finish Mark") \
f(final_mark_propagate_gc_state, " Propagate GC State") \ f(final_mark_propagate_gc_state, " Propagate GC State") \
SHENANDOAH_PAR_PHASE_DO(finish_mark_, " FM: ", f) \ SHENANDOAH_PAR_PHASE_DO(finish_mark_, " FM: ", f) \
@ -107,21 +109,22 @@ class outputStream;
f(conc_strong_roots, "Concurrent Strong Roots") \ f(conc_strong_roots, "Concurrent Strong Roots") \
SHENANDOAH_PAR_PHASE_DO(conc_strong_roots_, " CSR: ", f) \ SHENANDOAH_PAR_PHASE_DO(conc_strong_roots_, " CSR: ", f) \
f(conc_evac, "Concurrent Evacuation") \ f(conc_evac, "Concurrent Evacuation") \
f(promote_in_place, "Concurrent Promote Regions") \ f(conc_final_roots, "Concurrent Final Roots") \
f(final_roots_gross, "Pause Final Roots (G)") \ f(promote_in_place, " Promote Regions") \
f(final_roots, "Pause Final Roots (N)") \ f(final_roots_gross, "Pause Verify Final Roots (G)") \
f(final_roots_propagate_gc_state, " Propagate GC State") \ f(final_roots, "Pause Verify Final Roots (N)") \
\ \
f(init_update_refs_gross, "Pause Init Update Refs (G)") \ f(init_update_refs_gross, "Pause Init Update Refs (G)") \
f(init_update_refs, "Pause Init Update Refs (N)") \ f(init_update_refs, "Pause Init Update Refs (N)") \
f(init_update_refs_manage_gclabs, " Manage GCLABs") \ f(init_update_refs_verify, " Verify") \
\ \
f(conc_update_refs_prepare, "Concurrent Update Refs Prepare") \
f(conc_update_refs, "Concurrent Update Refs") \ f(conc_update_refs, "Concurrent Update Refs") \
f(conc_update_thread_roots, "Concurrent Update Thread Roots") \ f(conc_update_thread_roots, "Concurrent Update Thread Roots") \
\ \
f(final_update_refs_gross, "Pause Final Update Refs (G)") \ f(final_update_refs_gross, "Pause Final Update Refs (G)") \
f(final_update_refs, "Pause Final Update Refs (N)") \ f(final_update_refs, "Pause Final Update Refs (N)") \
f(final_update_refs_finish_work, " Finish Work") \ f(final_update_refs_verify, " Verify") \
f(final_update_refs_update_region_states, " Update Region States") \ f(final_update_refs_update_region_states, " Update Region States") \
f(final_update_refs_trash_cset, " Trash Collection Set") \ f(final_update_refs_trash_cset, " Trash Collection Set") \
f(final_update_refs_rebuild_freeset, " Rebuild Free Set") \ f(final_update_refs_rebuild_freeset, " Rebuild Free Set") \
@ -152,7 +155,6 @@ class outputStream;
f(degen_gc_stw_evac, " Evacuation") \ f(degen_gc_stw_evac, " Evacuation") \
f(degen_gc_init_update_refs_manage_gclabs, " Manage GCLABs") \ f(degen_gc_init_update_refs_manage_gclabs, " Manage GCLABs") \
f(degen_gc_update_refs, " Update References") \ f(degen_gc_update_refs, " Update References") \
f(degen_gc_final_update_refs_finish_work, " Finish Work") \
f(degen_gc_final_update_refs_update_region_states," Update Region States") \ f(degen_gc_final_update_refs_update_region_states," Update Region States") \
f(degen_gc_final_update_refs_trash_cset, " Trash Collection Set") \ f(degen_gc_final_update_refs_trash_cset, " Trash Collection Set") \
f(degen_gc_final_update_refs_rebuild_freeset, " Rebuild Free Set") \ f(degen_gc_final_update_refs_rebuild_freeset, " Rebuild Free Set") \

View File

@ -116,5 +116,5 @@ void VM_ShenandoahFinalUpdateRefs::doit() {
void VM_ShenandoahFinalRoots::doit() { void VM_ShenandoahFinalRoots::doit() {
ShenandoahGCPauseMark mark(_gc_id, "Final Roots", SvcGCMarker::CONCURRENT); ShenandoahGCPauseMark mark(_gc_id, "Final Roots", SvcGCMarker::CONCURRENT);
set_active_generation(); set_active_generation();
_gc->entry_final_roots(); _gc->entry_verify_final_roots();
} }