Path: blob/master/src/hotspot/share/gc/shenandoah/shenandoahConcurrentGC.cpp
66644 views
/*1* Copyright (c) 2021, Red Hat, Inc. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation.7*8* This code is distributed in the hope that it will be useful, but WITHOUT9* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or10* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License11* version 2 for more details (a copy is included in the LICENSE file that12* accompanied this code).13*14* You should have received a copy of the GNU General Public License version15* 2 along with this work; if not, write to the Free Software Foundation,16* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.17*18* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA19* or visit www.oracle.com if you need additional information or have any20* questions.21*22*/2324#include "precompiled.hpp"2526#include "gc/shared/barrierSetNMethod.hpp"27#include "gc/shared/collectorCounters.hpp"28#include "gc/shenandoah/shenandoahBreakpoint.hpp"29#include "gc/shenandoah/shenandoahCollectorPolicy.hpp"30#include "gc/shenandoah/shenandoahConcurrentGC.hpp"31#include "gc/shenandoah/shenandoahFreeSet.hpp"32#include "gc/shenandoah/shenandoahLock.hpp"33#include "gc/shenandoah/shenandoahMark.inline.hpp"34#include "gc/shenandoah/shenandoahMonitoringSupport.hpp"35#include "gc/shenandoah/shenandoahOopClosures.inline.hpp"36#include "gc/shenandoah/shenandoahPhaseTimings.hpp"37#include "gc/shenandoah/shenandoahReferenceProcessor.hpp"38#include "gc/shenandoah/shenandoahRootProcessor.inline.hpp"39#include "gc/shenandoah/shenandoahStackWatermark.hpp"40#include "gc/shenandoah/shenandoahUtils.hpp"41#include "gc/shenandoah/shenandoahVerifier.hpp"42#include "gc/shenandoah/shenandoahVMOperations.hpp"43#include "gc/shenandoah/shenandoahWorkGroup.hpp"44#include "gc/shenandoah/shenandoahWorkerPolicy.hpp"45#include "memory/allocation.hpp"46#include "prims/jvmtiTagMap.hpp"47#include "runtime/vmThread.hpp"48#include "utilities/events.hpp"4950// Breakpoint support51class ShenandoahBreakpointGCScope : public StackObj {52private:53const GCCause::Cause _cause;54public:55ShenandoahBreakpointGCScope(GCCause::Cause cause) : _cause(cause) {56if (cause == GCCause::_wb_breakpoint) {57ShenandoahBreakpoint::start_gc();58ShenandoahBreakpoint::at_before_gc();59}60}6162~ShenandoahBreakpointGCScope() {63if (_cause == GCCause::_wb_breakpoint) {64ShenandoahBreakpoint::at_after_gc();65}66}67};6869class ShenandoahBreakpointMarkScope : public StackObj {70private:71const GCCause::Cause _cause;72public:73ShenandoahBreakpointMarkScope(GCCause::Cause cause) : _cause(cause) {74if (_cause == GCCause::_wb_breakpoint) {75ShenandoahBreakpoint::at_after_marking_started();76}77}7879~ShenandoahBreakpointMarkScope() {80if (_cause == GCCause::_wb_breakpoint) {81ShenandoahBreakpoint::at_before_marking_completed();82}83}84};8586ShenandoahConcurrentGC::ShenandoahConcurrentGC() :87_mark(),88_degen_point(ShenandoahDegenPoint::_degenerated_unset) {89}9091ShenandoahGC::ShenandoahDegenPoint ShenandoahConcurrentGC::degen_point() const {92return _degen_point;93}9495void ShenandoahConcurrentGC::cancel() {96ShenandoahConcurrentMark::cancel();97}9899bool ShenandoahConcurrentGC::collect(GCCause::Cause cause) {100ShenandoahHeap* const heap = ShenandoahHeap::heap();101ShenandoahBreakpointGCScope breakpoint_gc_scope(cause);102103// Reset for upcoming marking104entry_reset();105106// Start initial mark under STW107vmop_entry_init_mark();108109{110ShenandoahBreakpointMarkScope breakpoint_mark_scope(cause);111// Concurrent mark roots112entry_mark_roots();113if (check_cancellation_and_abort(ShenandoahDegenPoint::_degenerated_outside_cycle)) return false;114115// Continue concurrent mark116entry_mark();117if (check_cancellation_and_abort(ShenandoahDegenPoint::_degenerated_mark)) return false;118}119120// Complete marking under STW, and start evacuation121vmop_entry_final_mark();122123// Concurrent stack processing124if (heap->is_evacuation_in_progress()) {125entry_thread_roots();126}127128// Process weak roots that might still point to regions that would be broken by cleanup129if (heap->is_concurrent_weak_root_in_progress()) {130entry_weak_refs();131entry_weak_roots();132}133134// Final mark might have reclaimed some immediate garbage, kick cleanup to reclaim135// the space. This would be the last action if there is nothing to evacuate.136entry_cleanup_early();137138{139ShenandoahHeapLocker locker(heap->lock());140heap->free_set()->log_status();141}142143// Perform concurrent class unloading144if (heap->unload_classes() &&145heap->is_concurrent_weak_root_in_progress()) {146entry_class_unloading();147}148149// Processing strong roots150// This may be skipped if there is nothing to update/evacuate.151// If so, strong_root_in_progress would be unset.152if (heap->is_concurrent_strong_root_in_progress()) {153entry_strong_roots();154}155156// Continue the cycle with evacuation and optional update-refs.157// This may be skipped if there is nothing to evacuate.158// If so, evac_in_progress would be unset by collection set preparation code.159if (heap->is_evacuation_in_progress()) {160// Concurrently evacuate161entry_evacuate();162if (check_cancellation_and_abort(ShenandoahDegenPoint::_degenerated_evac)) return false;163164// Perform update-refs phase.165vmop_entry_init_updaterefs();166entry_updaterefs();167if (check_cancellation_and_abort(ShenandoahDegenPoint::_degenerated_updaterefs)) return false;168169// Concurrent update thread roots170entry_update_thread_roots();171if (check_cancellation_and_abort(ShenandoahDegenPoint::_degenerated_updaterefs)) return false;172173vmop_entry_final_updaterefs();174175// Update references freed up collection set, kick the cleanup to reclaim the space.176entry_cleanup_complete();177} else {178vmop_entry_final_roots();179}180181return true;182}183184void ShenandoahConcurrentGC::vmop_entry_init_mark() {185ShenandoahHeap* const heap = ShenandoahHeap::heap();186TraceCollectorStats tcs(heap->monitoring_support()->stw_collection_counters());187ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::init_mark_gross);188189heap->try_inject_alloc_failure();190VM_ShenandoahInitMark op(this);191VMThread::execute(&op); // jump to entry_init_mark() under safepoint192}193194void ShenandoahConcurrentGC::vmop_entry_final_mark() {195ShenandoahHeap* const heap = ShenandoahHeap::heap();196TraceCollectorStats tcs(heap->monitoring_support()->stw_collection_counters());197ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::final_mark_gross);198199heap->try_inject_alloc_failure();200VM_ShenandoahFinalMarkStartEvac op(this);201VMThread::execute(&op); // jump to entry_final_mark under safepoint202}203204void ShenandoahConcurrentGC::vmop_entry_init_updaterefs() {205ShenandoahHeap* const heap = ShenandoahHeap::heap();206TraceCollectorStats tcs(heap->monitoring_support()->stw_collection_counters());207ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::init_update_refs_gross);208209heap->try_inject_alloc_failure();210VM_ShenandoahInitUpdateRefs op(this);211VMThread::execute(&op);212}213214void ShenandoahConcurrentGC::vmop_entry_final_updaterefs() {215ShenandoahHeap* const heap = ShenandoahHeap::heap();216TraceCollectorStats tcs(heap->monitoring_support()->stw_collection_counters());217ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::final_update_refs_gross);218219heap->try_inject_alloc_failure();220VM_ShenandoahFinalUpdateRefs op(this);221VMThread::execute(&op);222}223224void ShenandoahConcurrentGC::vmop_entry_final_roots() {225ShenandoahHeap* const heap = ShenandoahHeap::heap();226TraceCollectorStats tcs(heap->monitoring_support()->stw_collection_counters());227ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::final_roots_gross);228229// This phase does not use workers, no need for setup230heap->try_inject_alloc_failure();231VM_ShenandoahFinalRoots op(this);232VMThread::execute(&op);233}234235void ShenandoahConcurrentGC::entry_init_mark() {236const char* msg = init_mark_event_message();237ShenandoahPausePhase gc_phase(msg, ShenandoahPhaseTimings::init_mark);238EventMark em("%s", msg);239240ShenandoahWorkerScope scope(ShenandoahHeap::heap()->workers(),241ShenandoahWorkerPolicy::calc_workers_for_init_marking(),242"init marking");243244op_init_mark();245}246247void ShenandoahConcurrentGC::entry_final_mark() {248const char* msg = final_mark_event_message();249ShenandoahPausePhase gc_phase(msg, ShenandoahPhaseTimings::final_mark);250EventMark em("%s", msg);251252ShenandoahWorkerScope scope(ShenandoahHeap::heap()->workers(),253ShenandoahWorkerPolicy::calc_workers_for_final_marking(),254"final marking");255256op_final_mark();257}258259void ShenandoahConcurrentGC::entry_init_updaterefs() {260static const char* msg = "Pause Init Update Refs";261ShenandoahPausePhase gc_phase(msg, ShenandoahPhaseTimings::init_update_refs);262EventMark em("%s", msg);263264// No workers used in this phase, no setup required265op_init_updaterefs();266}267268void ShenandoahConcurrentGC::entry_final_updaterefs() {269static const char* msg = "Pause Final Update Refs";270ShenandoahPausePhase gc_phase(msg, ShenandoahPhaseTimings::final_update_refs);271EventMark em("%s", msg);272273ShenandoahWorkerScope scope(ShenandoahHeap::heap()->workers(),274ShenandoahWorkerPolicy::calc_workers_for_final_update_ref(),275"final reference update");276277op_final_updaterefs();278}279280void ShenandoahConcurrentGC::entry_final_roots() {281static const char* msg = "Pause Final Roots";282ShenandoahPausePhase gc_phase(msg, ShenandoahPhaseTimings::final_roots);283EventMark em("%s", msg);284285op_final_roots();286}287288void ShenandoahConcurrentGC::entry_reset() {289ShenandoahHeap* const heap = ShenandoahHeap::heap();290TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters());291static const char* msg = "Concurrent reset";292ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_reset);293EventMark em("%s", msg);294295ShenandoahWorkerScope scope(heap->workers(),296ShenandoahWorkerPolicy::calc_workers_for_conc_reset(),297"concurrent reset");298299heap->try_inject_alloc_failure();300op_reset();301}302303void ShenandoahConcurrentGC::entry_mark_roots() {304ShenandoahHeap* const heap = ShenandoahHeap::heap();305TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters());306const char* msg = "Concurrent marking roots";307ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_mark_roots);308EventMark em("%s", msg);309310ShenandoahWorkerScope scope(heap->workers(),311ShenandoahWorkerPolicy::calc_workers_for_conc_marking(),312"concurrent marking roots");313314heap->try_inject_alloc_failure();315op_mark_roots();316}317318void ShenandoahConcurrentGC::entry_mark() {319ShenandoahHeap* const heap = ShenandoahHeap::heap();320TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters());321const char* msg = conc_mark_event_message();322ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_mark);323EventMark em("%s", msg);324325ShenandoahWorkerScope scope(heap->workers(),326ShenandoahWorkerPolicy::calc_workers_for_conc_marking(),327"concurrent marking");328329heap->try_inject_alloc_failure();330op_mark();331}332333void ShenandoahConcurrentGC::entry_thread_roots() {334ShenandoahHeap* const heap = ShenandoahHeap::heap();335static const char* msg = "Concurrent thread roots";336ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_thread_roots);337EventMark em("%s", msg);338339ShenandoahWorkerScope scope(heap->workers(),340ShenandoahWorkerPolicy::calc_workers_for_conc_root_processing(),341msg);342343heap->try_inject_alloc_failure();344op_thread_roots();345}346347void ShenandoahConcurrentGC::entry_weak_refs() {348ShenandoahHeap* const heap = ShenandoahHeap::heap();349static const char* msg = "Concurrent weak references";350ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_weak_refs);351EventMark em("%s", msg);352353ShenandoahWorkerScope scope(heap->workers(),354ShenandoahWorkerPolicy::calc_workers_for_conc_refs_processing(),355"concurrent weak references");356357heap->try_inject_alloc_failure();358op_weak_refs();359}360361void ShenandoahConcurrentGC::entry_weak_roots() {362ShenandoahHeap* const heap = ShenandoahHeap::heap();363TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters());364static const char* msg = "Concurrent weak roots";365ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_weak_roots);366EventMark em("%s", msg);367368ShenandoahWorkerScope scope(heap->workers(),369ShenandoahWorkerPolicy::calc_workers_for_conc_root_processing(),370"concurrent weak root");371372heap->try_inject_alloc_failure();373op_weak_roots();374}375376void ShenandoahConcurrentGC::entry_class_unloading() {377ShenandoahHeap* const heap = ShenandoahHeap::heap();378TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters());379static const char* msg = "Concurrent class unloading";380ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_class_unload);381EventMark em("%s", msg);382383ShenandoahWorkerScope scope(heap->workers(),384ShenandoahWorkerPolicy::calc_workers_for_conc_root_processing(),385"concurrent class unloading");386387heap->try_inject_alloc_failure();388op_class_unloading();389}390391void ShenandoahConcurrentGC::entry_strong_roots() {392ShenandoahHeap* const heap = ShenandoahHeap::heap();393TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters());394static const char* msg = "Concurrent strong roots";395ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_strong_roots);396EventMark em("%s", msg);397398ShenandoahGCWorkerPhase worker_phase(ShenandoahPhaseTimings::conc_strong_roots);399400ShenandoahWorkerScope scope(heap->workers(),401ShenandoahWorkerPolicy::calc_workers_for_conc_root_processing(),402"concurrent strong root");403404heap->try_inject_alloc_failure();405op_strong_roots();406}407408void ShenandoahConcurrentGC::entry_cleanup_early() {409ShenandoahHeap* const heap = ShenandoahHeap::heap();410TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters());411static const char* msg = "Concurrent cleanup";412ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_cleanup_early, true /* log_heap_usage */);413EventMark em("%s", msg);414415// This phase does not use workers, no need for setup416heap->try_inject_alloc_failure();417op_cleanup_early();418}419420void ShenandoahConcurrentGC::entry_evacuate() {421ShenandoahHeap* const heap = ShenandoahHeap::heap();422TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters());423424static const char* msg = "Concurrent evacuation";425ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_evac);426EventMark em("%s", msg);427428ShenandoahWorkerScope scope(heap->workers(),429ShenandoahWorkerPolicy::calc_workers_for_conc_evac(),430"concurrent evacuation");431432heap->try_inject_alloc_failure();433op_evacuate();434}435436void ShenandoahConcurrentGC::entry_update_thread_roots() {437ShenandoahHeap* const heap = ShenandoahHeap::heap();438TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters());439440static const char* msg = "Concurrent update thread roots";441ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_update_thread_roots);442EventMark em("%s", msg);443444// No workers used in this phase, no setup required445heap->try_inject_alloc_failure();446op_update_thread_roots();447}448449void ShenandoahConcurrentGC::entry_updaterefs() {450ShenandoahHeap* const heap = ShenandoahHeap::heap();451TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters());452static const char* msg = "Concurrent update references";453ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_update_refs);454EventMark em("%s", msg);455456ShenandoahWorkerScope scope(heap->workers(),457ShenandoahWorkerPolicy::calc_workers_for_conc_update_ref(),458"concurrent reference update");459460heap->try_inject_alloc_failure();461op_updaterefs();462}463464void ShenandoahConcurrentGC::entry_cleanup_complete() {465ShenandoahHeap* const heap = ShenandoahHeap::heap();466TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters());467static const char* msg = "Concurrent cleanup";468ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_cleanup_complete, true /* log_heap_usage */);469EventMark em("%s", msg);470471// This phase does not use workers, no need for setup472heap->try_inject_alloc_failure();473op_cleanup_complete();474}475476void ShenandoahConcurrentGC::op_reset() {477ShenandoahHeap* const heap = ShenandoahHeap::heap();478if (ShenandoahPacing) {479heap->pacer()->setup_for_reset();480}481482heap->prepare_gc();483}484485class ShenandoahInitMarkUpdateRegionStateClosure : public ShenandoahHeapRegionClosure {486private:487ShenandoahMarkingContext* const _ctx;488public:489ShenandoahInitMarkUpdateRegionStateClosure() : _ctx(ShenandoahHeap::heap()->marking_context()) {}490491void heap_region_do(ShenandoahHeapRegion* r) {492assert(!r->has_live(), "Region " SIZE_FORMAT " should have no live data", r->index());493if (r->is_active()) {494// Check if region needs updating its TAMS. We have updated it already during concurrent495// reset, so it is very likely we don't need to do another write here.496if (_ctx->top_at_mark_start(r) != r->top()) {497_ctx->capture_top_at_mark_start(r);498}499} else {500assert(_ctx->top_at_mark_start(r) == r->top(),501"Region " SIZE_FORMAT " should already have correct TAMS", r->index());502}503}504505bool is_thread_safe() { return true; }506};507508void ShenandoahConcurrentGC::op_init_mark() {509ShenandoahHeap* const heap = ShenandoahHeap::heap();510assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "Should be at safepoint");511assert(Thread::current()->is_VM_thread(), "can only do this in VMThread");512513assert(heap->marking_context()->is_bitmap_clear(), "need clear marking bitmap");514assert(!heap->marking_context()->is_complete(), "should not be complete");515assert(!heap->has_forwarded_objects(), "No forwarded objects on this path");516517if (ShenandoahVerify) {518heap->verifier()->verify_before_concmark();519}520521if (VerifyBeforeGC) {522Universe::verify();523}524525heap->set_concurrent_mark_in_progress(true);526527{528ShenandoahGCPhase phase(ShenandoahPhaseTimings::init_update_region_states);529ShenandoahInitMarkUpdateRegionStateClosure cl;530heap->parallel_heap_region_iterate(&cl);531}532533// Weak reference processing534ShenandoahReferenceProcessor* rp = heap->ref_processor();535rp->reset_thread_locals();536rp->set_soft_reference_policy(heap->soft_ref_policy()->should_clear_all_soft_refs());537538// Make above changes visible to worker threads539OrderAccess::fence();540// Arm nmethods for concurrent marking. When a nmethod is about to be executed,541// we need to make sure that all its metadata are marked. alternative is to remark542// thread roots at final mark pause, but it can be potential latency killer.543if (heap->unload_classes()) {544ShenandoahCodeRoots::arm_nmethods();545}546547ShenandoahStackWatermark::change_epoch_id();548if (ShenandoahPacing) {549heap->pacer()->setup_for_mark();550}551}552553void ShenandoahConcurrentGC::op_mark_roots() {554_mark.mark_concurrent_roots();555}556557void ShenandoahConcurrentGC::op_mark() {558_mark.concurrent_mark();559}560561void ShenandoahConcurrentGC::op_final_mark() {562ShenandoahHeap* const heap = ShenandoahHeap::heap();563assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "Should be at safepoint");564assert(!heap->has_forwarded_objects(), "No forwarded objects on this path");565566if (ShenandoahVerify) {567heap->verifier()->verify_roots_no_forwarded();568}569570if (!heap->cancelled_gc()) {571_mark.finish_mark();572assert(!heap->cancelled_gc(), "STW mark cannot OOM");573574// Notify JVMTI that the tagmap table will need cleaning.575JvmtiTagMap::set_needs_cleaning();576577heap->prepare_regions_and_collection_set(true /*concurrent*/);578579// Has to be done after cset selection580heap->prepare_concurrent_roots();581582if (!heap->collection_set()->is_empty()) {583if (ShenandoahVerify) {584heap->verifier()->verify_before_evacuation();585}586587heap->set_evacuation_in_progress(true);588// From here on, we need to update references.589heap->set_has_forwarded_objects(true);590591// Verify before arming for concurrent processing.592// Otherwise, verification can trigger stack processing.593if (ShenandoahVerify) {594heap->verifier()->verify_during_evacuation();595}596597// Arm nmethods/stack for concurrent processing598ShenandoahCodeRoots::arm_nmethods();599ShenandoahStackWatermark::change_epoch_id();600601// Notify JVMTI that oops are changed.602JvmtiTagMap::set_needs_rehashing();603604if (ShenandoahPacing) {605heap->pacer()->setup_for_evac();606}607} else {608if (ShenandoahVerify) {609heap->verifier()->verify_after_concmark();610}611612if (VerifyAfterGC) {613Universe::verify();614}615}616}617}618619class ShenandoahConcurrentEvacThreadClosure : public ThreadClosure {620private:621OopClosure* const _oops;622623public:624ShenandoahConcurrentEvacThreadClosure(OopClosure* oops);625void do_thread(Thread* thread);626};627628ShenandoahConcurrentEvacThreadClosure::ShenandoahConcurrentEvacThreadClosure(OopClosure* oops) :629_oops(oops) {630}631632void ShenandoahConcurrentEvacThreadClosure::do_thread(Thread* thread) {633JavaThread* const jt = thread->as_Java_thread();634StackWatermarkSet::finish_processing(jt, _oops, StackWatermarkKind::gc);635}636637class ShenandoahConcurrentEvacUpdateThreadTask : public AbstractGangTask {638private:639ShenandoahJavaThreadsIterator _java_threads;640641public:642ShenandoahConcurrentEvacUpdateThreadTask(uint n_workers) :643AbstractGangTask("Shenandoah Evacuate/Update Concurrent Thread Roots"),644_java_threads(ShenandoahPhaseTimings::conc_thread_roots, n_workers) {645}646647void work(uint worker_id) {648// ShenandoahEvacOOMScope has to be setup by ShenandoahContextEvacuateUpdateRootsClosure.649// Otherwise, may deadlock with watermark lock650ShenandoahContextEvacuateUpdateRootsClosure oops_cl;651ShenandoahConcurrentEvacThreadClosure thr_cl(&oops_cl);652_java_threads.threads_do(&thr_cl, worker_id);653}654};655656void ShenandoahConcurrentGC::op_thread_roots() {657ShenandoahHeap* const heap = ShenandoahHeap::heap();658assert(heap->is_evacuation_in_progress(), "Checked by caller");659ShenandoahGCWorkerPhase worker_phase(ShenandoahPhaseTimings::conc_thread_roots);660ShenandoahConcurrentEvacUpdateThreadTask task(heap->workers()->active_workers());661heap->workers()->run_task(&task);662}663664void ShenandoahConcurrentGC::op_weak_refs() {665ShenandoahHeap* const heap = ShenandoahHeap::heap();666assert(heap->is_concurrent_weak_root_in_progress(), "Only during this phase");667// Concurrent weak refs processing668ShenandoahGCWorkerPhase worker_phase(ShenandoahPhaseTimings::conc_weak_refs);669if (heap->gc_cause() == GCCause::_wb_breakpoint) {670ShenandoahBreakpoint::at_after_reference_processing_started();671}672heap->ref_processor()->process_references(ShenandoahPhaseTimings::conc_weak_refs, heap->workers(), true /* concurrent */);673}674675class ShenandoahEvacUpdateCleanupOopStorageRootsClosure : public BasicOopIterateClosure {676private:677ShenandoahHeap* const _heap;678ShenandoahMarkingContext* const _mark_context;679bool _evac_in_progress;680Thread* const _thread;681682public:683ShenandoahEvacUpdateCleanupOopStorageRootsClosure();684void do_oop(oop* p);685void do_oop(narrowOop* p);686};687688ShenandoahEvacUpdateCleanupOopStorageRootsClosure::ShenandoahEvacUpdateCleanupOopStorageRootsClosure() :689_heap(ShenandoahHeap::heap()),690_mark_context(ShenandoahHeap::heap()->marking_context()),691_evac_in_progress(ShenandoahHeap::heap()->is_evacuation_in_progress()),692_thread(Thread::current()) {693}694695void ShenandoahEvacUpdateCleanupOopStorageRootsClosure::do_oop(oop* p) {696const oop obj = RawAccess<>::oop_load(p);697if (!CompressedOops::is_null(obj)) {698if (!_mark_context->is_marked(obj)) {699shenandoah_assert_correct(p, obj);700Atomic::cmpxchg(p, obj, oop(NULL));701} else if (_evac_in_progress && _heap->in_collection_set(obj)) {702oop resolved = ShenandoahBarrierSet::resolve_forwarded_not_null(obj);703if (resolved == obj) {704resolved = _heap->evacuate_object(obj, _thread);705}706Atomic::cmpxchg(p, obj, resolved);707assert(_heap->cancelled_gc() ||708_mark_context->is_marked(resolved) && !_heap->in_collection_set(resolved),709"Sanity");710}711}712}713714void ShenandoahEvacUpdateCleanupOopStorageRootsClosure::do_oop(narrowOop* p) {715ShouldNotReachHere();716}717718class ShenandoahIsCLDAliveClosure : public CLDClosure {719public:720void do_cld(ClassLoaderData* cld) {721cld->is_alive();722}723};724725class ShenandoahIsNMethodAliveClosure: public NMethodClosure {726public:727void do_nmethod(nmethod* n) {728n->is_unloading();729}730};731732// This task not only evacuates/updates marked weak roots, but also "NULL"733// dead weak roots.734class ShenandoahConcurrentWeakRootsEvacUpdateTask : public AbstractGangTask {735private:736ShenandoahVMWeakRoots<true /*concurrent*/> _vm_roots;737738// Roots related to concurrent class unloading739ShenandoahClassLoaderDataRoots<true /* concurrent */, true /* single thread*/>740_cld_roots;741ShenandoahConcurrentNMethodIterator _nmethod_itr;742ShenandoahPhaseTimings::Phase _phase;743744public:745ShenandoahConcurrentWeakRootsEvacUpdateTask(ShenandoahPhaseTimings::Phase phase) :746AbstractGangTask("Shenandoah Evacuate/Update Concurrent Weak Roots"),747_vm_roots(phase),748_cld_roots(phase, ShenandoahHeap::heap()->workers()->active_workers()),749_nmethod_itr(ShenandoahCodeRoots::table()),750_phase(phase) {751if (ShenandoahHeap::heap()->unload_classes()) {752MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);753_nmethod_itr.nmethods_do_begin();754}755}756757~ShenandoahConcurrentWeakRootsEvacUpdateTask() {758if (ShenandoahHeap::heap()->unload_classes()) {759MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);760_nmethod_itr.nmethods_do_end();761}762// Notify runtime data structures of potentially dead oops763_vm_roots.report_num_dead();764}765766void work(uint worker_id) {767ShenandoahConcurrentWorkerSession worker_session(worker_id);768{769ShenandoahEvacOOMScope oom;770// jni_roots and weak_roots are OopStorage backed roots, concurrent iteration771// may race against OopStorage::release() calls.772ShenandoahEvacUpdateCleanupOopStorageRootsClosure cl;773_vm_roots.oops_do(&cl, worker_id);774}775776// If we are going to perform concurrent class unloading later on, we need to777// cleanup the weak oops in CLD and determinate nmethod's unloading state, so that we778// can cleanup immediate garbage sooner.779if (ShenandoahHeap::heap()->unload_classes()) {780// Applies ShenandoahIsCLDAlive closure to CLDs, native barrier will either NULL the781// CLD's holder or evacuate it.782{783ShenandoahIsCLDAliveClosure is_cld_alive;784_cld_roots.cld_do(&is_cld_alive, worker_id);785}786787// Applies ShenandoahIsNMethodAliveClosure to registered nmethods.788// The closure calls nmethod->is_unloading(). The is_unloading789// state is cached, therefore, during concurrent class unloading phase,790// we will not touch the metadata of unloading nmethods791{792ShenandoahWorkerTimingsTracker timer(_phase, ShenandoahPhaseTimings::CodeCacheRoots, worker_id);793ShenandoahIsNMethodAliveClosure is_nmethod_alive;794_nmethod_itr.nmethods_do(&is_nmethod_alive);795}796}797}798};799800void ShenandoahConcurrentGC::op_weak_roots() {801ShenandoahHeap* const heap = ShenandoahHeap::heap();802assert(heap->is_concurrent_weak_root_in_progress(), "Only during this phase");803// Concurrent weak root processing804{805ShenandoahTimingsTracker t(ShenandoahPhaseTimings::conc_weak_roots_work);806ShenandoahGCWorkerPhase worker_phase(ShenandoahPhaseTimings::conc_weak_roots_work);807ShenandoahConcurrentWeakRootsEvacUpdateTask task(ShenandoahPhaseTimings::conc_weak_roots_work);808heap->workers()->run_task(&task);809}810811// Perform handshake to flush out dead oops812{813ShenandoahTimingsTracker t(ShenandoahPhaseTimings::conc_weak_roots_rendezvous);814heap->rendezvous_threads();815}816}817818void ShenandoahConcurrentGC::op_class_unloading() {819ShenandoahHeap* const heap = ShenandoahHeap::heap();820assert (heap->is_concurrent_weak_root_in_progress() &&821heap->unload_classes(),822"Checked by caller");823heap->do_class_unloading();824}825826class ShenandoahEvacUpdateCodeCacheClosure : public NMethodClosure {827private:828BarrierSetNMethod* const _bs;829ShenandoahEvacuateUpdateMetadataClosure<> _cl;830831public:832ShenandoahEvacUpdateCodeCacheClosure() :833_bs(BarrierSet::barrier_set()->barrier_set_nmethod()),834_cl() {835}836837void do_nmethod(nmethod* n) {838ShenandoahNMethod* data = ShenandoahNMethod::gc_data(n);839ShenandoahReentrantLocker locker(data->lock());840// Setup EvacOOM scope below reentrant lock to avoid deadlock with841// nmethod_entry_barrier842ShenandoahEvacOOMScope oom;843data->oops_do(&_cl, true/*fix relocation*/);844_bs->disarm(n);845}846};847848class ShenandoahConcurrentRootsEvacUpdateTask : public AbstractGangTask {849private:850ShenandoahPhaseTimings::Phase _phase;851ShenandoahVMRoots<true /*concurrent*/> _vm_roots;852ShenandoahClassLoaderDataRoots<true /*concurrent*/, false /*single threaded*/> _cld_roots;853ShenandoahConcurrentNMethodIterator _nmethod_itr;854855public:856ShenandoahConcurrentRootsEvacUpdateTask(ShenandoahPhaseTimings::Phase phase) :857AbstractGangTask("Shenandoah Evacuate/Update Concurrent Strong Roots"),858_phase(phase),859_vm_roots(phase),860_cld_roots(phase, ShenandoahHeap::heap()->workers()->active_workers()),861_nmethod_itr(ShenandoahCodeRoots::table()) {862if (!ShenandoahHeap::heap()->unload_classes()) {863MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);864_nmethod_itr.nmethods_do_begin();865}866}867868~ShenandoahConcurrentRootsEvacUpdateTask() {869if (!ShenandoahHeap::heap()->unload_classes()) {870MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);871_nmethod_itr.nmethods_do_end();872}873}874875void work(uint worker_id) {876ShenandoahConcurrentWorkerSession worker_session(worker_id);877{878ShenandoahEvacOOMScope oom;879{880// vm_roots and weak_roots are OopStorage backed roots, concurrent iteration881// may race against OopStorage::release() calls.882ShenandoahContextEvacuateUpdateRootsClosure cl;883_vm_roots.oops_do<ShenandoahContextEvacuateUpdateRootsClosure>(&cl, worker_id);884}885886{887ShenandoahEvacuateUpdateMetadataClosure<> cl;888CLDToOopClosure clds(&cl, ClassLoaderData::_claim_strong);889_cld_roots.cld_do(&clds, worker_id);890}891}892893// Cannot setup ShenandoahEvacOOMScope here, due to potential deadlock with nmethod_entry_barrier.894if (!ShenandoahHeap::heap()->unload_classes()) {895ShenandoahWorkerTimingsTracker timer(_phase, ShenandoahPhaseTimings::CodeCacheRoots, worker_id);896ShenandoahEvacUpdateCodeCacheClosure cl;897_nmethod_itr.nmethods_do(&cl);898}899}900};901902void ShenandoahConcurrentGC::op_strong_roots() {903ShenandoahHeap* const heap = ShenandoahHeap::heap();904assert(heap->is_concurrent_strong_root_in_progress(), "Checked by caller");905ShenandoahConcurrentRootsEvacUpdateTask task(ShenandoahPhaseTimings::conc_strong_roots);906heap->workers()->run_task(&task);907heap->set_concurrent_strong_root_in_progress(false);908}909910void ShenandoahConcurrentGC::op_cleanup_early() {911ShenandoahHeap::heap()->free_set()->recycle_trash();912}913914void ShenandoahConcurrentGC::op_evacuate() {915ShenandoahHeap::heap()->evacuate_collection_set(true /*concurrent*/);916}917918void ShenandoahConcurrentGC::op_init_updaterefs() {919ShenandoahHeap* const heap = ShenandoahHeap::heap();920heap->set_evacuation_in_progress(false);921heap->set_concurrent_weak_root_in_progress(false);922heap->prepare_update_heap_references(true /*concurrent*/);923heap->set_update_refs_in_progress(true);924925if (ShenandoahPacing) {926heap->pacer()->setup_for_updaterefs();927}928}929930void ShenandoahConcurrentGC::op_updaterefs() {931ShenandoahHeap::heap()->update_heap_references(true /*concurrent*/);932}933934class ShenandoahUpdateThreadClosure : public HandshakeClosure {935private:936ShenandoahUpdateRefsClosure _cl;937public:938ShenandoahUpdateThreadClosure();939void do_thread(Thread* thread);940};941942ShenandoahUpdateThreadClosure::ShenandoahUpdateThreadClosure() :943HandshakeClosure("Shenandoah Update Thread Roots") {944}945946void ShenandoahUpdateThreadClosure::do_thread(Thread* thread) {947if (thread->is_Java_thread()) {948JavaThread* jt = thread->as_Java_thread();949ResourceMark rm;950jt->oops_do(&_cl, NULL);951}952}953954void ShenandoahConcurrentGC::op_update_thread_roots() {955ShenandoahUpdateThreadClosure cl;956Handshake::execute(&cl);957}958959void ShenandoahConcurrentGC::op_final_updaterefs() {960ShenandoahHeap* const heap = ShenandoahHeap::heap();961assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "must be at safepoint");962assert(!heap->_update_refs_iterator.has_next(), "Should have finished update references");963964heap->finish_concurrent_roots();965966// Clear cancelled GC, if set. On cancellation path, the block before would handle967// everything.968if (heap->cancelled_gc()) {969heap->clear_cancelled_gc();970}971972// Has to be done before cset is clear973if (ShenandoahVerify) {974heap->verifier()->verify_roots_in_to_space();975}976977heap->update_heap_region_states(true /*concurrent*/);978979heap->set_update_refs_in_progress(false);980heap->set_has_forwarded_objects(false);981982if (ShenandoahVerify) {983heap->verifier()->verify_after_updaterefs();984}985986if (VerifyAfterGC) {987Universe::verify();988}989990heap->rebuild_free_set(true /*concurrent*/);991}992993void ShenandoahConcurrentGC::op_final_roots() {994ShenandoahHeap::heap()->set_concurrent_weak_root_in_progress(false);995}996997void ShenandoahConcurrentGC::op_cleanup_complete() {998ShenandoahHeap::heap()->free_set()->recycle_trash();999}10001001bool ShenandoahConcurrentGC::check_cancellation_and_abort(ShenandoahDegenPoint point) {1002if (ShenandoahHeap::heap()->cancelled_gc()) {1003_degen_point = point;1004return true;1005}1006return false;1007}10081009const char* ShenandoahConcurrentGC::init_mark_event_message() const {1010ShenandoahHeap* const heap = ShenandoahHeap::heap();1011assert(!heap->has_forwarded_objects(), "Should not have forwarded objects here");1012if (heap->unload_classes()) {1013return "Pause Init Mark (unload classes)";1014} else {1015return "Pause Init Mark";1016}1017}10181019const char* ShenandoahConcurrentGC::final_mark_event_message() const {1020ShenandoahHeap* const heap = ShenandoahHeap::heap();1021assert(!heap->has_forwarded_objects(), "Should not have forwarded objects here");1022if (heap->unload_classes()) {1023return "Pause Final Mark (unload classes)";1024} else {1025return "Pause Final Mark";1026}1027}10281029const char* ShenandoahConcurrentGC::conc_mark_event_message() const {1030ShenandoahHeap* const heap = ShenandoahHeap::heap();1031assert(!heap->has_forwarded_objects(), "Should not have forwarded objects here");1032if (heap->unload_classes()) {1033return "Concurrent marking (unload classes)";1034} else {1035return "Concurrent marking";1036}1037}103810391040