diff --git a/src/hotspot/share/gc/shenandoah/heuristics/shenandoahOldHeuristics.cpp b/src/hotspot/share/gc/shenandoah/heuristics/shenandoahOldHeuristics.cpp index 824dd3b492c..da2cd0e7712 100644 --- a/src/hotspot/share/gc/shenandoah/heuristics/shenandoahOldHeuristics.cpp +++ b/src/hotspot/share/gc/shenandoah/heuristics/shenandoahOldHeuristics.cpp @@ -66,7 +66,11 @@ ShenandoahOldHeuristics::ShenandoahOldHeuristics(ShenandoahOldGeneration* genera _old_generation(generation), _cannot_expand_trigger(false), _fragmentation_trigger(false), - _growth_trigger(false) { + _growth_trigger(false), + _fragmentation_density(0.0), + _fragmentation_first_old_region(0), + _fragmentation_last_old_region(0) +{ } bool ShenandoahOldHeuristics::prime_collection_set(ShenandoahCollectionSet* collection_set) { @@ -203,11 +207,8 @@ bool ShenandoahOldHeuristics::prime_collection_set(ShenandoahCollectionSet* coll // We have added the last of our collection candidates to a mixed collection. // Any triggers that occurred during mixed evacuations may no longer be valid. They can retrigger if appropriate. clear_triggers(); - if (has_coalesce_and_fill_candidates()) { - _old_generation->transition_to(ShenandoahOldGeneration::FILLING); - } else { - _old_generation->transition_to(ShenandoahOldGeneration::WAITING_FOR_BOOTSTRAP); - } + + _old_generation->complete_mixed_evacuations(); } else if (included_old_regions == 0) { // We have candidates, but none were included for evacuation - are they all pinned? // or did we just not have enough room for any of them in this collection set? @@ -216,7 +217,7 @@ bool ShenandoahOldHeuristics::prime_collection_set(ShenandoahCollectionSet* coll // (pinned) regions parsable. if (all_candidates_are_pinned()) { log_info(gc)("All candidate regions " UINT32_FORMAT " are pinned", unprocessed_old_collection_candidates()); - _old_generation->transition_to(ShenandoahOldGeneration::FILLING); + _old_generation->abandon_mixed_evacuations(); } else { log_info(gc)("No regions selected for mixed collection. " "Old evacuation budget: " PROPERFMT ", Remaining evacuation budget: " PROPERFMT diff --git a/src/hotspot/share/gc/shenandoah/heuristics/shenandoahOldHeuristics.hpp b/src/hotspot/share/gc/shenandoah/heuristics/shenandoahOldHeuristics.hpp index 158f959ac17..25c01116528 100644 --- a/src/hotspot/share/gc/shenandoah/heuristics/shenandoahOldHeuristics.hpp +++ b/src/hotspot/share/gc/shenandoah/heuristics/shenandoahOldHeuristics.hpp @@ -110,11 +110,10 @@ class ShenandoahOldHeuristics : public ShenandoahHeuristics { static int compare_by_index(RegionData a, RegionData b); protected: - virtual void choose_collection_set_from_regiondata(ShenandoahCollectionSet* set, RegionData* data, size_t data_size, - size_t free) override; + void choose_collection_set_from_regiondata(ShenandoahCollectionSet* set, RegionData* data, size_t data_size, size_t free) override; public: - ShenandoahOldHeuristics(ShenandoahOldGeneration* generation); + explicit ShenandoahOldHeuristics(ShenandoahOldGeneration* generation); // Prepare for evacuation of old-gen regions by capturing the mark results of a recently completed concurrent mark pass. void prepare_for_old_collections(); @@ -193,7 +192,7 @@ class ShenandoahOldHeuristics : public ShenandoahHeuristics { bool is_experimental() override; - private: +private: void slide_pinned_regions_to_front(); bool all_candidates_are_pinned(); }; diff --git a/src/hotspot/share/gc/shenandoah/shenandoahConcurrentGC.cpp b/src/hotspot/share/gc/shenandoah/shenandoahConcurrentGC.cpp index 3cbe4a78c74..9c658e9160d 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahConcurrentGC.cpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahConcurrentGC.cpp @@ -231,6 +231,16 @@ bool ShenandoahConcurrentGC::collect(GCCause::Cause cause) { // We defer generation resizing actions until after cset regions have been recycled. We do this even following an // abbreviated cycle. if (heap->mode()->is_generational()) { + if (!heap->old_generation()->is_parseable()) { + // Class unloading may render the card offsets unusable, so we must rebuild them before + // the next remembered set scan. We _could_ let the control thread do this sometime after + // the global cycle has completed and before the next young collection, but under memory + // pressure the control thread may not have the time (that is, because it's running back + // to back GCs). In that scenario, we would have to make the old regions parsable before + // we could start a young collection. This could delay the start of the young cycle and + // throw off the heuristics. + entry_global_coalesce_and_fill(); + } ShenandoahGenerationalHeap::TransferResult result; { @@ -1273,6 +1283,25 @@ void ShenandoahConcurrentGC::op_final_roots() { } } +void ShenandoahConcurrentGC::entry_global_coalesce_and_fill() { + ShenandoahHeap* const heap = ShenandoahHeap::heap(); + + const char* msg = "Coalescing and filling old regions in global collect"; + ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_coalesce_and_fill); + + TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters()); + EventMark em("%s", msg); + ShenandoahWorkerScope scope(heap->workers(), + ShenandoahWorkerPolicy::calc_workers_for_conc_marking(), + "concurrent coalesce and fill"); + + op_global_coalesce_and_fill(); +} + +void ShenandoahConcurrentGC::op_global_coalesce_and_fill() { + ShenandoahGenerationalHeap::heap()->coalesce_and_fill_old_regions(true); +} + void ShenandoahConcurrentGC::op_cleanup_complete() { ShenandoahHeap::heap()->free_set()->recycle_trash(); } diff --git a/src/hotspot/share/gc/shenandoah/shenandoahConcurrentGC.hpp b/src/hotspot/share/gc/shenandoah/shenandoahConcurrentGC.hpp index e5857b83b24..0604879e7c3 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahConcurrentGC.hpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahConcurrentGC.hpp @@ -58,7 +58,7 @@ class ShenandoahConcurrentGC : public ShenandoahGC { public: ShenandoahConcurrentGC(ShenandoahGeneration* generation, bool do_old_gc_bootstrap); - bool collect(GCCause::Cause cause); + bool collect(GCCause::Cause cause) override; ShenandoahDegenPoint degen_point() const; // Return true if this cycle found enough immediate garbage to skip evacuation @@ -104,6 +104,7 @@ class ShenandoahConcurrentGC : public ShenandoahGC { void entry_evacuate(); void entry_update_thread_roots(); void entry_updaterefs(); + void entry_global_coalesce_and_fill(); void entry_cleanup_complete(); // Actual work for the phases @@ -123,6 +124,7 @@ class ShenandoahConcurrentGC : public ShenandoahGC { void op_update_thread_roots(); void op_final_updaterefs(); void op_final_roots(); + void op_global_coalesce_and_fill(); void op_cleanup_complete(); protected: diff --git a/src/hotspot/share/gc/shenandoah/shenandoahDegeneratedGC.cpp b/src/hotspot/share/gc/shenandoah/shenandoahDegeneratedGC.cpp index fc0cc88527a..33406109e96 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahDegeneratedGC.cpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahDegeneratedGC.cpp @@ -58,7 +58,7 @@ bool ShenandoahDegenGC::collect(GCCause::Cause cause) { vmop_degenerated(); ShenandoahHeap* heap = ShenandoahHeap::heap(); if (heap->mode()->is_generational()) { - bool is_bootstrap_gc = heap->old_generation()->state() == ShenandoahOldGeneration::BOOTSTRAPPING; + bool is_bootstrap_gc = heap->old_generation()->is_bootstrapping(); heap->mmu_tracker()->record_degenerated(GCId::current(), is_bootstrap_gc); const char* msg = is_bootstrap_gc? "At end of Degenerated Bootstrap Old GC": "At end of Degenerated Young GC"; heap->log_heap_status(msg); @@ -105,10 +105,9 @@ void ShenandoahDegenGC::op_degenerated() { if (_generation->is_global()) { // If we are in a global cycle, the old generation should not be marking. It is, however, // allowed to be holding regions for evacuation or coalescing. - ShenandoahOldGeneration::State state = old_generation->state(); - assert(state == ShenandoahOldGeneration::WAITING_FOR_BOOTSTRAP - || state == ShenandoahOldGeneration::EVACUATING - || state == ShenandoahOldGeneration::FILLING, + assert(old_generation->is_idle() + || old_generation->is_doing_mixed_evacuations() + || old_generation->is_preparing_for_mark(), "Old generation cannot be in state: %s", old_generation->state_name()); } } @@ -307,6 +306,10 @@ void ShenandoahDegenGC::op_degenerated() { // In case degeneration interrupted concurrent evacuation or update references, we need to clean up transient state. // Otherwise, these actions have no effect. ShenandoahGenerationalHeap::heap()->reset_generation_reserves(); + + if (!ShenandoahGenerationalHeap::heap()->old_generation()->is_parseable()) { + op_global_coalesce_and_fill(); + } } if (ShenandoahVerify) { @@ -400,6 +403,11 @@ void ShenandoahDegenGC::op_cleanup_early() { ShenandoahHeap::heap()->recycle_trash(); } +void ShenandoahDegenGC::op_global_coalesce_and_fill() { + ShenandoahGCPhase phase(ShenandoahPhaseTimings::degen_gc_coalesce_and_fill); + ShenandoahGenerationalHeap::heap()->coalesce_and_fill_old_regions(false); +} + void ShenandoahDegenGC::op_evacuate() { ShenandoahGCPhase phase(ShenandoahPhaseTimings::degen_gc_stw_evac); ShenandoahHeap::heap()->evacuate_collection_set(false /* concurrent*/); diff --git a/src/hotspot/share/gc/shenandoah/shenandoahDegeneratedGC.hpp b/src/hotspot/share/gc/shenandoah/shenandoahDegeneratedGC.hpp index 68f9a3f0ba2..652e2ee2f1e 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahDegeneratedGC.hpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahDegeneratedGC.hpp @@ -58,6 +58,9 @@ class ShenandoahDegenGC : public ShenandoahGC { void op_update_roots(); void op_cleanup_complete(); + // This will rebuild card offsets, which is necessary if classes were unloaded + void op_global_coalesce_and_fill(); + // Fail handling void op_degenerated_futile(); void op_degenerated_fail(); diff --git a/src/hotspot/share/gc/shenandoah/shenandoahGenerationalControlThread.cpp b/src/hotspot/share/gc/shenandoah/shenandoahGenerationalControlThread.cpp index 8ac6de0eb51..d620db705aa 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahGenerationalControlThread.cpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahGenerationalControlThread.cpp @@ -709,7 +709,7 @@ void ShenandoahGenerationalControlThread::service_stw_degenerated_cycle(GCCause: } else { assert(_degen_generation->is_young(), "Expected degenerated young cycle, if not global."); ShenandoahOldGeneration* old = heap->old_generation(); - if (old->state() == ShenandoahOldGeneration::BOOTSTRAPPING) { + if (old->is_bootstrapping()) { old->transition_to(ShenandoahOldGeneration::MARKING); } } diff --git a/src/hotspot/share/gc/shenandoah/shenandoahGenerationalEvacuationTask.cpp b/src/hotspot/share/gc/shenandoah/shenandoahGenerationalEvacuationTask.cpp index 93442843d92..c95cf973a41 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahGenerationalEvacuationTask.cpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahGenerationalEvacuationTask.cpp @@ -80,7 +80,6 @@ void ShenandoahGenerationalEvacuationTask::work(uint worker_id) { void ShenandoahGenerationalEvacuationTask::do_work() { ShenandoahConcurrentEvacuator cl(_heap); ShenandoahHeapRegion* r; - ShenandoahMarkingContext* const ctx = _heap->marking_context(); while ((r = _regions->next()) != nullptr) { log_debug(gc)("GenerationalEvacuationTask do_work(), looking at %s region " SIZE_FORMAT ", (age: %d) [%s, %s, %s]", diff --git a/src/hotspot/share/gc/shenandoah/shenandoahGenerationalFullGC.cpp b/src/hotspot/share/gc/shenandoah/shenandoahGenerationalFullGC.cpp index ef21e77b6eb..d75f4d08902 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahGenerationalFullGC.cpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahGenerationalFullGC.cpp @@ -72,8 +72,7 @@ void ShenandoahGenerationalFullGC::handle_completion(ShenandoahHeap* heap) { heap->mmu_tracker()->record_full(GCId::current()); heap->log_heap_status("At end of Full GC"); - assert(old->state() == ShenandoahOldGeneration::WAITING_FOR_BOOTSTRAP, - "After full GC, old generation should be waiting for bootstrap."); + assert(old->is_idle(), "After full GC, old generation should be idle."); // Since we allow temporary violation of these constraints during Full GC, we want to enforce that the assertions are // made valid by the time Full GC completes. @@ -91,6 +90,12 @@ void ShenandoahGenerationalFullGC::rebuild_remembered_set(ShenandoahHeap* heap) ShenandoahRegionIterator regions; ShenandoahReconstructRememberedSetTask task(®ions); heap->workers()->run_task(&task); + + // Rebuilding the remembered set recomputes all the card offsets for objects. + // The adjust pointers phase coalesces and fills all necessary regions. In case + // we came to the full GC from an incomplete global cycle, we need to indicate + // that the old regions are parseable. + heap->old_generation()->set_parseable(true); } void ShenandoahGenerationalFullGC::balance_generations_after_gc(ShenandoahHeap* heap) { @@ -165,7 +170,7 @@ void ShenandoahGenerationalFullGC::account_for_region(ShenandoahHeapRegion* r, s void ShenandoahGenerationalFullGC::maybe_coalesce_and_fill_region(ShenandoahHeapRegion* r) { if (r->is_pinned() && r->is_old() && r->is_active() && !r->is_humongous()) { r->begin_preemptible_coalesce_and_fill(); - r->oop_fill_and_coalesce_without_cancel(); + r->oop_coalesce_and_fill(false); } } diff --git a/src/hotspot/share/gc/shenandoah/shenandoahGenerationalHeap.cpp b/src/hotspot/share/gc/shenandoah/shenandoahGenerationalHeap.cpp index 00476afde0b..8e049881ffb 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahGenerationalHeap.cpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahGenerationalHeap.cpp @@ -34,6 +34,7 @@ #include "gc/shenandoah/shenandoahInitLogger.hpp" #include "gc/shenandoah/shenandoahMemoryPool.hpp" #include "gc/shenandoah/shenandoahOldGeneration.hpp" +#include "gc/shenandoah/shenandoahPhaseTimings.hpp" #include "gc/shenandoah/shenandoahRegulatorThread.hpp" #include "gc/shenandoah/shenandoahScanRemembered.inline.hpp" #include "gc/shenandoah/shenandoahYoungGeneration.hpp" @@ -675,3 +676,43 @@ void ShenandoahGenerationalHeap::TransferResult::print_on(const char* when, outp success? "successfully transferred": "failed to transfer", region_count, region_destination, PROPERFMTARGS(old_available), PROPERFMTARGS(young_available)); } + +void ShenandoahGenerationalHeap::coalesce_and_fill_old_regions(bool concurrent) { + class ShenandoahGlobalCoalesceAndFill : public WorkerTask { + private: + ShenandoahPhaseTimings::Phase _phase; + ShenandoahRegionIterator _regions; + public: + explicit ShenandoahGlobalCoalesceAndFill(ShenandoahPhaseTimings::Phase phase) : + WorkerTask("Shenandoah Global Coalesce"), + _phase(phase) {} + + void work(uint worker_id) override { + ShenandoahWorkerTimingsTracker timer(_phase, + ShenandoahPhaseTimings::ScanClusters, + worker_id, true); + ShenandoahHeapRegion* region; + while ((region = _regions.next()) != nullptr) { + // old region is not in the collection set and was not immediately trashed + if (region->is_old() && region->is_active() && !region->is_humongous()) { + // Reset the coalesce and fill boundary because this is a global collect + // and cannot be preempted by young collects. We want to be sure the entire + // region is coalesced here and does not resume from a previously interrupted + // or completed coalescing. + region->begin_preemptible_coalesce_and_fill(); + region->oop_coalesce_and_fill(false); + } + } + } + }; + + ShenandoahPhaseTimings::Phase phase = concurrent ? + ShenandoahPhaseTimings::conc_coalesce_and_fill : + ShenandoahPhaseTimings::degen_gc_coalesce_and_fill; + + // This is not cancellable + ShenandoahGlobalCoalesceAndFill coalesce(phase); + workers()->run_task(&coalesce); + old_generation()->set_parseable(true); +} + diff --git a/src/hotspot/share/gc/shenandoah/shenandoahGenerationalHeap.hpp b/src/hotspot/share/gc/shenandoah/shenandoahGenerationalHeap.hpp index 98693a7f90c..ff34e44fb71 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahGenerationalHeap.hpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahGenerationalHeap.hpp @@ -91,6 +91,7 @@ class ShenandoahGenerationalHeap : public ShenandoahHeap { // Transfers surplus old regions to young, or takes regions from young to satisfy old region deficit TransferResult balance_generations(); + void coalesce_and_fill_old_regions(bool concurrent); private: void initialize_controller() override; diff --git a/src/hotspot/share/gc/shenandoah/shenandoahHeap.cpp b/src/hotspot/share/gc/shenandoah/shenandoahHeap.cpp index 56102951c97..07add31dadd 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahHeap.cpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahHeap.cpp @@ -973,7 +973,7 @@ HeapWord* ShenandoahHeap::allocate_from_gclab_slow(Thread* thread, size_t size) void ShenandoahHeap::cancel_old_gc() { shenandoah_assert_safepoint(); assert(old_generation() != nullptr, "Should only have mixed collections in generation mode."); - if (old_generation()->state() == ShenandoahOldGeneration::WAITING_FOR_BOOTSTRAP) { + if (old_generation()->is_idle()) { #ifdef ASSERT old_generation()->validate_waiting_for_bootstrap(); #endif @@ -2095,6 +2095,9 @@ void ShenandoahHeap::recycle_trash() { void ShenandoahHeap::do_class_unloading() { _unloader.unload(); + if (mode()->is_generational()) { + old_generation()->set_parseable(false); + } } void ShenandoahHeap::stw_weak_refs(bool full_gc) { @@ -2174,7 +2177,7 @@ void ShenandoahHeap::set_concurrent_old_mark_in_progress(bool in_progress) { } bool ShenandoahHeap::is_prepare_for_old_mark_in_progress() const { - return old_generation()->state() == ShenandoahOldGeneration::FILLING; + return old_generation()->is_preparing_for_mark(); } void ShenandoahHeap::set_aging_cycle(bool in_progress) { diff --git a/src/hotspot/share/gc/shenandoah/shenandoahHeapRegion.cpp b/src/hotspot/share/gc/shenandoah/shenandoahHeapRegion.cpp index bd2c835e186..3a3176b89b6 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahHeapRegion.cpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahHeapRegion.cpp @@ -452,9 +452,11 @@ void ShenandoahHeapRegion::print_on(outputStream* st) const { #undef SHR_PTR_FORMAT } -// oop_iterate without closure and without cancellation. always return true. -bool ShenandoahHeapRegion::oop_fill_and_coalesce_without_cancel() { - HeapWord* obj_addr = resume_coalesce_and_fill(); +// oop_iterate without closure, return true if completed without cancellation +bool ShenandoahHeapRegion::oop_coalesce_and_fill(bool cancellable) { + + // Consider yielding to cancel/preemption request after this many coalesce operations (skip marked, or coalesce free). + const size_t preemption_stride = 128; assert(!is_humongous(), "No need to fill or coalesce humongous regions"); if (!is_active()) { @@ -464,50 +466,10 @@ bool ShenandoahHeapRegion::oop_fill_and_coalesce_without_cancel() { ShenandoahHeap* heap = ShenandoahHeap::heap(); ShenandoahMarkingContext* marking_context = heap->marking_context(); - // All objects above TAMS are considered live even though their mark bits will not be set. Note that young- - // gen evacuations that interrupt a long-running old-gen concurrent mark may promote objects into old-gen - // while the old-gen concurrent marking is ongoing. These newly promoted objects will reside above TAMS - // and will be treated as live during the current old-gen marking pass, even though they will not be - // explicitly marked. - HeapWord* t = marking_context->top_at_mark_start(this); // Expect marking to be completed before these threads invoke this service. assert(heap->active_generation()->is_mark_complete(), "sanity"); - while (obj_addr < t) { - oop obj = cast_to_oop(obj_addr); - if (marking_context->is_marked(obj)) { - assert(obj->klass() != nullptr, "klass should not be nullptr"); - obj_addr += obj->size(); - } else { - // Object is not marked. Coalesce and fill dead object with dead neighbors. - HeapWord* next_marked_obj = marking_context->get_next_marked_addr(obj_addr, t); - assert(next_marked_obj <= t, "next marked object cannot exceed top"); - size_t fill_size = next_marked_obj - obj_addr; - assert(fill_size >= ShenandoahHeap::min_fill_size(), "previously allocated objects known to be larger than min_size"); - ShenandoahHeap::fill_with_object(obj_addr, fill_size); - heap->card_scan()->coalesce_objects(obj_addr, fill_size); - obj_addr = next_marked_obj; - } - } - // Mark that this region has been coalesced and filled - end_preemptible_coalesce_and_fill(); - return true; -} - -// oop_iterate without closure, return true if completed without cancellation -bool ShenandoahHeapRegion::oop_fill_and_coalesce() { - HeapWord* obj_addr = resume_coalesce_and_fill(); - // Consider yielding to cancel/preemption request after this many coalesce operations (skip marked, or coalesce free). - const size_t preemption_stride = 128; - assert(!is_humongous(), "No need to fill or coalesce humongous regions"); - if (!is_active()) { - end_preemptible_coalesce_and_fill(); - return true; - } - - ShenandoahHeap* heap = ShenandoahHeap::heap(); - ShenandoahMarkingContext* marking_context = heap->marking_context(); // All objects above TAMS are considered live even though their mark bits will not be set. Note that young- // gen evacuations that interrupt a long-running old-gen concurrent mark may promote objects into old-gen // while the old-gen concurrent marking is ongoing. These newly promoted objects will reside above TAMS @@ -515,8 +477,8 @@ bool ShenandoahHeapRegion::oop_fill_and_coalesce() { // explicitly marked. HeapWord* t = marking_context->top_at_mark_start(this); - // Expect marking to be completed before these threads invoke this service. - assert(heap->active_generation()->is_mark_complete(), "sanity"); + // Resume coalesce and fill from this address + HeapWord* obj_addr = resume_coalesce_and_fill(); size_t ops_before_preempt_check = preemption_stride; while (obj_addr < t) { @@ -534,7 +496,7 @@ bool ShenandoahHeapRegion::oop_fill_and_coalesce() { heap->card_scan()->coalesce_objects(obj_addr, fill_size); obj_addr = next_marked_obj; } - if (ops_before_preempt_check-- == 0) { + if (cancellable && ops_before_preempt_check-- == 0) { if (heap->cancelled_gc()) { suspend_coalesce_and_fill(obj_addr); return false; diff --git a/src/hotspot/share/gc/shenandoah/shenandoahHeapRegion.hpp b/src/hotspot/share/gc/shenandoah/shenandoahHeapRegion.hpp index e494612932f..ec32c8388b2 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahHeapRegion.hpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahHeapRegion.hpp @@ -402,10 +402,7 @@ class ShenandoahHeapRegion { // Coalesce contiguous spans of garbage objects by filling header and reregistering start locations with remembered set. // This is used by old-gen GC following concurrent marking to make old-gen HeapRegions parsable. Return true iff // region is completely coalesced and filled. Returns false if cancelled before task is complete. - bool oop_fill_and_coalesce(); - - // Like oop_fill_and_coalesce(), but without honoring cancellation requests. - bool oop_fill_and_coalesce_without_cancel(); + bool oop_coalesce_and_fill(bool cancellable); // Invoke closure on every reference contained within the humongous object that spans this humongous // region if the reference is contained within a DIRTY card and the reference is no more than words following diff --git a/src/hotspot/share/gc/shenandoah/shenandoahOldGeneration.cpp b/src/hotspot/share/gc/shenandoah/shenandoahOldGeneration.cpp index 81bc48259a5..f8d86e1a60a 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahOldGeneration.cpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahOldGeneration.cpp @@ -88,7 +88,7 @@ class ShenandoahProcessOldSATB : public SATBBufferClosure { } } - size_t trashed_oops() { + size_t trashed_oops() const { return _trashed_oops; } }; @@ -145,6 +145,7 @@ class ShenandoahConcurrentCoalesceAndFillTask : public WorkerTask { } void work(uint worker_id) override { + ShenandoahWorkerTimingsTracker timer(ShenandoahPhaseTimings::conc_coalesce_and_fill, ShenandoahPhaseTimings::ScanClusters, worker_id); for (uint region_idx = worker_id; region_idx < _coalesce_and_fill_region_count; region_idx += _nworkers) { ShenandoahHeapRegion* r = _coalesce_and_fill_region_array[region_idx]; if (r->is_humongous()) { @@ -153,7 +154,7 @@ class ShenandoahConcurrentCoalesceAndFillTask : public WorkerTask { continue; } - if (!r->oop_fill_and_coalesce()) { + if (!r->oop_coalesce_and_fill(true)) { // Coalesce and fill has been preempted Atomic::store(&_is_preempted, true); return; @@ -178,6 +179,7 @@ ShenandoahOldGeneration::ShenandoahOldGeneration(uint max_queues, size_t max_cap _pad_for_promote_in_place(0), _promotable_humongous_regions(0), _promotable_regular_regions(0), + _is_parseable(true), _state(WAITING_FOR_BOOTSTRAP), _growth_before_compaction(INITIAL_GROWTH_BEFORE_COMPACTION), _min_growth_before_compaction ((ShenandoahMinOldGenGrowthPercent * FRACTIONAL_DENOMINATOR) / 100) @@ -284,9 +286,8 @@ bool ShenandoahOldGeneration::entry_coalesce_and_fill() { ShenandoahHeap* const heap = ShenandoahHeap::heap(); static const char* msg = "Coalescing and filling (OLD)"; - ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::coalesce_and_fill); + ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_coalesce_and_fill); - // TODO: I don't think we're using these concurrent collection counters correctly. TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters()); EventMark em("%s", msg); ShenandoahWorkerScope scope(heap->workers(), @@ -299,22 +300,28 @@ bool ShenandoahOldGeneration::entry_coalesce_and_fill() { // Make the old generation regions parsable, so they can be safely // scanned when looking for objects in memory indicated by dirty cards. bool ShenandoahOldGeneration::coalesce_and_fill() { - ShenandoahHeap* const heap = ShenandoahHeap::heap(); transition_to(FILLING); - WorkerThreads* workers = heap->workers(); - uint nworkers = workers->active_workers(); - // This code will see the same set of regions to fill on each resumption as it did // on the initial run. That's okay because each region keeps track of its own coalesce // and fill state. Regions that were filled on a prior attempt will not try to fill again. uint coalesce_and_fill_regions_count = heuristics()->get_coalesce_and_fill_candidates(_coalesce_and_fill_region_array); - assert(coalesce_and_fill_regions_count <= heap->num_regions(), "Sanity"); + assert(coalesce_and_fill_regions_count <= ShenandoahHeap::heap()->num_regions(), "Sanity"); + if (coalesce_and_fill_regions_count == 0) { + // No regions need to be filled. + abandon_collection_candidates(); + return true; + } + + ShenandoahHeap* const heap = ShenandoahHeap::heap(); + WorkerThreads* workers = heap->workers(); + uint nworkers = workers->active_workers(); ShenandoahConcurrentCoalesceAndFillTask task(nworkers, _coalesce_and_fill_region_array, coalesce_and_fill_regions_count); log_info(gc)("Starting (or resuming) coalesce-and-fill of " UINT32_FORMAT " old heap regions", coalesce_and_fill_regions_count); workers->run_task(&task); if (task.is_completed()) { + // We no longer need to track regions that need to be coalesced and filled. abandon_collection_candidates(); return true; } else { @@ -384,11 +391,12 @@ void ShenandoahOldGeneration::prepare_regions_and_collection_set(bool concurrent const char* ShenandoahOldGeneration::state_name(State state) { switch (state) { - case WAITING_FOR_BOOTSTRAP: return "Waiting for Bootstrap"; - case FILLING: return "Coalescing"; - case BOOTSTRAPPING: return "Bootstrapping"; - case MARKING: return "Marking"; - case EVACUATING: return "Evacuating"; + case WAITING_FOR_BOOTSTRAP: return "Waiting for Bootstrap"; + case FILLING: return "Coalescing"; + case BOOTSTRAPPING: return "Bootstrapping"; + case MARKING: return "Marking"; + case EVACUATING: return "Evacuating"; + case EVACUATING_AFTER_GLOBAL: return "Evacuating (G)"; default: ShouldNotReachHere(); return "Unknown"; @@ -432,38 +440,44 @@ void ShenandoahOldGeneration::transition_to(State new_state) { // | | | Filling Complete | <-> A global collection may // | | v | move the old generation // | | +-----------------+ | directly from waiting for -// | +--------> | WAITING | | bootstrap to filling or -// | | +---- | FOR BOOTSTRAP | ----+ evacuating. -// | | | +-----------------+ -// | | | | -// | | | | Reset Bitmap -// | | | v -// | | | +-----------------+ +----------------------+ -// | | | | BOOTSTRAP | <-> | YOUNG GC | -// | | | | | | (RSet Parses Region) | -// | | | +-----------------+ +----------------------+ -// | | | | -// | | | | Old Marking -// | | | v -// | | | +-----------------+ +----------------------+ -// | | | | MARKING | <-> | YOUNG GC | -// | +--------- | | | (RSet Parses Region) | -// | | +-----------------+ +----------------------+ -// | | | -// | | | Has Evacuation Candidates -// | | v -// | | +-----------------+ +--------------------+ -// | +---> | EVACUATING | <-> | YOUNG GC | -// +------------- | | | (RSet Uses Bitmap) | +// +-- |-- |--------> | WAITING | | bootstrap to filling or +// | | | +---- | FOR BOOTSTRAP | ----+ evacuating. It may also +// | | | | +-----------------+ move from filling to waiting +// | | | | | for bootstrap. +// | | | | | Reset Bitmap +// | | | | v +// | | | | +-----------------+ +----------------------+ +// | | | | | BOOTSTRAP | <-> | YOUNG GC | +// | | | | | | | (RSet Parses Region) | +// | | | | +-----------------+ +----------------------+ +// | | | | | +// | | | | | Old Marking +// | | | | v +// | | | | +-----------------+ +----------------------+ +// | | | | | MARKING | <-> | YOUNG GC | +// | | +--------- | | | (RSet Parses Region) | +// | | | +-----------------+ +----------------------+ +// | | | | +// | | | | Has Evacuation Candidates +// | | | v +// | | | +-----------------+ +--------------------+ +// | | +---> | EVACUATING | <-> | YOUNG GC | +// | +------------- | | | (RSet Uses Bitmap) | +// | +-----------------+ +--------------------+ +// | | +// | | Global Cycle Coalesces and Fills Old Regions +// | v +// | +-----------------+ +--------------------+ +// +----------------- | EVACUATING | <-> | YOUNG GC | +// | AFTER GLOBAL | | (RSet Uses Bitmap) | // +-----------------+ +--------------------+ // // -// void ShenandoahOldGeneration::validate_transition(State new_state) { ShenandoahHeap* heap = ShenandoahHeap::heap(); switch (new_state) { case FILLING: - assert(_state != BOOTSTRAPPING, "Cannot beging making old regions parsable after bootstrapping"); + assert(_state != BOOTSTRAPPING, "Cannot begin making old regions parsable after bootstrapping"); assert(is_mark_complete(), "Cannot begin filling without first completing marking, state is '%s'", state_name(_state)); assert(_old_heuristics->has_coalesce_and_fill_candidates(), "Cannot begin filling without something to fill."); break; @@ -481,6 +495,9 @@ void ShenandoahOldGeneration::validate_transition(State new_state) { assert(heap->young_generation()->old_gen_task_queues() != nullptr, "Young generation needs old mark queues."); assert(heap->is_concurrent_old_mark_in_progress(), "Should be marking old now."); break; + case EVACUATING_AFTER_GLOBAL: + assert(_state == EVACUATING, "Must have been evacuating, state is '%s'", state_name(_state)); + break; case EVACUATING: assert(_state == WAITING_FOR_BOOTSTRAP || _state == MARKING, "Cannot have old collection candidates without first marking, state is '%s'", state_name(_state)); assert(_old_heuristics->unprocessed_old_collection_candidates() > 0, "Must have collection candidates here."); @@ -651,3 +668,73 @@ void ShenandoahOldGeneration::parallel_region_iterate_free(ShenandoahHeapRegionC ShenandoahExcludeRegionClosure<YOUNG_GENERATION> exclude_cl(cl); ShenandoahGeneration::parallel_region_iterate_free(&exclude_cl); } + +void ShenandoahOldGeneration::set_parseable(bool parseable) { + _is_parseable = parseable; + if (_is_parseable) { + // The current state would have been chosen during final mark of the global + // collection, _before_ any decisions about class unloading have been made. + // + // After unloading classes, we have made the old generation regions parseable. + // We can skip filling or transition to a state that knows everything has + // already been filled. + switch (state()) { + case ShenandoahOldGeneration::EVACUATING: + transition_to(ShenandoahOldGeneration::EVACUATING_AFTER_GLOBAL); + break; + case ShenandoahOldGeneration::FILLING: + assert(_old_heuristics->unprocessed_old_collection_candidates() == 0, "Expected no mixed collection candidates"); + assert(_old_heuristics->coalesce_and_fill_candidates_count() > 0, "Expected coalesce and fill candidates"); + // When the heuristic put the old generation in this state, it didn't know + // that we would unload classes and make everything parseable. But, we know + // that now so we can override this state. + // TODO: It would be nicer if we didn't have to 'correct' this situation. + abandon_collection_candidates(); + transition_to(ShenandoahOldGeneration::WAITING_FOR_BOOTSTRAP); + break; + default: + // We can get here during a full GC. The full GC will cancel anything + // happening in the old generation and return it to the waiting for bootstrap + // state. The full GC will then record that the old regions are parseable + // after rebuilding the remembered set. + assert(is_idle(), "Unexpected state %s at end of global GC", state_name()); + break; + } + } +} + +void ShenandoahOldGeneration::complete_mixed_evacuations() { + assert(is_doing_mixed_evacuations(), "Mixed evacuations should be in progress"); + if (!_old_heuristics->has_coalesce_and_fill_candidates()) { + // No candidate regions to coalesce and fill + transition_to(ShenandoahOldGeneration::WAITING_FOR_BOOTSTRAP); + return; + } + + if (state() == ShenandoahOldGeneration::EVACUATING) { + transition_to(ShenandoahOldGeneration::FILLING); + return; + } + + // Here, we have no more candidates for mixed collections. The candidates for coalescing + // and filling have already been processed during the global cycle, so there is nothing + // more to do. + assert(state() == ShenandoahOldGeneration::EVACUATING_AFTER_GLOBAL, "Should be evacuating after a global cycle"); + abandon_collection_candidates(); + transition_to(ShenandoahOldGeneration::WAITING_FOR_BOOTSTRAP); +} + +void ShenandoahOldGeneration::abandon_mixed_evacuations() { + switch(state()) { + case ShenandoahOldGeneration::EVACUATING: + transition_to(ShenandoahOldGeneration::FILLING); + break; + case ShenandoahOldGeneration::EVACUATING_AFTER_GLOBAL: + abandon_collection_candidates(); + transition_to(ShenandoahOldGeneration::WAITING_FOR_BOOTSTRAP); + break; + default: + ShouldNotReachHere(); + break; + } +} diff --git a/src/hotspot/share/gc/shenandoah/shenandoahOldGeneration.hpp b/src/hotspot/share/gc/shenandoah/shenandoahOldGeneration.hpp index e00de6ae76b..480d1db8a9a 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahOldGeneration.hpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahOldGeneration.hpp @@ -76,6 +76,9 @@ class ShenandoahOldGeneration : public ShenandoahGeneration { size_t _promotable_humongous_regions; size_t _promotable_regular_regions; + // True if old regions may be safely traversed by the remembered set scan. + bool _is_parseable; + bool coalesce_and_fill(); public: @@ -126,6 +129,10 @@ class ShenandoahOldGeneration : public ShenandoahGeneration { void set_expected_regular_region_promotions(size_t region_count) { _promotable_regular_regions = region_count; } bool has_in_place_promotions() const { return (_promotable_humongous_regions + _promotable_regular_regions) > 0; } + // Class unloading may render the card table offsets unusable, if they refer to unmarked objects + bool is_parseable() const { return _is_parseable; } + void set_parseable(bool parseable); + // This will signal the heuristic to trigger an old generation collection void handle_failed_transfer(); @@ -143,6 +150,13 @@ class ShenandoahOldGeneration : public ShenandoahGeneration { return _failed_evacuation.try_unset(); } + // Transition to the next state after mixed evacuations have completed + void complete_mixed_evacuations(); + + // Abandon any future mixed collections. This is invoked when all old regions eligible for + // inclusion in a mixed evacuation are pinned. This should be rare. + void abandon_mixed_evacuations(); + void parallel_heap_region_iterate(ShenandoahHeapRegionClosure* cl) override; void parallel_region_iterate_free(ShenandoahHeapRegionClosure* cl) override; @@ -186,13 +200,21 @@ class ShenandoahOldGeneration : public ShenandoahGeneration { bool has_unprocessed_collection_candidates(); bool is_doing_mixed_evacuations() const { - return state() == EVACUATING; + return state() == EVACUATING || state() == EVACUATING_AFTER_GLOBAL; } bool is_preparing_for_mark() const { return state() == FILLING; } + bool is_idle() const { + return state() == WAITING_FOR_BOOTSTRAP; + } + + bool is_bootstrapping() const { + return state() == BOOTSTRAPPING; + } + // Amount of live memory (bytes) in regions waiting for mixed collections size_t unprocessed_collection_candidates_live_memory(); @@ -202,7 +224,7 @@ class ShenandoahOldGeneration : public ShenandoahGeneration { void maybe_trigger_collection(size_t first_old_region, size_t last_old_region, size_t old_region_count); public: enum State { - FILLING, WAITING_FOR_BOOTSTRAP, BOOTSTRAPPING, MARKING, EVACUATING + FILLING, WAITING_FOR_BOOTSTRAP, BOOTSTRAPPING, MARKING, EVACUATING, EVACUATING_AFTER_GLOBAL }; #ifdef ASSERT diff --git a/src/hotspot/share/gc/shenandoah/shenandoahPhaseTimings.cpp b/src/hotspot/share/gc/shenandoah/shenandoahPhaseTimings.cpp index 95531890a02..612b870ef46 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahPhaseTimings.cpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahPhaseTimings.cpp @@ -110,6 +110,7 @@ bool ShenandoahPhaseTimings::is_worker_phase(Phase phase) { case full_gc_weakrefs: case full_gc_purge_class_unload: case full_gc_purge_weak_par: + case degen_gc_coalesce_and_fill: case degen_gc_weakrefs: case degen_gc_purge_class_unload: case degen_gc_purge_weak_par: @@ -120,6 +121,7 @@ bool ShenandoahPhaseTimings::is_worker_phase(Phase phase) { case conc_weak_roots_work: case conc_weak_refs: case conc_strong_roots: + case conc_coalesce_and_fill: return true; default: return false; diff --git a/src/hotspot/share/gc/shenandoah/shenandoahPhaseTimings.hpp b/src/hotspot/share/gc/shenandoah/shenandoahPhaseTimings.hpp index 4bf9ed3e772..a4d2aba9e92 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahPhaseTimings.hpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahPhaseTimings.hpp @@ -102,8 +102,6 @@ class outputStream; f(conc_class_unload_purge_ec, " Exception Caches") \ f(conc_strong_roots, "Concurrent Strong Roots") \ SHENANDOAH_PAR_PHASE_DO(conc_strong_roots_, " CSR: ", f) \ - f(coalesce_and_fill, "Coalesce and Fill Old Dead") \ - SHENANDOAH_PAR_PHASE_DO(coalesce_and_fill_, " CFOD: ", f) \ f(conc_evac, "Concurrent Evacuation") \ \ f(final_roots_gross, "Pause Final Roots (G)") \ @@ -124,6 +122,8 @@ class outputStream; f(final_update_refs_rebuild_freeset, " Rebuild Free Set") \ \ f(conc_cleanup_complete, "Concurrent Cleanup") \ + f(conc_coalesce_and_fill, "Concurrent Coalesce and Fill") \ + SHENANDOAH_PAR_PHASE_DO(conc_coalesce_, " CC&F: ", f) \ \ f(degen_gc_gross, "Pause Degenerated GC (G)") \ f(degen_gc, "Pause Degenerated GC (N)") \ @@ -153,6 +153,8 @@ class outputStream; f(degen_gc_update_roots, " Degen Update Roots") \ SHENANDOAH_PAR_PHASE_DO(degen_gc_update_, " DU: ", f) \ f(degen_gc_cleanup_complete, " Cleanup") \ + f(degen_gc_coalesce_and_fill, " Degen Coalesce and Fill") \ + SHENANDOAH_PAR_PHASE_DO(degen_coalesce_, " DC&F", f) \ \ f(full_gc_gross, "Pause Full GC (G)") \ f(full_gc, "Pause Full GC (N)") \ diff --git a/src/hotspot/share/gc/shenandoah/shenandoahScanRemembered.inline.hpp b/src/hotspot/share/gc/shenandoah/shenandoahScanRemembered.inline.hpp index 50a56910191..655563d7eff 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahScanRemembered.inline.hpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahScanRemembered.inline.hpp @@ -551,6 +551,7 @@ template <typename ClosureType> void ShenandoahScanRemembered<RememberedSet>::process_clusters(size_t first_cluster, size_t count, HeapWord* end_of_range, ClosureType* cl, bool use_write_table, uint worker_id) { + assert(ShenandoahHeap::heap()->old_generation()->is_parseable(), "Old generation regions must be parseable for remembered set scan"); // If old-gen evacuation is active, then MarkingContext for old-gen heap regions is valid. We use the MarkingContext // bits to determine which objects within a DIRTY card need to be scanned. This is necessary because old-gen heap // regions that are in the candidate collection set have not been coalesced and filled. Thus, these heap regions