Skip to content

Commit 4da6fd4

Browse files
author
Xiaolong Peng
committedDec 12, 2024
8345423: Shenandoah: Parallelize concurrent cleanup
Reviewed-by: ysr, kdnilsen, wkemper
1 parent ec219ae commit 4da6fd4

13 files changed

+171
-148
lines changed
 

‎src/hotspot/share/gc/shenandoah/shenandoahConcurrentGC.cpp

+8-2
Original file line numberDiff line numberDiff line change
@@ -1046,7 +1046,10 @@ void ShenandoahConcurrentGC::op_strong_roots() {
10461046
}
10471047

10481048
void ShenandoahConcurrentGC::op_cleanup_early() {
1049-
ShenandoahHeap::heap()->free_set()->recycle_trash();
1049+
ShenandoahWorkerScope scope(ShenandoahHeap::heap()->workers(),
1050+
ShenandoahWorkerPolicy::calc_workers_for_conc_cleanup(),
1051+
"cleanup early.");
1052+
ShenandoahHeap::heap()->recycle_trash();
10501053
}
10511054

10521055
void ShenandoahConcurrentGC::op_evacuate() {
@@ -1178,7 +1181,10 @@ void ShenandoahConcurrentGC::op_final_roots() {
11781181
}
11791182

11801183
void ShenandoahConcurrentGC::op_cleanup_complete() {
1181-
ShenandoahHeap::heap()->free_set()->recycle_trash();
1184+
ShenandoahWorkerScope scope(ShenandoahHeap::heap()->workers(),
1185+
ShenandoahWorkerPolicy::calc_workers_for_conc_cleanup(),
1186+
"cleanup complete.");
1187+
ShenandoahHeap::heap()->recycle_trash();
11821188
}
11831189

11841190
bool ShenandoahConcurrentGC::check_cancellation_and_abort(ShenandoahDegenPoint point) {

‎src/hotspot/share/gc/shenandoah/shenandoahFreeSet.cpp

+18-54
Original file line numberDiff line numberDiff line change
@@ -729,7 +729,6 @@ void ShenandoahRegionPartitions::assert_bounds() {
729729
ShenandoahFreeSet::ShenandoahFreeSet(ShenandoahHeap* heap, size_t max_regions) :
730730
_heap(heap),
731731
_partitions(max_regions, this),
732-
_trash_regions(NEW_C_HEAP_ARRAY(ShenandoahHeapRegion*, max_regions, mtGC)),
733732
_alloc_bias_weight(0)
734733
{
735734
clear_internal();
@@ -1002,7 +1001,7 @@ HeapWord* ShenandoahFreeSet::try_allocate_in(ShenandoahHeapRegion* r, Shenandoah
10021001
return nullptr;
10031002
}
10041003
HeapWord* result = nullptr;
1005-
try_recycle_trashed(r);
1004+
r->try_recycle_under_lock();
10061005
in_new_region = r->is_empty();
10071006

10081007
if (in_new_region) {
@@ -1213,7 +1212,7 @@ HeapWord* ShenandoahFreeSet::allocate_contiguous(ShenandoahAllocRequest& req) {
12131212
// Initialize regions:
12141213
for (idx_t i = beg; i <= end; i++) {
12151214
ShenandoahHeapRegion* r = _heap->get_region(i);
1216-
try_recycle_trashed(r);
1215+
r->try_recycle_under_lock();
12171216

12181217
assert(i == beg || _heap->get_region(i - 1)->index() + 1 == r->index(), "Should be contiguous");
12191218
assert(r->is_empty(), "Should be empty");
@@ -1255,63 +1254,28 @@ HeapWord* ShenandoahFreeSet::allocate_contiguous(ShenandoahAllocRequest& req) {
12551254
return _heap->get_region(beg)->bottom();
12561255
}
12571256

1258-
void ShenandoahFreeSet::try_recycle_trashed(ShenandoahHeapRegion* r) {
1259-
if (r->is_trash()) {
1260-
r->recycle();
1257+
class ShenandoahRecycleTrashedRegionClosure final : public ShenandoahHeapRegionClosure {
1258+
public:
1259+
ShenandoahRecycleTrashedRegionClosure(): ShenandoahHeapRegionClosure() {}
1260+
1261+
void heap_region_do(ShenandoahHeapRegion* r) {
1262+
r->try_recycle();
12611263
}
1262-
}
1264+
1265+
bool is_thread_safe() {
1266+
return true;
1267+
}
1268+
};
12631269

12641270
void ShenandoahFreeSet::recycle_trash() {
1265-
// lock is not reentrable, check we don't have it
1271+
// lock is not non-reentrant, check we don't have it
12661272
shenandoah_assert_not_heaplocked();
1267-
size_t count = 0;
1268-
for (size_t i = 0; i < _heap->num_regions(); i++) {
1269-
ShenandoahHeapRegion* r = _heap->get_region(i);
1270-
if (r->is_trash()) {
1271-
_trash_regions[count++] = r;
1272-
}
1273-
}
12741273

1275-
size_t total_batches = 0;
1276-
jlong batch_start_time = 0;
1277-
jlong recycle_trash_start_time = os::javaTimeNanos(); // This value will be treated as the initial batch_start_time
1278-
jlong batch_end_time = recycle_trash_start_time;
1279-
// Process as many batches as can be processed within 10 us.
1280-
static constexpr jlong deadline_ns = 10000; // 10 us
1281-
size_t idx = 0;
1282-
jlong predicted_next_batch_end_time;
1283-
jlong batch_process_time_estimate = 0;
1284-
while (idx < count) {
1285-
if (idx > 0) {
1286-
os::naked_yield(); // Yield to allow allocators to take the lock, except on the first iteration
1287-
}
1288-
// Avoid another call to javaTimeNanos() if we already know time at which last batch ended
1289-
batch_start_time = batch_end_time;
1290-
const jlong deadline = batch_start_time + deadline_ns;
1274+
ShenandoahHeap* heap = ShenandoahHeap::heap();
1275+
heap->assert_gc_workers(heap->workers()->active_workers());
12911276

1292-
ShenandoahHeapLocker locker(_heap->lock());
1293-
do {
1294-
// Measurements on typical 2024 hardware suggest it typically requires between 1400 and 2000 ns to process a batch of
1295-
// 32 regions, assuming low contention with other threads. Sometimes this goes higher, when mutator threads
1296-
// are contending for CPU cores and/or the heap lock. On this hardware with a 10 us deadline, we expect 3-6 batches
1297-
// to be processed between yields most of the time.
1298-
//
1299-
// Note that deadline is enforced since the end of previous batch. In the case that yield() or acquisition of heap lock
1300-
// takes a "long time", we will have less time to process regions, but we will always process at least one batch between
1301-
// yields. Yielding more frequently when there is heavy contention for the heap lock or for CPU cores is considered the
1302-
// right thing to do.
1303-
const size_t REGIONS_PER_BATCH = 32;
1304-
size_t max_idx = MIN2(count, idx + REGIONS_PER_BATCH);
1305-
while (idx < max_idx) {
1306-
try_recycle_trashed(_trash_regions[idx++]);
1307-
}
1308-
total_batches++;
1309-
batch_end_time = os::javaTimeNanos();
1310-
// Estimate includes historic combination of yield times and heap lock acquisition times.
1311-
batch_process_time_estimate = (batch_end_time - recycle_trash_start_time) / total_batches;
1312-
predicted_next_batch_end_time = batch_end_time + batch_process_time_estimate;
1313-
} while ((idx < count) && (predicted_next_batch_end_time < deadline));
1314-
}
1277+
ShenandoahRecycleTrashedRegionClosure closure;
1278+
heap->parallel_heap_region_iterate(&closure);
13151279
}
13161280

13171281
void ShenandoahFreeSet::flip_to_old_gc(ShenandoahHeapRegion* r) {

‎src/hotspot/share/gc/shenandoah/shenandoahFreeSet.hpp

-2
Original file line numberDiff line numberDiff line change
@@ -286,7 +286,6 @@ class ShenandoahFreeSet : public CHeapObj<mtGC> {
286286
private:
287287
ShenandoahHeap* const _heap;
288288
ShenandoahRegionPartitions _partitions;
289-
ShenandoahHeapRegion** _trash_regions;
290289

291290
HeapWord* allocate_aligned_plab(size_t size, ShenandoahAllocRequest& req, ShenandoahHeapRegion* r);
292291

@@ -352,7 +351,6 @@ class ShenandoahFreeSet : public CHeapObj<mtGC> {
352351
HeapWord* try_allocate_from_mutator(ShenandoahAllocRequest& req, bool& in_new_region);
353352

354353
void clear_internal();
355-
void try_recycle_trashed(ShenandoahHeapRegion *r);
356354

357355
// Returns true iff this region is entirely available, either because it is empty() or because it has been found to represent
358356
// immediate trash and we'll be able to immediately recycle it. Note that we cannot recycle immediate trash if

‎src/hotspot/share/gc/shenandoah/shenandoahFullGC.cpp

+2-2
Original file line numberDiff line numberDiff line change
@@ -541,7 +541,7 @@ class ShenandoahEnsureHeapActiveClosure: public ShenandoahHeapRegionClosure {
541541
ShenandoahEnsureHeapActiveClosure() : _heap(ShenandoahHeap::heap()) {}
542542
void heap_region_do(ShenandoahHeapRegion* r) {
543543
if (r->is_trash()) {
544-
r->recycle();
544+
r->try_recycle_under_lock();
545545
}
546546
if (r->is_cset()) {
547547
// Leave affiliation unchanged
@@ -990,7 +990,7 @@ class ShenandoahPostCompactClosure : public ShenandoahHeapRegionClosure {
990990
// Recycle all trash regions
991991
if (r->is_trash()) {
992992
live = 0;
993-
r->recycle();
993+
r->try_recycle_under_lock();
994994
} else {
995995
if (r->is_old()) {
996996
ShenandoahGenerationalFullGC::account_for_region(r, _old_regions, _old_usage, _old_humongous_waste);

‎src/hotspot/share/gc/shenandoah/shenandoahGeneration.cpp

+23-20
Original file line numberDiff line numberDiff line change
@@ -883,43 +883,45 @@ size_t ShenandoahGeneration::increment_affiliated_region_count() {
883883
// During full gc, multiple GC worker threads may change region affiliations without a lock. No lock is enforced
884884
// on read and write of _affiliated_region_count. At the end of full gc, a single thread overwrites the count with
885885
// a coherent value.
886-
_affiliated_region_count++;
887-
return _affiliated_region_count;
886+
return Atomic::add(&_affiliated_region_count, (size_t) 1);
888887
}
889888

890889
size_t ShenandoahGeneration::decrement_affiliated_region_count() {
891890
shenandoah_assert_heaplocked_or_safepoint();
892891
// During full gc, multiple GC worker threads may change region affiliations without a lock. No lock is enforced
893892
// on read and write of _affiliated_region_count. At the end of full gc, a single thread overwrites the count with
894893
// a coherent value.
895-
_affiliated_region_count--;
894+
auto affiliated_region_count = Atomic::sub(&_affiliated_region_count, (size_t) 1);
896895
assert(ShenandoahHeap::heap()->is_full_gc_in_progress() ||
897-
(_used + _humongous_waste <= _affiliated_region_count * ShenandoahHeapRegion::region_size_bytes()),
896+
(used() + _humongous_waste <= affiliated_region_count * ShenandoahHeapRegion::region_size_bytes()),
898897
"used + humongous cannot exceed regions");
899-
return _affiliated_region_count;
898+
return affiliated_region_count;
899+
}
900+
901+
size_t ShenandoahGeneration::decrement_affiliated_region_count_without_lock() {
902+
return Atomic::sub(&_affiliated_region_count, (size_t) 1);
900903
}
901904

902905
size_t ShenandoahGeneration::increase_affiliated_region_count(size_t delta) {
903906
shenandoah_assert_heaplocked_or_safepoint();
904-
_affiliated_region_count += delta;
905-
return _affiliated_region_count;
907+
return Atomic::add(&_affiliated_region_count, delta);
906908
}
907909

908910
size_t ShenandoahGeneration::decrease_affiliated_region_count(size_t delta) {
909911
shenandoah_assert_heaplocked_or_safepoint();
910-
assert(_affiliated_region_count >= delta, "Affiliated region count cannot be negative");
912+
assert(Atomic::load(&_affiliated_region_count) >= delta, "Affiliated region count cannot be negative");
911913

912-
_affiliated_region_count -= delta;
914+
auto const affiliated_region_count = Atomic::sub(&_affiliated_region_count, delta);
913915
assert(ShenandoahHeap::heap()->is_full_gc_in_progress() ||
914-
(_used + _humongous_waste <= _affiliated_region_count * ShenandoahHeapRegion::region_size_bytes()),
916+
(_used + _humongous_waste <= affiliated_region_count * ShenandoahHeapRegion::region_size_bytes()),
915917
"used + humongous cannot exceed regions");
916-
return _affiliated_region_count;
918+
return affiliated_region_count;
917919
}
918920

919921
void ShenandoahGeneration::establish_usage(size_t num_regions, size_t num_bytes, size_t humongous_waste) {
920922
assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "must be at a safepoint");
921-
_affiliated_region_count = num_regions;
922-
_used = num_bytes;
923+
Atomic::store(&_affiliated_region_count, num_regions);
924+
Atomic::store(&_used, num_bytes);
923925
_humongous_waste = humongous_waste;
924926
}
925927

@@ -948,21 +950,22 @@ void ShenandoahGeneration::decrease_used(size_t bytes) {
948950
}
949951

950952
size_t ShenandoahGeneration::used_regions() const {
951-
return _affiliated_region_count;
953+
return Atomic::load(&_affiliated_region_count);
952954
}
953955

954956
size_t ShenandoahGeneration::free_unaffiliated_regions() const {
955957
size_t result = max_capacity() / ShenandoahHeapRegion::region_size_bytes();
956-
if (_affiliated_region_count > result) {
958+
auto const used_regions = this->used_regions();
959+
if (used_regions > result) {
957960
result = 0;
958961
} else {
959-
result -= _affiliated_region_count;
962+
result -= used_regions;
960963
}
961964
return result;
962965
}
963966

964967
size_t ShenandoahGeneration::used_regions_size() const {
965-
return _affiliated_region_count * ShenandoahHeapRegion::region_size_bytes();
968+
return used_regions() * ShenandoahHeapRegion::region_size_bytes();
966969
}
967970

968971
size_t ShenandoahGeneration::available() const {
@@ -996,7 +999,7 @@ size_t ShenandoahGeneration::increase_capacity(size_t increment) {
996999

9971000
// This detects arithmetic wraparound on _used
9981001
assert(ShenandoahHeap::heap()->is_full_gc_in_progress() ||
999-
(_affiliated_region_count * ShenandoahHeapRegion::region_size_bytes() >= _used),
1002+
(used_regions_size() >= used()),
10001003
"Affiliated regions must hold more than what is currently used");
10011004
return _max_capacity;
10021005
}
@@ -1020,12 +1023,12 @@ size_t ShenandoahGeneration::decrease_capacity(size_t decrement) {
10201023

10211024
// This detects arithmetic wraparound on _used
10221025
assert(ShenandoahHeap::heap()->is_full_gc_in_progress() ||
1023-
(_affiliated_region_count * ShenandoahHeapRegion::region_size_bytes() >= _used),
1026+
(used_regions_size() >= used()),
10241027
"Affiliated regions must hold more than what is currently used");
10251028
assert(ShenandoahHeap::heap()->is_full_gc_in_progress() ||
10261029
(_used <= _max_capacity), "Cannot use more than capacity");
10271030
assert(ShenandoahHeap::heap()->is_full_gc_in_progress() ||
1028-
(_affiliated_region_count * ShenandoahHeapRegion::region_size_bytes() <= _max_capacity),
1031+
(used_regions_size() <= _max_capacity),
10291032
"Cannot use more than capacity");
10301033
return _max_capacity;
10311034
}

‎src/hotspot/share/gc/shenandoah/shenandoahGeneration.hpp

+4-2
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ class ShenandoahGeneration : public CHeapObj<mtGC>, public ShenandoahSpaceInfo {
5252

5353
ShenandoahReferenceProcessor* const _ref_processor;
5454

55-
size_t _affiliated_region_count;
55+
volatile size_t _affiliated_region_count;
5656

5757
// How much free memory is left in the last region of humongous objects.
5858
// This is _not_ included in used, but it _is_ deducted from available,
@@ -131,7 +131,7 @@ class ShenandoahGeneration : public CHeapObj<mtGC>, public ShenandoahSpaceInfo {
131131
virtual size_t used_regions() const;
132132
virtual size_t used_regions_size() const;
133133
virtual size_t free_unaffiliated_regions() const;
134-
size_t used() const override { return _used; }
134+
size_t used() const override { return Atomic::load(&_used); }
135135
size_t available() const override;
136136
size_t available_with_reserve() const;
137137
size_t used_including_humongous_waste() const {
@@ -219,6 +219,8 @@ class ShenandoahGeneration : public CHeapObj<mtGC>, public ShenandoahSpaceInfo {
219219

220220
// Return the updated value of affiliated_region_count
221221
size_t decrement_affiliated_region_count();
222+
// Same as decrement_affiliated_region_count, but w/o the need to hold heap lock before being called.
223+
size_t decrement_affiliated_region_count_without_lock();
222224

223225
// Return the updated value of affiliated_region_count
224226
size_t increase_affiliated_region_count(size_t delta);

‎src/hotspot/share/gc/shenandoah/shenandoahHeap.hpp

+2-2
Original file line numberDiff line numberDiff line change
@@ -605,10 +605,10 @@ class ShenandoahHeap : public CollectedHeap {
605605
// such an object as unreachable.
606606
inline bool is_in_old_during_young_collection(oop obj) const;
607607

608-
inline ShenandoahAffiliation region_affiliation(const ShenandoahHeapRegion* r);
608+
inline ShenandoahAffiliation region_affiliation(const ShenandoahHeapRegion* r) const;
609609
inline void set_affiliation(ShenandoahHeapRegion* r, ShenandoahAffiliation new_affiliation);
610610

611-
inline ShenandoahAffiliation region_affiliation(size_t index);
611+
inline ShenandoahAffiliation region_affiliation(size_t index) const;
612612

613613
bool requires_barriers(stackChunkOop obj) const override;
614614

‎src/hotspot/share/gc/shenandoah/shenandoahHeap.inline.hpp

+10-10
Original file line numberDiff line numberDiff line change
@@ -371,7 +371,7 @@ inline bool ShenandoahHeap::is_in_active_generation(oop obj) const {
371371
// No flickering!
372372
assert(gen == active_generation(), "Race?");
373373

374-
switch (_affiliations[index]) {
374+
switch (region_affiliation(index)) {
375375
case ShenandoahAffiliation::FREE:
376376
// Free regions are in old, young, and global collections
377377
return true;
@@ -382,25 +382,25 @@ inline bool ShenandoahHeap::is_in_active_generation(oop obj) const {
382382
// Old regions are in old and global collections, not in young collections
383383
return !gen->is_young();
384384
default:
385-
assert(false, "Bad affiliation (%d) for region " SIZE_FORMAT, _affiliations[index], index);
385+
assert(false, "Bad affiliation (%d) for region " SIZE_FORMAT, region_affiliation(index), index);
386386
return false;
387387
}
388388
}
389389

390390
inline bool ShenandoahHeap::is_in_young(const void* p) const {
391-
return is_in_reserved(p) && (_affiliations[heap_region_index_containing(p)] == ShenandoahAffiliation::YOUNG_GENERATION);
391+
return is_in_reserved(p) && (region_affiliation(heap_region_index_containing(p)) == ShenandoahAffiliation::YOUNG_GENERATION);
392392
}
393393

394394
inline bool ShenandoahHeap::is_in_old(const void* p) const {
395-
return is_in_reserved(p) && (_affiliations[heap_region_index_containing(p)] == ShenandoahAffiliation::OLD_GENERATION);
395+
return is_in_reserved(p) && (region_affiliation(heap_region_index_containing(p)) == ShenandoahAffiliation::OLD_GENERATION);
396396
}
397397

398398
inline bool ShenandoahHeap::is_in_old_during_young_collection(oop obj) const {
399399
return active_generation()->is_young() && is_in_old(obj);
400400
}
401401

402-
inline ShenandoahAffiliation ShenandoahHeap::region_affiliation(const ShenandoahHeapRegion *r) {
403-
return (ShenandoahAffiliation) _affiliations[r->index()];
402+
inline ShenandoahAffiliation ShenandoahHeap::region_affiliation(const ShenandoahHeapRegion *r) const {
403+
return region_affiliation(r->index());
404404
}
405405

406406
inline void ShenandoahHeap::assert_lock_for_affiliation(ShenandoahAffiliation orig_affiliation,
@@ -419,7 +419,7 @@ inline void ShenandoahHeap::assert_lock_for_affiliation(ShenandoahAffiliation or
419419
//
420420
// Note: during full GC, all transitions between states are possible. During Full GC, we should be in a safepoint.
421421

422-
if ((orig_affiliation == ShenandoahAffiliation::FREE) || (new_affiliation == ShenandoahAffiliation::FREE)) {
422+
if (orig_affiliation == ShenandoahAffiliation::FREE) {
423423
shenandoah_assert_heaplocked_or_safepoint();
424424
}
425425
}
@@ -428,11 +428,11 @@ inline void ShenandoahHeap::set_affiliation(ShenandoahHeapRegion* r, ShenandoahA
428428
#ifdef ASSERT
429429
assert_lock_for_affiliation(region_affiliation(r), new_affiliation);
430430
#endif
431-
_affiliations[r->index()] = (uint8_t) new_affiliation;
431+
Atomic::store(_affiliations + r->index(), (uint8_t) new_affiliation);
432432
}
433433

434-
inline ShenandoahAffiliation ShenandoahHeap::region_affiliation(size_t index) {
435-
return (ShenandoahAffiliation) _affiliations[index];
434+
inline ShenandoahAffiliation ShenandoahHeap::region_affiliation(size_t index) const {
435+
return (ShenandoahAffiliation) Atomic::load(_affiliations + index);
436436
}
437437

438438
inline bool ShenandoahHeap::requires_marking(const void* entry) const {

‎src/hotspot/share/gc/shenandoah/shenandoahHeapRegion.cpp

+68-32
Original file line numberDiff line numberDiff line change
@@ -88,19 +88,20 @@ ShenandoahHeapRegion::ShenandoahHeapRegion(HeapWord* start, size_t index, bool c
8888
if (ZapUnusedHeapArea && committed) {
8989
SpaceMangler::mangle_region(MemRegion(_bottom, _end));
9090
}
91+
_recycling.unset();
9192
}
9293

9394
void ShenandoahHeapRegion::report_illegal_transition(const char *method) {
9495
stringStream ss;
95-
ss.print("Illegal region state transition from \"%s\", at %s\n ", region_state_to_string(_state), method);
96+
ss.print("Illegal region state transition from \"%s\", at %s\n ", region_state_to_string(state()), method);
9697
print_on(&ss);
9798
fatal("%s", ss.freeze());
9899
}
99100

100101
void ShenandoahHeapRegion::make_regular_allocation(ShenandoahAffiliation affiliation) {
101102
shenandoah_assert_heaplocked();
102103
reset_age();
103-
switch (_state) {
104+
switch (state()) {
104105
case _empty_uncommitted:
105106
do_commit();
106107
case _empty_committed:
@@ -120,7 +121,7 @@ void ShenandoahHeapRegion::make_regular_allocation(ShenandoahAffiliation affilia
120121
void ShenandoahHeapRegion::make_affiliated_maybe() {
121122
shenandoah_assert_heaplocked();
122123
assert(!ShenandoahHeap::heap()->mode()->is_generational(), "Only call if non-generational");
123-
switch (_state) {
124+
switch (state()) {
124125
case _empty_uncommitted:
125126
case _empty_committed:
126127
case _cset:
@@ -146,14 +147,15 @@ void ShenandoahHeapRegion::make_regular_bypass() {
146147
ShenandoahHeap::heap()->is_degenerated_gc_in_progress(),
147148
"Only for STW GC or when Universe is initializing (CDS)");
148149
reset_age();
149-
switch (_state) {
150+
auto cur_state = state();
151+
switch (cur_state) {
150152
case _empty_uncommitted:
151153
do_commit();
152154
case _empty_committed:
153155
case _cset:
154156
case _humongous_start:
155157
case _humongous_cont:
156-
if (_state == _humongous_start || _state == _humongous_cont) {
158+
if (cur_state == _humongous_start || cur_state == _humongous_cont) {
157159
// CDS allocates chunks of the heap to fill with regular objects. The allocator
158160
// will dutifully track any waste in the unused portion of the last region. Once
159161
// CDS has finished initializing the objects, it will convert these regions to
@@ -177,7 +179,7 @@ void ShenandoahHeapRegion::make_regular_bypass() {
177179
void ShenandoahHeapRegion::make_humongous_start() {
178180
shenandoah_assert_heaplocked();
179181
reset_age();
180-
switch (_state) {
182+
switch (state()) {
181183
case _empty_uncommitted:
182184
do_commit();
183185
case _empty_committed:
@@ -194,7 +196,7 @@ void ShenandoahHeapRegion::make_humongous_start_bypass(ShenandoahAffiliation aff
194196
// Don't bother to account for affiliated regions during Full GC. We recompute totals at end.
195197
set_affiliation(affiliation);
196198
reset_age();
197-
switch (_state) {
199+
switch (state()) {
198200
case _empty_committed:
199201
case _regular:
200202
case _humongous_start:
@@ -209,7 +211,7 @@ void ShenandoahHeapRegion::make_humongous_start_bypass(ShenandoahAffiliation aff
209211
void ShenandoahHeapRegion::make_humongous_cont() {
210212
shenandoah_assert_heaplocked();
211213
reset_age();
212-
switch (_state) {
214+
switch (state()) {
213215
case _empty_uncommitted:
214216
do_commit();
215217
case _empty_committed:
@@ -226,7 +228,7 @@ void ShenandoahHeapRegion::make_humongous_cont_bypass(ShenandoahAffiliation affi
226228
set_affiliation(affiliation);
227229
// Don't bother to account for affiliated regions during Full GC. We recompute totals at end.
228230
reset_age();
229-
switch (_state) {
231+
switch (state()) {
230232
case _empty_committed:
231233
case _regular:
232234
case _humongous_start:
@@ -242,7 +244,7 @@ void ShenandoahHeapRegion::make_pinned() {
242244
shenandoah_assert_heaplocked();
243245
assert(pin_count() > 0, "Should have pins: " SIZE_FORMAT, pin_count());
244246

245-
switch (_state) {
247+
switch (state()) {
246248
case _regular:
247249
set_state(_pinned);
248250
case _pinned_cset:
@@ -253,7 +255,7 @@ void ShenandoahHeapRegion::make_pinned() {
253255
case _pinned_humongous_start:
254256
return;
255257
case _cset:
256-
_state = _pinned_cset;
258+
set_state(_pinned_cset);
257259
return;
258260
default:
259261
report_illegal_transition("pinning");
@@ -264,7 +266,7 @@ void ShenandoahHeapRegion::make_unpinned() {
264266
shenandoah_assert_heaplocked();
265267
assert(pin_count() == 0, "Should not have pins: " SIZE_FORMAT, pin_count());
266268

267-
switch (_state) {
269+
switch (state()) {
268270
case _pinned:
269271
assert(is_affiliated(), "Pinned region should be affiliated");
270272
set_state(_regular);
@@ -286,7 +288,7 @@ void ShenandoahHeapRegion::make_unpinned() {
286288
void ShenandoahHeapRegion::make_cset() {
287289
shenandoah_assert_heaplocked();
288290
// Leave age untouched. We need to consult the age when we are deciding whether to promote evacuated objects.
289-
switch (_state) {
291+
switch (state()) {
290292
case _regular:
291293
set_state(_cset);
292294
case _cset:
@@ -299,7 +301,7 @@ void ShenandoahHeapRegion::make_cset() {
299301
void ShenandoahHeapRegion::make_trash() {
300302
shenandoah_assert_heaplocked();
301303
reset_age();
302-
switch (_state) {
304+
switch (state()) {
303305
case _humongous_start:
304306
case _humongous_cont:
305307
{
@@ -329,10 +331,9 @@ void ShenandoahHeapRegion::make_trash_immediate() {
329331
}
330332

331333
void ShenandoahHeapRegion::make_empty() {
332-
shenandoah_assert_heaplocked();
333334
reset_age();
334335
CENSUS_NOISE(clear_youth();)
335-
switch (_state) {
336+
switch (state()) {
336337
case _trash:
337338
set_state(_empty_committed);
338339
_empty_time = os::elapsedTime();
@@ -344,7 +345,7 @@ void ShenandoahHeapRegion::make_empty() {
344345

345346
void ShenandoahHeapRegion::make_uncommitted() {
346347
shenandoah_assert_heaplocked();
347-
switch (_state) {
348+
switch (state()) {
348349
case _empty_committed:
349350
do_uncommit();
350351
set_state(_empty_uncommitted);
@@ -358,7 +359,7 @@ void ShenandoahHeapRegion::make_committed_bypass() {
358359
shenandoah_assert_heaplocked();
359360
assert (ShenandoahHeap::heap()->is_full_gc_in_progress(), "only for full GC");
360361

361-
switch (_state) {
362+
switch (state()) {
362363
case _empty_uncommitted:
363364
do_commit();
364365
set_state(_empty_committed);
@@ -399,7 +400,7 @@ void ShenandoahHeapRegion::print_on(outputStream* st) const {
399400
st->print("|");
400401
st->print(SIZE_FORMAT_W(5), this->_index);
401402

402-
switch (_state) {
403+
switch (state()) {
403404
case _empty_uncommitted:
404405
st->print("|EU ");
405406
break;
@@ -569,27 +570,62 @@ ShenandoahHeapRegion* ShenandoahHeapRegion::humongous_start_region() const {
569570
return r;
570571
}
571572

572-
void ShenandoahHeapRegion::recycle() {
573-
shenandoah_assert_heaplocked();
574-
ShenandoahHeap* heap = ShenandoahHeap::heap();
575-
ShenandoahGeneration* generation = heap->generation_for(affiliation());
576573

577-
heap->decrease_used(generation, used());
578-
generation->decrement_affiliated_region_count();
574+
void ShenandoahHeapRegion::recycle_internal() {
575+
assert(_recycling.is_set() && is_trash(), "Wrong state");
576+
ShenandoahHeap* heap = ShenandoahHeap::heap();
579577

580578
set_top(bottom());
581579
clear_live_data();
582580
reset_alloc_metadata();
583-
584581
heap->marking_context()->reset_top_at_mark_start(this);
585-
586582
set_update_watermark(bottom());
583+
if (ZapUnusedHeapArea) {
584+
SpaceMangler::mangle_region(MemRegion(bottom(), end()));
585+
}
587586

588587
make_empty();
589-
590588
set_affiliation(FREE);
591-
if (ZapUnusedHeapArea) {
592-
SpaceMangler::mangle_region(MemRegion(bottom(), end()));
589+
}
590+
591+
void ShenandoahHeapRegion::try_recycle_under_lock() {
592+
shenandoah_assert_heaplocked();
593+
if (is_trash() && _recycling.try_set()) {
594+
if (is_trash()) {
595+
ShenandoahHeap* heap = ShenandoahHeap::heap();
596+
ShenandoahGeneration* generation = heap->generation_for(affiliation());
597+
598+
heap->decrease_used(generation, used());
599+
generation->decrement_affiliated_region_count();
600+
601+
recycle_internal();
602+
}
603+
_recycling.unset();
604+
} else {
605+
// Ensure recycling is unset before returning to mutator to continue memory allocation.
606+
while (_recycling.is_set()) {
607+
if (os::is_MP()) {
608+
SpinPause();
609+
} else {
610+
os::naked_yield();
611+
}
612+
}
613+
}
614+
}
615+
616+
void ShenandoahHeapRegion::try_recycle() {
617+
shenandoah_assert_not_heaplocked();
618+
if (is_trash() && _recycling.try_set()) {
619+
// Double check region state after win the race to set recycling flag
620+
if (is_trash()) {
621+
ShenandoahHeap* heap = ShenandoahHeap::heap();
622+
ShenandoahGeneration* generation = heap->generation_for(affiliation());
623+
heap->decrease_used(generation, used());
624+
generation->decrement_affiliated_region_count_without_lock();
625+
626+
recycle_internal();
627+
}
628+
_recycling.unset();
593629
}
594630
}
595631

@@ -795,11 +831,11 @@ void ShenandoahHeapRegion::set_state(RegionState to) {
795831
evt.set_index((unsigned) index());
796832
evt.set_start((uintptr_t)bottom());
797833
evt.set_used(used());
798-
evt.set_from(_state);
834+
evt.set_from(state());
799835
evt.set_to(to);
800836
evt.commit();
801837
}
802-
_state = to;
838+
Atomic::store(&_state, to);
803839
}
804840

805841
void ShenandoahHeapRegion::record_pin() {

‎src/hotspot/share/gc/shenandoah/shenandoahHeapRegion.hpp

+28-21
Original file line numberDiff line numberDiff line change
@@ -165,6 +165,7 @@ class ShenandoahHeapRegion {
165165
}
166166

167167
void report_illegal_transition(const char* method);
168+
void recycle_internal();
168169

169170
public:
170171
static int region_states_num() {
@@ -188,33 +189,35 @@ class ShenandoahHeapRegion {
188189
void make_uncommitted();
189190
void make_committed_bypass();
190191

191-
// Individual states:
192-
bool is_empty_uncommitted() const { return _state == _empty_uncommitted; }
193-
bool is_empty_committed() const { return _state == _empty_committed; }
194-
bool is_regular() const { return _state == _regular; }
195-
bool is_humongous_continuation() const { return _state == _humongous_cont; }
196-
197-
// Participation in logical groups:
198-
bool is_empty() const { return is_empty_committed() || is_empty_uncommitted(); }
199-
bool is_active() const { return !is_empty() && !is_trash(); }
200-
bool is_trash() const { return _state == _trash; }
201-
bool is_humongous_start() const { return _state == _humongous_start || _state == _pinned_humongous_start; }
202-
bool is_humongous() const { return is_humongous_start() || is_humongous_continuation(); }
192+
// Primitive state predicates
193+
bool is_empty_uncommitted() const { return state() == _empty_uncommitted; }
194+
bool is_empty_committed() const { return state() == _empty_committed; }
195+
bool is_regular() const { return state() == _regular; }
196+
bool is_humongous_continuation() const { return state() == _humongous_cont; }
197+
bool is_regular_pinned() const { return state() == _pinned; }
198+
bool is_trash() const { return state() == _trash; }
199+
200+
// Derived state predicates (boolean combinations of individual states)
201+
bool static is_empty_state(RegionState state) { return state == _empty_committed || state == _empty_uncommitted; }
202+
bool static is_humongous_start_state(RegionState state) { return state == _humongous_start || state == _pinned_humongous_start; }
203+
bool is_empty() const { return is_empty_state(this->state()); }
204+
bool is_active() const { auto cur_state = state(); return !is_empty_state(cur_state) && cur_state != _trash; }
205+
bool is_humongous_start() const { return is_humongous_start_state(state()); }
206+
bool is_humongous() const { auto cur_state = state(); return is_humongous_start_state(cur_state) || cur_state == _humongous_cont; }
203207
bool is_committed() const { return !is_empty_uncommitted(); }
204-
bool is_cset() const { return _state == _cset || _state == _pinned_cset; }
205-
bool is_pinned() const { return _state == _pinned || _state == _pinned_cset || _state == _pinned_humongous_start; }
206-
bool is_regular_pinned() const { return _state == _pinned; }
208+
bool is_cset() const { auto cur_state = state(); return cur_state == _cset || cur_state == _pinned_cset; }
209+
bool is_pinned() const { auto cur_state = state(); return cur_state == _pinned || cur_state == _pinned_cset || cur_state == _pinned_humongous_start; }
207210

208211
inline bool is_young() const;
209212
inline bool is_old() const;
210213
inline bool is_affiliated() const;
211214

212215
// Macro-properties:
213-
bool is_alloc_allowed() const { return is_empty() || is_regular() || _state == _pinned; }
214-
bool is_stw_move_allowed() const { return is_regular() || _state == _cset || (ShenandoahHumongousMoves && _state == _humongous_start); }
216+
bool is_alloc_allowed() const { auto cur_state = state(); return is_empty_state(cur_state) || cur_state == _regular || cur_state == _pinned; }
217+
bool is_stw_move_allowed() const { auto cur_state = state(); return cur_state == _regular || cur_state == _cset || (ShenandoahHumongousMoves && cur_state == _humongous_start); }
215218

216-
RegionState state() const { return _state; }
217-
int state_ordinal() const { return region_state_to_ordinal(_state); }
219+
RegionState state() const { return Atomic::load(&_state); }
220+
int state_ordinal() const { return region_state_to_ordinal(state()); }
218221

219222
void record_pin();
220223
void record_unpin();
@@ -243,7 +246,7 @@ class ShenandoahHeapRegion {
243246
HeapWord* _top_before_promoted;
244247

245248
// Seldom updated fields
246-
RegionState _state;
249+
volatile RegionState _state;
247250
HeapWord* _coalesce_and_fill_boundary; // for old regions not selected as collection set candidates.
248251

249252
// Frequently updated fields
@@ -261,6 +264,8 @@ class ShenandoahHeapRegion {
261264
uint _age;
262265
CENSUS_NOISE(uint _youth;) // tracks epochs of retrograde ageing (rejuvenation)
263266

267+
ShenandoahSharedFlag _recycling; // Used to indicate that the region is being recycled; see try_recycle*().
268+
264269
public:
265270
ShenandoahHeapRegion(HeapWord* start, size_t index, bool committed);
266271

@@ -376,7 +381,9 @@ class ShenandoahHeapRegion {
376381

377382
void print_on(outputStream* st) const;
378383

379-
void recycle();
384+
void try_recycle_under_lock();
385+
386+
void try_recycle();
380387

381388
inline void begin_preemptible_coalesce_and_fill() {
382389
_coalesce_and_fill_boundary = _bottom;

‎src/hotspot/share/gc/shenandoah/shenandoahWorkerPolicy.cpp

+4
Original file line numberDiff line numberDiff line change
@@ -74,3 +74,7 @@ uint ShenandoahWorkerPolicy::calc_workers_for_final_update_ref() {
7474
uint ShenandoahWorkerPolicy::calc_workers_for_conc_reset() {
7575
return ConcGCThreads;
7676
}
77+
78+
uint ShenandoahWorkerPolicy::calc_workers_for_conc_cleanup() {
79+
return ConcGCThreads;
80+
}

‎src/hotspot/share/gc/shenandoah/shenandoahWorkerPolicy.hpp

+3
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,9 @@ class ShenandoahWorkerPolicy : AllStatic {
6464

6565
// Calculate workers for concurrent reset
6666
static uint calc_workers_for_conc_reset();
67+
68+
// Calculate workers for concurrent cleanup
69+
static uint calc_workers_for_conc_cleanup();
6770
};
6871

6972
#endif // SHARE_GC_SHENANDOAH_SHENANDOAHWORKERPOLICY_HPP

‎src/hotspot/share/gc/shenandoah/vmStructs_shenandoah.hpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@
3939
volatile_nonstatic_field(ShenandoahGeneration, _used, size_t) \
4040
static_field(ShenandoahHeapRegion, RegionSizeBytes, size_t) \
4141
static_field(ShenandoahHeapRegion, RegionSizeBytesShift, size_t) \
42-
nonstatic_field(ShenandoahHeapRegion, _state, ShenandoahHeapRegion::RegionState) \
42+
volatile_nonstatic_field(ShenandoahHeapRegion, _state, ShenandoahHeapRegion::RegionState) \
4343
nonstatic_field(ShenandoahHeapRegion, _index, size_t const) \
4444
nonstatic_field(ShenandoahHeapRegion, _bottom, HeapWord* const) \
4545
nonstatic_field(ShenandoahHeapRegion, _top, HeapWord*) \

0 commit comments

Comments
 (0)
Please sign in to comment.