@@ -577,6 +577,7 @@ void ShenandoahRegionPartitions::assert_bounds() {
577
577
ShenandoahFreeSet::ShenandoahFreeSet (ShenandoahHeap* heap, size_t max_regions) :
578
578
_heap(heap),
579
579
_partitions(max_regions, this ),
580
+ _trash_regions(NEW_C_HEAP_ARRAY(ShenandoahHeapRegion*, max_regions, mtGC)),
580
581
_right_to_left_bias(false ),
581
582
_alloc_bias_weight(0 )
582
583
{
@@ -899,7 +900,7 @@ HeapWord* ShenandoahFreeSet::allocate_contiguous(ShenandoahAllocRequest& req) {
899
900
return _heap->get_region (beg)->bottom ();
900
901
}
901
902
902
- void ShenandoahFreeSet::try_recycle_trashed (ShenandoahHeapRegion * r) {
903
+ void ShenandoahFreeSet::try_recycle_trashed (ShenandoahHeapRegion* r) {
903
904
if (r->is_trash ()) {
904
905
_heap->decrease_used (r->used ());
905
906
r->recycle ();
@@ -910,13 +911,24 @@ void ShenandoahFreeSet::recycle_trash() {
910
911
// lock is not reentrable, check we don't have it
911
912
shenandoah_assert_not_heaplocked ();
912
913
914
+ size_t count = 0 ;
913
915
for (size_t i = 0 ; i < _heap->num_regions (); i++) {
914
916
ShenandoahHeapRegion* r = _heap->get_region (i);
915
917
if (r->is_trash ()) {
916
- ShenandoahHeapLocker locker (_heap->lock ());
917
- try_recycle_trashed (r);
918
+ _trash_regions[count++] = r;
919
+ }
920
+ }
921
+
922
+ // Relinquish the lock after this much time passed.
923
+ static constexpr jlong deadline_ns = 30000 ; // 30 us
924
+ size_t idx = 0 ;
925
+ while (idx < count) {
926
+ os::naked_yield (); // Yield to allow allocators to take the lock
927
+ ShenandoahHeapLocker locker (_heap->lock ());
928
+ const jlong deadline = os::javaTimeNanos () + deadline_ns;
929
+ while (idx < count && os::javaTimeNanos () < deadline) {
930
+ try_recycle_trashed (_trash_regions[idx++]);
918
931
}
919
- SpinPause (); // allow allocators to take the lock
920
932
}
921
933
}
922
934
0 commit comments