@@ -70,6 +70,7 @@ JfrThreadLocal::JfrThreadLocal() :
70
70
_wallclock_time(os::javaTimeNanos()),
71
71
_stackdepth(0 ),
72
72
_entering_suspend_flag(0 ),
73
+ _non_reentrant_nesting(0 ),
73
74
_vthread_epoch(0 ),
74
75
_vthread_excluded(false ),
75
76
_jvm_thread_excluded(false ),
@@ -365,28 +366,32 @@ typedef JfrOopTraceId<ThreadIdAccess> AccessThreadTraceId;
365
366
void JfrThreadLocal::set_vthread_epoch (const JavaThread* jt, traceid tid, u2 epoch) {
366
367
assert (jt != nullptr , " invariant" );
367
368
assert (is_vthread (jt), " invariant" );
368
- // To support event recursion, we update the native side first,
369
- // this provides the terminating case.
369
+ assert (! is_non_reentrant (), " invariant " );
370
+
370
371
Atomic::store (&jt->jfr_thread_local ()->_vthread_epoch , epoch);
371
- /*
372
- * The java side, i.e. the vthread object, can now be updated.
373
- * Accessing the vthread object itself is a recursive case,
374
- * because it can trigger additional events, e.g.
375
- * loading the oop through load barriers.
376
- * Note there is a potential problem with this solution:
377
- * The recursive write hitting the terminating case will
378
- * use the thread id _before_ the checkpoint is committed.
379
- * Hence, the periodic thread can possibly flush that event
380
- * to a segment that does not include an associated checkpoint.
381
- * Considered rare and quite benign for now. The worst case is
382
- * that thread information for that event is not resolvable, i.e. null.
383
- */
372
+
384
373
oop vthread = jt->vthread ();
385
374
assert (vthread != nullptr , " invariant" );
375
+
386
376
AccessThreadTraceId::set_epoch (vthread, epoch);
387
377
JfrCheckpointManager::write_checkpoint (const_cast <JavaThread*>(jt), tid, vthread);
388
378
}
389
379
380
+ void JfrThreadLocal::set_vthread_epoch_checked (const JavaThread* jt, traceid tid, u2 epoch) {
381
+ assert (jt != nullptr , " invariant" );
382
+ assert (is_vthread (jt), " invariant" );
383
+
384
+ // If the event is marked as non reentrant, write only a simplified version of the vthread info.
385
+ // Essentially all the same info except the vthread name, because we cannot touch the oop.
386
+ // Since we cannot touch the oop, we also cannot update its vthread epoch.
387
+ if (is_non_reentrant ()) {
388
+ JfrCheckpointManager::write_simplified_vthread_checkpoint (tid);
389
+ return ;
390
+ }
391
+
392
+ set_vthread_epoch (jt, tid, epoch);
393
+ }
394
+
390
395
traceid JfrThreadLocal::vthread_id (const Thread* t) {
391
396
assert (t != nullptr , " invariant" );
392
397
return Atomic::load (&t->jfr_thread_local ()->_vthread_id );
@@ -416,7 +421,7 @@ traceid JfrThreadLocal::thread_id(const Thread* t) {
416
421
if (!tl->is_vthread_excluded ()) {
417
422
const u2 current_epoch = AccessThreadTraceId::current_epoch ();
418
423
if (vthread_epoch (jt) != current_epoch) {
419
- set_vthread_epoch (jt, tid, current_epoch);
424
+ set_vthread_epoch_checked (jt, tid, current_epoch);
420
425
}
421
426
}
422
427
return tid;
@@ -480,6 +485,26 @@ bool JfrThreadLocal::is_vthread(const JavaThread* jt) {
480
485
return Atomic::load_acquire (&jt->jfr_thread_local ()->_vthread ) && jt->last_continuation () != nullptr ;
481
486
}
482
487
488
+ int32_t JfrThreadLocal::make_non_reentrant (Thread* t) {
489
+ assert (t != nullptr , " invariant" );
490
+ if (!t->is_Java_thread () || !is_vthread (JavaThread::cast (t))) {
491
+ return -1 ;
492
+ }
493
+ return t->jfr_thread_local ()->_non_reentrant_nesting ++;
494
+ }
495
+
496
+ void JfrThreadLocal::make_reentrant (Thread* t, int32_t previous_nesting) {
497
+ assert (t->is_Java_thread () && is_vthread (JavaThread::cast (t)), " invariant" );
498
+ assert (previous_nesting >= 0 , " invariant" );
499
+ t->jfr_thread_local ()->_non_reentrant_nesting = previous_nesting;
500
+ }
501
+
502
+ bool JfrThreadLocal::is_non_reentrant () {
503
+ Thread* const current_thread = Thread::current ();
504
+ assert (current_thread != nullptr , " invariant" );
505
+ return current_thread->jfr_thread_local ()->_non_reentrant_nesting > 0 ;
506
+ }
507
+
483
508
inline bool is_virtual (const JavaThread* jt, oop thread) {
484
509
assert (jt != nullptr , " invariant" );
485
510
return thread != jt->threadObj ();
@@ -493,6 +518,7 @@ void JfrThreadLocal::on_set_current_thread(JavaThread* jt, oop thread) {
493
518
Atomic::release_store (&tl->_vthread , false );
494
519
return ;
495
520
}
521
+ assert (tl->_non_reentrant_nesting == 0 , " invariant" );
496
522
Atomic::store (&tl->_vthread_id , AccessThreadTraceId::id (thread));
497
523
const u2 epoch_raw = AccessThreadTraceId::epoch (thread);
498
524
const bool excluded = epoch_raw & excluded_bit;
0 commit comments