6
6
7
7
#include " profiler.h"
8
8
#include " asyncSampleMutex.h"
9
- #include " common.h"
10
9
#include " context.h"
11
10
#include " counters.h"
12
11
#include " ctimer.h"
@@ -278,7 +277,7 @@ int Profiler::getNativeTrace(void *ucontext, ASGCT_CallFrame *frames,
278
277
PerfEvents::walkKernel (tid, callchain + native_frames,
279
278
MAX_NATIVE_FRAMES - native_frames, java_ctx);
280
279
}
281
- if (_cstack = = CSTACK_VM) {
280
+ if (_cstack > = CSTACK_VM) {
282
281
return 0 ;
283
282
} else if (_cstack == CSTACK_DWARF) {
284
283
native_frames += StackWalker::walkDwarf (ucontext, callchain + native_frames,
@@ -395,7 +394,7 @@ int Profiler::getJavaTraceAsync(void *ucontext, ASGCT_CallFrame *frames,
395
394
if (in_java && java_ctx->sp != 0 ) {
396
395
// skip ahead to the Java frames before calling AGCT
397
396
frame.restore ((uintptr_t )java_ctx->pc , java_ctx->sp , java_ctx->fp );
398
- } else if (state != 0 && vm_thread->lastJavaSP () == 0 ) {
397
+ } else if (state != 0 && ( vm_thread->anchor () == nullptr || vm_thread-> anchor ()-> lastJavaSP () == 0 ) ) {
399
398
// we haven't found the top Java frame ourselves, and the lastJavaSP wasn't
400
399
// recorded either when not in the Java state, lastJava ucontext will be
401
400
// used by AGCT
@@ -474,14 +473,16 @@ int Profiler::getJavaTraceAsync(void *ucontext, ASGCT_CallFrame *frames,
474
473
}
475
474
} else if (trace.num_frames == ticks_unknown_not_Java &&
476
475
!(_safe_mode & LAST_JAVA_PC)) {
477
- uintptr_t &sp = vm_thread->lastJavaSP ();
478
- uintptr_t &pc = vm_thread->lastJavaPC ();
479
- if (sp != 0 && pc == 0 ) {
476
+ JavaFrameAnchor* anchor = vm_thread->anchor ();
477
+ uintptr_t sp = anchor->lastJavaSP ();
478
+ const void * pc = anchor->lastJavaPC ();
479
+ if (sp != 0 && pc == NULL ) {
480
480
// We have the last Java frame anchor, but it is not marked as walkable.
481
481
// Make it walkable here
482
- pc = ((uintptr_t *)sp)[-1 ];
482
+ pc = ((const void **)sp)[-1 ];
483
+ anchor->setLastJavaPC (pc);
483
484
484
- NMethod *m = CodeHeap::findNMethod (( const void *) pc);
485
+ NMethod *m = CodeHeap::findNMethod (pc);
485
486
if (m != NULL ) {
486
487
// AGCT fails if the last Java frame is a Runtime Stub with an invalid
487
488
// _frame_complete_offset. In this case we patch _frame_complete_offset
@@ -491,28 +492,29 @@ int Profiler::getJavaTraceAsync(void *ucontext, ASGCT_CallFrame *frames,
491
492
m->setFrameCompleteOffset (0 );
492
493
}
493
494
VM::_asyncGetCallTrace (&trace, max_depth, ucontext);
494
- } else if (_libs->findLibraryByAddress (( const void *) pc) != NULL ) {
495
+ } else if (_libs->findLibraryByAddress (pc) != NULL ) {
495
496
VM::_asyncGetCallTrace (&trace, max_depth, ucontext);
496
497
}
497
498
498
- pc = 0 ;
499
+ anchor-> setLastJavaPC ( nullptr ) ;
499
500
}
500
501
} else if (trace.num_frames == ticks_not_walkable_not_Java &&
501
502
!(_safe_mode & LAST_JAVA_PC)) {
502
- uintptr_t &sp = vm_thread->lastJavaSP ();
503
- uintptr_t &pc = vm_thread->lastJavaPC ();
504
- if (sp != 0 && pc != 0 ) {
503
+ JavaFrameAnchor* anchor = vm_thread->anchor ();
504
+ uintptr_t sp = anchor->lastJavaSP ();
505
+ const void * pc = anchor->lastJavaPC ();
506
+ if (sp != 0 && pc != NULL ) {
505
507
// Similar to the above: last Java frame is set,
506
508
// but points to a Runtime Stub with an invalid _frame_complete_offset
507
- NMethod *m = CodeHeap::findNMethod (( const void *) pc);
509
+ NMethod *m = CodeHeap::findNMethod (pc);
508
510
if (m != NULL && !m->isNMethod () && m->frameSize () > 0 &&
509
511
m->frameCompleteOffset () == -1 ) {
510
512
m->setFrameCompleteOffset (0 );
511
513
VM::_asyncGetCallTrace (&trace, max_depth, ucontext);
512
514
}
513
515
}
514
516
} else if (trace.num_frames == ticks_GC_active && !(_safe_mode & GC_TRACES)) {
515
- if (vm_thread->lastJavaSP () == 0 ) {
517
+ if (vm_thread->anchor ()-> lastJavaSP () == 0 ) {
516
518
// Do not add 'GC_active' for threads with no Java frames, e.g. Compiler
517
519
// threads
518
520
frame.restore (saved_pc, saved_sp, saved_fp);
@@ -671,28 +673,32 @@ void Profiler::recordSample(void *ucontext, u64 counter, int tid,
671
673
ASGCT_CallFrame *native_stop = frames + num_frames;
672
674
num_frames += getNativeTrace (ucontext, native_stop, event_type, tid,
673
675
&java_ctx, &truncated);
674
- if (_cstack == CSTACK_VM) {
675
- num_frames +=
676
- StackWalker::walkVM (ucontext, frames + num_frames, _max_stack_depth,
677
- _call_stub_begin, _call_stub_end);
676
+ if (_cstack == CSTACK_VMX) {
677
+ num_frames += StackWalker::walkVM (ucontext, frames + num_frames, _max_stack_depth, VM_EXPERT, &truncated);
678
678
} else if (event_type == BCI_CPU || event_type == BCI_WALL) {
679
- int java_frames = 0 ;
680
- {
679
+ if (_cstack == CSTACK_VM) {
680
+ num_frames += StackWalker::walkVM (ucontext, frames + num_frames, _max_stack_depth, VM_NORMAL, &truncated);
681
+ } else {
681
682
// Async events
682
683
AsyncSampleMutex mutex (ProfiledThread::current ());
684
+ int java_frames = 0 ;
683
685
if (mutex.acquired ()) {
684
- java_frames =
685
- getJavaTraceAsync (ucontext, frames + num_frames, _max_stack_depth,
686
- &java_ctx, &truncated);
686
+ java_frames = getJavaTraceAsync (ucontext, frames + num_frames, _max_stack_depth, &java_ctx, &truncated);
687
+ if (java_frames > 0 && java_ctx.pc != NULL && VMStructs::hasMethodStructs ()) {
688
+ NMethod* nmethod = CodeHeap::findNMethod (java_ctx.pc );
689
+ if (nmethod != NULL ) {
690
+ fillFrameTypes (frames + num_frames, java_frames, nmethod);
691
+ }
692
+ }
687
693
}
688
- }
689
- if (java_frames > 0 && java_ctx. pc != NULL ) {
690
- NMethod * nmethod = CodeHeap::findNMethod (java_ctx. pc );
691
- if (nmethod != NULL ) {
692
- fillFrameTypes (frames + num_frames, java_frames, nmethod);
694
+ if (java_frames > 0 && java_ctx. pc != NULL ) {
695
+ NMethod *nmethod = CodeHeap::findNMethod (java_ctx. pc );
696
+ if ( nmethod != NULL ) {
697
+ fillFrameTypes (frames + num_frames, java_frames, nmethod);
698
+ }
693
699
}
700
+ num_frames += java_frames;
694
701
}
695
- num_frames += java_frames;
696
702
}
697
703
698
704
if (num_frames == 0 ) {
@@ -1058,7 +1064,7 @@ Error Profiler::checkJvmCapabilities() {
1058
1064
}
1059
1065
}
1060
1066
1061
- if (!VMStructs::hasDebugSymbols () && !VM::isOpenJ9 ()) {
1067
+ if (!VMStructs::libjvm ()-> hasDebugSymbols () && !VM::isOpenJ9 ()) {
1062
1068
Log::warn (" Install JVM debug symbols to improve profile accuracy" );
1063
1069
}
1064
1070
@@ -1151,7 +1157,6 @@ Error Profiler::start(Arguments &args, bool reset) {
1151
1157
return Error (
1152
1158
" VMStructs stack walking is not supported on this JVM/platform" );
1153
1159
}
1154
- Log::info (" cstack=vm is an experimental option, use with care" );
1155
1160
}
1156
1161
1157
1162
// Kernel symbols are useful only for perf_events without --all-user
@@ -1281,7 +1286,7 @@ Error Profiler::check(Arguments &args) {
1281
1286
return Error (" DWARF unwinding is not supported on this platform" );
1282
1287
} else if (args._cstack == CSTACK_LBR && _cpu_engine != &perf_events) {
1283
1288
return Error (" Branch stack is supported only with PMU events" );
1284
- } else if (args. _cstack == CSTACK_VM && !VMStructs::hasStackStructs ()) {
1289
+ } else if (_cstack >= CSTACK_VM && !( VMStructs::hasStackStructs () && OS::isLinux () )) {
1285
1290
return Error (
1286
1291
" VMStructs stack walking is not supported on this JVM/platform" );
1287
1292
}
0 commit comments