diff --git a/bindings/profiler.cc b/bindings/profiler.cc index cab84fc7..587e9546 100644 --- a/bindings/profiler.cc +++ b/bindings/profiler.cc @@ -89,8 +89,18 @@ NAN_METHOD(StopSamplingHeapProfiler) { // Signature: // getAllocationProfile(): AllocationProfileNode NAN_METHOD(GetAllocationProfile) { - std::unique_ptr profile( - info.GetIsolate()->GetHeapProfiler()->GetAllocationProfile()); + const int kMaxPollCycles = 100; + const int kPollIntervalUs = 50000; // 50 ms + + std::unique_ptr profile; + for (int i = 0; i < kMaxPollCycles; ++i) { + profile.reset(info.GetIsolate()->GetHeapProfiler()->GetAllocationProfile()); + if (profile->GetRootNode()->allocations.size() > 0) { + break; + } + uv_sleep(kPollIntervalUs / 1000); + } + AllocationProfile::Node* root = profile->GetRootNode(); info.GetReturnValue().Set(TranslateAllocationProfile(root)); } diff --git a/ts/src/heap-profiler.ts b/ts/src/heap-profiler.ts index 475836d9..162e57e7 100644 --- a/ts/src/heap-profiler.ts +++ b/ts/src/heap-profiler.ts @@ -86,7 +86,10 @@ export function profile( * @param intervalBytes - average number of bytes between samples. * @param stackDepth - maximum stack depth for samples collected. */ -export function start(intervalBytes: number, stackDepth: number) { +export async function start( + intervalBytes: number, + stackDepth: number +): Promise { if (enabled) { throw new Error( `Heap profiler is already started with intervalBytes ${heapIntervalBytes} and stackDepth ${stackDepth}` @@ -96,6 +99,10 @@ export function start(intervalBytes: number, stackDepth: number) { heapStackDepth = stackDepth; startSamplingHeapProfiler(heapIntervalBytes, heapStackDepth); enabled = true; + // Wait for 100ms to give the profiler time to initialize. + // This is a workaround for a race condition where the profiler is not + // ready to collect samples immediately after it is started. + await new Promise(resolve => setTimeout(resolve, 100)); } // Stops heap profiling. If heap profiling has not been started, does nothing. diff --git a/ts/test/test-heap-profiler.ts b/ts/test/test-heap-profiler.ts index 85e36970..676ade97 100644 --- a/ts/test/test-heap-profiler.ts +++ b/ts/test/test-heap-profiler.ts @@ -65,7 +65,7 @@ describe('HeapProfiler', () => { }); const intervalBytes = 1024 * 512; const stackDepth = 32; - heapProfiler.start(intervalBytes, stackDepth); + await heapProfiler.start(intervalBytes, stackDepth); const profile = heapProfiler.profile(); assert.deepEqual(heapProfileWithExternal, profile); }); @@ -83,7 +83,7 @@ describe('HeapProfiler', () => { }); const intervalBytes = 1024 * 512; const stackDepth = 32; - heapProfiler.start(intervalBytes, stackDepth); + await heapProfiler.start(intervalBytes, stackDepth); const profile = heapProfiler.profile(); assert.deepEqual(heapProfileIncludePath, profile); }); @@ -101,7 +101,7 @@ describe('HeapProfiler', () => { }); const intervalBytes = 1024 * 512; const stackDepth = 32; - heapProfiler.start(intervalBytes, stackDepth); + await heapProfiler.start(intervalBytes, stackDepth); const profile = heapProfiler.profile('@google-cloud/profiler'); assert.deepEqual(heapProfileExcludePath, profile); }); @@ -117,10 +117,10 @@ describe('HeapProfiler', () => { ); }); - it('should throw error when started then stopped', () => { + it('should throw error when started then stopped', async () => { const intervalBytes = 1024 * 512; const stackDepth = 32; - heapProfiler.start(intervalBytes, stackDepth); + await heapProfiler.start(intervalBytes, stackDepth); heapProfiler.stop(); assert.throws( () => { @@ -131,22 +131,39 @@ describe('HeapProfiler', () => { } ); }); + it('should return a non-empty profile when profiling immediately after starting', async () => { + profileStub = sinon + .stub(v8HeapProfiler, 'getAllocationProfile') + .returns(copy(v8HeapProfile)); + memoryUsageStub = sinon.stub(process, 'memoryUsage').returns({ + external: 0, + rss: 2048, + heapTotal: 4096, + heapUsed: 2048, + arrayBuffers: 512, + }); + const intervalBytes = 1024 * 512; + const stackDepth = 32; + await heapProfiler.start(intervalBytes, stackDepth); + const profile = heapProfiler.profile(); + assert.notDeepEqual({sample: []}, profile); + }); }); describe('start', () => { - it('should call startSamplingHeapProfiler', () => { + it('should call startSamplingHeapProfiler', async () => { const intervalBytes1 = 1024 * 512; const stackDepth1 = 32; - heapProfiler.start(intervalBytes1, stackDepth1); + await heapProfiler.start(intervalBytes1, stackDepth1); assert.ok( startStub.calledWith(intervalBytes1, stackDepth1), 'expected startSamplingHeapProfiler to be called' ); }); - it('should throw error when enabled and started with different parameters', () => { + it('should throw error when enabled and started with different parameters', async () => { const intervalBytes1 = 1024 * 512; const stackDepth1 = 32; - heapProfiler.start(intervalBytes1, stackDepth1); + await heapProfiler.start(intervalBytes1, stackDepth1); assert.ok( startStub.calledWith(intervalBytes1, stackDepth1), 'expected startSamplingHeapProfiler to be called' @@ -155,7 +172,7 @@ describe('HeapProfiler', () => { const intervalBytes2 = 1024 * 128; const stackDepth2 = 64; try { - heapProfiler.start(intervalBytes2, stackDepth2); + await heapProfiler.start(intervalBytes2, stackDepth2); } catch (e) { assert.strictEqual( (e as Error).message, @@ -175,8 +192,8 @@ describe('HeapProfiler', () => { heapProfiler.stop(); assert.ok(!stopStub.called, 'stop() should have been no-op.'); }); - it('should call stopSamplingHeapProfiler if profiler started', () => { - heapProfiler.start(1024 * 512, 32); + it('should call stopSamplingHeapProfiler if profiler started', async () => { + await heapProfiler.start(1024 * 512, 32); heapProfiler.stop(); assert.ok( stopStub.called,