diff --git a/packages/dd-trace/src/profiling/profiler.js b/packages/dd-trace/src/profiling/profiler.js index e9f1ced1313..8725c06e2a0 100644 --- a/packages/dd-trace/src/profiling/profiler.js +++ b/packages/dd-trace/src/profiling/profiler.js @@ -9,18 +9,10 @@ const dc = require('dc-polyfill') const crashtracker = require('../crashtracking') const { promisify } = require('util') -const zlib = require('zlib') const profileSubmittedChannel = dc.channel('datadog:profiling:profile-submitted') const spanFinishedChannel = dc.channel('dd-trace:span:finish') -function maybeSourceMap (sourceMap, SourceMapper, debug) { - if (!sourceMap) return - return SourceMapper.create([ - process.cwd() - ], debug) -} - function logError (logger, ...args) { if (logger) { logger.error(...args) @@ -50,22 +42,19 @@ class Profiler extends EventEmitter { this._timer = undefined this._lastStart = undefined this._timeoutInterval = undefined + this._sourceMapsLoaded = undefined this.endpointCounts = new Map() } - start (options) { - return this._start(options).catch((err) => { - logError(options.logger, 'Error starting profiler. For troubleshooting tips, see ' + - '', err) - return false - }) - } - _logError (err) { logError(this._logger, err) } - async _start (options) { + sourceMapsLoaded () { + return this._sourceMapsLoaded + } + + start (options) { if (this._enabled) return true const config = this._config = new Config(options) @@ -77,48 +66,41 @@ class Profiler extends EventEmitter { // Log errors if the source map finder fails, but don't prevent the rest // of the profiler from running without source maps. let mapper - try { - const { setLogger, SourceMapper } = require('@datadog/pprof') - setLogger(config.logger) - - mapper = await maybeSourceMap(config.sourceMap, SourceMapper, config.debugSourceMaps) - if (config.sourceMap && config.debugSourceMaps) { - this._logger.debug(() => { - return mapper.infoMap.size === 0 - ? 'Found no source maps' - : `Found source maps for following files: [${[...mapper.infoMap.keys()].join(', ')}]` - }) - } - - const clevel = config.uploadCompression.level - switch (config.uploadCompression.method) { - case 'gzip': - this._compressionFn = promisify(zlib.gzip) - if (clevel !== undefined) { - this._compressionOptions = { - level: clevel - } + if (config.sourceMap) { + try { + const { setLogger, SourceMapper } = require('@datadog/pprof') + setLogger(config.logger) + + const maybeMapperLoadingPromise = SourceMapper.create([process.cwd()], config.debugSourceMaps) + if (maybeMapperLoadingPromise instanceof Promise) { + mapper = { + hasMappingInfo: () => false, + mappingInfo: (l) => l } - break - case 'zstd': - if (typeof zlib.zstdCompress === 'function') { - this._compressionFn = promisify(zlib.zstdCompress) - if (clevel !== undefined) { - this._compressionOptions = { - params: { - [zlib.constants.ZSTD_c_compressionLevel]: clevel - } - } + this._sourceMapsLoaded = maybeMapperLoadingPromise.then((sourceMap) => { + if (config.debugSourceMaps) { + this._logger.debug(() => { + return sourceMap.infoMap.size === 0 + ? 'Found no source maps' + : `Found source maps for following files: [${[...mapper.infoMap.keys()].join(', ')}]` + }) } - } else { - const zstdCompress = require('@datadog/libdatadog').load('datadog-js-zstd').zstd_compress - const level = clevel ?? 0 // 0 is zstd default compression level - this._compressionFn = (buffer) => Promise.resolve(Buffer.from(zstdCompress(buffer, level))) - } - break + mapper.hasMappingInfo = sourceMap.hasMappingInfo.bind(sourceMap) + mapper.mappingInfo = sourceMap.mappingInfo.bind(sourceMap) + }).catch((err) => { + this._logError(err) + }) + } else { + // If the result of SourceMapper.create is not a promise, it is already loaded + mapper = maybeMapperLoadingPromise + } + } catch (err) { + this._logError(err) } - } catch (err) { - this._logError(err) + } + + if (this._sourceMapsLoaded === undefined) { + this._sourceMapsLoaded = Promise.resolve() } try { @@ -217,6 +199,45 @@ class Profiler extends EventEmitter { } } + _getCompressionFn () { + if (this._compressionFn === undefined) { + try { + const method = this._config.uploadCompression.method + if (method === 'off') { + return + } + const zlib = require('zlib') + const clevel = this._config.uploadCompression.level + if (method === 'gzip') { + this._compressionFn = promisify(zlib.gzip) + if (clevel !== undefined) { + this._compressionOptions = { + level: clevel + } + } + } else if (method === 'zstd') { + if (typeof zlib.zstdCompress === 'function') { + this._compressionFn = promisify(zlib.zstdCompress) + if (clevel !== undefined) { + this._compressionOptions = { + params: { + [zlib.constants.ZSTD_c_compressionLevel]: clevel + } + } + } + } else { + const zstdCompress = require('@datadog/libdatadog').load('datadog-js-zstd').zstd_compress + const level = clevel ?? 0 // 0 is zstd default compression level + this._compressionFn = (buffer) => Promise.resolve(Buffer.from(zstdCompress(buffer, level))) + } + } + } catch (err) { + this._logError(err) + } + } + return this._compressionFn + } + async _collect (snapshotKind, restart = true) { if (!this._enabled) return @@ -252,9 +273,13 @@ class Profiler extends EventEmitter { await Promise.all(profiles.map(async ({ profiler, profile }) => { try { const encoded = await profiler.encode(profile) - const compressed = encoded instanceof Buffer && this._compressionFn !== undefined - ? await this._compressionFn(encoded, this._compressionOptions) - : encoded + let compressed = encoded + if (encoded instanceof Buffer) { + const compressionFn = this._getCompressionFn() + if (compressionFn !== undefined) { + compressed = await compressionFn(encoded, this._compressionOptions) + } + } encodedProfiles[profiler.type] = compressed this._logger.debug(() => { const profileJson = JSON.stringify(profile, (key, value) => { diff --git a/packages/dd-trace/src/profiling/profilers/wall.js b/packages/dd-trace/src/profiling/profilers/wall.js index c39550fc654..1758d4a0221 100644 --- a/packages/dd-trace/src/profiling/profilers/wall.js +++ b/packages/dd-trace/src/profiling/profilers/wall.js @@ -86,7 +86,6 @@ class NativeWallProfiler { // cpu profiling is enabled. this._withContexts = this._captureSpanData || this._timelineEnabled || this._cpuProfilingEnabled this._v8ProfilerBugWorkaroundEnabled = !!options.v8ProfilerBugWorkaroundEnabled - this._mapper = undefined this._pprof = undefined // Bind these to this so they can be used as callbacks @@ -111,7 +110,6 @@ class NativeWallProfiler { start ({ mapper } = {}) { if (this._started) return - this._mapper = mapper this._pprof = require('@datadog/pprof') kSampleCount = this._pprof.time.constants.kSampleCount @@ -126,7 +124,7 @@ class NativeWallProfiler { this._pprof.time.start({ intervalMicros: this._samplingIntervalMicros, durationMillis: this._flushIntervalMillis, - sourceMapper: this._mapper, + sourceMapper: mapper, withContexts: this._withContexts, lineNumbers: false, workaroundV8Bug: this._v8ProfilerBugWorkaroundEnabled, diff --git a/packages/dd-trace/src/proxy.js b/packages/dd-trace/src/proxy.js index aa678773c53..e312867b2c1 100644 --- a/packages/dd-trace/src/proxy.js +++ b/packages/dd-trace/src/proxy.js @@ -159,7 +159,8 @@ class Tracer extends NoopProxy { ssiHeuristics.start() let mockProfiler = null if (config.profiling.enabled === 'true') { - this._profilerStarted = this._startProfiler(config) + this._startProfiler(config) + this._profilerStarted = Promise.resolve(true) } else if (ssiHeuristics.emitsTelemetry) { // Start a mock profiler that emits mock profile-submitted events for the telemetry. // It will be stopped if the real profiler is started by the heuristics. diff --git a/packages/dd-trace/test/profiling/profiler.spec.js b/packages/dd-trace/test/profiling/profiler.spec.js index afdfb86a85d..8810d4b8851 100644 --- a/packages/dd-trace/test/profiling/profiler.spec.js +++ b/packages/dd-trace/test/profiling/profiler.spec.js @@ -106,15 +106,15 @@ describe('profiler', function () { }) it('should start the internal time profilers', async () => { - await profiler._start({ profilers, exporters }) + profiler.start({ profilers, exporters }) sinon.assert.calledOnce(wallProfiler.start) sinon.assert.calledOnce(spaceProfiler.start) }) it('should start only once', async () => { - await profiler._start({ profilers, exporters }) - await profiler._start({ profilers, exporters }) + profiler.start({ profilers, exporters }) + profiler.start({ profilers, exporters }) sinon.assert.calledOnce(wallProfiler.start) sinon.assert.calledOnce(spaceProfiler.start) @@ -127,7 +127,7 @@ describe('profiler', function () { ] for (const exporters of checks) { - await profiler._start({ + profiler.start({ sourceMap: false, exporters }) @@ -149,7 +149,7 @@ describe('profiler', function () { ].map(profilers => profilers.filter(profiler => samplingContextsAvailable || profiler !== EventsProfiler)) for (const [profilers, ...expected] of checks) { - await profiler._start({ + profiler.start({ sourceMap: false, profilers }) @@ -164,7 +164,7 @@ describe('profiler', function () { }) it('should stop the internal profilers', async () => { - await profiler._start({ profilers, exporters }) + profiler.start({ profilers, exporters }) profiler.stop() sinon.assert.calledOnce(wallProfiler.stop) @@ -174,7 +174,7 @@ describe('profiler', function () { it('should stop when starting failed', async () => { wallProfiler.start.throws() - await profiler._start({ profilers, exporters, logger }) + profiler.start({ profilers, exporters, logger }) sinon.assert.calledOnce(wallProfiler.stop) sinon.assert.calledOnce(spaceProfiler.stop) @@ -184,7 +184,7 @@ describe('profiler', function () { it('should stop when capturing failed', async () => { wallProfiler.profile.throws(new Error('boom')) - await profiler._start({ profilers, exporters, logger }) + profiler.start({ profilers, exporters, logger }) clock.tick(interval) @@ -200,7 +200,7 @@ describe('profiler', function () { const rejected = Promise.reject(new Error('boom')) wallProfiler.encode.returns(rejected) - await profiler._start({ profilers, exporters, logger }) + profiler.start({ profilers, exporters, logger }) clock.tick(interval) @@ -217,7 +217,7 @@ describe('profiler', function () { const rejected = Promise.reject(new Error('boom')) exporter.export.returns(rejected) - await profiler._start({ profilers, exporters, logger }) + profiler.start({ profilers, exporters, logger }) clock.tick(interval) @@ -230,7 +230,7 @@ describe('profiler', function () { }) it('should flush when the interval is reached', async () => { - await profiler._start({ profilers, exporters }) + profiler.start({ profilers, exporters }) clock.tick(interval) @@ -240,7 +240,7 @@ describe('profiler', function () { }) it('should flush when the profiler is stopped', async () => { - await profiler._start({ profilers, exporters }) + profiler.start({ profilers, exporters }) profiler.stop() @@ -268,7 +268,7 @@ describe('profiler', function () { process.env = { DD_PROFILING_DEBUG_UPLOAD_COMPRESSION: compression } - await profiler._start({ profilers, exporters, tags: { foo: 'foo' } }) + profiler.start({ profilers, exporters, tags: { foo: 'foo' } }) process.env = env clock.tick(interval) @@ -310,7 +310,7 @@ describe('profiler', function () { it('should log exporter errors', async () => { exporter.export.rejects(new Error('boom')) - await profiler._start({ profilers, exporters, logger }) + profiler.start({ profilers, exporters, logger }) clock.tick(interval) @@ -322,7 +322,7 @@ describe('profiler', function () { it('should log encoded profile', async () => { exporter.export.rejects(new Error('boom')) - await profiler._start({ profilers, exporters, logger }) + profiler.start({ profilers, exporters, logger }) clock.tick(interval) @@ -347,7 +347,7 @@ describe('profiler', function () { }) it('should have a new start time for each capture', async () => { - await profiler._start({ profilers, exporters }) + profiler.start({ profilers, exporters }) clock.tick(interval) await waitForExport() @@ -374,7 +374,7 @@ describe('profiler', function () { }) it('should not pass source mapper to profilers when disabled', async () => { - await profiler._start({ profilers, exporters, sourceMap: false }) + profiler.start({ profilers, exporters, sourceMap: false }) const options = profilers[0].start.args[0][0] expect(options).to.have.property('mapper', undefined) @@ -383,7 +383,7 @@ describe('profiler', function () { it('should pass source mapper to profilers when enabled', async () => { const mapper = {} sourceMapCreate.returns(mapper) - await profiler._start({ profilers, exporters, sourceMap: true }) + profiler.start({ profilers, exporters, sourceMap: true }) const options = profilers[0].start.args[0][0] expect(options).to.have.property('mapper') @@ -393,7 +393,8 @@ describe('profiler', function () { it('should work with a root working dir and source maps on', async () => { const error = new Error('fail') sourceMapCreate.rejects(error) - await profiler._start({ profilers, exporters, logger, sourceMap: true }) + profiler.start({ profilers, exporters, logger, sourceMap: true }) + await profiler.sourceMapsLoaded() expect(consoleLogger.error.args[0][0]).to.equal(error) expect(profiler._enabled).to.equal(true) }) @@ -429,7 +430,7 @@ describe('profiler', function () { }) it('should increment profiled intervals after one interval elapses', async () => { - await profiler._start({ profilers, exporters }) + profiler.start({ profilers, exporters }) expect(profiler._profiledIntervals).to.equal(0) clock.tick(interval) @@ -439,7 +440,7 @@ describe('profiler', function () { }) it('should flush when flush after intervals is reached', async () => { - await profiler._start({ profilers, exporters }) + profiler.start({ profilers, exporters }) // flushAfterIntervals + 1 becauses flushes after last interval for (let i = 0; i < flushAfterIntervals + 1; i++) {