From e06c376f21b5d5995b7c4467055eee683c0cd918 Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Tue, 8 Jul 2025 14:31:50 +0200 Subject: [PATCH 01/53] perf: improve algorithm for getting callsites in AppSec (#6044) Use the `Error.captureStackTrace` API when triggering `Error.prepareStackTrace` in order to be able to provide a constructor function. This is used to reduce the number of unnecessary frames being generated. --- .../src/appsec/iast/vulnerability-reporter.js | 2 +- packages/dd-trace/src/appsec/rasp/utils.js | 2 +- packages/dd-trace/src/appsec/stack_trace.js | 22 +++++++++---------- .../test/appsec/iast/path-line.spec.js | 2 +- .../dd-trace/test/appsec/stack_trace.spec.js | 18 +++++++-------- 5 files changed, 23 insertions(+), 23 deletions(-) diff --git a/packages/dd-trace/src/appsec/iast/vulnerability-reporter.js b/packages/dd-trace/src/appsec/iast/vulnerability-reporter.js index ce03870aaab..a4d300f7616 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerability-reporter.js +++ b/packages/dd-trace/src/appsec/iast/vulnerability-reporter.js @@ -114,7 +114,7 @@ function isDuplicatedVulnerability (vulnerability) { } function getVulnerabilityCallSiteFrames () { - return getCallsiteFrames(stackTraceMaxDepth) + return getCallsiteFrames(stackTraceMaxDepth, getVulnerabilityCallSiteFrames) } function replaceCallSiteFromSourceMap (callsite) { diff --git a/packages/dd-trace/src/appsec/rasp/utils.js b/packages/dd-trace/src/appsec/rasp/utils.js index 4fc891cf1a9..f3b3cea6ba9 100644 --- a/packages/dd-trace/src/appsec/rasp/utils.js +++ b/packages/dd-trace/src/appsec/rasp/utils.js @@ -41,7 +41,7 @@ function handleResult (result, req, res, abortController, config, raspRule) { const ruleTriggered = !!result?.events?.length if (generateStackTraceAction && enabled && canReportStackTrace(rootSpan, maxStackTraces)) { - const frames = getCallsiteFrames(maxDepth) + const frames = getCallsiteFrames(maxDepth, handleResult) reportStackTrace( rootSpan, diff --git a/packages/dd-trace/src/appsec/stack_trace.js b/packages/dd-trace/src/appsec/stack_trace.js index 448b19c9d53..c40d1124d19 100644 --- a/packages/dd-trace/src/appsec/stack_trace.js +++ b/packages/dd-trace/src/appsec/stack_trace.js @@ -9,36 +9,36 @@ const STACK_TRACE_NAMESPACES = { IAST: 'vulnerability' } -function getCallSiteList (maxDepth = 100) { +function prepareStackTrace (_, callsites) { + return callsites +} + +function getCallSiteList (maxDepth = 100, constructorOpt) { const previousPrepareStackTrace = Error.prepareStackTrace const previousStackTraceLimit = Error.stackTraceLimit - let callsiteList // Since some frames will be discarded because they come from tracer codebase, a buffer is added // to the limit in order to get as close as `maxDepth` number of frames. Error.stackTraceLimit = maxDepth + LIBRARY_FRAMES_BUFFER try { - Error.prepareStackTrace = function (_, callsites) { - callsiteList = callsites - } - const e = new Error('message') - e.stack + Error.prepareStackTrace = prepareStackTrace + const obj = {} + Error.captureStackTrace(obj, constructorOpt) + return obj.stack } finally { Error.prepareStackTrace = previousPrepareStackTrace Error.stackTraceLimit = previousStackTraceLimit } - - return callsiteList } function filterOutFramesFromLibrary (callSiteList) { return callSiteList.filter(callSite => !callSite.getFileName()?.startsWith(ddBasePath)) } -function getCallsiteFrames (maxDepth = 32, callSiteListGetter = getCallSiteList) { +function getCallsiteFrames (maxDepth = 32, constructorOpt = getCallsiteFrames, callSiteListGetter = getCallSiteList) { if (maxDepth < 1) maxDepth = Infinity - const callSiteList = callSiteListGetter(maxDepth) + const callSiteList = callSiteListGetter(maxDepth, constructorOpt) const filteredFrames = filterOutFramesFromLibrary(callSiteList) const half = filteredFrames.length > maxDepth ? Math.round(maxDepth / 2) : Infinity diff --git a/packages/dd-trace/test/appsec/iast/path-line.spec.js b/packages/dd-trace/test/appsec/iast/path-line.spec.js index 592ceb9f821..ae669b37b24 100644 --- a/packages/dd-trace/test/appsec/iast/path-line.spec.js +++ b/packages/dd-trace/test/appsec/iast/path-line.spec.js @@ -199,7 +199,7 @@ describe('path-line', function () { const basePath = pathLine.ddBasePath pathLine.ddBasePath = path.join('test', 'base', 'path') - const list = getCallsiteFrames(32, getCallSiteInfo) + const list = getCallsiteFrames(32, getCallSiteInfo, getCallSiteInfo) const firstNonDDPath = pathLine.getNonDDCallSiteFrames(list)[0] const expectedPath = path.join('node_modules', firstNonDDPath.path) diff --git a/packages/dd-trace/test/appsec/stack_trace.spec.js b/packages/dd-trace/test/appsec/stack_trace.spec.js index 406944c0381..6599d0caa66 100644 --- a/packages/dd-trace/test/appsec/stack_trace.spec.js +++ b/packages/dd-trace/test/appsec/stack_trace.spec.js @@ -66,7 +66,7 @@ describe('Stack trace reporter', () => { const rootSpan = {} const stackId = 'test_stack_id' const maxDepth = 32 - const frames = getCallsiteFrames(maxDepth, () => callSiteList) + const frames = getCallsiteFrames(maxDepth, getCallsiteFrames, () => callSiteList) reportStackTrace(rootSpan, stackId, frames) @@ -112,7 +112,7 @@ describe('Stack trace reporter', () => { } )) - const frames = getCallsiteFrames(maxDepth, () => callSiteList) + const frames = getCallsiteFrames(maxDepth, getCallsiteFrames, () => callSiteList) reportStackTrace(rootSpan, stackId, frames) @@ -141,7 +141,7 @@ describe('Stack trace reporter', () => { } )) - const frames = getCallsiteFrames(maxDepth, () => callSiteList) + const frames = getCallsiteFrames(maxDepth, getCallsiteFrames, () => callSiteList) reportStackTrace(rootSpan, stackId, frames) @@ -174,7 +174,7 @@ describe('Stack trace reporter', () => { } )) - const frames = getCallsiteFrames(maxDepth, () => callSiteList) + const frames = getCallsiteFrames(maxDepth, getCallsiteFrames, () => callSiteList) reportStackTrace(rootSpan, stackId, frames) @@ -216,7 +216,7 @@ describe('Stack trace reporter', () => { const stackId = 'test_stack_id' const maxDepth = 32 - const frames = getCallsiteFrames(maxDepth, () => callSiteList) + const frames = getCallsiteFrames(maxDepth, getCallsiteFrames, () => callSiteList) reportStackTrace(rootSpan, stackId, frames) @@ -265,7 +265,7 @@ describe('Stack trace reporter', () => { } )) - const frames = getCallsiteFrames(maxDepth, () => callSiteList) + const frames = getCallsiteFrames(maxDepth, getCallsiteFrames, () => callSiteList) reportStackTrace(rootSpan, stackId, frames) @@ -316,7 +316,7 @@ describe('Stack trace reporter', () => { } )) - const frames = getCallsiteFrames(maxDepth, () => callSiteListWithLibraryFrames) + const frames = getCallsiteFrames(maxDepth, getCallsiteFrames, () => callSiteListWithLibraryFrames) reportStackTrace(rootSpan, stackId, frames) @@ -339,7 +339,7 @@ describe('Stack trace reporter', () => { } )) - const frames = getCallsiteFrames(maxDepth, () => callSiteList) + const frames = getCallsiteFrames(maxDepth, getCallsiteFrames, () => callSiteList) reportStackTrace(rootSpan, stackId, frames) @@ -362,7 +362,7 @@ describe('Stack trace reporter', () => { } )) - const frames = getCallsiteFrames(maxDepth, () => callSiteList) + const frames = getCallsiteFrames(maxDepth, getCallsiteFrames, () => callSiteList) reportStackTrace(rootSpan, stackId, frames) From 97bc0c15d332ec5d0980fc2f9d9c5f34e00747cd Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Tue, 8 Jul 2025 14:54:18 +0200 Subject: [PATCH 02/53] Enable recommended rules for eslint-plugin-n (#5216) Enables all recommended rules from the `eslint-plugin-n` ESLint plugin that can reasonably be enabled without too much work. The following rules have been disabled as they cause too many errors: - `n/hashbang` - `n/no-process-exit` - `n/no-missing-require` (only disabled in benchmarks and tests) All `eslint-plugin-n` rules are also disabled for `**/*.mjs` files as these for some reason resulting in parsing errors. Finally a select list of experimental Node.js APIs have been allowed in `n/no-unsupported-features/node-builtins`, so that we can use them without having to add ESLint comments all over the place: - `Response` - `async_hooks.createHook` - `async_hooks.executionAsyncId` - `async_hooks.executionAsyncResource` - `fetch` - `fs/promises.cp` --- LICENSE-3rdparty.csv | 1 + esbuild.js | 2 + eslint.config.mjs | 49 +++++++++++++++++++ index.js | 2 + init.js | 2 + initialize.mjs | 2 + integration-tests/appsec/esm-app/index.mjs | 1 + .../ci-visibility/subproject/package.json | 5 +- .../subproject/subproject-test.js | 2 + integration-tests/esbuild/index.spec.js | 2 + package.json | 1 + .../datadog-instrumentations/src/fetch.js | 1 + .../datadog-plugin-openai/test/index.spec.js | 6 +-- .../appsec/iast/taint-tracking/rewriter.js | 1 + .../src/debugger/devtools_client/index.js | 16 ++++-- .../inspector_promises_polyfill.js | 1 + .../profiling/exporters/event_serializer.js | 3 ++ .../src/runtime_metrics/runtime_metrics.js | 1 + register.js | 2 + scripts/verify-ci-config.js | 1 + yarn.lock | 6 +-- 21 files changed, 96 insertions(+), 11 deletions(-) diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index accec9c62df..9b2483261c4 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -68,6 +68,7 @@ dev,sinon,BSD-3-Clause,Copyright 2010-2017 Christian Johansen dev,sinon-chai,WTFPL and BSD-2-Clause,Copyright 2004 Sam Hocevar 2012–2017 Domenic Denicola dev,tap,ISC,Copyright 2011-2022 Isaac Z. Schlueter and Contributors dev,tiktoken,MIT,Copyright (c) 2022 OpenAI, Shantanu Jain +dev,workerpool,Apache license 2.0,Copyright (C) 2014-2024 Jos de Jong wjosdejong@gmail.com dev,yaml,ISC,Copyright Eemeli Aro dev,yarn-deduplicate,Apache license 2.0,Copyright [yyyy] [name of copyright owner] file,aws-lambda-nodejs-runtime-interface-client,Apache 2.0,Copyright 2019 Amazon.com Inc. or its affiliates. All Rights Reserved. diff --git a/esbuild.js b/esbuild.js index 424ba5cb908..5c80493515a 100644 --- a/esbuild.js +++ b/esbuild.js @@ -1,3 +1,5 @@ 'use strict' +// TODO: It shouldn't be necessary to disable n/no-unpublished-require - Research +// eslint-disable-next-line n/no-unpublished-require module.exports = require('./packages/datadog-esbuild/index.js') diff --git a/eslint.config.mjs b/eslint.config.mjs index 475cde84ac6..622745fdf11 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -286,6 +286,19 @@ export default [ yoda: ['error', 'never'] } }, + { + ...eslintPluginN.configs['flat/recommended'], + ignores: [ + 'integration-tests/debugger/target-app/re-evaluation/index.js', + 'integration-tests/debugger/target-app/re-evaluation/unique-filename.js', + 'packages/dd-trace/test/appsec/next/app-dir/**/*.js', + 'packages/dd-trace/test/appsec/next/pages-dir/**/*.js', + 'packages/datadog-plugin-next/test/app/**/*.js', + 'packages/datadog-plugin-next/test/**/pages/**/*.js', + 'packages/datadog-plugin-next/test/middleware.js', + '**/*.mjs' // TODO: This shoudln't be required, research why it is + ] + }, { name: 'dd-trace/defaults', @@ -328,6 +341,18 @@ export default [ }], 'import/no-extraneous-dependencies': 'error', 'n/no-restricted-require': ['error', ['diagnostics_channel']], + 'n/hashbang': 'off', // TODO: Enable this rule once we have a plan to address it + 'n/no-process-exit': 'off', // TODO: Enable this rule once we have a plan to address it + 'n/no-unsupported-features/node-builtins': ['error', { + ignores: [ + 'Response', + 'async_hooks.createHook', + 'async_hooks.executionAsyncId', + 'async_hooks.executionAsyncResource', + 'fetch', + 'fs/promises.cp' + ] + }], 'no-console': 'error', 'no-prototype-builtins': 'off', // Override (turned on by @eslint/js/recommended) 'no-var': 'error', @@ -421,6 +446,26 @@ export default [ ...eslintPluginMocha.configs.flat.recommended, files: TEST_FILES }, + { + name: 'dd-trace/benchmarks', + files: [ + 'benchmark/**/*' + ], + rules: { + 'n/no-missing-require': 'off' + } + }, + { + name: 'dd-trace/scripts', + files: [ + 'scripts/**/*' + ], + rules: { + 'n/no-unsupported-features/node-builtins': ['error', { + allowExperimental: true + }] + } + }, { name: 'dd-trace/tests/all', files: TEST_FILES, @@ -447,6 +492,10 @@ export default [ 'mocha/no-skipped-tests': 'off', 'mocha/no-top-level-hooks': 'off', 'n/handle-callback-err': 'off', + 'n/no-missing-require': 'off', + 'n/no-unsupported-features/node-builtins': ['error', { + allowExperimental: true + }], 'require-await': 'off' } }, diff --git a/index.js b/index.js index a8c61274ad8..ed48ff2fea5 100644 --- a/index.js +++ b/index.js @@ -1,3 +1,5 @@ 'use strict' +// TODO: It shouldn't be necessary to disable n/no-unpublished-require - Research +// eslint-disable-next-line n/no-unpublished-require module.exports = require('./packages/dd-trace') diff --git a/init.js b/init.js index 625d493b3b1..63fa9ba96be 100644 --- a/init.js +++ b/init.js @@ -2,6 +2,8 @@ /* eslint-disable no-var */ +// TODO: It shouldn't be necessary to disable n/no-unpublished-require - Research +// eslint-disable-next-line n/no-unpublished-require var guard = require('./packages/dd-trace/src/guardrails') module.exports = guard(function () { diff --git a/initialize.mjs b/initialize.mjs index 42787ce3d02..f60a544be7d 100644 --- a/initialize.mjs +++ b/initialize.mjs @@ -10,6 +10,8 @@ * hook will always be active for ESM support. */ +/* eslint n/no-unsupported-features/node-builtins: ['error', { ignores: ['module.register'] }] */ + import { isMainThread } from 'worker_threads' import * as Module from 'node:module' diff --git a/integration-tests/appsec/esm-app/index.mjs b/integration-tests/appsec/esm-app/index.mjs index e0285273f99..4fa4e23570e 100644 --- a/integration-tests/appsec/esm-app/index.mjs +++ b/integration-tests/appsec/esm-app/index.mjs @@ -1,4 +1,5 @@ 'use strict' +/* eslint n/no-unsupported-features/node-builtins: ['error', { ignores: ['module.register'] }] */ import childProcess from 'node:child_process' import express from 'express' diff --git a/integration-tests/ci-visibility/subproject/package.json b/integration-tests/ci-visibility/subproject/package.json index dc1d9050f8e..c3fded33440 100644 --- a/integration-tests/ci-visibility/subproject/package.json +++ b/integration-tests/ci-visibility/subproject/package.json @@ -2,5 +2,8 @@ "name": "subproject", "private": true, "version": "1.0.0", - "description": "app within repo" + "description": "app within repo", + "dependencies": { + "dd-trace": "file:../../.." + } } diff --git a/integration-tests/ci-visibility/subproject/subproject-test.js b/integration-tests/ci-visibility/subproject/subproject-test.js index 1545789c108..64cdd384939 100644 --- a/integration-tests/ci-visibility/subproject/subproject-test.js +++ b/integration-tests/ci-visibility/subproject/subproject-test.js @@ -1,3 +1,5 @@ +// TODO: It shouldn't be necessary to disable n/no-extraneous-require - Research +// eslint-disable-next-line n/no-extraneous-require const { expect } = require('chai') const dependency = require('./dependency') diff --git a/integration-tests/esbuild/index.spec.js b/integration-tests/esbuild/index.spec.js index 4a1dee5b788..01fe1e6cef1 100755 --- a/integration-tests/esbuild/index.spec.js +++ b/integration-tests/esbuild/index.spec.js @@ -7,6 +7,8 @@ const chproc = require('child_process') const path = require('path') const fs = require('fs') +// TODO: It shouldn't be necessary to disable n/no-extraneous-require - Research +// eslint-disable-next-line n/no-extraneous-require const { assert } = require('chai') const TEST_DIR = path.join(__dirname, '.') diff --git a/package.json b/package.json index 887ac2f0ef1..3f3234c49b9 100644 --- a/package.json +++ b/package.json @@ -156,6 +156,7 @@ "sinon-chai": "^3.7.0", "tap": "^16.3.10", "tiktoken": "^1.0.21", + "workerpool": "^9.2.0", "yaml": "^2.8.0", "yarn-deduplicate": "^6.0.2" } diff --git a/packages/datadog-instrumentations/src/fetch.js b/packages/datadog-instrumentations/src/fetch.js index 8a1c855790e..9a3fe148f23 100644 --- a/packages/datadog-instrumentations/src/fetch.js +++ b/packages/datadog-instrumentations/src/fetch.js @@ -1,4 +1,5 @@ 'use strict' +/* eslint n/no-unsupported-features/node-builtins: ['error', { ignores: ['fetch', 'Request'] }] */ const { isInServerlessEnvironment } = require('../../dd-trace/src/serverless') diff --git a/packages/datadog-plugin-openai/test/index.spec.js b/packages/datadog-plugin-openai/test/index.spec.js index 6035af1b6bf..501b9b06e18 100644 --- a/packages/datadog-plugin-openai/test/index.spec.js +++ b/packages/datadog-plugin-openai/test/index.spec.js @@ -42,7 +42,7 @@ describe('Plugin', () => { after(() => { if (semver.satisfies(realVersion, '>=5.0.0') && NODE_MAJOR < 20) { - global.File = globalFile + global.File = globalFile // eslint-disable-line n/no-unsupported-features/node-builtins } return agent.close({ ritmReset: false }) @@ -62,8 +62,8 @@ describe('Plugin', () => { * Error: `File` is not defined as a global, which is required for file uploads. * Update to Node 20 LTS or newer, or set `globalThis.File` to `import('node:buffer').File`. */ - globalFile = global.File - global.File = require('node:buffer').File + globalFile = global.File // eslint-disable-line n/no-unsupported-features/node-builtins + global.File = require('node:buffer').File // eslint-disable-line n/no-unsupported-features/node-builtins } if (semver.satisfies(realVersion, '>=4.0.0')) { diff --git a/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js b/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js index ab0b5ec878f..3415e13bc22 100644 --- a/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js +++ b/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js @@ -1,4 +1,5 @@ 'use strict' +/* eslint n/no-unsupported-features/node-builtins: ['error', { ignores: ['module.register'] }] */ const Module = require('module') const { pathToFileURL } = require('url') diff --git a/packages/dd-trace/src/debugger/devtools_client/index.js b/packages/dd-trace/src/debugger/devtools_client/index.js index 321c655c67d..10ba81032fd 100644 --- a/packages/dd-trace/src/debugger/devtools_client/index.js +++ b/packages/dd-trace/src/debugger/devtools_client/index.js @@ -40,11 +40,19 @@ const SUPPORT_ARRAY_BUFFER_RESIZE = NODE_MAJOR >= 20 const oneSecondNs = 1_000_000_000n let globalSnapshotSamplingRateWindowStart = 0n let snapshotsSampledWithinTheLastSecond = 0 -// TODO: Is a limit of 256 snapshots ever going to be a problem? -const snapshotProbeIndexBuffer = new ArrayBuffer(1, { maxByteLength: 256 }) -// TODO: Is a limit of 256 probes ever going to be a problem? + // TODO: Change to const once we drop support for Node.js 18 -let snapshotProbeIndex = new Uint8Array(snapshotProbeIndexBuffer) +let snapshotProbeIndexBuffer, snapshotProbeIndex + +if (SUPPORT_ARRAY_BUFFER_RESIZE) { + // TODO: Is a limit of 256 snapshots ever going to be a problem? + // eslint-disable-next-line n/no-unsupported-features/es-syntax + snapshotProbeIndexBuffer = new ArrayBuffer(1, { maxByteLength: 256 }) + // TODO: Is a limit of 256 probes ever going to be a problem? + snapshotProbeIndex = new Uint8Array(snapshotProbeIndexBuffer) +} else { + snapshotProbeIndex = new Uint8Array(1) +} // WARNING: The code above the line `await session.post('Debugger.resume')` is highly optimized. Please edit with care! session.on('Debugger.paused', async ({ params }) => { diff --git a/packages/dd-trace/src/debugger/devtools_client/inspector_promises_polyfill.js b/packages/dd-trace/src/debugger/devtools_client/inspector_promises_polyfill.js index bb4b0340be6..a940065a62d 100644 --- a/packages/dd-trace/src/debugger/devtools_client/inspector_promises_polyfill.js +++ b/packages/dd-trace/src/debugger/devtools_client/inspector_promises_polyfill.js @@ -1,4 +1,5 @@ 'use strict' +/* eslint n/no-unsupported-features/node-builtins: ['error', { ignores: ['inspector/promises'] }] */ const { builtinModules } = require('node:module') diff --git a/packages/dd-trace/src/profiling/exporters/event_serializer.js b/packages/dd-trace/src/profiling/exporters/event_serializer.js index 4a3e591d97e..199482b6661 100644 --- a/packages/dd-trace/src/profiling/exporters/event_serializer.js +++ b/packages/dd-trace/src/profiling/exporters/event_serializer.js @@ -1,3 +1,6 @@ +'use strict' +/* eslint n/no-unsupported-features/node-builtins: ['error', { ignores: ['os.availableParallelism'] }] */ + const os = require('os') const perf = require('perf_hooks').performance const version = require('../../../../../package.json').version diff --git a/packages/dd-trace/src/runtime_metrics/runtime_metrics.js b/packages/dd-trace/src/runtime_metrics/runtime_metrics.js index ab7865093a9..1417851427b 100644 --- a/packages/dd-trace/src/runtime_metrics/runtime_metrics.js +++ b/packages/dd-trace/src/runtime_metrics/runtime_metrics.js @@ -1,4 +1,5 @@ 'use strict' +/* eslint n/no-unsupported-features/node-builtins: ['error', { ignores: ['v8.GCProfiler'] }] */ // TODO: capture every second and flush every 10 seconds diff --git a/register.js b/register.js index 5ce6d6dec06..5189a0ffede 100644 --- a/register.js +++ b/register.js @@ -1,3 +1,5 @@ +/* eslint n/no-unsupported-features/node-builtins: ['error', { version: '>=20.6.0', allowExperimental: true }] */ + const { register } = require('node:module') const { pathToFileURL } = require('node:url') diff --git a/scripts/verify-ci-config.js b/scripts/verify-ci-config.js index 486fa282626..5f83f4cae88 100644 --- a/scripts/verify-ci-config.js +++ b/scripts/verify-ci-config.js @@ -1,5 +1,6 @@ 'use strict' /* eslint-disable no-console */ +/* eslint n/no-unsupported-features/node-builtins: ['error', { version: '>=22.0.0' }] */ const fs = require('fs') const path = require('path') diff --git a/yarn.lock b/yarn.lock index 5f3e61dab2c..846fed4c811 100644 --- a/yarn.lock +++ b/yarn.lock @@ -5023,9 +5023,9 @@ word-wrap@^1.2.5: integrity sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA== workerpool@^9.2.0: - version "9.3.2" - resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-9.3.2.tgz#4c045a8b437ae1bc70c646af11929a8b4d238656" - integrity sha512-Xz4Nm9c+LiBHhDR5bDLnNzmj6+5F+cyEAWPMkbs2awq/dYazR/efelZzUAjB/y3kNHL+uzkHvxVVpaOfGCPV7A== + version "9.3.3" + resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-9.3.3.tgz#e75281fe62e851afb21cdeef8fa85f6a62ec3583" + integrity sha512-slxCaKbYjEdFT/o2rH9xS1hf4uRDch1w7Uo+apxhZ+sf/1d9e0ZVkn42kPNGP2dgjIx6YFvSevj0zHvbWe2jdw== "wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": version "7.0.0" From 2901fbb008e3184a72c7ad6d182d546bfa8f9096 Mon Sep 17 00:00:00 2001 From: Ruben Bridgewater Date: Tue, 8 Jul 2025 15:34:34 +0200 Subject: [PATCH 03/53] chore: update dependencies & dependabot & less cache lookups (#6041) * chore: update lru-cache, less lookups, dependabot ignore update This updates the lru-cache to the latest version that supports Node.js 18. The version above is ignored in dependabot. It also ignores path-to-regexp, since that has to stay aligned with the one used in express.js v4. Express itself is updated as dev dependencies. That way we should notice earlier in case more things break. The cache lookups got a bit optimized to not have to check multiple times if an entry exists or not. * chore: outcommend alternative in docker-compose Only one should be used per port. --- .github/dependabot.yml | 6 + .github/workflows/platform.yml | 1 + docker-compose.yml | 22 +- package.json | 10 +- .../src/appsec/iast/overhead-controller.js | 2 +- .../src/appsec/iast/vulnerability-reporter.js | 4 +- packages/dd-trace/src/datastreams/pathway.js | 18 +- .../dd-trace/src/datastreams/processor.js | 13 +- .../src/datastreams/schemas/schema_builder.js | 10 +- packages/dd-trace/src/rate_limiter.js | 2 +- .../test/remote_config/resources/index.js | 5 +- yarn.lock | 366 ++++++++---------- 12 files changed, 205 insertions(+), 254 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index d23fc2b31e5..891a2bba0e7 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -31,6 +31,12 @@ updates: - dependency-name: "jest-docblock" # 30.0.0 onwards only supports Node.js 18.14.x and above update-types: ["version-update:semver-major"] + # The path-to-regexp version has to be the same as used in express v4. + # Consider vendoring it instead. + - dependency-name: "path-to-regexp" + - dependency-name: "lru-cache" + # 11.0.0 onwards only supports Node.js 20 and above + update-types: ["version-update:semver-major"] groups: dev-minor-and-patch-dependencies: dependency-type: "development" diff --git a/.github/workflows/platform.yml b/.github/workflows/platform.yml index df72c1cc9c6..37e7675732a 100644 --- a/.github/workflows/platform.yml +++ b/.github/workflows/platform.yml @@ -296,6 +296,7 @@ jobs: version: ${{ matrix.version }} - uses: ./.github/actions/install - run: yarn add --ignore-scripts mocha@10 # Use older mocha to support old Node.js versions + - run: yarn add --ignore-scripts express@4 # Use older express to support old Node.js versions - run: node node_modules/.bin/mocha --colors --timeout 30000 integration-tests/init.spec.js integration-guardrails-unsupported: diff --git a/docker-compose.yml b/docker-compose.yml index e70dcc6c0de..56fa9340fcf 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -165,14 +165,14 @@ services: # Use this for local development when making new VCR cassettes to persist to the test agent # Do not use the above testagent service if using this one. - testagent-vcr: - image: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:v1.24.1 - ports: - - "127.0.0.1:9126:9126" - environment: - - LOG_LEVEL=DEBUG - - TRACE_LANGUAGE=javascript - - ENABLED_CHECKS=trace_stall,meta_tracer_version_header,trace_count_header,trace_peer_service - - PORT=9126 - volumes: - - ${VCR_CASSETTES_PATH:-/tmp/empty-vcr}:/vcr-cassettes:delegated + # testagent-vcr: + # image: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:v1.24.1 + # ports: + # - "127.0.0.1:9126:9126" + # environment: + # - LOG_LEVEL=DEBUG + # - TRACE_LANGUAGE=javascript + # - ENABLED_CHECKS=trace_stall,meta_tracer_version_header,trace_count_header,trace_peer_service + # - PORT=9126 + # volumes: + # - ${VCR_CASSETTES_PATH:-/tmp/empty-vcr}:/vcr-cassettes:delegated diff --git a/package.json b/package.json index 3f3234c49b9..10066b36f83 100644 --- a/package.json +++ b/package.json @@ -97,18 +97,18 @@ "@opentelemetry/core": "^1.14.0", "crypto-randomuuid": "^1.0.0", "dc-polyfill": "^0.1.9", - "ignore": "^5.2.4", + "ignore": "^7.0.5", "import-in-the-middle": "^1.14.2", "istanbul-lib-coverage": "^3.2.2", "jest-docblock": "^29.7.0", "jsonpath-plus": "^10.3.0", "koalas": "^1.0.2", - "limiter": "^1.1.5", + "limiter": "^3.0.0", "lodash.sortby": "^4.7.0", - "lru-cache": "^7.18.3", + "lru-cache": "^10.4.3", "module-details-from-path": "^1.0.4", "mutexify": "^1.4.0", - "opentracing": ">=0.12.1", + "opentracing": ">=0.14.7", "path-to-regexp": "^0.1.12", "pprof-format": "^2.1.0", "protobufjs": "^7.5.3", @@ -138,7 +138,7 @@ "eslint-plugin-n": "^17.20.0", "eslint-plugin-promise": "^7.2.1", "eslint-plugin-unicorn": "^59.0.1", - "express": "^4.21.2", + "express": "^5.1.0", "get-port": "^5.1.1", "glob": "^7.2.3", "globals": "^15.15.0", diff --git a/packages/dd-trace/src/appsec/iast/overhead-controller.js b/packages/dd-trace/src/appsec/iast/overhead-controller.js index 3aa7bb651d5..ad5f005fd84 100644 --- a/packages/dd-trace/src/appsec/iast/overhead-controller.js +++ b/packages/dd-trace/src/appsec/iast/overhead-controller.js @@ -1,6 +1,6 @@ 'use strict' -const LRUCache = require('lru-cache') +const { LRUCache } = require('lru-cache') const web = require('../../plugins/util/web') const vulnerabilities = require('./vulnerabilities') diff --git a/packages/dd-trace/src/appsec/iast/vulnerability-reporter.js b/packages/dd-trace/src/appsec/iast/vulnerability-reporter.js index a4d300f7616..201a945b25e 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerability-reporter.js +++ b/packages/dd-trace/src/appsec/iast/vulnerability-reporter.js @@ -1,6 +1,6 @@ 'use strict' -const LRU = require('lru-cache') +const { LRUCache } = require('lru-cache') const vulnerabilitiesFormatter = require('./vulnerabilities-formatter') const { IAST_ENABLED_TAG_KEY, IAST_JSON_TAG_KEY } = require('./tags') const { keepTrace } = require('../../priority_sampler') @@ -10,7 +10,7 @@ const { ASM } = require('../../standalone/product') const VULNERABILITIES_KEY = 'vulnerabilities' const VULNERABILITY_HASHES_MAX_SIZE = 1000 -const VULNERABILITY_HASHES = new LRU({ max: VULNERABILITY_HASHES_MAX_SIZE }) +const VULNERABILITY_HASHES = new LRUCache({ max: VULNERABILITY_HASHES_MAX_SIZE }) const RESET_VULNERABILITY_CACHE_INTERVAL = 60 * 60 * 1000 // 1 hour let tracer diff --git a/packages/dd-trace/src/datastreams/pathway.js b/packages/dd-trace/src/datastreams/pathway.js index ddcd6ae9876..27ea3dac2a1 100644 --- a/packages/dd-trace/src/datastreams/pathway.js +++ b/packages/dd-trace/src/datastreams/pathway.js @@ -3,12 +3,11 @@ // this inconsistency is ok because hashes do not need to be consistent across services const crypto = require('crypto') const { encodeVarint, decodeVarint } = require('./encoding') -const LRUCache = require('lru-cache') +const { LRUCache } = require('lru-cache') const log = require('../log') const pick = require('../../../datadog-core/src/utils/src/pick') -const options = { max: 500 } -const cache = new LRUCache(options) +const cache = new LRUCache({ max: 500 }) const CONTEXT_PROPAGATION_KEY = 'dd-pathway-ctx' const CONTEXT_PROPAGATION_KEY_BASE64 = 'dd-pathway-ctx-base64' @@ -24,15 +23,16 @@ function computeHash (service, env, edgeTags, parentHash) { edgeTags.sort() const hashableEdgeTags = edgeTags.filter(item => item !== 'manual_checkpoint:true') - const key = `${service}${env}` + hashableEdgeTags.join('') + parentHash.toString() - if (cache.get(key)) { - return cache.get(key) + const key = `${service}${env}${hashableEdgeTags.join('')}${parentHash}` + let value = cache.get(key) + if (value) { + return value } const currentHash = shaHash(`${service}${env}` + hashableEdgeTags.join('')) const buf = Buffer.concat([currentHash, parentHash], 16) - const val = shaHash(buf.toString()) - cache.set(key, val) - return val + value = shaHash(buf.toString()) + cache.set(key, value) + return value } function encodePathwayContext (dataStreamsContext) { diff --git a/packages/dd-trace/src/datastreams/processor.js b/packages/dd-trace/src/datastreams/processor.js index f65e28fa64b..c0e3ff2a18c 100644 --- a/packages/dd-trace/src/datastreams/processor.js +++ b/packages/dd-trace/src/datastreams/processor.js @@ -78,15 +78,14 @@ class StatsBucket { return this._backlogs } - forCheckpoint (checkpoint) { - const key = checkpoint.hash - if (!this._checkpoints.has(key)) { - this._checkpoints.set( - key, new StatsPoint(checkpoint.hash, checkpoint.parentHash, checkpoint.edgeTags) - ) + forCheckpoint ({ hash, parentHash, edgeTags }) { + let checkpoint = this._checkpoints.get(hash) + if (!checkpoint) { + checkpoint = new StatsPoint(hash, parentHash, edgeTags) + this._checkpoints.set(hash, checkpoint) } - return this._checkpoints.get(key) + return checkpoint } /** diff --git a/packages/dd-trace/src/datastreams/schemas/schema_builder.js b/packages/dd-trace/src/datastreams/schemas/schema_builder.js index 6e4fee424e1..6db3660d23e 100644 --- a/packages/dd-trace/src/datastreams/schemas/schema_builder.js +++ b/packages/dd-trace/src/datastreams/schemas/schema_builder.js @@ -1,4 +1,4 @@ -const LRUCache = require('lru-cache') +const { LRUCache } = require('lru-cache') const { fnv64 } = require('../fnv') const { Schema } = require('./schema') @@ -25,10 +25,12 @@ class SchemaBuilder { } static getSchema (schemaName, iterator, builder) { - if (!CACHE.has(schemaName)) { - CACHE.set(schemaName, (builder ?? new SchemaBuilder(iterator)).build()) + let entry = CACHE.get(schemaName) + if (!entry) { + entry = (builder ?? new SchemaBuilder(iterator)).build() + CACHE.set(schemaName, entry) } - return CACHE.get(schemaName) + return entry } build () { diff --git a/packages/dd-trace/src/rate_limiter.js b/packages/dd-trace/src/rate_limiter.js index 3789ffaeb72..a584216335f 100644 --- a/packages/dd-trace/src/rate_limiter.js +++ b/packages/dd-trace/src/rate_limiter.js @@ -5,7 +5,7 @@ const limiter = require('limiter') class RateLimiter { constructor (rateLimit, interval = 'second') { this._rateLimit = Number.parseInt(rateLimit) - this._limiter = new limiter.RateLimiter(this._rateLimit, interval) + this._limiter = new limiter.RateLimiter({ tokensPerInterval: this._rateLimit, interval }) this._tokensRequested = 0 this._prevIntervalTokens = 0 this._prevTokensRequested = 0 diff --git a/packages/dd-trace/test/remote_config/resources/index.js b/packages/dd-trace/test/remote_config/resources/index.js index a9e0bacaa8b..b76e302f019 100644 --- a/packages/dd-trace/test/remote_config/resources/index.js +++ b/packages/dd-trace/test/remote_config/resources/index.js @@ -13,6 +13,9 @@ app.get('/', async (req, res) => { res.end('OK') }) -const server = app.listen(process.env.APP_PORT || 0, () => { +const server = app.listen(process.env.APP_PORT || 0, (error) => { + if (error) { + throw error + } process.send?.({ port: server.address().port }) }) diff --git a/yarn.lock b/yarn.lock index 846fed4c811..8edfc60c376 100644 --- a/yarn.lock +++ b/yarn.lock @@ -874,13 +874,13 @@ resolved "https://registry.yarnpkg.com/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz#e77a97fbd345b76d83245edcd17d393b1b41fb31" integrity sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ== -accepts@~1.3.8: - version "1.3.8" - resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" - integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== +accepts@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/accepts/-/accepts-2.0.0.tgz#bbcf4ba5075467f3f2131eab3cffc73c2f5d7895" + integrity sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng== dependencies: - mime-types "~2.1.34" - negotiator "0.6.3" + mime-types "^3.0.0" + negotiator "^1.0.0" acorn-import-attributes@^1.9.5: version "1.9.5" @@ -994,11 +994,6 @@ array-buffer-byte-length@^1.0.1, array-buffer-byte-length@^1.0.2: call-bound "^1.0.3" is-array-buffer "^3.0.5" -array-flatten@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" - integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== - array-includes@^3.1.9: version "3.1.9" resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.9.tgz#1f0ccaa08e90cdbc3eb433210f903ad0f17c3f3a" @@ -1133,24 +1128,6 @@ bind-obj-methods@^3.0.0: resolved "https://registry.yarnpkg.com/bind-obj-methods/-/bind-obj-methods-3.0.0.tgz#65b66544d9d668d80dfefe2089dd347ad1dbcaed" integrity sha512-nLEaaz3/sEzNSyPWRsN9HNsqwk1AUyECtGj+XwGdIi3xABnEqecvXtIJ0wehQXuuER5uZ/5fTs2usONgYjG+iw== -body-parser@1.20.3: - version "1.20.3" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.3.tgz#1953431221c6fb5cd63c4b36d53fab0928e548c6" - integrity sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g== - dependencies: - bytes "3.1.2" - content-type "~1.0.5" - debug "2.6.9" - depd "2.0.0" - destroy "1.2.0" - http-errors "2.0.0" - iconv-lite "0.4.24" - on-finished "2.4.1" - qs "6.13.0" - raw-body "2.5.2" - type-is "~1.6.18" - unpipe "1.0.0" - body-parser@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-2.2.0.tgz#f7a9656de305249a715b549b7b8fd1ab9dfddcfa" @@ -1496,14 +1473,14 @@ concat-stream@^2.0.0: readable-stream "^3.0.2" typedarray "^0.0.6" -content-disposition@0.5.4: - version "0.5.4" - resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" - integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== +content-disposition@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-1.0.0.tgz#844426cb398f934caefcbb172200126bc7ceace2" + integrity sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg== dependencies: safe-buffer "5.2.1" -content-type@^1.0.5, content-type@~1.0.4, content-type@~1.0.5: +content-type@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918" integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA== @@ -1523,15 +1500,15 @@ convert-to-spaces@^1.0.1: resolved "https://registry.yarnpkg.com/convert-to-spaces/-/convert-to-spaces-1.0.2.tgz#7e3e48bbe6d997b1417ddca2868204b4d3d85715" integrity sha512-cj09EBuObp9gZNQCzc7hByQyrs6jVGE+o9kSJmeUoj+GiPiJvi5LYqEH/Hmme4+MTLHM+Ejtq+FChpjjEnsPdQ== -cookie-signature@1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" - integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== +cookie-signature@^1.2.1: + version "1.2.2" + resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.2.2.tgz#57c7fc3cc293acab9fec54d73e15690ebe4a1793" + integrity sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg== -cookie@0.7.1: - version "0.7.1" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.7.1.tgz#2f73c42142d5d5cf71310a74fc4ae61670e5dbc9" - integrity sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w== +cookie@^0.7.1: + version "0.7.2" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.7.2.tgz#556369c472a2ba910f2979891b526b3436237ed7" + integrity sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w== core-js-compat@^3.41.0: version "3.43.0" @@ -1596,13 +1573,6 @@ dc-polyfill@^0.1.9: resolved "https://registry.yarnpkg.com/dc-polyfill/-/dc-polyfill-0.1.9.tgz#ee594f4366a6dcf006db1c1f9d3672f57a720856" integrity sha512-D5mJThEEk9hf+CJPwTf9JFsrWdlWp8Pccjxkhf7uUT/E/cU9Mx3ebWe2Bz2OawRmJ6WS9eaDPBkeBE4uOKq9uw== -debug@2.6.9: - version "2.6.9" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" - integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== - dependencies: - ms "2.0.0" - debug@^3.2.7: version "3.2.7" resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" @@ -1674,16 +1644,11 @@ delayed-stream@~1.0.0: resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== -depd@2.0.0: +depd@2.0.0, depd@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== -destroy@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" - integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== - detect-newline@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" @@ -1745,12 +1710,7 @@ emoji-regex@^9.2.2: resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== -encodeurl@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" - integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== - -encodeurl@~2.0.0: +encodeurl@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-2.0.0.tgz#7b8ea898077d7e409d3ac45474ea38eaf0857a58" integrity sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg== @@ -1876,7 +1836,7 @@ escalade@^3.1.1, escalade@^3.2.0: resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.2.0.tgz#011a3f69856ba189dffa7dc8fcce99d2a87903e5" integrity sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA== -escape-html@~1.0.3: +escape-html@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== @@ -2123,7 +2083,7 @@ esutils@^2.0.2: resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== -etag@~1.8.1: +etag@^1.8.1: version "1.8.1" resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== @@ -2133,42 +2093,38 @@ events-to-array@^1.0.1: resolved "https://registry.yarnpkg.com/events-to-array/-/events-to-array-1.1.2.tgz#2d41f563e1fe400ed4962fe1a4d5c6a7539df7f6" integrity sha512-inRWzRY7nG+aXZxBzEqYKB3HPgwflZRopAjDCHv0whhRx+MTUr1ei0ICZUypdyE0HRm4L2d5VEcIqLD6yl+BFA== -express@^4.21.2: - version "4.21.2" - resolved "https://registry.yarnpkg.com/express/-/express-4.21.2.tgz#cf250e48362174ead6cea4a566abef0162c1ec32" - integrity sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA== - dependencies: - accepts "~1.3.8" - array-flatten "1.1.1" - body-parser "1.20.3" - content-disposition "0.5.4" - content-type "~1.0.4" - cookie "0.7.1" - cookie-signature "1.0.6" - debug "2.6.9" - depd "2.0.0" - encodeurl "~2.0.0" - escape-html "~1.0.3" - etag "~1.8.1" - finalhandler "1.3.1" - fresh "0.5.2" - http-errors "2.0.0" - merge-descriptors "1.0.3" - methods "~1.1.2" - on-finished "2.4.1" - parseurl "~1.3.3" - path-to-regexp "0.1.12" - proxy-addr "~2.0.7" - qs "6.13.0" - range-parser "~1.2.1" - safe-buffer "5.2.1" - send "0.19.0" - serve-static "1.16.2" - setprototypeof "1.2.0" - statuses "2.0.1" - type-is "~1.6.18" - utils-merge "1.0.1" - vary "~1.1.2" +express@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/express/-/express-5.1.0.tgz#d31beaf715a0016f0d53f47d3b4d7acf28c75cc9" + integrity sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA== + dependencies: + accepts "^2.0.0" + body-parser "^2.2.0" + content-disposition "^1.0.0" + content-type "^1.0.5" + cookie "^0.7.1" + cookie-signature "^1.2.1" + debug "^4.4.0" + encodeurl "^2.0.0" + escape-html "^1.0.3" + etag "^1.8.1" + finalhandler "^2.1.0" + fresh "^2.0.0" + http-errors "^2.0.0" + merge-descriptors "^2.0.0" + mime-types "^3.0.0" + on-finished "^2.4.1" + once "^1.4.0" + parseurl "^1.3.3" + proxy-addr "^2.0.7" + qs "^6.14.0" + range-parser "^1.2.1" + router "^2.2.0" + send "^1.1.0" + serve-static "^2.2.0" + statuses "^2.0.1" + type-is "^2.0.1" + vary "^1.1.2" fast-content-type-parse@^3.0.0: version "3.0.0" @@ -2217,18 +2173,17 @@ fill-range@^7.1.1: dependencies: to-regex-range "^5.0.1" -finalhandler@1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.3.1.tgz#0c575f1d1d324ddd1da35ad7ece3df7d19088019" - integrity sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ== - dependencies: - debug "2.6.9" - encodeurl "~2.0.0" - escape-html "~1.0.3" - on-finished "2.4.1" - parseurl "~1.3.3" - statuses "2.0.1" - unpipe "~1.0.0" +finalhandler@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-2.1.0.tgz#72306373aa89d05a8242ed569ed86a1bff7c561f" + integrity sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q== + dependencies: + debug "^4.4.0" + encodeurl "^2.0.0" + escape-html "^1.0.3" + on-finished "^2.4.1" + parseurl "^1.3.3" + statuses "^2.0.1" find-cache-dir@^3.2.0: version "3.3.2" @@ -2326,10 +2281,10 @@ forwarded@0.2.0: resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== -fresh@0.5.2: - version "0.5.2" - resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" - integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== +fresh@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/fresh/-/fresh-2.0.0.tgz#8dd7df6a1b3a1b3a5cf186c05a5dd267622635a4" + integrity sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A== fromentries@^1.2.0: version "1.3.2" @@ -2603,13 +2558,6 @@ http-errors@2.0.0, http-errors@^2.0.0: statuses "2.0.1" toidentifier "1.0.1" -iconv-lite@0.4.24: - version "0.4.24" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" - integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== - dependencies: - safer-buffer ">= 2.1.2 < 3" - iconv-lite@0.6.3, iconv-lite@^0.6.3: version "0.6.3" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" @@ -2617,11 +2565,16 @@ iconv-lite@0.6.3, iconv-lite@^0.6.3: dependencies: safer-buffer ">= 2.1.2 < 3.0.0" -ignore@^5.2.0, ignore@^5.2.4, ignore@^5.3.2: +ignore@^5.2.0, ignore@^5.3.2: version "5.3.2" resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.2.tgz#3cd40e729f3643fd87cb04e50bf0eb722bc596f5" integrity sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g== +ignore@^7.0.5: + version "7.0.5" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-7.0.5.tgz#4cb5f6cd7d4c7ab0365738c7aea888baa6d7efd9" + integrity sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg== + immediate@~3.0.5: version "3.0.6" resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.0.6.tgz#9db1dbd0faf8de6fbe0f5dd5e56bb606280de69b" @@ -2868,6 +2821,11 @@ is-plain-obj@^2.1.0: resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287" integrity sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA== +is-promise@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-4.0.0.tgz#42ff9f84206c1991d26debf520dd5c01042dd2f3" + integrity sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ== + is-regex@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.2.1.tgz#76d70a3ed10ef9be48eb577887d74205bf0cad22" @@ -3189,10 +3147,10 @@ lie@~3.3.0: dependencies: immediate "~3.0.5" -limiter@^1.1.5: - version "1.1.5" - resolved "https://registry.yarnpkg.com/limiter/-/limiter-1.1.5.tgz#8f92a25b3b16c6131293a0cc834b4a838a2aa7c2" - integrity sha512-FWWMIEOxz3GwUI4Ts/IvgVy6LPvoMPgjMdQ185nN6psJyBJ4yOpzqm695/h5umdLJg2vW3GR5iG11MAkR2AzJA== +limiter@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/limiter/-/limiter-3.0.0.tgz#03556b76d1a81f547caeecc6b83ecc6f24495715" + integrity sha512-hev7DuXojsTFl2YwyzUJMDnZ/qBDd3yZQLSH3aD4tdL1cqfc3TMnoecEJtWFaQFdErZsKoFMBTxF/FBSkgDbEg== locate-path@^5.0.0: version "5.0.0" @@ -3260,7 +3218,7 @@ loupe@^2.3.6: dependencies: get-func-name "^2.0.1" -lru-cache@^10.2.0: +lru-cache@^10.2.0, lru-cache@^10.4.3: version "10.4.3" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.4.3.tgz#410fc8a17b70e598013df257c2446b7f3383f119" integrity sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ== @@ -3272,7 +3230,7 @@ lru-cache@^5.1.1: dependencies: yallist "^3.0.2" -lru-cache@^7.14.0, lru-cache@^7.18.3: +lru-cache@^7.14.0: version "7.18.3" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.18.3.tgz#f793896e0fd0e954a59dfdd82f0773808df6aa89" integrity sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA== @@ -3306,16 +3264,16 @@ media-typer@^1.1.0: resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-1.1.0.tgz#6ab74b8f2d3320f2064b2a87a38e7931ff3a5561" integrity sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw== -merge-descriptors@1.0.3, merge-descriptors@~1.0.0: +merge-descriptors@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-2.0.0.tgz#ea922f660635a2249ee565e0449f951e6b603808" + integrity sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g== + +merge-descriptors@~1.0.0: version "1.0.3" resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.3.tgz#d80319a65f3c7935351e5cfdac8f9318504dbed5" integrity sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ== -methods@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" - integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== - mime-db@1.52.0: version "1.52.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" @@ -3326,25 +3284,20 @@ mime-db@^1.54.0: resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.54.0.tgz#cddb3ee4f9c64530dff640236661d42cb6a314f5" integrity sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ== -mime-types@^2.1.12, mime-types@~2.1.24, mime-types@~2.1.34: +mime-types@^2.1.12, mime-types@~2.1.24: version "2.1.35" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== dependencies: mime-db "1.52.0" -mime-types@^3.0.0: +mime-types@^3.0.0, mime-types@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-3.0.1.tgz#b1d94d6997a9b32fd69ebaed0db73de8acb519ce" integrity sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA== dependencies: mime-db "^1.54.0" -mime@1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" - integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== - mimic-fn@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" @@ -3434,12 +3387,7 @@ module-not-found-error@^1.0.0: resolved "https://registry.yarnpkg.com/module-not-found-error/-/module-not-found-error-1.0.1.tgz#cf8b4ff4f29640674d6cdd02b0e3bc523c2bbdc0" integrity sha512-pEk4ECWQXV6z2zjhRZUongnLJNUeGQJ3w6OQ5ctGwD+i5o93qjRQUk2Rt6VdNeu3sEP0AB4LcfvdebpxBRVr4g== -ms@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" - integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== - -ms@2.1.3, ms@^2.1.1, ms@^2.1.2, ms@^2.1.3: +ms@^2.1.1, ms@^2.1.2, ms@^2.1.3: version "2.1.3" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== @@ -3469,10 +3417,10 @@ natural-compare@^1.4.0: resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== -negotiator@0.6.3: - version "0.6.3" - resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" - integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== +negotiator@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-1.0.0.tgz#b6c91bb47172d69f93cfd7c357bbb529019b5f6a" + integrity sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg== nise@^6.0.0: version "6.1.1" @@ -3634,14 +3582,14 @@ octokit@^5.0.3: "@octokit/types" "^14.0.0" "@octokit/webhooks" "^14.0.0" -on-finished@2.4.1, on-finished@^2.4.1: +on-finished@^2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== dependencies: ee-first "1.1.1" -once@^1.3.0: +once@^1.3.0, once@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== @@ -3660,7 +3608,7 @@ opener@^1.5.1: resolved "https://registry.yarnpkg.com/opener/-/opener-1.5.2.tgz#5d37e1f35077b9dcac4301372271afdeb2a13598" integrity sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A== -opentracing@>=0.12.1: +opentracing@>=0.14.7: version "0.14.7" resolved "https://registry.yarnpkg.com/opentracing/-/opentracing-0.14.7.tgz#25d472bd0296dc0b64d7b94cbc995219031428f5" integrity sha512-vz9iS7MJ5+Bp1URw8Khvdyw1H/hGvzHWlKQ7eRrQojSCDL1/SrWfrY9QebLw97n2deyRtzHRC3MkQfVNUCo91Q== @@ -3765,7 +3713,7 @@ parent-module@^1.0.0: dependencies: callsites "^3.0.0" -parseurl@~1.3.3: +parseurl@^1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== @@ -3803,12 +3751,12 @@ path-scurry@^1.11.1: lru-cache "^10.2.0" minipass "^5.0.0 || ^6.0.2 || ^7.0.0" -path-to-regexp@0.1.12, path-to-regexp@^0.1.12: +path-to-regexp@^0.1.12: version "0.1.12" resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.12.tgz#d5e1a12e478a976d432ef3c58d534b9923164bb7" integrity sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ== -path-to-regexp@^8.1.0: +path-to-regexp@^8.0.0, path-to-regexp@^8.1.0: version "8.2.0" resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-8.2.0.tgz#73990cc29e57a3ff2a0d914095156df5db79e8b4" integrity sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ== @@ -3900,7 +3848,7 @@ protobufjs@^7.5.3: "@types/node" ">=13.7.0" long "^5.0.0" -proxy-addr@~2.0.7: +proxy-addr@^2.0.7: version "2.0.7" resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== @@ -3927,13 +3875,6 @@ punycode@^2.0.0, punycode@^2.1.0: resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5" integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== -qs@6.13.0: - version "6.13.0" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.13.0.tgz#6ca3bd58439f7e245655798997787b0d88a51906" - integrity sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg== - dependencies: - side-channel "^1.0.6" - qs@^6.14.0: version "6.14.0" resolved "https://registry.yarnpkg.com/qs/-/qs-6.14.0.tgz#c63fa40680d2c5c941412a0e899c89af60c0a930" @@ -3958,21 +3899,11 @@ randombytes@^2.1.0: dependencies: safe-buffer "^5.1.0" -range-parser@~1.2.1: +range-parser@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== -raw-body@2.5.2: - version "2.5.2" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.2.tgz#99febd83b90e08975087e8f1f9419a149366b68a" - integrity sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA== - dependencies: - bytes "3.1.2" - http-errors "2.0.0" - iconv-lite "0.4.24" - unpipe "1.0.0" - raw-body@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-3.0.0.tgz#25b3476f07a51600619dae3fe82ddc28a36e5e0f" @@ -4163,6 +4094,17 @@ rimraf@^3.0.0, rimraf@^3.0.2: dependencies: glob "^7.1.3" +router@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/router/-/router-2.2.0.tgz#019be620b711c87641167cc79b99090f00b146ef" + integrity sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ== + dependencies: + debug "^4.4.0" + depd "^2.0.0" + is-promise "^4.0.0" + parseurl "^1.3.3" + path-to-regexp "^8.0.0" + safe-array-concat@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/safe-array-concat/-/safe-array-concat-1.1.3.tgz#c9e54ec4f603b0bbb8e7e5007a5ee7aecd1538c3" @@ -4201,7 +4143,7 @@ safe-regex-test@^1.1.0: es-errors "^1.3.0" is-regex "^1.2.1" -"safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0": +"safer-buffer@>= 2.1.2 < 3.0.0": version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== @@ -4229,24 +4171,22 @@ semver@^7.5.0, semver@^7.5.3, semver@^7.5.4, semver@^7.6.3, semver@^7.7.1, semve resolved "https://registry.yarnpkg.com/semver/-/semver-7.7.2.tgz#67d99fdcd35cec21e6f8b87a7fd515a33f982b58" integrity sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA== -send@0.19.0: - version "0.19.0" - resolved "https://registry.yarnpkg.com/send/-/send-0.19.0.tgz#bbc5a388c8ea6c048967049dbeac0e4a3f09d7f8" - integrity sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw== +send@^1.1.0, send@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/send/-/send-1.2.0.tgz#32a7554fb777b831dfa828370f773a3808d37212" + integrity sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw== dependencies: - debug "2.6.9" - depd "2.0.0" - destroy "1.2.0" - encodeurl "~1.0.2" - escape-html "~1.0.3" - etag "~1.8.1" - fresh "0.5.2" - http-errors "2.0.0" - mime "1.6.0" - ms "2.1.3" - on-finished "2.4.1" - range-parser "~1.2.1" - statuses "2.0.1" + debug "^4.3.5" + encodeurl "^2.0.0" + escape-html "^1.0.3" + etag "^1.8.1" + fresh "^2.0.0" + http-errors "^2.0.0" + mime-types "^3.0.1" + ms "^2.1.3" + on-finished "^2.4.1" + range-parser "^1.2.1" + statuses "^2.0.1" serialize-javascript@^6.0.2: version "6.0.2" @@ -4255,15 +4195,15 @@ serialize-javascript@^6.0.2: dependencies: randombytes "^2.1.0" -serve-static@1.16.2: - version "1.16.2" - resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.16.2.tgz#b6a5343da47f6bdd2673848bf45754941e803296" - integrity sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw== +serve-static@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-2.2.0.tgz#9c02564ee259bdd2251b82d659a2e7e1938d66f9" + integrity sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ== dependencies: - encodeurl "~2.0.0" - escape-html "~1.0.3" - parseurl "~1.3.3" - send "0.19.0" + encodeurl "^2.0.0" + escape-html "^1.0.3" + parseurl "^1.3.3" + send "^1.2.0" set-blocking@^2.0.0: version "2.0.0" @@ -4357,7 +4297,7 @@ side-channel-weakmap@^1.0.2: object-inspect "^1.13.3" side-channel-map "^1.0.1" -side-channel@^1.0.6, side-channel@^1.1.0: +side-channel@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.1.0.tgz#c3fcff9c4da932784873335ec9765fa94ff66bc9" integrity sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw== @@ -4451,6 +4391,11 @@ statuses@2.0.1: resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== +statuses@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.2.tgz#8f75eecef765b5e1cfcdc080da59409ed424e382" + integrity sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw== + stop-iteration-iterator@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz#f481ff70a548f6124d0312c3aa14cbfa7aa542ad" @@ -4794,7 +4739,7 @@ type-fest@^0.8.0: resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d" integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA== -type-is@^1.6.18, type-is@~1.6.18: +type-is@^1.6.18: version "1.6.18" resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== @@ -4802,7 +4747,7 @@ type-is@^1.6.18, type-is@~1.6.18: media-typer "0.3.0" mime-types "~2.1.24" -type-is@^2.0.0: +type-is@^2.0.0, type-is@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/type-is/-/type-is-2.0.1.tgz#64f6cf03f92fce4015c2b224793f6bdd4b068c97" integrity sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw== @@ -4905,7 +4850,7 @@ universal-user-agent@^7.0.0, universal-user-agent@^7.0.2: resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-7.0.3.tgz#c05870a58125a2dc00431f2df815a77fe69736be" integrity sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A== -unpipe@1.0.0, unpipe@~1.0.0: +unpipe@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== @@ -4930,17 +4875,12 @@ util-deprecate@^1.0.1, util-deprecate@~1.0.1: resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== -utils-merge@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" - integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== - uuid@^8.3.2: version "8.3.2" resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== -vary@~1.1.2: +vary@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== From 40d56184ed811aa4052d53e47d94132f970f159c Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Tue, 8 Jul 2025 18:33:57 +0200 Subject: [PATCH 04/53] ESLint: Require 'use strict' header (#5334) * Fix issues related to adding strict-mode These issues are either just a different behavior in strict-mode, changes to tests that expect specific lines, or hiding real errors. --- benchmark/sirun/appsec-iast/insecure-bank.js | 2 ++ benchmark/sirun/appsec/insecure-bank.js | 2 ++ benchmark/sirun/encoding/index.js | 2 ++ benchmark/sirun/gc.js | 2 ++ benchmark/sirun/means.js | 2 ++ benchmark/sirun/scope/index.js | 2 ++ benchmark/sirun/shimmer-runtime/index.js | 2 ++ benchmark/sirun/shimmer-startup/index.js | 2 ++ benchmark/sirun/spans/spans.js | 2 ++ benchmark/sirun/strip-unwanted-results.js | 1 + ci/cypress/after-run.js | 2 ++ ci/cypress/after-spec.js | 2 ++ ci/cypress/plugin.js | 2 ++ ci/cypress/polyfills.js | 2 ++ ci/cypress/support.js | 2 ++ ci/init.js | 2 ++ eslint.config.mjs | 3 ++- .../appsec/esm-app/custom-noop-hooks.mjs | 2 -- integration-tests/appsec/esm-app/index.mjs | 1 - integration-tests/appsec/esm-app/worker-dep.mjs | 2 -- .../appsec/esm-security-controls/index.mjs | 2 -- .../esm-security-controls/sanitizer-default.mjs | 2 -- .../appsec/esm-security-controls/sanitizer.mjs | 2 -- .../appsec/esm-security-controls/validator.mjs | 2 -- integration-tests/ci-visibility-intake.js | 2 ++ .../support/logger.js | 2 ++ .../support/steps.js | 2 ++ .../support/sum.js | 2 ++ .../automatic-log-submission-test.js | 2 ++ .../logger.js | 2 ++ .../automatic-log-submission-playwright/sum.js | 2 ++ .../automatic-log-submission-test.js | 2 ++ .../automatic-log-submission/config-jest.js | 2 ++ .../automatic-log-submission/logger.js | 2 ++ .../automatic-log-submission/sum.js | 2 ++ .../dynamic-instrumentation/dependency.js | 2 ++ .../test-hit-breakpoint.js | 2 ++ .../test-not-hit-breakpoint.js | 2 ++ .../ci-visibility/features-di/support/steps.js | 2 ++ .../ci-visibility/features-di/support/sum.js | 2 ++ .../features-flaky/support/steps.js | 2 ++ .../features-impacted-test/support/steps.js | 2 ++ .../features-retry/support/steps.js | 2 ++ .../features-selenium/support/steps.js | 2 ++ .../features-test-management/support/steps.js | 2 ++ .../ci-visibility/features/support/steps.js | 2 ++ .../ci-visibility/jest-flaky/flaky-fails.js | 2 ++ .../ci-visibility/jest-flaky/flaky-passes.js | 2 ++ .../ci-visibility/jest/failing-test.js | 2 ++ .../ci-visibility/jest/mocked-test.js | 2 ++ .../ci-visibility/jestEnvironmentBadInit.js | 2 ++ .../office-addin-mock/dependency.js | 2 ++ .../ci-visibility/office-addin-mock/test.js | 2 ++ .../active-test-span-custom-span-test.js | 2 ++ .../active-test-span-tags-test.js | 2 ++ .../automatic-retry-test.js | 2 ++ .../before-all-timeout-test.js | 2 ++ .../exit-code-test.js | 2 ++ .../impacted-test.js | 2 ++ .../failing-test-and-another-test.js | 2 ++ .../active-test-span-rum-test.js | 2 ++ .../passing-test.js | 2 ++ .../attempt-to-fix-test.js | 2 ++ .../disabled-test.js | 2 ++ .../quarantine-test.js | 2 ++ .../playwright-tests/landing-page-test.js | 2 ++ .../playwright-tests/skipped-suite-test.js | 2 ++ .../playwright-tests/todo-list-page-test.js | 2 ++ .../ci-visibility/run-jest-bad-init.js | 2 ++ integration-tests/ci-visibility/run-jest.js | 2 ++ integration-tests/ci-visibility/run-mocha.js | 2 ++ .../ci-visibility/run-workerpool.js | 2 ++ .../sharding-test/sharding-test-1.js | 2 ++ .../sharding-test/sharding-test-2.js | 2 ++ .../sharding-test/sharding-test-3.js | 2 ++ .../sharding-test/sharding-test-4.js | 2 ++ .../sharding-test/sharding-test-5.js | 2 ++ .../ci-visibility/subproject/cypress.config.js | 2 ++ .../subproject/cypress/plugins-old/index.js | 2 ++ .../subproject/cypress/support/e2e.js | 2 ++ .../ci-visibility/subproject/dependency.js | 2 ++ .../subproject/features/support/steps.js | 2 ++ .../playwright-tests/landing-page-test.js | 2 ++ .../subproject/playwright.config.js | 2 ++ .../ci-visibility/subproject/subproject-test.js | 2 ++ .../test-custom-tags/custom-tags.js | 2 ++ .../test-early-flake-detection/jest-snapshot.js | 2 ++ .../mocha-parameterized.js | 2 ++ .../occasionally-failing-test.js | 2 ++ .../skipped-and-todo-test.js | 2 ++ .../test-parameterized.js | 2 ++ .../test-early-flake-detection/test.js | 2 ++ .../weird-test-names.js | 2 ++ .../eventually-passing-test.js | 2 ++ .../test-impacted-test/test-impacted-1.js | 2 ++ .../test-impacted-test/test-impacted-2.js | 2 ++ .../test-management/test-attempt-to-fix-1.js | 2 ++ .../test-management/test-attempt-to-fix-2.js | 2 ++ .../test-management/test-disabled-1.js | 2 ++ .../test-management/test-disabled-2.js | 2 ++ .../test-management/test-quarantine-1.js | 2 ++ .../test-management/test-quarantine-2.js | 2 ++ .../test-nested-hooks/test-nested-hooks.js | 2 ++ .../test-parsing-error/parsing-error-2.js | 2 ++ .../test-parsing-error/parsing-error.js | 2 ++ .../test-total-code-coverage/test-run.js | 2 ++ .../test-total-code-coverage/test-skipped.js | 2 ++ .../unused-dependency.js | 2 ++ .../test-total-code-coverage/used-dependency.js | 2 ++ .../ci-visibility/test/ci-visibility-test-2.js | 2 ++ .../ci-visibility/test/ci-visibility-test.js | 2 ++ .../ci-visibility/test/fail-test.js | 2 ++ .../ci-visibility/test/selenium-no-framework.js | 2 ++ .../ci-visibility/test/selenium-test.js | 2 ++ integration-tests/ci-visibility/test/sum.js | 2 ++ .../unskippable-test/test-to-run.js | 2 ++ .../unskippable-test/test-to-skip.js | 2 ++ .../unskippable-test/test-unskippable.js | 1 + .../web-app-server-with-redirect.js | 2 ++ .../ci-visibility/web-app-server.js | 2 ++ integration-tests/config-jest-multiproject.js | 2 ++ integration-tests/config-jest.js | 2 ++ integration-tests/cucumber/cucumber.spec.js | 4 ++-- integration-tests/cypress.config.js | 2 ++ integration-tests/cypress/plugins-old/index.js | 2 ++ .../debugger/source-map-support.spec.js | 2 +- .../target-app/source-map-support/minify.js | 2 ++ .../target-app/source-map-support/minify.min.js | 2 +- .../source-map-support/minify.min.js.map | 2 +- integration-tests/esbuild/aws-sdk.js | 2 ++ integration-tests/esbuild/basic-test.js | 1 + .../esbuild/build-and-test-aws-sdk.js | 2 ++ .../esbuild/build-and-test-openai.js | 2 ++ .../esbuild/build-and-test-skip-external.js | 2 ++ .../esbuild/build.esm.common-config.js | 3 +++ integration-tests/esbuild/build.js | 1 + integration-tests/esbuild/complex-app.js | 1 + integration-tests/esbuild/openai.js | 2 ++ integration-tests/esbuild/skip-external.js | 2 ++ integration-tests/graphql/index.js | 2 ++ integration-tests/init.spec.js | 2 ++ integration-tests/init/instrument.js | 2 ++ integration-tests/init/trace.js | 2 ++ integration-tests/jest/jest.spec.js | 6 +++--- integration-tests/memory-leak/index.js | 2 ++ integration-tests/mocha/mocha.spec.js | 4 ++-- integration-tests/my-nyc.config.js | 2 ++ .../opentelemetry/auto-instrumentation.js | 2 ++ integration-tests/package-guardrails.spec.js | 2 ++ integration-tests/playwright.config.js | 2 ++ integration-tests/profiler/dnstest.js | 2 ++ integration-tests/profiler/fstest.js | 2 ++ integration-tests/profiler/nettest.js | 2 ++ integration-tests/selenium/selenium.spec.js | 2 ++ packages/datadog-instrumentations/src/apollo.js | 17 +++++++---------- packages/datadog-instrumentations/src/avsc.js | 2 ++ .../datadog-instrumentations/src/cypress.js | 2 ++ packages/datadog-instrumentations/src/fetch.js | 1 + packages/datadog-instrumentations/src/mocha.js | 2 ++ .../src/mocha/common.js | 2 ++ packages/datadog-instrumentations/src/nyc.js | 2 ++ .../src/orchestrion-config/index.js | 2 ++ .../datadog-instrumentations/src/playwright.js | 2 ++ .../datadog-instrumentations/src/protobufjs.js | 2 ++ .../datadog-instrumentations/src/selenium.js | 2 ++ packages/datadog-instrumentations/src/vitest.js | 2 ++ .../helpers/check-require-cache/bad-order.js | 1 + .../helpers/check-require-cache/good-order.js | 1 + .../datadog-plugin-aerospike/test/naming.js | 2 ++ packages/datadog-plugin-amqp10/test/naming.js | 2 ++ packages/datadog-plugin-amqplib/test/naming.js | 2 ++ packages/datadog-plugin-apollo/test/fixtures.js | 2 ++ packages/datadog-plugin-apollo/test/naming.js | 2 ++ packages/datadog-plugin-avsc/src/index.js | 2 ++ .../datadog-plugin-avsc/src/schema_iterator.js | 2 ++ packages/datadog-plugin-avsc/test/helpers.js | 2 ++ .../src/services/bedrockruntime/index.js | 2 ++ .../test/kinesis-naming.js | 2 ++ .../test/lambda-naming.js | 2 ++ .../datadog-plugin-aws-sdk/test/s3-naming.js | 2 ++ .../datadog-plugin-aws-sdk/test/sns-naming.js | 2 ++ .../datadog-plugin-aws-sdk/test/sqs-naming.js | 2 ++ .../datadog-plugin-aws-sdk/test/util.spec.js | 2 ++ .../test/naming.js | 2 ++ .../test/naming.js | 2 ++ .../datadog-plugin-couchbase/test/naming.js | 2 ++ packages/datadog-plugin-couchbase/test/suite.js | 1 + .../test/features/simple.js | 2 ++ .../datadog-plugin-cypress/src/after-run.js | 2 ++ .../datadog-plugin-cypress/src/after-spec.js | 2 ++ .../src/cypress-plugin.js | 2 ++ packages/datadog-plugin-cypress/src/index.js | 2 ++ packages/datadog-plugin-cypress/src/plugin.js | 2 ++ packages/datadog-plugin-cypress/src/support.js | 2 ++ .../test/app-10/app-server.js | 2 ++ .../test/app-10/cypress.config.js | 2 ++ .../test/app-10/cypress/plugins/index.js | 2 ++ .../test/app-10/cypress/support/index.js | 2 ++ .../test/app/app-server.js | 2 ++ .../test/app/cypress/plugins/index.js | 2 ++ .../test/app/cypress/support/index.js | 2 ++ .../datadog-plugin-elasticsearch/test/naming.js | 2 ++ packages/datadog-plugin-fetch/test/naming.js | 2 ++ .../test/naming.js | 2 ++ .../src/utils.js | 2 ++ packages/datadog-plugin-graphql/src/utils.js | 2 ++ packages/datadog-plugin-graphql/test/naming.js | 2 ++ packages/datadog-plugin-grpc/test/naming.js | 2 ++ packages/datadog-plugin-http/test/naming.js | 2 ++ packages/datadog-plugin-http2/test/naming.js | 2 ++ packages/datadog-plugin-ioredis/test/naming.js | 2 ++ packages/datadog-plugin-iovalkey/test/naming.js | 2 ++ packages/datadog-plugin-jest/src/index.js | 2 ++ packages/datadog-plugin-jest/src/util.js | 2 ++ packages/datadog-plugin-jest/test/env.js | 2 ++ .../test/fixtures/test-to-run.js | 2 ++ .../test/fixtures/test-to-skip.js | 2 ++ .../test/fixtures/test-unskippable.js | 1 + packages/datadog-plugin-jest/test/jest-focus.js | 2 ++ .../test/jest-hook-failure.js | 2 ++ .../test/jest-inject-globals.js | 2 ++ .../datadog-plugin-jest/test/jest-test-suite.js | 2 ++ packages/datadog-plugin-jest/test/jest-test.js | 2 ++ packages/datadog-plugin-jest/test/util.spec.js | 2 ++ .../src/batch-consumer.js | 2 ++ packages/datadog-plugin-kafkajs/test/naming.js | 2 ++ packages/datadog-plugin-mariadb/test/naming.js | 2 ++ .../datadog-plugin-memcached/test/naming.js | 2 ++ .../test/proxy.js | 2 ++ .../test/mocha-active-span-in-hooks.js | 2 ++ .../test/mocha-fail-hook-async.js | 2 ++ .../test/mocha-fail-hook-sync.js | 2 ++ .../test/mocha-test-async-fail.js | 2 ++ .../test/mocha-test-async-pass.js | 2 ++ .../test/mocha-test-code-coverage.js | 2 ++ .../test/mocha-test-done-fail-badly.js | 2 ++ .../test/mocha-test-done-fail.js | 2 ++ .../test/mocha-test-done-pass.js | 2 ++ .../test/mocha-test-fail.js | 2 ++ .../test/mocha-test-integration.js | 2 ++ .../test/mocha-test-itr-1.js | 2 ++ .../test/mocha-test-itr-2.js | 2 ++ .../test/mocha-test-parameterized.js | 2 ++ .../test/mocha-test-pass.js | 2 ++ .../test/mocha-test-promise-fail.js | 2 ++ .../test/mocha-test-promise-pass.js | 2 ++ .../test/mocha-test-retries.js | 2 ++ .../test/mocha-test-skip-describe.js | 2 ++ .../test/mocha-test-skip.js | 2 ++ .../mocha-test-suite-level-fail-after-each.js | 2 ++ ...mocha-test-suite-level-fail-skip-describe.js | 2 ++ .../test/mocha-test-suite-level-fail-test.js | 2 ++ .../test/mocha-test-suite-level-pass.js | 2 ++ .../test/mocha-test-timeout-fail.js | 2 ++ .../test/mocha-test-timeout-pass.js | 2 ++ .../datadog-plugin-moleculer/test/naming.js | 2 ++ .../datadog-plugin-mongodb-core/test/naming.js | 2 ++ packages/datadog-plugin-mysql/test/naming.js | 2 ++ packages/datadog-plugin-mysql2/test/naming.js | 2 ++ packages/datadog-plugin-next/test/datadog.js | 2 ++ packages/datadog-plugin-next/test/naming.js | 2 ++ .../datadog-plugin-next/test/next.config.js | 2 ++ packages/datadog-plugin-nyc/src/index.js | 2 ++ packages/datadog-plugin-openai/test/no-init.js | 1 + .../datadog-plugin-opensearch/test/naming.js | 2 ++ .../src/connection-parser.js | 2 ++ packages/datadog-plugin-oracledb/test/naming.js | 2 ++ packages/datadog-plugin-pg/test/naming.js | 2 ++ packages/datadog-plugin-prisma/test/naming.js | 2 ++ packages/datadog-plugin-protobufjs/src/index.js | 2 ++ .../src/schema_iterator.js | 2 ++ .../datadog-plugin-protobufjs/test/helpers.js | 2 ++ packages/datadog-plugin-redis/test/naming.js | 2 ++ packages/datadog-plugin-rhea/test/naming.js | 2 ++ packages/datadog-plugin-selenium/src/index.js | 2 ++ packages/datadog-plugin-tedious/test/naming.js | 2 ++ packages/datadog-plugin-undici/test/naming.js | 2 ++ packages/datadog-plugin-vitest/src/index.js | 2 ++ .../dd-trace/src/appsec/iast/iast-context.js | 6 +++++- packages/dd-trace/src/appsec/iast/index.js | 2 ++ .../appsec/iast/taint-tracking/rewriter-esm.mjs | 2 -- .../src/appsec/iast/taint-tracking/rewriter.js | 1 + .../evidence-redaction/sensitive-regex.js | 2 ++ .../dd-trace/src/appsec/iast/vulnerabilities.js | 2 ++ .../dd-trace/src/appsec/telemetry/common.js | 2 +- .../early-flake-detection/get-known-tests.js | 2 ++ .../ci-visibility/exporters/git/git_metadata.js | 2 ++ .../get-skippable-suites.js | 2 ++ .../log-submission/log-submission-plugin.js | 2 ++ .../requests/get-library-configuration.js | 2 ++ .../dd-trace/src/ci-visibility/telemetry.js | 2 ++ .../test-api-manual/test-api-manual-plugin.js | 2 ++ .../get-test-management-tests.js | 2 ++ packages/dd-trace/src/config_stable.js | 2 ++ .../dd-trace/src/datastreams/checkpointer.js | 2 ++ packages/dd-trace/src/datastreams/context.js | 2 ++ packages/dd-trace/src/datastreams/encoding.js | 2 ++ packages/dd-trace/src/datastreams/fnv.js | 2 ++ packages/dd-trace/src/datastreams/pathway.js | 2 ++ packages/dd-trace/src/datastreams/processor.js | 2 ++ .../dd-trace/src/datastreams/schemas/schema.js | 2 ++ .../src/datastreams/schemas/schema_builder.js | 2 ++ .../src/datastreams/schemas/schema_sampler.js | 2 ++ packages/dd-trace/src/datastreams/writer.js | 2 ++ .../inspector_promises_polyfill.js | 1 + .../devtools_client/snapshot/symbols.js | 2 +- packages/dd-trace/src/encode/tags-processors.js | 2 ++ .../src/exporters/common/agent-info-exporter.js | 2 ++ packages/dd-trace/src/exporters/common/util.js | 2 ++ .../dd-trace/src/exporters/span-stats/index.js | 2 ++ .../dd-trace/src/exporters/span-stats/writer.js | 2 ++ .../dd-trace/src/external-logger/src/index.js | 2 ++ packages/dd-trace/src/git_metadata_tagger.js | 2 ++ packages/dd-trace/src/git_properties.js | 2 ++ .../src/llmobs/plugins/bedrockruntime.js | 2 ++ packages/dd-trace/src/noop/dogstatsd.js | 2 ++ .../src/opentracing/propagation/text_map_dsm.js | 2 ++ .../src/payload-tagging/config/index.js | 2 ++ packages/dd-trace/src/payload-tagging/index.js | 2 ++ .../dd-trace/src/payload-tagging/tagging.js | 2 ++ packages/dd-trace/src/plugins/apollo.js | 2 ++ packages/dd-trace/src/plugins/ci_plugin.js | 2 ++ packages/dd-trace/src/plugins/util/ci.js | 2 ++ packages/dd-trace/src/plugins/util/env.js | 2 ++ packages/dd-trace/src/plugins/util/git.js | 2 ++ .../dd-trace/src/plugins/util/inferred_proxy.js | 2 ++ packages/dd-trace/src/plugins/util/llm.js | 2 ++ .../dd-trace/src/plugins/util/serverless.js | 2 ++ packages/dd-trace/src/plugins/util/tags.js | 2 ++ packages/dd-trace/src/plugins/util/test.js | 2 ++ packages/dd-trace/src/plugins/util/url.js | 2 ++ .../src/plugins/util/user-provided-git.js | 2 ++ .../src/profiling/exporters/event_serializer.js | 1 + .../profiling/profilers/event_plugins/dns.js | 2 ++ .../profilers/event_plugins/dns_lookup.js | 2 ++ .../event_plugins/dns_lookupservice.js | 2 ++ .../profilers/event_plugins/dns_resolve.js | 2 ++ .../profilers/event_plugins/dns_reverse.js | 2 ++ .../profiling/profilers/event_plugins/event.js | 2 ++ .../src/profiling/profilers/event_plugins/fs.js | 2 ++ .../profiling/profilers/event_plugins/net.js | 2 ++ .../dd-trace/src/profiling/profilers/events.js | 2 ++ .../dd-trace/src/profiling/webspan-utils.js | 2 ++ .../src/runtime_metrics/runtime_metrics.js | 1 + packages/dd-trace/src/service-naming/index.js | 2 ++ .../src/service-naming/schemas/definition.js | 2 ++ .../dd-trace/src/service-naming/schemas/util.js | 2 ++ .../src/service-naming/schemas/v0/graphql.js | 2 ++ .../src/service-naming/schemas/v0/index.js | 2 ++ .../src/service-naming/schemas/v0/messaging.js | 2 ++ .../src/service-naming/schemas/v0/serverless.js | 2 ++ .../src/service-naming/schemas/v0/storage.js | 2 ++ .../src/service-naming/schemas/v0/web.js | 2 ++ .../src/service-naming/schemas/v1/graphql.js | 2 ++ .../src/service-naming/schemas/v1/index.js | 2 ++ .../src/service-naming/schemas/v1/messaging.js | 2 ++ .../src/service-naming/schemas/v1/serverless.js | 2 ++ .../src/service-naming/schemas/v1/storage.js | 2 ++ .../src/service-naming/schemas/v1/web.js | 2 ++ packages/dd-trace/src/span_stats.js | 2 ++ packages/dd-trace/src/telemetry/send-data.js | 2 ++ packages/dd-trace/test/appsec/graphql.spec.js | 2 ++ .../analyzers/resources/random-functions.js | 2 ++ .../analyzers/weak-randomness-analyzer.spec.js | 2 +- .../dd-trace/test/appsec/iast/index.spec.js | 2 ++ .../dd-trace/test/appsec/iast/path-line.spec.js | 2 ++ .../resources/propagationFunctions.js | 2 ++ .../appsec/iast/vulnerability-reporter.spec.js | 2 ++ .../test/appsec/next/pages-dir/server.js | 2 -- packages/dd-trace/test/custom-metrics-app.js | 1 + .../data_streams_checkpointer.spec.js | 2 ++ .../snapshot/target-code/max-collection-size.js | 2 +- .../target-code/max-field-count-scopes.js | 2 +- .../snapshot/target-code/max-field-count.js | 2 +- packages/dd-trace/test/git_properties.spec.js | 2 ++ .../dd-trace/test/lambda/fixtures/handler.js | 1 + .../dd-trace/test/payload-tagging/index.spec.js | 2 ++ packages/dd-trace/test/payload_tagging.spec.js | 2 ++ packages/dd-trace/test/ritm-tests/module-b.js | 2 ++ .../dd-trace/test/service-naming/schema.spec.js | 2 ++ .../dd-trace/test/setup/services/oracledb.js | 2 ++ register.js | 2 ++ scripts/check-proposal-labels.js | 2 ++ scripts/flakiness.mjs | 2 -- scripts/get-chrome-driver-download-url.js | 2 ++ scripts/verify-ci-config.js | 1 + 386 files changed, 735 insertions(+), 47 deletions(-) diff --git a/benchmark/sirun/appsec-iast/insecure-bank.js b/benchmark/sirun/appsec-iast/insecure-bank.js index d07ab7c762f..0bfef252aa7 100644 --- a/benchmark/sirun/appsec-iast/insecure-bank.js +++ b/benchmark/sirun/appsec-iast/insecure-bank.js @@ -1,3 +1,5 @@ +'use strict' + const http = require('http') const app = require('/opt/insecure-bank-js/app') // eslint-disable-line import/no-absolute-path diff --git a/benchmark/sirun/appsec/insecure-bank.js b/benchmark/sirun/appsec/insecure-bank.js index d07ab7c762f..0bfef252aa7 100644 --- a/benchmark/sirun/appsec/insecure-bank.js +++ b/benchmark/sirun/appsec/insecure-bank.js @@ -1,3 +1,5 @@ +'use strict' + const http = require('http') const app = require('/opt/insecure-bank-js/app') // eslint-disable-line import/no-absolute-path diff --git a/benchmark/sirun/encoding/index.js b/benchmark/sirun/encoding/index.js index fcbc8d470f8..9c8056e9723 100644 --- a/benchmark/sirun/encoding/index.js +++ b/benchmark/sirun/encoding/index.js @@ -1,3 +1,5 @@ +'use strict' + const { ENCODER_VERSION } = process.env diff --git a/benchmark/sirun/gc.js b/benchmark/sirun/gc.js index 0386c5e1b74..b3e74e81c6e 100644 --- a/benchmark/sirun/gc.js +++ b/benchmark/sirun/gc.js @@ -1,3 +1,5 @@ +'use strict' + const { createHistogram, PerformanceObserver } = require('perf_hooks') if (createHistogram) { const StatsD = require('./statsd') diff --git a/benchmark/sirun/means.js b/benchmark/sirun/means.js index e7233965b28..6ccdf9d49dc 100644 --- a/benchmark/sirun/means.js +++ b/benchmark/sirun/means.js @@ -1,3 +1,5 @@ +'use strict' + /* eslint-disable no-console */ const chunks = [] diff --git a/benchmark/sirun/scope/index.js b/benchmark/sirun/scope/index.js index 20c2f6da2f6..026f2a6ae76 100644 --- a/benchmark/sirun/scope/index.js +++ b/benchmark/sirun/scope/index.js @@ -1,3 +1,5 @@ +'use strict' + const { DD_TRACE_SCOPE, COUNT diff --git a/benchmark/sirun/shimmer-runtime/index.js b/benchmark/sirun/shimmer-runtime/index.js index aff01fc2e17..3edb0dac16e 100644 --- a/benchmark/sirun/shimmer-runtime/index.js +++ b/benchmark/sirun/shimmer-runtime/index.js @@ -1,3 +1,5 @@ +'use strict' + /* eslint-disable require-await */ const shimmer = require('../../../packages/datadog-shimmer') diff --git a/benchmark/sirun/shimmer-startup/index.js b/benchmark/sirun/shimmer-startup/index.js index d57168af36a..b33c49420ca 100644 --- a/benchmark/sirun/shimmer-startup/index.js +++ b/benchmark/sirun/shimmer-startup/index.js @@ -1,3 +1,5 @@ +'use strict' + /* eslint-disable require-await */ const shimmer = require('../../../packages/datadog-shimmer') diff --git a/benchmark/sirun/spans/spans.js b/benchmark/sirun/spans/spans.js index e12f187bd1d..e931339012e 100644 --- a/benchmark/sirun/spans/spans.js +++ b/benchmark/sirun/spans/spans.js @@ -1,3 +1,5 @@ +'use strict' + const tracer = require('../../..').init() tracer._tracer._processor.process = function process (span) { diff --git a/benchmark/sirun/strip-unwanted-results.js b/benchmark/sirun/strip-unwanted-results.js index fe22d2d2628..9f26a05c8c6 100755 --- a/benchmark/sirun/strip-unwanted-results.js +++ b/benchmark/sirun/strip-unwanted-results.js @@ -1,4 +1,5 @@ #!/usr/bin/env node +'use strict' const fs = require('fs') const path = require('path') diff --git a/ci/cypress/after-run.js b/ci/cypress/after-run.js index 8fec98e3d1f..7607066d253 100644 --- a/ci/cypress/after-run.js +++ b/ci/cypress/after-run.js @@ -1 +1,3 @@ +'use strict' + module.exports = require('../../packages/datadog-plugin-cypress/src/after-run') diff --git a/ci/cypress/after-spec.js b/ci/cypress/after-spec.js index 9c3ae9da74d..c7dbe88df4d 100644 --- a/ci/cypress/after-spec.js +++ b/ci/cypress/after-spec.js @@ -1 +1,3 @@ +'use strict' + module.exports = require('../../packages/datadog-plugin-cypress/src/after-spec') diff --git a/ci/cypress/plugin.js b/ci/cypress/plugin.js index a53439910ab..4b1da7d266f 100644 --- a/ci/cypress/plugin.js +++ b/ci/cypress/plugin.js @@ -1,3 +1,5 @@ +'use strict' + const { NODE_MAJOR } = require('../../version') // These polyfills are here because cypress@6.7.0, which we still support for v5, runs its plugin code diff --git a/ci/cypress/polyfills.js b/ci/cypress/polyfills.js index a22f6a52ad5..1c3d99e9253 100644 --- a/ci/cypress/polyfills.js +++ b/ci/cypress/polyfills.js @@ -1,3 +1,5 @@ +'use strict' + if (!Object.hasOwn) { Object.defineProperty(Object, 'hasOwn', { // eslint-disable-next-line prefer-object-has-own diff --git a/ci/cypress/support.js b/ci/cypress/support.js index fb0b2f711bb..822476ae3a1 100644 --- a/ci/cypress/support.js +++ b/ci/cypress/support.js @@ -1 +1,3 @@ +'use strict' + require('../../packages/datadog-plugin-cypress/src/support') diff --git a/ci/init.js b/ci/init.js index 5928c826ae0..54257cb8922 100644 --- a/ci/init.js +++ b/ci/init.js @@ -1,3 +1,5 @@ +'use strict' + /* eslint-disable no-console */ const tracer = require('../packages/dd-trace') const { isTrue, isFalse } = require('../packages/dd-trace/src/util') diff --git a/eslint.config.mjs b/eslint.config.mjs index 622745fdf11..9c90e9a1275 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -356,7 +356,8 @@ export default [ 'no-console': 'error', 'no-prototype-builtins': 'off', // Override (turned on by @eslint/js/recommended) 'no-var': 'error', - 'require-await': 'error' + 'require-await': 'error', + strict: 'error' } }, { diff --git a/integration-tests/appsec/esm-app/custom-noop-hooks.mjs b/integration-tests/appsec/esm-app/custom-noop-hooks.mjs index 9641f541057..0f367ba4c01 100644 --- a/integration-tests/appsec/esm-app/custom-noop-hooks.mjs +++ b/integration-tests/appsec/esm-app/custom-noop-hooks.mjs @@ -1,5 +1,3 @@ -'use strict' - function dummyOperation (a) { return a + 'should have ' + 'dummy operation to be rewritten' + ' without crashing' } diff --git a/integration-tests/appsec/esm-app/index.mjs b/integration-tests/appsec/esm-app/index.mjs index 4fa4e23570e..504e71d1c20 100644 --- a/integration-tests/appsec/esm-app/index.mjs +++ b/integration-tests/appsec/esm-app/index.mjs @@ -1,4 +1,3 @@ -'use strict' /* eslint n/no-unsupported-features/node-builtins: ['error', { ignores: ['module.register'] }] */ import childProcess from 'node:child_process' diff --git a/integration-tests/appsec/esm-app/worker-dep.mjs b/integration-tests/appsec/esm-app/worker-dep.mjs index 5b967fff099..eed83bdfc5a 100644 --- a/integration-tests/appsec/esm-app/worker-dep.mjs +++ b/integration-tests/appsec/esm-app/worker-dep.mjs @@ -1,5 +1,3 @@ -'use strict' - function dummyOperation (a) { return a + 'dummy operation with concat in worker-dep' } diff --git a/integration-tests/appsec/esm-security-controls/index.mjs b/integration-tests/appsec/esm-security-controls/index.mjs index 382ea10985b..ce944602823 100644 --- a/integration-tests/appsec/esm-security-controls/index.mjs +++ b/integration-tests/appsec/esm-security-controls/index.mjs @@ -1,5 +1,3 @@ -'use strict' - import childProcess from 'node:child_process' import express from 'express' import { sanitize } from './sanitizer.mjs' diff --git a/integration-tests/appsec/esm-security-controls/sanitizer-default.mjs b/integration-tests/appsec/esm-security-controls/sanitizer-default.mjs index 6e580f450c5..eac6c9d94af 100644 --- a/integration-tests/appsec/esm-security-controls/sanitizer-default.mjs +++ b/integration-tests/appsec/esm-security-controls/sanitizer-default.mjs @@ -1,5 +1,3 @@ -'use strict' - function sanitizeDefault (input) { return input } diff --git a/integration-tests/appsec/esm-security-controls/sanitizer.mjs b/integration-tests/appsec/esm-security-controls/sanitizer.mjs index 4529126061d..6cc1047555a 100644 --- a/integration-tests/appsec/esm-security-controls/sanitizer.mjs +++ b/integration-tests/appsec/esm-security-controls/sanitizer.mjs @@ -1,5 +1,3 @@ -'use strict' - export function sanitize (input) { return input } diff --git a/integration-tests/appsec/esm-security-controls/validator.mjs b/integration-tests/appsec/esm-security-controls/validator.mjs index 3542aa8d17c..214e5c2c24c 100644 --- a/integration-tests/appsec/esm-security-controls/validator.mjs +++ b/integration-tests/appsec/esm-security-controls/validator.mjs @@ -1,5 +1,3 @@ -'use strict' - export function validate (input) { return true } diff --git a/integration-tests/ci-visibility-intake.js b/integration-tests/ci-visibility-intake.js index a5ab4e5c394..0f84d33181e 100644 --- a/integration-tests/ci-visibility-intake.js +++ b/integration-tests/ci-visibility-intake.js @@ -1,3 +1,5 @@ +'use strict' + const express = require('express') const bodyParser = require('body-parser') const msgpack = require('@msgpack/msgpack') diff --git a/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/logger.js b/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/logger.js index 5480f1ee574..2490746e554 100644 --- a/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/logger.js +++ b/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/logger.js @@ -1,3 +1,5 @@ +'use strict' + const { createLogger, format, transports } = require('winston') module.exports = createLogger({ diff --git a/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/steps.js b/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/steps.js index 2d1bdb4e906..90f3d25aa6e 100644 --- a/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/steps.js +++ b/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/steps.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') const { When, Then } = require('@cucumber/cucumber') diff --git a/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/sum.js b/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/sum.js index cce61142972..f9836281a39 100644 --- a/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/sum.js +++ b/integration-tests/ci-visibility/automatic-log-submission-cucumber/support/sum.js @@ -1,3 +1,5 @@ +'use strict' + const logger = require('./logger') module.exports = function (a, b) { diff --git a/integration-tests/ci-visibility/automatic-log-submission-playwright/automatic-log-submission-test.js b/integration-tests/ci-visibility/automatic-log-submission-playwright/automatic-log-submission-test.js index 9152122c531..3f9cdcf15ab 100644 --- a/integration-tests/ci-visibility/automatic-log-submission-playwright/automatic-log-submission-test.js +++ b/integration-tests/ci-visibility/automatic-log-submission-playwright/automatic-log-submission-test.js @@ -1,3 +1,5 @@ +'use strict' + const { test, expect } = require('@playwright/test') const logger = require('./logger') const sum = require('./sum') diff --git a/integration-tests/ci-visibility/automatic-log-submission-playwright/logger.js b/integration-tests/ci-visibility/automatic-log-submission-playwright/logger.js index 5480f1ee574..2490746e554 100644 --- a/integration-tests/ci-visibility/automatic-log-submission-playwright/logger.js +++ b/integration-tests/ci-visibility/automatic-log-submission-playwright/logger.js @@ -1,3 +1,5 @@ +'use strict' + const { createLogger, format, transports } = require('winston') module.exports = createLogger({ diff --git a/integration-tests/ci-visibility/automatic-log-submission-playwright/sum.js b/integration-tests/ci-visibility/automatic-log-submission-playwright/sum.js index cce61142972..f9836281a39 100644 --- a/integration-tests/ci-visibility/automatic-log-submission-playwright/sum.js +++ b/integration-tests/ci-visibility/automatic-log-submission-playwright/sum.js @@ -1,3 +1,5 @@ +'use strict' + const logger = require('./logger') module.exports = function (a, b) { diff --git a/integration-tests/ci-visibility/automatic-log-submission/automatic-log-submission-test.js b/integration-tests/ci-visibility/automatic-log-submission/automatic-log-submission-test.js index cfc60b8d3b0..6a557c46119 100644 --- a/integration-tests/ci-visibility/automatic-log-submission/automatic-log-submission-test.js +++ b/integration-tests/ci-visibility/automatic-log-submission/automatic-log-submission-test.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') const logger = require('./logger') diff --git a/integration-tests/ci-visibility/automatic-log-submission/config-jest.js b/integration-tests/ci-visibility/automatic-log-submission/config-jest.js index 56afa0d36db..453408298ea 100644 --- a/integration-tests/ci-visibility/automatic-log-submission/config-jest.js +++ b/integration-tests/ci-visibility/automatic-log-submission/config-jest.js @@ -1,3 +1,5 @@ +'use strict' + module.exports = { projects: [], testPathIgnorePatterns: ['/node_modules/'], diff --git a/integration-tests/ci-visibility/automatic-log-submission/logger.js b/integration-tests/ci-visibility/automatic-log-submission/logger.js index 5480f1ee574..2490746e554 100644 --- a/integration-tests/ci-visibility/automatic-log-submission/logger.js +++ b/integration-tests/ci-visibility/automatic-log-submission/logger.js @@ -1,3 +1,5 @@ +'use strict' + const { createLogger, format, transports } = require('winston') module.exports = createLogger({ diff --git a/integration-tests/ci-visibility/automatic-log-submission/sum.js b/integration-tests/ci-visibility/automatic-log-submission/sum.js index cce61142972..f9836281a39 100644 --- a/integration-tests/ci-visibility/automatic-log-submission/sum.js +++ b/integration-tests/ci-visibility/automatic-log-submission/sum.js @@ -1,3 +1,5 @@ +'use strict' + const logger = require('./logger') module.exports = function (a, b) { diff --git a/integration-tests/ci-visibility/dynamic-instrumentation/dependency.js b/integration-tests/ci-visibility/dynamic-instrumentation/dependency.js index b53ebf22f97..5a564174560 100644 --- a/integration-tests/ci-visibility/dynamic-instrumentation/dependency.js +++ b/integration-tests/ci-visibility/dynamic-instrumentation/dependency.js @@ -1,3 +1,5 @@ +'use strict' + module.exports = function (a, b) { const localVariable = 2 if (a > 10) { diff --git a/integration-tests/ci-visibility/dynamic-instrumentation/test-hit-breakpoint.js b/integration-tests/ci-visibility/dynamic-instrumentation/test-hit-breakpoint.js index 7b317b7f249..ec6a043a1b9 100644 --- a/integration-tests/ci-visibility/dynamic-instrumentation/test-hit-breakpoint.js +++ b/integration-tests/ci-visibility/dynamic-instrumentation/test-hit-breakpoint.js @@ -1,3 +1,5 @@ +'use strict' + const sum = require('./dependency') const { expect } = require('chai') diff --git a/integration-tests/ci-visibility/dynamic-instrumentation/test-not-hit-breakpoint.js b/integration-tests/ci-visibility/dynamic-instrumentation/test-not-hit-breakpoint.js index ff652d88673..04591f0050b 100644 --- a/integration-tests/ci-visibility/dynamic-instrumentation/test-not-hit-breakpoint.js +++ b/integration-tests/ci-visibility/dynamic-instrumentation/test-not-hit-breakpoint.js @@ -1,3 +1,5 @@ +'use strict' + const sum = require('./dependency') const { expect } = require('chai') diff --git a/integration-tests/ci-visibility/features-di/support/steps.js b/integration-tests/ci-visibility/features-di/support/steps.js index 00880f83467..a488786c0b4 100644 --- a/integration-tests/ci-visibility/features-di/support/steps.js +++ b/integration-tests/ci-visibility/features-di/support/steps.js @@ -1,3 +1,5 @@ +'use strict' + const assert = require('assert') const { When, Then } = require('@cucumber/cucumber') const sum = require('./sum') diff --git a/integration-tests/ci-visibility/features-di/support/sum.js b/integration-tests/ci-visibility/features-di/support/sum.js index cb1d7adb951..9c6536232cb 100644 --- a/integration-tests/ci-visibility/features-di/support/sum.js +++ b/integration-tests/ci-visibility/features-di/support/sum.js @@ -1,3 +1,5 @@ +'use strict' + function funSum (a, b) { const localVariable = 2 if (a > 10) { diff --git a/integration-tests/ci-visibility/features-flaky/support/steps.js b/integration-tests/ci-visibility/features-flaky/support/steps.js index 2e4a335cfb7..0f93352c1e6 100644 --- a/integration-tests/ci-visibility/features-flaky/support/steps.js +++ b/integration-tests/ci-visibility/features-flaky/support/steps.js @@ -1,3 +1,5 @@ +'use strict' + const assert = require('assert') const { When, Then } = require('@cucumber/cucumber') diff --git a/integration-tests/ci-visibility/features-impacted-test/support/steps.js b/integration-tests/ci-visibility/features-impacted-test/support/steps.js index 182284616c6..1c5acdda063 100644 --- a/integration-tests/ci-visibility/features-impacted-test/support/steps.js +++ b/integration-tests/ci-visibility/features-impacted-test/support/steps.js @@ -1,3 +1,5 @@ +'use strict' + const assert = require('assert') const { When, Then } = require('@cucumber/cucumber') diff --git a/integration-tests/ci-visibility/features-retry/support/steps.js b/integration-tests/ci-visibility/features-retry/support/steps.js index 50da213fb75..04331c1acdd 100644 --- a/integration-tests/ci-visibility/features-retry/support/steps.js +++ b/integration-tests/ci-visibility/features-retry/support/steps.js @@ -1,3 +1,5 @@ +'use strict' + const assert = require('assert') const { When, Then } = require('@cucumber/cucumber') diff --git a/integration-tests/ci-visibility/features-selenium/support/steps.js b/integration-tests/ci-visibility/features-selenium/support/steps.js index 307c947189f..2e2052cdc72 100644 --- a/integration-tests/ci-visibility/features-selenium/support/steps.js +++ b/integration-tests/ci-visibility/features-selenium/support/steps.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') const { By, Builder } = require('selenium-webdriver') const chrome = require('selenium-webdriver/chrome') diff --git a/integration-tests/ci-visibility/features-test-management/support/steps.js b/integration-tests/ci-visibility/features-test-management/support/steps.js index 67a2ed51361..0b80cddcf8a 100644 --- a/integration-tests/ci-visibility/features-test-management/support/steps.js +++ b/integration-tests/ci-visibility/features-test-management/support/steps.js @@ -1,3 +1,5 @@ +'use strict' + const assert = require('assert') const { When, Then } = require('@cucumber/cucumber') diff --git a/integration-tests/ci-visibility/features/support/steps.js b/integration-tests/ci-visibility/features/support/steps.js index 23320b6ed46..889b88632b5 100644 --- a/integration-tests/ci-visibility/features/support/steps.js +++ b/integration-tests/ci-visibility/features/support/steps.js @@ -1,3 +1,5 @@ +'use strict' + const assert = require('assert') const { When, Then, Before, After } = require('@cucumber/cucumber') const tracer = require('dd-trace') diff --git a/integration-tests/ci-visibility/jest-flaky/flaky-fails.js b/integration-tests/ci-visibility/jest-flaky/flaky-fails.js index 2717720f364..63bc4640188 100644 --- a/integration-tests/ci-visibility/jest-flaky/flaky-fails.js +++ b/integration-tests/ci-visibility/jest-flaky/flaky-fails.js @@ -1,3 +1,5 @@ +'use strict' + describe('test-flaky-test-retries', () => { it('can retry failed tests', () => { expect(1).toEqual(2) diff --git a/integration-tests/ci-visibility/jest-flaky/flaky-passes.js b/integration-tests/ci-visibility/jest-flaky/flaky-passes.js index 31e43b9a78f..e00c0c61762 100644 --- a/integration-tests/ci-visibility/jest-flaky/flaky-passes.js +++ b/integration-tests/ci-visibility/jest-flaky/flaky-passes.js @@ -1,3 +1,5 @@ +'use strict' + let counter = 0 describe('test-flaky-test-retries', () => { diff --git a/integration-tests/ci-visibility/jest/failing-test.js b/integration-tests/ci-visibility/jest/failing-test.js index 9acd66538de..9a9201fe1f7 100644 --- a/integration-tests/ci-visibility/jest/failing-test.js +++ b/integration-tests/ci-visibility/jest/failing-test.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('failing', () => { diff --git a/integration-tests/ci-visibility/jest/mocked-test.js b/integration-tests/ci-visibility/jest/mocked-test.js index f320779e33a..95af183a1b4 100644 --- a/integration-tests/ci-visibility/jest/mocked-test.js +++ b/integration-tests/ci-visibility/jest/mocked-test.js @@ -1,3 +1,5 @@ +'use strict' + // eslint-disable-next-line no-undef jest.mock('../test/sum.js') diff --git a/integration-tests/ci-visibility/jestEnvironmentBadInit.js b/integration-tests/ci-visibility/jestEnvironmentBadInit.js index 9915e4b7785..b442a905852 100644 --- a/integration-tests/ci-visibility/jestEnvironmentBadInit.js +++ b/integration-tests/ci-visibility/jestEnvironmentBadInit.js @@ -1,3 +1,5 @@ +'use strict' + require('dd-trace').init({ service: 'dd-trace-bad-init' }) diff --git a/integration-tests/ci-visibility/office-addin-mock/dependency.js b/integration-tests/ci-visibility/office-addin-mock/dependency.js index 363131a422a..227a43ededd 100644 --- a/integration-tests/ci-visibility/office-addin-mock/dependency.js +++ b/integration-tests/ci-visibility/office-addin-mock/dependency.js @@ -1,3 +1,5 @@ +'use strict' + require('office-addin-mock') function sum (a, b) { diff --git a/integration-tests/ci-visibility/office-addin-mock/test.js b/integration-tests/ci-visibility/office-addin-mock/test.js index 50a3b6c2e28..5d40e2fad6f 100644 --- a/integration-tests/ci-visibility/office-addin-mock/test.js +++ b/integration-tests/ci-visibility/office-addin-mock/test.js @@ -1,3 +1,5 @@ +'use strict' + const sum = require('./dependency') const { expect } = require('chai') diff --git a/integration-tests/ci-visibility/playwright-tests-active-test-span/active-test-span-custom-span-test.js b/integration-tests/ci-visibility/playwright-tests-active-test-span/active-test-span-custom-span-test.js index b2e667c0bf4..9b81c8a8a08 100644 --- a/integration-tests/ci-visibility/playwright-tests-active-test-span/active-test-span-custom-span-test.js +++ b/integration-tests/ci-visibility/playwright-tests-active-test-span/active-test-span-custom-span-test.js @@ -1,3 +1,5 @@ +'use strict' + const { test, expect } = require('@playwright/test') const tracer = require('dd-trace') diff --git a/integration-tests/ci-visibility/playwright-tests-active-test-span/active-test-span-tags-test.js b/integration-tests/ci-visibility/playwright-tests-active-test-span/active-test-span-tags-test.js index 0a8d0e57469..e7424277e39 100644 --- a/integration-tests/ci-visibility/playwright-tests-active-test-span/active-test-span-tags-test.js +++ b/integration-tests/ci-visibility/playwright-tests-active-test-span/active-test-span-tags-test.js @@ -1,3 +1,5 @@ +'use strict' + const { test, expect } = require('@playwright/test') const tracer = require('dd-trace') diff --git a/integration-tests/ci-visibility/playwright-tests-automatic-retry/automatic-retry-test.js b/integration-tests/ci-visibility/playwright-tests-automatic-retry/automatic-retry-test.js index ac0cc8e33c1..70e7a48255b 100644 --- a/integration-tests/ci-visibility/playwright-tests-automatic-retry/automatic-retry-test.js +++ b/integration-tests/ci-visibility/playwright-tests-automatic-retry/automatic-retry-test.js @@ -1,3 +1,5 @@ +'use strict' + const { test, expect } = require('@playwright/test') test.beforeEach(async ({ page }) => { diff --git a/integration-tests/ci-visibility/playwright-tests-error/before-all-timeout-test.js b/integration-tests/ci-visibility/playwright-tests-error/before-all-timeout-test.js index 9736f2b801d..6c3265ca896 100644 --- a/integration-tests/ci-visibility/playwright-tests-error/before-all-timeout-test.js +++ b/integration-tests/ci-visibility/playwright-tests-error/before-all-timeout-test.js @@ -1,3 +1,5 @@ +'use strict' + const { test, expect } = require('@playwright/test') test.beforeEach(async ({ page }) => { diff --git a/integration-tests/ci-visibility/playwright-tests-exit-code/exit-code-test.js b/integration-tests/ci-visibility/playwright-tests-exit-code/exit-code-test.js index a6a4f191af4..6888fce622e 100644 --- a/integration-tests/ci-visibility/playwright-tests-exit-code/exit-code-test.js +++ b/integration-tests/ci-visibility/playwright-tests-exit-code/exit-code-test.js @@ -1,3 +1,5 @@ +'use strict' + const { test, expect } = require('@playwright/test') // eslint-disable-next-line no-unused-vars const dummy = require('dummy') // This should not exist, so should throw an error diff --git a/integration-tests/ci-visibility/playwright-tests-impacted-tests/impacted-test.js b/integration-tests/ci-visibility/playwright-tests-impacted-tests/impacted-test.js index bb8710de53c..c80a5ba643c 100644 --- a/integration-tests/ci-visibility/playwright-tests-impacted-tests/impacted-test.js +++ b/integration-tests/ci-visibility/playwright-tests-impacted-tests/impacted-test.js @@ -1,3 +1,5 @@ +'use strict' + const { test, expect } = require('@playwright/test') test.beforeEach(async ({ page }) => { diff --git a/integration-tests/ci-visibility/playwright-tests-max-failures/failing-test-and-another-test.js b/integration-tests/ci-visibility/playwright-tests-max-failures/failing-test-and-another-test.js index 317b97f4175..fdff3ed9bbc 100644 --- a/integration-tests/ci-visibility/playwright-tests-max-failures/failing-test-and-another-test.js +++ b/integration-tests/ci-visibility/playwright-tests-max-failures/failing-test-and-another-test.js @@ -1,3 +1,5 @@ +'use strict' + const { test, expect } = require('@playwright/test') test.beforeEach(async ({ page }) => { diff --git a/integration-tests/ci-visibility/playwright-tests-rum/active-test-span-rum-test.js b/integration-tests/ci-visibility/playwright-tests-rum/active-test-span-rum-test.js index 67d7250eb83..85bb5d11767 100644 --- a/integration-tests/ci-visibility/playwright-tests-rum/active-test-span-rum-test.js +++ b/integration-tests/ci-visibility/playwright-tests-rum/active-test-span-rum-test.js @@ -1,3 +1,5 @@ +'use strict' + const { test, expect } = require('@playwright/test') test.beforeEach(async ({ page }) => { diff --git a/integration-tests/ci-visibility/playwright-tests-test-capabilities/passing-test.js b/integration-tests/ci-visibility/playwright-tests-test-capabilities/passing-test.js index 736db3aeb1c..d9e88e19ecf 100644 --- a/integration-tests/ci-visibility/playwright-tests-test-capabilities/passing-test.js +++ b/integration-tests/ci-visibility/playwright-tests-test-capabilities/passing-test.js @@ -1,3 +1,5 @@ +'use strict' + const { test, expect } = require('@playwright/test') test.beforeEach(async ({ page }) => { diff --git a/integration-tests/ci-visibility/playwright-tests-test-management/attempt-to-fix-test.js b/integration-tests/ci-visibility/playwright-tests-test-management/attempt-to-fix-test.js index f235d10f549..a5b1148573b 100644 --- a/integration-tests/ci-visibility/playwright-tests-test-management/attempt-to-fix-test.js +++ b/integration-tests/ci-visibility/playwright-tests-test-management/attempt-to-fix-test.js @@ -1,3 +1,5 @@ +'use strict' + const { test, expect } = require('@playwright/test') test.beforeEach(async ({ page }) => { diff --git a/integration-tests/ci-visibility/playwright-tests-test-management/disabled-test.js b/integration-tests/ci-visibility/playwright-tests-test-management/disabled-test.js index f4dde8335a2..46409b33b7c 100644 --- a/integration-tests/ci-visibility/playwright-tests-test-management/disabled-test.js +++ b/integration-tests/ci-visibility/playwright-tests-test-management/disabled-test.js @@ -1,3 +1,5 @@ +'use strict' + const { test, expect } = require('@playwright/test') test.beforeEach(async ({ page }) => { diff --git a/integration-tests/ci-visibility/playwright-tests-test-management/quarantine-test.js b/integration-tests/ci-visibility/playwright-tests-test-management/quarantine-test.js index 69287e98ecb..4a4da88e205 100644 --- a/integration-tests/ci-visibility/playwright-tests-test-management/quarantine-test.js +++ b/integration-tests/ci-visibility/playwright-tests-test-management/quarantine-test.js @@ -1,3 +1,5 @@ +'use strict' + const { test, expect } = require('@playwright/test') test.beforeEach(async ({ page }) => { diff --git a/integration-tests/ci-visibility/playwright-tests/landing-page-test.js b/integration-tests/ci-visibility/playwright-tests/landing-page-test.js index 5115ee7939f..b4b342513ce 100644 --- a/integration-tests/ci-visibility/playwright-tests/landing-page-test.js +++ b/integration-tests/ci-visibility/playwright-tests/landing-page-test.js @@ -1,3 +1,5 @@ +'use strict' + const { test, expect } = require('@playwright/test') const tracer = require('dd-trace') diff --git a/integration-tests/ci-visibility/playwright-tests/skipped-suite-test.js b/integration-tests/ci-visibility/playwright-tests/skipped-suite-test.js index 6865cb4cd39..d15d798f60c 100644 --- a/integration-tests/ci-visibility/playwright-tests/skipped-suite-test.js +++ b/integration-tests/ci-visibility/playwright-tests/skipped-suite-test.js @@ -1,3 +1,5 @@ +'use strict' + const { test, expect } = require('@playwright/test') test.beforeEach(async ({ page }) => { diff --git a/integration-tests/ci-visibility/playwright-tests/todo-list-page-test.js b/integration-tests/ci-visibility/playwright-tests/todo-list-page-test.js index 6a002cab86c..e53fadb416e 100644 --- a/integration-tests/ci-visibility/playwright-tests/todo-list-page-test.js +++ b/integration-tests/ci-visibility/playwright-tests/todo-list-page-test.js @@ -1,3 +1,5 @@ +'use strict' + const { test, expect } = require('@playwright/test') test.beforeEach(async ({ page }) => { diff --git a/integration-tests/ci-visibility/run-jest-bad-init.js b/integration-tests/ci-visibility/run-jest-bad-init.js index 8917ef833bd..f9251aca38e 100644 --- a/integration-tests/ci-visibility/run-jest-bad-init.js +++ b/integration-tests/ci-visibility/run-jest-bad-init.js @@ -1,3 +1,5 @@ +'use strict' + const jest = require('jest') const options = { diff --git a/integration-tests/ci-visibility/run-jest.js b/integration-tests/ci-visibility/run-jest.js index 9e644bc374a..a7f8054b720 100644 --- a/integration-tests/ci-visibility/run-jest.js +++ b/integration-tests/ci-visibility/run-jest.js @@ -1,3 +1,5 @@ +'use strict' + const jest = require('jest') const options = { diff --git a/integration-tests/ci-visibility/run-mocha.js b/integration-tests/ci-visibility/run-mocha.js index 19d009ca9a2..37111d3c3f4 100644 --- a/integration-tests/ci-visibility/run-mocha.js +++ b/integration-tests/ci-visibility/run-mocha.js @@ -1,3 +1,5 @@ +'use strict' + const Mocha = require('mocha') const mocha = new Mocha({ diff --git a/integration-tests/ci-visibility/run-workerpool.js b/integration-tests/ci-visibility/run-workerpool.js index 4ab60a1fc0c..0c00fb66050 100644 --- a/integration-tests/ci-visibility/run-workerpool.js +++ b/integration-tests/ci-visibility/run-workerpool.js @@ -1,3 +1,5 @@ +'use strict' + const workerpool = require('workerpool') const pool = workerpool.pool({ workerType: 'process' }) diff --git a/integration-tests/ci-visibility/sharding-test/sharding-test-1.js b/integration-tests/ci-visibility/sharding-test/sharding-test-1.js index a336f30caca..0a26a920193 100644 --- a/integration-tests/ci-visibility/sharding-test/sharding-test-1.js +++ b/integration-tests/ci-visibility/sharding-test/sharding-test-1.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('sharding test 1', () => { diff --git a/integration-tests/ci-visibility/sharding-test/sharding-test-2.js b/integration-tests/ci-visibility/sharding-test/sharding-test-2.js index 670ad7dc6ba..2381beb5939 100644 --- a/integration-tests/ci-visibility/sharding-test/sharding-test-2.js +++ b/integration-tests/ci-visibility/sharding-test/sharding-test-2.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('sharding test 2', () => { diff --git a/integration-tests/ci-visibility/sharding-test/sharding-test-3.js b/integration-tests/ci-visibility/sharding-test/sharding-test-3.js index ee67ff25c8c..60757fa3df9 100644 --- a/integration-tests/ci-visibility/sharding-test/sharding-test-3.js +++ b/integration-tests/ci-visibility/sharding-test/sharding-test-3.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('sharding test 3', () => { diff --git a/integration-tests/ci-visibility/sharding-test/sharding-test-4.js b/integration-tests/ci-visibility/sharding-test/sharding-test-4.js index 4fd0add71d4..90330b62a74 100644 --- a/integration-tests/ci-visibility/sharding-test/sharding-test-4.js +++ b/integration-tests/ci-visibility/sharding-test/sharding-test-4.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('sharding test 4', () => { diff --git a/integration-tests/ci-visibility/sharding-test/sharding-test-5.js b/integration-tests/ci-visibility/sharding-test/sharding-test-5.js index a13a2354eae..4b56d06cd79 100644 --- a/integration-tests/ci-visibility/sharding-test/sharding-test-5.js +++ b/integration-tests/ci-visibility/sharding-test/sharding-test-5.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('sharding test 5', () => { diff --git a/integration-tests/ci-visibility/subproject/cypress.config.js b/integration-tests/ci-visibility/subproject/cypress.config.js index 9a786e4ef75..7b9383ea40d 100644 --- a/integration-tests/ci-visibility/subproject/cypress.config.js +++ b/integration-tests/ci-visibility/subproject/cypress.config.js @@ -1,3 +1,5 @@ +'use strict' + module.exports = { defaultCommandTimeout: 100, e2e: { diff --git a/integration-tests/ci-visibility/subproject/cypress/plugins-old/index.js b/integration-tests/ci-visibility/subproject/cypress/plugins-old/index.js index f80695694a9..2f6ed52ee52 100644 --- a/integration-tests/ci-visibility/subproject/cypress/plugins-old/index.js +++ b/integration-tests/ci-visibility/subproject/cypress/plugins-old/index.js @@ -1 +1,3 @@ +'use strict' + module.exports = require('dd-trace/ci/cypress/plugin') diff --git a/integration-tests/ci-visibility/subproject/cypress/support/e2e.js b/integration-tests/ci-visibility/subproject/cypress/support/e2e.js index 26fdad7588a..78bc2b9dc57 100644 --- a/integration-tests/ci-visibility/subproject/cypress/support/e2e.js +++ b/integration-tests/ci-visibility/subproject/cypress/support/e2e.js @@ -1 +1,3 @@ +'use strict' + require('dd-trace/ci/cypress/support') diff --git a/integration-tests/ci-visibility/subproject/dependency.js b/integration-tests/ci-visibility/subproject/dependency.js index 2012896b44c..86d4a1c94ef 100644 --- a/integration-tests/ci-visibility/subproject/dependency.js +++ b/integration-tests/ci-visibility/subproject/dependency.js @@ -1,3 +1,5 @@ +'use strict' + module.exports = function (a, b) { return a + b } diff --git a/integration-tests/ci-visibility/subproject/features/support/steps.js b/integration-tests/ci-visibility/subproject/features/support/steps.js index 6a946067ca9..bff74e09dbd 100644 --- a/integration-tests/ci-visibility/subproject/features/support/steps.js +++ b/integration-tests/ci-visibility/subproject/features/support/steps.js @@ -1,3 +1,5 @@ +'use strict' + const assert = require('assert') const { When, Then } = require('@cucumber/cucumber') class Greeter { diff --git a/integration-tests/ci-visibility/subproject/playwright-tests/landing-page-test.js b/integration-tests/ci-visibility/subproject/playwright-tests/landing-page-test.js index 34e6eb2c3aa..a6728facade 100644 --- a/integration-tests/ci-visibility/subproject/playwright-tests/landing-page-test.js +++ b/integration-tests/ci-visibility/subproject/playwright-tests/landing-page-test.js @@ -1,3 +1,5 @@ +'use strict' + const { test, expect } = require('@playwright/test') test.beforeEach(async ({ page }) => { diff --git a/integration-tests/ci-visibility/subproject/playwright.config.js b/integration-tests/ci-visibility/subproject/playwright.config.js index 3be77049e3b..c5bf661511a 100644 --- a/integration-tests/ci-visibility/subproject/playwright.config.js +++ b/integration-tests/ci-visibility/subproject/playwright.config.js @@ -1,3 +1,5 @@ +'use strict' + // Playwright config file for integration tests const { devices } = require('@playwright/test') diff --git a/integration-tests/ci-visibility/subproject/subproject-test.js b/integration-tests/ci-visibility/subproject/subproject-test.js index 64cdd384939..021028a4dbb 100644 --- a/integration-tests/ci-visibility/subproject/subproject-test.js +++ b/integration-tests/ci-visibility/subproject/subproject-test.js @@ -1,3 +1,5 @@ +'use strict' + // TODO: It shouldn't be necessary to disable n/no-extraneous-require - Research // eslint-disable-next-line n/no-extraneous-require const { expect } = require('chai') diff --git a/integration-tests/ci-visibility/test-custom-tags/custom-tags.js b/integration-tests/ci-visibility/test-custom-tags/custom-tags.js index dbc4ae5d5b4..c6a7d1a5bc7 100644 --- a/integration-tests/ci-visibility/test-custom-tags/custom-tags.js +++ b/integration-tests/ci-visibility/test-custom-tags/custom-tags.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') const sum = require('../test/sum') const tracer = require('dd-trace') diff --git a/integration-tests/ci-visibility/test-early-flake-detection/jest-snapshot.js b/integration-tests/ci-visibility/test-early-flake-detection/jest-snapshot.js index 15fadb1601e..b0e3078ba6f 100644 --- a/integration-tests/ci-visibility/test-early-flake-detection/jest-snapshot.js +++ b/integration-tests/ci-visibility/test-early-flake-detection/jest-snapshot.js @@ -1,3 +1,5 @@ +'use strict' + describe('test', () => { it('can do snapshot', () => { expect(1 + 2).toMatchSnapshot() diff --git a/integration-tests/ci-visibility/test-early-flake-detection/mocha-parameterized.js b/integration-tests/ci-visibility/test-early-flake-detection/mocha-parameterized.js index b286dfeb359..ed9f2c34a38 100644 --- a/integration-tests/ci-visibility/test-early-flake-detection/mocha-parameterized.js +++ b/integration-tests/ci-visibility/test-early-flake-detection/mocha-parameterized.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') const forEach = require('mocha-each') diff --git a/integration-tests/ci-visibility/test-early-flake-detection/occasionally-failing-test.js b/integration-tests/ci-visibility/test-early-flake-detection/occasionally-failing-test.js index 22b6d91935b..e444b98034a 100644 --- a/integration-tests/ci-visibility/test-early-flake-detection/occasionally-failing-test.js +++ b/integration-tests/ci-visibility/test-early-flake-detection/occasionally-failing-test.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') let globalCounter = 0 diff --git a/integration-tests/ci-visibility/test-early-flake-detection/skipped-and-todo-test.js b/integration-tests/ci-visibility/test-early-flake-detection/skipped-and-todo-test.js index b778a31711e..de339a65ae3 100644 --- a/integration-tests/ci-visibility/test-early-flake-detection/skipped-and-todo-test.js +++ b/integration-tests/ci-visibility/test-early-flake-detection/skipped-and-todo-test.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('ci visibility', () => { diff --git a/integration-tests/ci-visibility/test-early-flake-detection/test-parameterized.js b/integration-tests/ci-visibility/test-early-flake-detection/test-parameterized.js index 8ff884c6c28..0c95a1a57b3 100644 --- a/integration-tests/ci-visibility/test-early-flake-detection/test-parameterized.js +++ b/integration-tests/ci-visibility/test-early-flake-detection/test-parameterized.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('parameterized', () => { diff --git a/integration-tests/ci-visibility/test-early-flake-detection/test.js b/integration-tests/ci-visibility/test-early-flake-detection/test.js index e3306f69374..3a708c06b8f 100644 --- a/integration-tests/ci-visibility/test-early-flake-detection/test.js +++ b/integration-tests/ci-visibility/test-early-flake-detection/test.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('ci visibility', () => { diff --git a/integration-tests/ci-visibility/test-early-flake-detection/weird-test-names.js b/integration-tests/ci-visibility/test-early-flake-detection/weird-test-names.js index 60b30a65fb0..755dff5c50d 100644 --- a/integration-tests/ci-visibility/test-early-flake-detection/weird-test-names.js +++ b/integration-tests/ci-visibility/test-early-flake-detection/weird-test-names.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') it('no describe can do stuff', () => { diff --git a/integration-tests/ci-visibility/test-flaky-test-retries/eventually-passing-test.js b/integration-tests/ci-visibility/test-flaky-test-retries/eventually-passing-test.js index de08821128d..e4c3b90f2e1 100644 --- a/integration-tests/ci-visibility/test-flaky-test-retries/eventually-passing-test.js +++ b/integration-tests/ci-visibility/test-flaky-test-retries/eventually-passing-test.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') let counter = 0 diff --git a/integration-tests/ci-visibility/test-impacted-test/test-impacted-1.js b/integration-tests/ci-visibility/test-impacted-test/test-impacted-1.js index 086759f6178..5a696c4f0cb 100644 --- a/integration-tests/ci-visibility/test-impacted-test/test-impacted-1.js +++ b/integration-tests/ci-visibility/test-impacted-test/test-impacted-1.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('impacted tests', () => { diff --git a/integration-tests/ci-visibility/test-impacted-test/test-impacted-2.js b/integration-tests/ci-visibility/test-impacted-test/test-impacted-2.js index 6a226f1131e..aadc8ae4272 100644 --- a/integration-tests/ci-visibility/test-impacted-test/test-impacted-2.js +++ b/integration-tests/ci-visibility/test-impacted-test/test-impacted-2.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('impacted tests 2', () => { diff --git a/integration-tests/ci-visibility/test-management/test-attempt-to-fix-1.js b/integration-tests/ci-visibility/test-management/test-attempt-to-fix-1.js index be05f47fd50..f6f1d0585da 100644 --- a/integration-tests/ci-visibility/test-management/test-attempt-to-fix-1.js +++ b/integration-tests/ci-visibility/test-management/test-attempt-to-fix-1.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') let numAttempts = 0 diff --git a/integration-tests/ci-visibility/test-management/test-attempt-to-fix-2.js b/integration-tests/ci-visibility/test-management/test-attempt-to-fix-2.js index 053d1d62eb0..cb3a60f8ade 100644 --- a/integration-tests/ci-visibility/test-management/test-attempt-to-fix-2.js +++ b/integration-tests/ci-visibility/test-management/test-attempt-to-fix-2.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('attempt to fix tests 2', () => { diff --git a/integration-tests/ci-visibility/test-management/test-disabled-1.js b/integration-tests/ci-visibility/test-management/test-disabled-1.js index c3483031ec8..6a0dc33232f 100644 --- a/integration-tests/ci-visibility/test-management/test-disabled-1.js +++ b/integration-tests/ci-visibility/test-management/test-disabled-1.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('disable tests', () => { diff --git a/integration-tests/ci-visibility/test-management/test-disabled-2.js b/integration-tests/ci-visibility/test-management/test-disabled-2.js index d47525c555b..7435a1d2b10 100644 --- a/integration-tests/ci-visibility/test-management/test-disabled-2.js +++ b/integration-tests/ci-visibility/test-management/test-disabled-2.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('disable tests 2', () => { diff --git a/integration-tests/ci-visibility/test-management/test-quarantine-1.js b/integration-tests/ci-visibility/test-management/test-quarantine-1.js index 71733e2af1c..2ad8a7963e7 100644 --- a/integration-tests/ci-visibility/test-management/test-quarantine-1.js +++ b/integration-tests/ci-visibility/test-management/test-quarantine-1.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('quarantine tests', () => { diff --git a/integration-tests/ci-visibility/test-management/test-quarantine-2.js b/integration-tests/ci-visibility/test-management/test-quarantine-2.js index f94386f1b87..8d3816454d0 100644 --- a/integration-tests/ci-visibility/test-management/test-quarantine-2.js +++ b/integration-tests/ci-visibility/test-management/test-quarantine-2.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('quarantine tests 2', () => { diff --git a/integration-tests/ci-visibility/test-nested-hooks/test-nested-hooks.js b/integration-tests/ci-visibility/test-nested-hooks/test-nested-hooks.js index 3e270592b73..89d1b969e31 100644 --- a/integration-tests/ci-visibility/test-nested-hooks/test-nested-hooks.js +++ b/integration-tests/ci-visibility/test-nested-hooks/test-nested-hooks.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') let globalAttempts = 0 diff --git a/integration-tests/ci-visibility/test-parsing-error/parsing-error-2.js b/integration-tests/ci-visibility/test-parsing-error/parsing-error-2.js index 81286c0ee5d..bb43f645de4 100644 --- a/integration-tests/ci-visibility/test-parsing-error/parsing-error-2.js +++ b/integration-tests/ci-visibility/test-parsing-error/parsing-error-2.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chao') describe('test-parsing-error-2', () => { diff --git a/integration-tests/ci-visibility/test-parsing-error/parsing-error.js b/integration-tests/ci-visibility/test-parsing-error/parsing-error.js index e6d9108a9ea..9ede9a25a37 100644 --- a/integration-tests/ci-visibility/test-parsing-error/parsing-error.js +++ b/integration-tests/ci-visibility/test-parsing-error/parsing-error.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chao') describe('test-parsing-error', () => { diff --git a/integration-tests/ci-visibility/test-total-code-coverage/test-run.js b/integration-tests/ci-visibility/test-total-code-coverage/test-run.js index 2256f75f069..6b6d2b5a4eb 100644 --- a/integration-tests/ci-visibility/test-total-code-coverage/test-run.js +++ b/integration-tests/ci-visibility/test-total-code-coverage/test-run.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') const sum = require('./used-dependency') diff --git a/integration-tests/ci-visibility/test-total-code-coverage/test-skipped.js b/integration-tests/ci-visibility/test-total-code-coverage/test-skipped.js index 1410740bfa3..3367eab54e0 100644 --- a/integration-tests/ci-visibility/test-total-code-coverage/test-skipped.js +++ b/integration-tests/ci-visibility/test-total-code-coverage/test-skipped.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') const sum = require('./unused-dependency') diff --git a/integration-tests/ci-visibility/test-total-code-coverage/unused-dependency.js b/integration-tests/ci-visibility/test-total-code-coverage/unused-dependency.js index 2012896b44c..86d4a1c94ef 100644 --- a/integration-tests/ci-visibility/test-total-code-coverage/unused-dependency.js +++ b/integration-tests/ci-visibility/test-total-code-coverage/unused-dependency.js @@ -1,3 +1,5 @@ +'use strict' + module.exports = function (a, b) { return a + b } diff --git a/integration-tests/ci-visibility/test-total-code-coverage/used-dependency.js b/integration-tests/ci-visibility/test-total-code-coverage/used-dependency.js index 2012896b44c..86d4a1c94ef 100644 --- a/integration-tests/ci-visibility/test-total-code-coverage/used-dependency.js +++ b/integration-tests/ci-visibility/test-total-code-coverage/used-dependency.js @@ -1,3 +1,5 @@ +'use strict' + module.exports = function (a, b) { return a + b } diff --git a/integration-tests/ci-visibility/test/ci-visibility-test-2.js b/integration-tests/ci-visibility/test/ci-visibility-test-2.js index 58316835b52..f6edf100993 100644 --- a/integration-tests/ci-visibility/test/ci-visibility-test-2.js +++ b/integration-tests/ci-visibility/test/ci-visibility-test-2.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') const sum = require('./sum') diff --git a/integration-tests/ci-visibility/test/ci-visibility-test.js b/integration-tests/ci-visibility/test/ci-visibility-test.js index 11713ffaf86..9ce350498f9 100644 --- a/integration-tests/ci-visibility/test/ci-visibility-test.js +++ b/integration-tests/ci-visibility/test/ci-visibility-test.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') const sum = require('./sum') diff --git a/integration-tests/ci-visibility/test/fail-test.js b/integration-tests/ci-visibility/test/fail-test.js index efca6e21432..c6be99aa6f3 100644 --- a/integration-tests/ci-visibility/test/fail-test.js +++ b/integration-tests/ci-visibility/test/fail-test.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('fail', () => { diff --git a/integration-tests/ci-visibility/test/selenium-no-framework.js b/integration-tests/ci-visibility/test/selenium-no-framework.js index cca24586bfd..a99e081fec4 100644 --- a/integration-tests/ci-visibility/test/selenium-no-framework.js +++ b/integration-tests/ci-visibility/test/selenium-no-framework.js @@ -1,3 +1,5 @@ +'use strict' + const { By, Builder } = require('selenium-webdriver') const chrome = require('selenium-webdriver/chrome') diff --git a/integration-tests/ci-visibility/test/selenium-test.js b/integration-tests/ci-visibility/test/selenium-test.js index 71260f8da95..45a2bf00432 100644 --- a/integration-tests/ci-visibility/test/selenium-test.js +++ b/integration-tests/ci-visibility/test/selenium-test.js @@ -1,3 +1,5 @@ +'use strict' + const { By, Builder } = require('selenium-webdriver') const chrome = require('selenium-webdriver/chrome') const { expect } = require('chai') diff --git a/integration-tests/ci-visibility/test/sum.js b/integration-tests/ci-visibility/test/sum.js index 2012896b44c..86d4a1c94ef 100644 --- a/integration-tests/ci-visibility/test/sum.js +++ b/integration-tests/ci-visibility/test/sum.js @@ -1,3 +1,5 @@ +'use strict' + module.exports = function (a, b) { return a + b } diff --git a/integration-tests/ci-visibility/unskippable-test/test-to-run.js b/integration-tests/ci-visibility/unskippable-test/test-to-run.js index f093d1e39ed..40326d2276a 100644 --- a/integration-tests/ci-visibility/unskippable-test/test-to-run.js +++ b/integration-tests/ci-visibility/unskippable-test/test-to-run.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('test-to-run', () => { diff --git a/integration-tests/ci-visibility/unskippable-test/test-to-skip.js b/integration-tests/ci-visibility/unskippable-test/test-to-skip.js index 74655c102ae..54223246967 100644 --- a/integration-tests/ci-visibility/unskippable-test/test-to-skip.js +++ b/integration-tests/ci-visibility/unskippable-test/test-to-skip.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('test-to-skip', () => { diff --git a/integration-tests/ci-visibility/unskippable-test/test-unskippable.js b/integration-tests/ci-visibility/unskippable-test/test-unskippable.js index 6530097c7d4..a0b0d303340 100644 --- a/integration-tests/ci-visibility/unskippable-test/test-unskippable.js +++ b/integration-tests/ci-visibility/unskippable-test/test-unskippable.js @@ -1,6 +1,7 @@ /** * @datadog {"unskippable": true} */ +'use strict' const { expect } = require('chai') diff --git a/integration-tests/ci-visibility/web-app-server-with-redirect.js b/integration-tests/ci-visibility/web-app-server-with-redirect.js index 388aa1b4016..1860f62b6f5 100644 --- a/integration-tests/ci-visibility/web-app-server-with-redirect.js +++ b/integration-tests/ci-visibility/web-app-server-with-redirect.js @@ -1,3 +1,5 @@ +'use strict' + // File to spin an HTTP server that returns an HTML for playwright to visit const http = require('http') diff --git a/integration-tests/ci-visibility/web-app-server.js b/integration-tests/ci-visibility/web-app-server.js index 1cd5f81ef07..5b32e07121a 100644 --- a/integration-tests/ci-visibility/web-app-server.js +++ b/integration-tests/ci-visibility/web-app-server.js @@ -1,3 +1,5 @@ +'use strict' + // File to spin an HTTP server that returns an HTML for playwright to visit const http = require('http') const coverage = require('../ci-visibility/fixtures/coverage.json') diff --git a/integration-tests/config-jest-multiproject.js b/integration-tests/config-jest-multiproject.js index e06aec35930..90190430044 100644 --- a/integration-tests/config-jest-multiproject.js +++ b/integration-tests/config-jest-multiproject.js @@ -1,3 +1,5 @@ +'use strict' + module.exports = { projects: [ { diff --git a/integration-tests/config-jest.js b/integration-tests/config-jest.js index f30aec0ad35..8d85aa43a8f 100644 --- a/integration-tests/config-jest.js +++ b/integration-tests/config-jest.js @@ -1,3 +1,5 @@ +'use strict' + module.exports = { projects: process.env.PROJECTS ? JSON.parse(process.env.PROJECTS) : [__dirname], testPathIgnorePatterns: ['/node_modules/'], diff --git a/integration-tests/cucumber/cucumber.spec.js b/integration-tests/cucumber/cucumber.spec.js index 4b48e978ab7..ea1f9668afb 100644 --- a/integration-tests/cucumber/cucumber.spec.js +++ b/integration-tests/cucumber/cucumber.spec.js @@ -1743,7 +1743,7 @@ versions.forEach(version => { retriedTest.meta[`${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_FILE_SUFFIX}`] .endsWith('ci-visibility/features-di/support/sum.js') ) - assert.equal(retriedTest.metrics[`${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_LINE_SUFFIX}`], 4) + assert.equal(retriedTest.metrics[`${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_LINE_SUFFIX}`], 6) const snapshotIdKey = `${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_SNAPSHOT_ID_SUFFIX}` assert.exists(retriedTest.meta[snapshotIdKey]) @@ -1761,7 +1761,7 @@ versions.forEach(version => { level: 'error' }) assert.equal(diLog.debugger.snapshot.language, 'javascript') - assert.deepInclude(diLog.debugger.snapshot.captures.lines['4'].locals, { + assert.deepInclude(diLog.debugger.snapshot.captures.lines['6'].locals, { a: { type: 'number', value: '11' diff --git a/integration-tests/cypress.config.js b/integration-tests/cypress.config.js index 529980e298c..44437e49c0f 100644 --- a/integration-tests/cypress.config.js +++ b/integration-tests/cypress.config.js @@ -1,3 +1,5 @@ +'use strict' + const ddAfterRun = require('dd-trace/ci/cypress/after-run') const ddAfterSpec = require('dd-trace/ci/cypress/after-spec') const cypressFailFast = require('cypress-fail-fast/plugin') diff --git a/integration-tests/cypress/plugins-old/index.js b/integration-tests/cypress/plugins-old/index.js index 66de6be80fe..d4ff411dbc0 100644 --- a/integration-tests/cypress/plugins-old/index.js +++ b/integration-tests/cypress/plugins-old/index.js @@ -1,3 +1,5 @@ +'use strict' + const ddAfterRun = require('dd-trace/ci/cypress/after-run') const ddAfterSpec = require('dd-trace/ci/cypress/after-spec') diff --git a/integration-tests/debugger/source-map-support.spec.js b/integration-tests/debugger/source-map-support.spec.js index 4c6f2f0efe5..51ef099b19a 100644 --- a/integration-tests/debugger/source-map-support.spec.js +++ b/integration-tests/debugger/source-map-support.spec.js @@ -38,7 +38,7 @@ describe('Dynamic Instrumentation', function () { t.agent.on('debugger-input', ({ payload: [{ debugger: { snapshot: { probe: { location } } } }] }) => { assert.deepEqual(location, { file: 'target-app/source-map-support/minify.js', - lines: ['6'] + lines: ['8'] }) done() }) diff --git a/integration-tests/debugger/target-app/source-map-support/minify.js b/integration-tests/debugger/target-app/source-map-support/minify.js index a6ac62607f7..d9cca1ce2b6 100644 --- a/integration-tests/debugger/target-app/source-map-support/minify.js +++ b/integration-tests/debugger/target-app/source-map-support/minify.js @@ -1,3 +1,5 @@ +'use strict' + require('dd-trace/init') const { createServer } = require('node:http') diff --git a/integration-tests/debugger/target-app/source-map-support/minify.min.js b/integration-tests/debugger/target-app/source-map-support/minify.min.js index d50f262152e..7bb28d474b7 100644 --- a/integration-tests/debugger/target-app/source-map-support/minify.min.js +++ b/integration-tests/debugger/target-app/source-map-support/minify.min.js @@ -1,2 +1,2 @@ -require("dd-trace/init");const{createServer}=require("node:http");const server=createServer((req,res)=>{res.end("hello world")});server.listen(process.env.APP_PORT||0,()=>{process.send?.({port:server.address().port})}); +"use strict";require("dd-trace/init");const{createServer}=require("node:http");const server=createServer((req,res)=>{res.end("hello world")});server.listen(process.env.APP_PORT||0,()=>{process.send?.({port:server.address().port})}); //# sourceMappingURL=minify.min.js.map \ No newline at end of file diff --git a/integration-tests/debugger/target-app/source-map-support/minify.min.js.map b/integration-tests/debugger/target-app/source-map-support/minify.min.js.map index 00c4797001e..0c367242ae0 100644 --- a/integration-tests/debugger/target-app/source-map-support/minify.min.js.map +++ b/integration-tests/debugger/target-app/source-map-support/minify.min.js.map @@ -1 +1 @@ -{"version":3,"sources":["integration-tests/debugger/target-app/source-map-support/minify.js"],"names":["require","createServer","server","req","res","end","listen","process","env","APP_PORT","send","port","address"],"mappings":"AAAAA,QAAQ,eAAe,EAEvB,KAAM,CAAEC,YAAa,EAAID,QAAQ,WAAW,EAE5C,MAAME,OAASD,aAAa,CAACE,IAAKC,OAChCA,IAAIC,IAAI,aAAa,CACvB,CAAC,EAEDH,OAAOI,OAAOC,QAAQC,IAAIC,UAAY,EAAG,KACvCF,QAAQG,OAAO,CAAEC,KAAMT,OAAOU,QAAQ,EAAED,IAAK,CAAC,CAChD,CAAC"} \ No newline at end of file +{"version":3,"sources":["integration-tests/debugger/target-app/source-map-support/minify.js"],"names":["require","createServer","server","req","res","end","listen","process","env","APP_PORT","send","port","address"],"mappings":"AAAA,aAEAA,QAAQ,eAAe,EAEvB,KAAM,CAAEC,YAAa,EAAID,QAAQ,WAAW,EAE5C,MAAME,OAASD,aAAa,CAACE,IAAKC,OAChCA,IAAIC,IAAI,aAAa,CACvB,CAAC,EAEDH,OAAOI,OAAOC,QAAQC,IAAIC,UAAY,EAAG,KACvCF,QAAQG,OAAO,CAAEC,KAAMT,OAAOU,QAAQ,EAAED,IAAK,CAAC,CAChD,CAAC"} \ No newline at end of file diff --git a/integration-tests/esbuild/aws-sdk.js b/integration-tests/esbuild/aws-sdk.js index c89f570689b..0f0f1383117 100644 --- a/integration-tests/esbuild/aws-sdk.js +++ b/integration-tests/esbuild/aws-sdk.js @@ -1,3 +1,5 @@ +'use strict' + require('../../').init() // dd-trace const aws = require('aws-sdk') diff --git a/integration-tests/esbuild/basic-test.js b/integration-tests/esbuild/basic-test.js index 5e95234eddf..4d8b34de672 100755 --- a/integration-tests/esbuild/basic-test.js +++ b/integration-tests/esbuild/basic-test.js @@ -1,4 +1,5 @@ #!/usr/bin/env node +'use strict' const tracer = require('../../').init() // dd-trace diff --git a/integration-tests/esbuild/build-and-test-aws-sdk.js b/integration-tests/esbuild/build-and-test-aws-sdk.js index 3324c6b60fc..925459169f9 100755 --- a/integration-tests/esbuild/build-and-test-aws-sdk.js +++ b/integration-tests/esbuild/build-and-test-aws-sdk.js @@ -1,4 +1,6 @@ #!/usr/bin/env node +'use strict' + /* eslint-disable no-console */ const fs = require('fs') const { spawnSync } = require('child_process') diff --git a/integration-tests/esbuild/build-and-test-openai.js b/integration-tests/esbuild/build-and-test-openai.js index c0889b22e41..77a2fd3ca47 100644 --- a/integration-tests/esbuild/build-and-test-openai.js +++ b/integration-tests/esbuild/build-and-test-openai.js @@ -1,4 +1,6 @@ #!/usr/bin/env node +'use strict' + /* eslint-disable no-console */ const fs = require('fs') const { spawnSync } = require('child_process') diff --git a/integration-tests/esbuild/build-and-test-skip-external.js b/integration-tests/esbuild/build-and-test-skip-external.js index 659e8a0c6eb..0960e96d3f7 100755 --- a/integration-tests/esbuild/build-and-test-skip-external.js +++ b/integration-tests/esbuild/build-and-test-skip-external.js @@ -1,4 +1,6 @@ #!/usr/bin/env node +'use strict' + const fs = require('fs') const assert = require('assert') diff --git a/integration-tests/esbuild/build.esm.common-config.js b/integration-tests/esbuild/build.esm.common-config.js index 2b921f05380..6642c9eed93 100644 --- a/integration-tests/esbuild/build.esm.common-config.js +++ b/integration-tests/esbuild/build.esm.common-config.js @@ -1,4 +1,7 @@ +'use strict' + const ddPlugin = require('../../esbuild') + module.exports = { format: 'esm', entryPoints: ['basic-test.js'], diff --git a/integration-tests/esbuild/build.js b/integration-tests/esbuild/build.js index 60ba653548f..418d794f85f 100755 --- a/integration-tests/esbuild/build.js +++ b/integration-tests/esbuild/build.js @@ -1,4 +1,5 @@ #!/usr/bin/env node +'use strict' const ddPlugin = require('../../esbuild') // dd-trace/esbuild const esbuild = require('esbuild') diff --git a/integration-tests/esbuild/complex-app.js b/integration-tests/esbuild/complex-app.js index 8f402cd4271..27592dd2f33 100755 --- a/integration-tests/esbuild/complex-app.js +++ b/integration-tests/esbuild/complex-app.js @@ -1,4 +1,5 @@ #!/usr/bin/env node +'use strict' require('../../').init() // dd-trace const assert = require('assert') diff --git a/integration-tests/esbuild/openai.js b/integration-tests/esbuild/openai.js index d663cd3cd55..678d1963d35 100644 --- a/integration-tests/esbuild/openai.js +++ b/integration-tests/esbuild/openai.js @@ -1,2 +1,4 @@ +'use strict' + require('../../').init() require('openai') diff --git a/integration-tests/esbuild/skip-external.js b/integration-tests/esbuild/skip-external.js index 1381bd5beea..ece6d197bca 100644 --- a/integration-tests/esbuild/skip-external.js +++ b/integration-tests/esbuild/skip-external.js @@ -1,3 +1,5 @@ +'use strict' + require('../../').init() // dd-trace // this should be bundled diff --git a/integration-tests/graphql/index.js b/integration-tests/graphql/index.js index e1f16c470a7..4774473d20a 100644 --- a/integration-tests/graphql/index.js +++ b/integration-tests/graphql/index.js @@ -1,3 +1,5 @@ +'use strict' + const tracer = require('dd-trace') const path = require('path') diff --git a/integration-tests/init.spec.js b/integration-tests/init.spec.js index 43dd718fada..6435b96b3e4 100644 --- a/integration-tests/init.spec.js +++ b/integration-tests/init.spec.js @@ -1,3 +1,5 @@ +'use strict' + const semver = require('semver') const { runAndCheckWithTelemetry: testFile, diff --git a/integration-tests/init/instrument.js b/integration-tests/init/instrument.js index b1114e6237b..3e5ea87d56c 100644 --- a/integration-tests/init/instrument.js +++ b/integration-tests/init/instrument.js @@ -1,3 +1,5 @@ +'use strict' + const http = require('http') const dc = require('dc-polyfill') diff --git a/integration-tests/init/trace.js b/integration-tests/init/trace.js index a05665b5ea9..c4c1d0b78e8 100644 --- a/integration-tests/init/trace.js +++ b/integration-tests/init/trace.js @@ -1,3 +1,5 @@ +'use strict' + // eslint-disable-next-line no-console console.log(!!global._ddtrace) // eslint-disable-next-line no-console diff --git a/integration-tests/jest/jest.spec.js b/integration-tests/jest/jest.spec.js index 58afad50266..35f910a47fd 100644 --- a/integration-tests/jest/jest.spec.js +++ b/integration-tests/jest/jest.spec.js @@ -547,7 +547,7 @@ describe('jest CommonJS', () => { retriedTest.meta[`${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_FILE_SUFFIX}`] .endsWith('ci-visibility/dynamic-instrumentation/dependency.js') ) - assert.equal(retriedTest.metrics[`${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_LINE_SUFFIX}`], 4) + assert.equal(retriedTest.metrics[`${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_LINE_SUFFIX}`], 6) const snapshotIdKey = `${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_SNAPSHOT_ID_SUFFIX}` assert.exists(retriedTest.meta[snapshotIdKey]) @@ -2716,7 +2716,7 @@ describe('jest CommonJS', () => { retriedTest.meta[`${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_FILE_SUFFIX}`] .endsWith('ci-visibility/dynamic-instrumentation/dependency.js') ) - assert.equal(retriedTest.metrics[`${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_LINE_SUFFIX}`], 4) + assert.equal(retriedTest.metrics[`${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_LINE_SUFFIX}`], 6) const snapshotIdKey = `${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_SNAPSHOT_ID_SUFFIX}` assert.exists(retriedTest.meta[snapshotIdKey]) @@ -2738,7 +2738,7 @@ describe('jest CommonJS', () => { level: 'error' }) assert.equal(diLog.debugger.snapshot.language, 'javascript') - assert.deepInclude(diLog.debugger.snapshot.captures.lines['4'].locals, { + assert.deepInclude(diLog.debugger.snapshot.captures.lines['6'].locals, { a: { type: 'number', value: '11' diff --git a/integration-tests/memory-leak/index.js b/integration-tests/memory-leak/index.js index 01d4c2c439e..8d87a5ab99e 100644 --- a/integration-tests/memory-leak/index.js +++ b/integration-tests/memory-leak/index.js @@ -1,3 +1,5 @@ +'use strict' + const tracer = require('../../') tracer.init() diff --git a/integration-tests/mocha/mocha.spec.js b/integration-tests/mocha/mocha.spec.js index bbe3f784dcf..3e9ff44bb21 100644 --- a/integration-tests/mocha/mocha.spec.js +++ b/integration-tests/mocha/mocha.spec.js @@ -2401,7 +2401,7 @@ describe('mocha CommonJS', function () { retriedTest.meta[`${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_FILE_SUFFIX}`] .endsWith('ci-visibility/dynamic-instrumentation/dependency.js') ) - assert.equal(retriedTest.metrics[`${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_LINE_SUFFIX}`], 4) + assert.equal(retriedTest.metrics[`${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_LINE_SUFFIX}`], 6) const snapshotIdKey = `${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_SNAPSHOT_ID_SUFFIX}` @@ -2424,7 +2424,7 @@ describe('mocha CommonJS', function () { level: 'error' }) assert.equal(diLog.debugger.snapshot.language, 'javascript') - assert.deepInclude(diLog.debugger.snapshot.captures.lines['4'].locals, { + assert.deepInclude(diLog.debugger.snapshot.captures.lines['6'].locals, { a: { type: 'number', value: '11' diff --git a/integration-tests/my-nyc.config.js b/integration-tests/my-nyc.config.js index b0d1235ecd2..88a8b9aac3e 100644 --- a/integration-tests/my-nyc.config.js +++ b/integration-tests/my-nyc.config.js @@ -1,3 +1,5 @@ +'use strict' + // non default name so that it only gets picked up intentionally module.exports = { exclude: ['node_modules/**'], diff --git a/integration-tests/opentelemetry/auto-instrumentation.js b/integration-tests/opentelemetry/auto-instrumentation.js index 8a1ba5c2c77..8628fc43be7 100644 --- a/integration-tests/opentelemetry/auto-instrumentation.js +++ b/integration-tests/opentelemetry/auto-instrumentation.js @@ -1,3 +1,5 @@ +'use strict' + const tracer = require('dd-trace').init() const { TracerProvider } = tracer const provider = new TracerProvider() diff --git a/integration-tests/package-guardrails.spec.js b/integration-tests/package-guardrails.spec.js index 8a9a2ce57c1..f257d742a63 100644 --- a/integration-tests/package-guardrails.spec.js +++ b/integration-tests/package-guardrails.spec.js @@ -1,3 +1,5 @@ +'use strict' + const { runAndCheckWithTelemetry: testFile, useEnv, diff --git a/integration-tests/playwright.config.js b/integration-tests/playwright.config.js index 34b0a69a859..b00c9ddb1ca 100644 --- a/integration-tests/playwright.config.js +++ b/integration-tests/playwright.config.js @@ -1,3 +1,5 @@ +'use strict' + // Playwright config file for integration tests const { devices } = require('@playwright/test') diff --git a/integration-tests/profiler/dnstest.js b/integration-tests/profiler/dnstest.js index 36398cb2a05..71d6d51aaea 100644 --- a/integration-tests/profiler/dnstest.js +++ b/integration-tests/profiler/dnstest.js @@ -1,3 +1,5 @@ +'use strict' + const dns = require('node:dns') require('dd-trace').init().profilerStarted().then(() => { diff --git a/integration-tests/profiler/fstest.js b/integration-tests/profiler/fstest.js index c65887c102e..ef49f715bdb 100644 --- a/integration-tests/profiler/fstest.js +++ b/integration-tests/profiler/fstest.js @@ -1,3 +1,5 @@ +'use strict' + const fs = require('fs') const os = require('os') const path = require('path') diff --git a/integration-tests/profiler/nettest.js b/integration-tests/profiler/nettest.js index e9f3002d6b0..ed50e2a81f9 100644 --- a/integration-tests/profiler/nettest.js +++ b/integration-tests/profiler/nettest.js @@ -1,3 +1,5 @@ +'use strict' + const net = require('net') async function streamToString (stream) { diff --git a/integration-tests/selenium/selenium.spec.js b/integration-tests/selenium/selenium.spec.js index 74738967c9a..491522e40ee 100644 --- a/integration-tests/selenium/selenium.spec.js +++ b/integration-tests/selenium/selenium.spec.js @@ -1,3 +1,5 @@ +'use strict' + const { exec } = require('child_process') const { assert } = require('chai') diff --git a/packages/datadog-instrumentations/src/apollo.js b/packages/datadog-instrumentations/src/apollo.js index a26bfb86eca..0e5719ea574 100644 --- a/packages/datadog-instrumentations/src/apollo.js +++ b/packages/datadog-instrumentations/src/apollo.js @@ -1,3 +1,5 @@ +'use strict' + const { addHook, channel @@ -16,20 +18,15 @@ const CHANNELS = { const generalErrorCh = channel('apm:apollo:gateway:general:error') -function wrapExecutor (executor) { - return function (...args) { - const channel = CHANNELS['gateway.request'] - const ctx = { requestContext: args[0], gateway: this } - - return channel.tracePromise(executor, ctx, this, ...args) - } -} - function wrapApolloGateway (ApolloGateway) { class ApolloGatewayWrapper extends ApolloGateway { constructor (...args) { super(...args) - shimmer.wrap(this, 'executor', wrapExecutor) + shimmer.wrap(this, 'executor', (originalExecutor) => (...args) => { + const channel = CHANNELS['gateway.request'] + const ctx = { requestContext: args[0], gateway: this } + return channel.tracePromise(originalExecutor, ctx, this, ...args) + }) } } return ApolloGatewayWrapper diff --git a/packages/datadog-instrumentations/src/avsc.js b/packages/datadog-instrumentations/src/avsc.js index 6d71b1744bf..c37d8a4cb67 100644 --- a/packages/datadog-instrumentations/src/avsc.js +++ b/packages/datadog-instrumentations/src/avsc.js @@ -1,3 +1,5 @@ +'use strict' + const shimmer = require('../../datadog-shimmer') const { addHook } = require('./helpers/instrument') diff --git a/packages/datadog-instrumentations/src/cypress.js b/packages/datadog-instrumentations/src/cypress.js index c116f67ea98..57604d7356b 100644 --- a/packages/datadog-instrumentations/src/cypress.js +++ b/packages/datadog-instrumentations/src/cypress.js @@ -1,3 +1,5 @@ +'use strict' + const { addHook } = require('./helpers/instrument') const { DD_MAJOR } = require('../../../version') diff --git a/packages/datadog-instrumentations/src/fetch.js b/packages/datadog-instrumentations/src/fetch.js index 9a3fe148f23..731a420a31d 100644 --- a/packages/datadog-instrumentations/src/fetch.js +++ b/packages/datadog-instrumentations/src/fetch.js @@ -1,4 +1,5 @@ 'use strict' + /* eslint n/no-unsupported-features/node-builtins: ['error', { ignores: ['fetch', 'Request'] }] */ const { isInServerlessEnvironment } = require('../../dd-trace/src/serverless') diff --git a/packages/datadog-instrumentations/src/mocha.js b/packages/datadog-instrumentations/src/mocha.js index 5449d769b03..a476f027b41 100644 --- a/packages/datadog-instrumentations/src/mocha.js +++ b/packages/datadog-instrumentations/src/mocha.js @@ -1,3 +1,5 @@ +'use strict' + const { getEnvironmentVariable } = require('../../dd-trace/src/config-helper') if (getEnvironmentVariable('MOCHA_WORKER_ID')) { diff --git a/packages/datadog-instrumentations/src/mocha/common.js b/packages/datadog-instrumentations/src/mocha/common.js index c25ab2fdb21..71962f05f25 100644 --- a/packages/datadog-instrumentations/src/mocha/common.js +++ b/packages/datadog-instrumentations/src/mocha/common.js @@ -1,3 +1,5 @@ +'use strict' + const { addHook, channel } = require('../helpers/instrument') const shimmer = require('../../../datadog-shimmer') const { getCallSites } = require('../../../dd-trace/src/plugins/util/stacktrace') diff --git a/packages/datadog-instrumentations/src/nyc.js b/packages/datadog-instrumentations/src/nyc.js index 93aa3ae1ad8..4459c6e36a7 100644 --- a/packages/datadog-instrumentations/src/nyc.js +++ b/packages/datadog-instrumentations/src/nyc.js @@ -1,3 +1,5 @@ +'use strict' + const { addHook, channel } = require('./helpers/instrument') const shimmer = require('../../datadog-shimmer') const { getEnvironmentVariable } = require('../../dd-trace/src/config-helper') diff --git a/packages/datadog-instrumentations/src/orchestrion-config/index.js b/packages/datadog-instrumentations/src/orchestrion-config/index.js index 15830d1bc34..9849f5b36fe 100644 --- a/packages/datadog-instrumentations/src/orchestrion-config/index.js +++ b/packages/datadog-instrumentations/src/orchestrion-config/index.js @@ -1,3 +1,5 @@ +'use strict' + module.exports = ` version: 1 dc_module: dc-polyfill diff --git a/packages/datadog-instrumentations/src/playwright.js b/packages/datadog-instrumentations/src/playwright.js index 1e46bcf81ab..ffdb236dedb 100644 --- a/packages/datadog-instrumentations/src/playwright.js +++ b/packages/datadog-instrumentations/src/playwright.js @@ -1,3 +1,5 @@ +'use strict' + const satisfies = require('semifies') const { addHook, channel } = require('./helpers/instrument') diff --git a/packages/datadog-instrumentations/src/protobufjs.js b/packages/datadog-instrumentations/src/protobufjs.js index 44ff70dba77..c11706582d6 100644 --- a/packages/datadog-instrumentations/src/protobufjs.js +++ b/packages/datadog-instrumentations/src/protobufjs.js @@ -1,3 +1,5 @@ +'use strict' + const shimmer = require('../../datadog-shimmer') const { addHook } = require('./helpers/instrument') diff --git a/packages/datadog-instrumentations/src/selenium.js b/packages/datadog-instrumentations/src/selenium.js index 141aa967e40..a99a9042443 100644 --- a/packages/datadog-instrumentations/src/selenium.js +++ b/packages/datadog-instrumentations/src/selenium.js @@ -1,3 +1,5 @@ +'use strict' + const { addHook, channel } = require('./helpers/instrument') const shimmer = require('../../datadog-shimmer') diff --git a/packages/datadog-instrumentations/src/vitest.js b/packages/datadog-instrumentations/src/vitest.js index cbae57ea093..081a2256f4e 100644 --- a/packages/datadog-instrumentations/src/vitest.js +++ b/packages/datadog-instrumentations/src/vitest.js @@ -1,3 +1,5 @@ +'use strict' + const { addHook, channel } = require('./helpers/instrument') const shimmer = require('../../datadog-shimmer') const log = require('../../dd-trace/src/log') diff --git a/packages/datadog-instrumentations/test/helpers/check-require-cache/bad-order.js b/packages/datadog-instrumentations/test/helpers/check-require-cache/bad-order.js index e549c600516..697ffcb45bc 100755 --- a/packages/datadog-instrumentations/test/helpers/check-require-cache/bad-order.js +++ b/packages/datadog-instrumentations/test/helpers/check-require-cache/bad-order.js @@ -1,4 +1,5 @@ #!/usr/bin/env node +'use strict' require('express') // package required before tracer const tracer = require('../../../../../') diff --git a/packages/datadog-instrumentations/test/helpers/check-require-cache/good-order.js b/packages/datadog-instrumentations/test/helpers/check-require-cache/good-order.js index f90e9c6fbbb..a7845938758 100755 --- a/packages/datadog-instrumentations/test/helpers/check-require-cache/good-order.js +++ b/packages/datadog-instrumentations/test/helpers/check-require-cache/good-order.js @@ -1,4 +1,5 @@ #!/usr/bin/env node +'use strict' const tracer = require('../../../../../') require('express') // package required after tracer diff --git a/packages/datadog-plugin-aerospike/test/naming.js b/packages/datadog-plugin-aerospike/test/naming.js index bed64a71625..ef8c48ecd08 100644 --- a/packages/datadog-plugin-aerospike/test/naming.js +++ b/packages/datadog-plugin-aerospike/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-amqp10/test/naming.js b/packages/datadog-plugin-amqp10/test/naming.js index e8bc0e42c3e..d367eea4ac5 100644 --- a/packages/datadog-plugin-amqp10/test/naming.js +++ b/packages/datadog-plugin-amqp10/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-amqplib/test/naming.js b/packages/datadog-plugin-amqplib/test/naming.js index fdb90a66d8c..15981d83e04 100644 --- a/packages/datadog-plugin-amqplib/test/naming.js +++ b/packages/datadog-plugin-amqplib/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-apollo/test/fixtures.js b/packages/datadog-plugin-apollo/test/fixtures.js index 6e0a992f5ca..fbf59b92fa1 100644 --- a/packages/datadog-plugin-apollo/test/fixtures.js +++ b/packages/datadog-plugin-apollo/test/fixtures.js @@ -1,3 +1,5 @@ +'use strict' + const typeDefs = ` type Query { hello(name: String, title: String): String diff --git a/packages/datadog-plugin-apollo/test/naming.js b/packages/datadog-plugin-apollo/test/naming.js index bc8e2247b82..c9f0c823898 100644 --- a/packages/datadog-plugin-apollo/test/naming.js +++ b/packages/datadog-plugin-apollo/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-avsc/src/index.js b/packages/datadog-plugin-avsc/src/index.js index be0ef970e50..853c4c1df3c 100644 --- a/packages/datadog-plugin-avsc/src/index.js +++ b/packages/datadog-plugin-avsc/src/index.js @@ -1,3 +1,5 @@ +'use strict' + const SchemaPlugin = require('../../dd-trace/src/plugins/schema') const SchemaExtractor = require('./schema_iterator') diff --git a/packages/datadog-plugin-avsc/src/schema_iterator.js b/packages/datadog-plugin-avsc/src/schema_iterator.js index c35ebf9604a..efee2e0425d 100644 --- a/packages/datadog-plugin-avsc/src/schema_iterator.js +++ b/packages/datadog-plugin-avsc/src/schema_iterator.js @@ -1,3 +1,5 @@ +'use strict' + const AVRO = 'avro' const { SCHEMA_DEFINITION, diff --git a/packages/datadog-plugin-avsc/test/helpers.js b/packages/datadog-plugin-avsc/test/helpers.js index 8e5be7ac433..7e941b5e184 100644 --- a/packages/datadog-plugin-avsc/test/helpers.js +++ b/packages/datadog-plugin-avsc/test/helpers.js @@ -1,3 +1,5 @@ +'use strict' + const fs = require('fs') async function loadMessage (avro, messageTypeName) { diff --git a/packages/datadog-plugin-aws-sdk/src/services/bedrockruntime/index.js b/packages/datadog-plugin-aws-sdk/src/services/bedrockruntime/index.js index c123c02fa65..e4706be8818 100644 --- a/packages/datadog-plugin-aws-sdk/src/services/bedrockruntime/index.js +++ b/packages/datadog-plugin-aws-sdk/src/services/bedrockruntime/index.js @@ -1,3 +1,5 @@ +'use strict' + const CompositePlugin = require('../../../../dd-trace/src/plugins/composite') const BedrockRuntimeTracing = require('./tracing') const BedrockRuntimeLLMObsPlugin = require('../../../../dd-trace/src/llmobs/plugins/bedrockruntime') diff --git a/packages/datadog-plugin-aws-sdk/test/kinesis-naming.js b/packages/datadog-plugin-aws-sdk/test/kinesis-naming.js index d6013a2cd88..c87a7197b7c 100644 --- a/packages/datadog-plugin-aws-sdk/test/kinesis-naming.js +++ b/packages/datadog-plugin-aws-sdk/test/kinesis-naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-aws-sdk/test/lambda-naming.js b/packages/datadog-plugin-aws-sdk/test/lambda-naming.js index 19c3ea4e8db..22a83302d52 100644 --- a/packages/datadog-plugin-aws-sdk/test/lambda-naming.js +++ b/packages/datadog-plugin-aws-sdk/test/lambda-naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-aws-sdk/test/s3-naming.js b/packages/datadog-plugin-aws-sdk/test/s3-naming.js index 137ab586dfd..ca4ec3167e5 100644 --- a/packages/datadog-plugin-aws-sdk/test/s3-naming.js +++ b/packages/datadog-plugin-aws-sdk/test/s3-naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-aws-sdk/test/sns-naming.js b/packages/datadog-plugin-aws-sdk/test/sns-naming.js index 10a01b898da..96646fb48b2 100644 --- a/packages/datadog-plugin-aws-sdk/test/sns-naming.js +++ b/packages/datadog-plugin-aws-sdk/test/sns-naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-aws-sdk/test/sqs-naming.js b/packages/datadog-plugin-aws-sdk/test/sqs-naming.js index 2f677e4b69a..2df142a9463 100644 --- a/packages/datadog-plugin-aws-sdk/test/sqs-naming.js +++ b/packages/datadog-plugin-aws-sdk/test/sqs-naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-aws-sdk/test/util.spec.js b/packages/datadog-plugin-aws-sdk/test/util.spec.js index 4a4ad0a8b12..ede5ba21724 100644 --- a/packages/datadog-plugin-aws-sdk/test/util.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/util.spec.js @@ -1,3 +1,5 @@ +'use strict' + const { generatePointerHash, encodeValue, extractPrimaryKeys } = require('../src/util') describe('generatePointerHash', () => { diff --git a/packages/datadog-plugin-cassandra-driver/test/naming.js b/packages/datadog-plugin-cassandra-driver/test/naming.js index 10d93008b8a..e2aa5dfb16b 100644 --- a/packages/datadog-plugin-cassandra-driver/test/naming.js +++ b/packages/datadog-plugin-cassandra-driver/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-confluentinc-kafka-javascript/test/naming.js b/packages/datadog-plugin-confluentinc-kafka-javascript/test/naming.js index 78b1f5f3a1c..2adceec4ba9 100644 --- a/packages/datadog-plugin-confluentinc-kafka-javascript/test/naming.js +++ b/packages/datadog-plugin-confluentinc-kafka-javascript/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-couchbase/test/naming.js b/packages/datadog-plugin-couchbase/test/naming.js index aa3debd7fb7..aaa6376edd6 100644 --- a/packages/datadog-plugin-couchbase/test/naming.js +++ b/packages/datadog-plugin-couchbase/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const serviceName = { diff --git a/packages/datadog-plugin-couchbase/test/suite.js b/packages/datadog-plugin-couchbase/test/suite.js index b4d03b35e11..b44243e32d8 100644 --- a/packages/datadog-plugin-couchbase/test/suite.js +++ b/packages/datadog-plugin-couchbase/test/suite.js @@ -1,4 +1,5 @@ 'use strict' + // const suiteTest = require('../../dd-trace/test/plugins/suite') // suiteTest('couchbase', 'couchbase/couchnode', 'v3.1.3') diff --git a/packages/datadog-plugin-cucumber/test/features/simple.js b/packages/datadog-plugin-cucumber/test/features/simple.js index 48af91cbc92..5d618d089b8 100644 --- a/packages/datadog-plugin-cucumber/test/features/simple.js +++ b/packages/datadog-plugin-cucumber/test/features/simple.js @@ -1,3 +1,5 @@ +'use strict' + const { Before, Given, When, Then, setWorldConstructor } = require('@cucumber/cucumber') const { expect } = require('chai') diff --git a/packages/datadog-plugin-cypress/src/after-run.js b/packages/datadog-plugin-cypress/src/after-run.js index 288218850d8..a15549a45fa 100644 --- a/packages/datadog-plugin-cypress/src/after-run.js +++ b/packages/datadog-plugin-cypress/src/after-run.js @@ -1,3 +1,5 @@ +'use strict' + const cypressPlugin = require('./cypress-plugin') module.exports = cypressPlugin.afterRun.bind(cypressPlugin) diff --git a/packages/datadog-plugin-cypress/src/after-spec.js b/packages/datadog-plugin-cypress/src/after-spec.js index 4fdf98ad582..a5c167a80c0 100644 --- a/packages/datadog-plugin-cypress/src/after-spec.js +++ b/packages/datadog-plugin-cypress/src/after-spec.js @@ -1,3 +1,5 @@ +'use strict' + const cypressPlugin = require('./cypress-plugin') module.exports = cypressPlugin.afterSpec.bind(cypressPlugin) diff --git a/packages/datadog-plugin-cypress/src/cypress-plugin.js b/packages/datadog-plugin-cypress/src/cypress-plugin.js index b930dfbb5ad..91e8e8c551b 100644 --- a/packages/datadog-plugin-cypress/src/cypress-plugin.js +++ b/packages/datadog-plugin-cypress/src/cypress-plugin.js @@ -1,3 +1,5 @@ +'use strict' + const { TEST_STATUS, TEST_IS_RUM_ACTIVE, diff --git a/packages/datadog-plugin-cypress/src/index.js b/packages/datadog-plugin-cypress/src/index.js index 932e462a243..0995a8f033f 100644 --- a/packages/datadog-plugin-cypress/src/index.js +++ b/packages/datadog-plugin-cypress/src/index.js @@ -1,3 +1,5 @@ +'use strict' + const Plugin = require('../../dd-trace/src/plugins/plugin') // Cypress plugin does not patch any library. This is just a placeholder to diff --git a/packages/datadog-plugin-cypress/src/plugin.js b/packages/datadog-plugin-cypress/src/plugin.js index 86d32839ac6..7226d0ad25b 100644 --- a/packages/datadog-plugin-cypress/src/plugin.js +++ b/packages/datadog-plugin-cypress/src/plugin.js @@ -1,3 +1,5 @@ +'use strict' + const NoopTracer = require('../../dd-trace/src/noop/tracer') const cypressPlugin = require('./cypress-plugin') const satisfies = require('semifies') diff --git a/packages/datadog-plugin-cypress/src/support.js b/packages/datadog-plugin-cypress/src/support.js index 38021aa6b68..86679d9de79 100644 --- a/packages/datadog-plugin-cypress/src/support.js +++ b/packages/datadog-plugin-cypress/src/support.js @@ -1,3 +1,5 @@ +'use strict' + let isEarlyFlakeDetectionEnabled = false let isKnownTestsEnabled = false let knownTestsForSuite = [] diff --git a/packages/datadog-plugin-cypress/test/app-10/app-server.js b/packages/datadog-plugin-cypress/test/app-10/app-server.js index 36512930879..37edf68e055 100644 --- a/packages/datadog-plugin-cypress/test/app-10/app-server.js +++ b/packages/datadog-plugin-cypress/test/app-10/app-server.js @@ -1,3 +1,5 @@ +'use strict' + // File to spin an HTTP server that returns an HTML for cypress to visit const http = require('http') diff --git a/packages/datadog-plugin-cypress/test/app-10/cypress.config.js b/packages/datadog-plugin-cypress/test/app-10/cypress.config.js index 6bb3b079f5d..fdd1c0b5a49 100644 --- a/packages/datadog-plugin-cypress/test/app-10/cypress.config.js +++ b/packages/datadog-plugin-cypress/test/app-10/cypress.config.js @@ -1,3 +1,5 @@ +'use strict' + const setupNodeEvents = require('./cypress/plugins/index.js') module.exports = { diff --git a/packages/datadog-plugin-cypress/test/app-10/cypress/plugins/index.js b/packages/datadog-plugin-cypress/test/app-10/cypress/plugins/index.js index 07594d7af6b..209aa20cb8b 100644 --- a/packages/datadog-plugin-cypress/test/app-10/cypress/plugins/index.js +++ b/packages/datadog-plugin-cypress/test/app-10/cypress/plugins/index.js @@ -1,3 +1,5 @@ +'use strict' + module.exports = (on, config) => { // We can't use the tracer available in the testing process, because this code is // run in a different process. We need to init a different tracer reporting to the diff --git a/packages/datadog-plugin-cypress/test/app-10/cypress/support/index.js b/packages/datadog-plugin-cypress/test/app-10/cypress/support/index.js index 929c713d2eb..3371ce88469 100644 --- a/packages/datadog-plugin-cypress/test/app-10/cypress/support/index.js +++ b/packages/datadog-plugin-cypress/test/app-10/cypress/support/index.js @@ -1 +1,3 @@ +'use strict' + require('../../../../src/support') diff --git a/packages/datadog-plugin-cypress/test/app/app-server.js b/packages/datadog-plugin-cypress/test/app/app-server.js index 36512930879..37edf68e055 100644 --- a/packages/datadog-plugin-cypress/test/app/app-server.js +++ b/packages/datadog-plugin-cypress/test/app/app-server.js @@ -1,3 +1,5 @@ +'use strict' + // File to spin an HTTP server that returns an HTML for cypress to visit const http = require('http') diff --git a/packages/datadog-plugin-cypress/test/app/cypress/plugins/index.js b/packages/datadog-plugin-cypress/test/app/cypress/plugins/index.js index 51c263d8ce6..ddeb86b685d 100644 --- a/packages/datadog-plugin-cypress/test/app/cypress/plugins/index.js +++ b/packages/datadog-plugin-cypress/test/app/cypress/plugins/index.js @@ -1 +1,3 @@ +'use strict' + module.exports = require('../../../../../../ci/cypress/plugin') diff --git a/packages/datadog-plugin-cypress/test/app/cypress/support/index.js b/packages/datadog-plugin-cypress/test/app/cypress/support/index.js index 929c713d2eb..3371ce88469 100644 --- a/packages/datadog-plugin-cypress/test/app/cypress/support/index.js +++ b/packages/datadog-plugin-cypress/test/app/cypress/support/index.js @@ -1 +1,3 @@ +'use strict' + require('../../../../src/support') diff --git a/packages/datadog-plugin-elasticsearch/test/naming.js b/packages/datadog-plugin-elasticsearch/test/naming.js index 737c724d81b..3d2c2af4d9e 100644 --- a/packages/datadog-plugin-elasticsearch/test/naming.js +++ b/packages/datadog-plugin-elasticsearch/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-fetch/test/naming.js b/packages/datadog-plugin-fetch/test/naming.js index e46f0960947..563b98f5821 100644 --- a/packages/datadog-plugin-fetch/test/naming.js +++ b/packages/datadog-plugin-fetch/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-google-cloud-pubsub/test/naming.js b/packages/datadog-plugin-google-cloud-pubsub/test/naming.js index da2ffd55247..b03e300f346 100644 --- a/packages/datadog-plugin-google-cloud-pubsub/test/naming.js +++ b/packages/datadog-plugin-google-cloud-pubsub/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-google-cloud-vertexai/src/utils.js b/packages/datadog-plugin-google-cloud-vertexai/src/utils.js index 81e6c7398f1..9b359361ccc 100644 --- a/packages/datadog-plugin-google-cloud-vertexai/src/utils.js +++ b/packages/datadog-plugin-google-cloud-vertexai/src/utils.js @@ -1,3 +1,5 @@ +'use strict' + function extractModel (instance) { const model = instance.model || instance.resourcePath || instance.publisherModelEndpoint return model?.split('/').pop() diff --git a/packages/datadog-plugin-graphql/src/utils.js b/packages/datadog-plugin-graphql/src/utils.js index dabeaac00fa..382a98b5f64 100644 --- a/packages/datadog-plugin-graphql/src/utils.js +++ b/packages/datadog-plugin-graphql/src/utils.js @@ -1,3 +1,5 @@ +'use strict' + function extractErrorIntoSpanEvent (config, span, exc) { const attributes = {} diff --git a/packages/datadog-plugin-graphql/test/naming.js b/packages/datadog-plugin-graphql/test/naming.js index ac873109936..7cf138a4697 100644 --- a/packages/datadog-plugin-graphql/test/naming.js +++ b/packages/datadog-plugin-graphql/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-grpc/test/naming.js b/packages/datadog-plugin-grpc/test/naming.js index 3328170b208..f9dd00b4ae6 100644 --- a/packages/datadog-plugin-grpc/test/naming.js +++ b/packages/datadog-plugin-grpc/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-http/test/naming.js b/packages/datadog-plugin-http/test/naming.js index 46635ddc678..eff07e598e8 100644 --- a/packages/datadog-plugin-http/test/naming.js +++ b/packages/datadog-plugin-http/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-http2/test/naming.js b/packages/datadog-plugin-http2/test/naming.js index 46635ddc678..eff07e598e8 100644 --- a/packages/datadog-plugin-http2/test/naming.js +++ b/packages/datadog-plugin-http2/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-ioredis/test/naming.js b/packages/datadog-plugin-ioredis/test/naming.js index 1ed3f17e428..6d116c36968 100644 --- a/packages/datadog-plugin-ioredis/test/naming.js +++ b/packages/datadog-plugin-ioredis/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-iovalkey/test/naming.js b/packages/datadog-plugin-iovalkey/test/naming.js index 57d556782e4..332d65ed39a 100644 --- a/packages/datadog-plugin-iovalkey/test/naming.js +++ b/packages/datadog-plugin-iovalkey/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-jest/src/index.js b/packages/datadog-plugin-jest/src/index.js index 39f88a70d74..6b1c3564928 100644 --- a/packages/datadog-plugin-jest/src/index.js +++ b/packages/datadog-plugin-jest/src/index.js @@ -1,3 +1,5 @@ +'use strict' + const CiPlugin = require('../../dd-trace/src/plugins/ci_plugin') const { storage } = require('../../datadog-core') const { getEnvironmentVariable } = require('../../dd-trace/src/config-helper') diff --git a/packages/datadog-plugin-jest/src/util.js b/packages/datadog-plugin-jest/src/util.js index 58bda8ef74b..335d910aa9d 100644 --- a/packages/datadog-plugin-jest/src/util.js +++ b/packages/datadog-plugin-jest/src/util.js @@ -1,3 +1,5 @@ +'use strict' + const { readFileSync } = require('fs') const { parse, extract } = require('jest-docblock') diff --git a/packages/datadog-plugin-jest/test/env.js b/packages/datadog-plugin-jest/test/env.js index fd843d8d0ca..17bac3a2f91 100644 --- a/packages/datadog-plugin-jest/test/env.js +++ b/packages/datadog-plugin-jest/test/env.js @@ -1,3 +1,5 @@ +'use strict' + const env = require(`../../../versions/${global.__libraryName__}@${global.__libraryVersion__}`).get() module.exports = env.default ? env.default : env diff --git a/packages/datadog-plugin-jest/test/fixtures/test-to-run.js b/packages/datadog-plugin-jest/test/fixtures/test-to-run.js index f093d1e39ed..40326d2276a 100644 --- a/packages/datadog-plugin-jest/test/fixtures/test-to-run.js +++ b/packages/datadog-plugin-jest/test/fixtures/test-to-run.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('test-to-run', () => { diff --git a/packages/datadog-plugin-jest/test/fixtures/test-to-skip.js b/packages/datadog-plugin-jest/test/fixtures/test-to-skip.js index 74655c102ae..54223246967 100644 --- a/packages/datadog-plugin-jest/test/fixtures/test-to-skip.js +++ b/packages/datadog-plugin-jest/test/fixtures/test-to-skip.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('test-to-skip', () => { diff --git a/packages/datadog-plugin-jest/test/fixtures/test-unskippable.js b/packages/datadog-plugin-jest/test/fixtures/test-unskippable.js index 6530097c7d4..a0b0d303340 100644 --- a/packages/datadog-plugin-jest/test/fixtures/test-unskippable.js +++ b/packages/datadog-plugin-jest/test/fixtures/test-unskippable.js @@ -1,6 +1,7 @@ /** * @datadog {"unskippable": true} */ +'use strict' const { expect } = require('chai') diff --git a/packages/datadog-plugin-jest/test/jest-focus.js b/packages/datadog-plugin-jest/test/jest-focus.js index 7c7eaddba6d..b741b42b96a 100644 --- a/packages/datadog-plugin-jest/test/jest-focus.js +++ b/packages/datadog-plugin-jest/test/jest-focus.js @@ -1,3 +1,5 @@ +'use strict' + describe('jest-test-focused', () => { it('will be skipped', () => { expect(true).toEqual(true) diff --git a/packages/datadog-plugin-jest/test/jest-hook-failure.js b/packages/datadog-plugin-jest/test/jest-hook-failure.js index d17df4a983f..c3c0aad4aeb 100644 --- a/packages/datadog-plugin-jest/test/jest-hook-failure.js +++ b/packages/datadog-plugin-jest/test/jest-hook-failure.js @@ -1,3 +1,5 @@ +'use strict' + describe('jest-hook-failure', () => { beforeEach(() => { throw new Error('hey, hook error before') diff --git a/packages/datadog-plugin-jest/test/jest-inject-globals.js b/packages/datadog-plugin-jest/test/jest-inject-globals.js index f18333867d0..1057eec03a2 100644 --- a/packages/datadog-plugin-jest/test/jest-inject-globals.js +++ b/packages/datadog-plugin-jest/test/jest-inject-globals.js @@ -1,3 +1,5 @@ +'use strict' + const { describe, it, expect } = require('../../../versions/@jest/globals').get() describe('jest-inject-globals', () => { diff --git a/packages/datadog-plugin-jest/test/jest-test-suite.js b/packages/datadog-plugin-jest/test/jest-test-suite.js index 4d96db61a75..518339bef4b 100644 --- a/packages/datadog-plugin-jest/test/jest-test-suite.js +++ b/packages/datadog-plugin-jest/test/jest-test-suite.js @@ -1,3 +1,5 @@ +'use strict' + describe('jest-test-suite-visibility', () => { it('works', () => { expect(true).toEqual(true) diff --git a/packages/datadog-plugin-jest/test/jest-test.js b/packages/datadog-plugin-jest/test/jest-test.js index d8155bcdc9a..51ddfa6169e 100644 --- a/packages/datadog-plugin-jest/test/jest-test.js +++ b/packages/datadog-plugin-jest/test/jest-test.js @@ -1,3 +1,5 @@ +'use strict' + const http = require('http') const tracer = require('dd-trace') diff --git a/packages/datadog-plugin-jest/test/util.spec.js b/packages/datadog-plugin-jest/test/util.spec.js index 297f1f74161..af28ed6d9ca 100644 --- a/packages/datadog-plugin-jest/test/util.spec.js +++ b/packages/datadog-plugin-jest/test/util.spec.js @@ -1,3 +1,5 @@ +'use strict' + const path = require('path') const { getFormattedJestTestParameters, getJestSuitesToRun } = require('../src/util') diff --git a/packages/datadog-plugin-kafkajs/src/batch-consumer.js b/packages/datadog-plugin-kafkajs/src/batch-consumer.js index 74c3caa1451..0fb16252116 100644 --- a/packages/datadog-plugin-kafkajs/src/batch-consumer.js +++ b/packages/datadog-plugin-kafkajs/src/batch-consumer.js @@ -1,3 +1,5 @@ +'use strict' + const ConsumerPlugin = require('../../dd-trace/src/plugins/consumer') const { getMessageSize } = require('../../dd-trace/src/datastreams') const { convertToTextMap } = require('./utils') diff --git a/packages/datadog-plugin-kafkajs/test/naming.js b/packages/datadog-plugin-kafkajs/test/naming.js index 78b1f5f3a1c..2adceec4ba9 100644 --- a/packages/datadog-plugin-kafkajs/test/naming.js +++ b/packages/datadog-plugin-kafkajs/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-mariadb/test/naming.js b/packages/datadog-plugin-mariadb/test/naming.js index 81bc7d60f0d..dfc38455cdf 100644 --- a/packages/datadog-plugin-mariadb/test/naming.js +++ b/packages/datadog-plugin-mariadb/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-memcached/test/naming.js b/packages/datadog-plugin-memcached/test/naming.js index 80a164f42d4..df6b8837c6b 100644 --- a/packages/datadog-plugin-memcached/test/naming.js +++ b/packages/datadog-plugin-memcached/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-microgateway-core/test/proxy.js b/packages/datadog-plugin-microgateway-core/test/proxy.js index dd9f0fdad4f..5c276ba16b2 100644 --- a/packages/datadog-plugin-microgateway-core/test/proxy.js +++ b/packages/datadog-plugin-microgateway-core/test/proxy.js @@ -1,3 +1,5 @@ +'use strict' + const http = require('http') const net = require('net') const tls = require('tls') diff --git a/packages/datadog-plugin-mocha/test/mocha-active-span-in-hooks.js b/packages/datadog-plugin-mocha/test/mocha-active-span-in-hooks.js index e5382e3de43..dccc56e256c 100644 --- a/packages/datadog-plugin-mocha/test/mocha-active-span-in-hooks.js +++ b/packages/datadog-plugin-mocha/test/mocha-active-span-in-hooks.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') let currentTestTraceId diff --git a/packages/datadog-plugin-mocha/test/mocha-fail-hook-async.js b/packages/datadog-plugin-mocha/test/mocha-fail-hook-async.js index 9f2e4aa7bba..2017fd3911b 100644 --- a/packages/datadog-plugin-mocha/test/mocha-fail-hook-async.js +++ b/packages/datadog-plugin-mocha/test/mocha-fail-hook-async.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('mocha-fail-before-all', function () { diff --git a/packages/datadog-plugin-mocha/test/mocha-fail-hook-sync.js b/packages/datadog-plugin-mocha/test/mocha-fail-hook-sync.js index 18f618b3578..928dac52896 100644 --- a/packages/datadog-plugin-mocha/test/mocha-fail-hook-sync.js +++ b/packages/datadog-plugin-mocha/test/mocha-fail-hook-sync.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('mocha-fail-hook-sync', () => { diff --git a/packages/datadog-plugin-mocha/test/mocha-test-async-fail.js b/packages/datadog-plugin-mocha/test/mocha-test-async-fail.js index d0bf922a319..88698671cef 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-async-fail.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-async-fail.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('mocha-test-async-fail', () => { diff --git a/packages/datadog-plugin-mocha/test/mocha-test-async-pass.js b/packages/datadog-plugin-mocha/test/mocha-test-async-pass.js index ae83996a137..8660312ac98 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-async-pass.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-async-pass.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('mocha-test-async-pass', () => { diff --git a/packages/datadog-plugin-mocha/test/mocha-test-code-coverage.js b/packages/datadog-plugin-mocha/test/mocha-test-code-coverage.js index 5daf79e48f6..fd318ddde23 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-code-coverage.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-code-coverage.js @@ -1,3 +1,5 @@ +'use strict' + describe('mocha-coverage', () => { it('can sum', () => { expect(1 + 2).to.equal(3) diff --git a/packages/datadog-plugin-mocha/test/mocha-test-done-fail-badly.js b/packages/datadog-plugin-mocha/test/mocha-test-done-fail-badly.js index 6745f4e51d7..ae21c2b1358 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-done-fail-badly.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-done-fail-badly.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('mocha-test-done-fail', () => { diff --git a/packages/datadog-plugin-mocha/test/mocha-test-done-fail.js b/packages/datadog-plugin-mocha/test/mocha-test-done-fail.js index f229e45509f..485875cfd6f 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-done-fail.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-done-fail.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('mocha-test-done-fail', () => { diff --git a/packages/datadog-plugin-mocha/test/mocha-test-done-pass.js b/packages/datadog-plugin-mocha/test/mocha-test-done-pass.js index 9c6ecd8fec3..6d3969526a0 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-done-pass.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-done-pass.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('mocha-test-done-pass', () => { diff --git a/packages/datadog-plugin-mocha/test/mocha-test-fail.js b/packages/datadog-plugin-mocha/test/mocha-test-fail.js index f8b7da0b6d6..44247c88089 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-fail.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-fail.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('mocha-test-fail', () => { diff --git a/packages/datadog-plugin-mocha/test/mocha-test-integration.js b/packages/datadog-plugin-mocha/test/mocha-test-integration.js index 3130b5b4960..ac6579fe426 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-integration.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-integration.js @@ -1,3 +1,5 @@ +'use strict' + const http = require('http') describe('mocha-test-integration-http', () => { diff --git a/packages/datadog-plugin-mocha/test/mocha-test-itr-1.js b/packages/datadog-plugin-mocha/test/mocha-test-itr-1.js index 355fe6eafad..605b3a1eaf3 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-itr-1.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-itr-1.js @@ -1,3 +1,5 @@ +'use strict' + describe('mocha-itr-1', () => { it('can sum', () => { expect(1 + 2).to.equal(3) diff --git a/packages/datadog-plugin-mocha/test/mocha-test-itr-2.js b/packages/datadog-plugin-mocha/test/mocha-test-itr-2.js index 048fef1441d..79879da1c57 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-itr-2.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-itr-2.js @@ -1,3 +1,5 @@ +'use strict' + describe('mocha-itr-2', () => { it('can sum', () => { expect(1 + 2).to.equal(3) diff --git a/packages/datadog-plugin-mocha/test/mocha-test-parameterized.js b/packages/datadog-plugin-mocha/test/mocha-test-parameterized.js index 0d5378099cc..8c0da9581d7 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-parameterized.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-parameterized.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') const forEach = require('../../../versions/mocha-each').get() diff --git a/packages/datadog-plugin-mocha/test/mocha-test-pass.js b/packages/datadog-plugin-mocha/test/mocha-test-pass.js index 6d40be6f5a6..cd72b5e9244 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-pass.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-pass.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('mocha-test-pass', () => { diff --git a/packages/datadog-plugin-mocha/test/mocha-test-promise-fail.js b/packages/datadog-plugin-mocha/test/mocha-test-promise-fail.js index 874eb8ca8d9..3da477c204d 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-promise-fail.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-promise-fail.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('mocha-test-promise-fail', () => { diff --git a/packages/datadog-plugin-mocha/test/mocha-test-promise-pass.js b/packages/datadog-plugin-mocha/test/mocha-test-promise-pass.js index ab872e2eccb..4101e84a26b 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-promise-pass.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-promise-pass.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('mocha-test-promise-pass', () => { diff --git a/packages/datadog-plugin-mocha/test/mocha-test-retries.js b/packages/datadog-plugin-mocha/test/mocha-test-retries.js index 6417f21a4f0..13b8863acf8 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-retries.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-retries.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') let attempt = 0 diff --git a/packages/datadog-plugin-mocha/test/mocha-test-skip-describe.js b/packages/datadog-plugin-mocha/test/mocha-test-skip-describe.js index 1e0f4228337..5513c9c8087 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-skip-describe.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-skip-describe.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('mocha-test-skip-describe', () => { diff --git a/packages/datadog-plugin-mocha/test/mocha-test-skip.js b/packages/datadog-plugin-mocha/test/mocha-test-skip.js index 9c9ad98ec2b..78300ef7056 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-skip.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-skip.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('mocha-test-skip', () => { diff --git a/packages/datadog-plugin-mocha/test/mocha-test-suite-level-fail-after-each.js b/packages/datadog-plugin-mocha/test/mocha-test-suite-level-fail-after-each.js index 5e948dde33e..5f8b4c11193 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-suite-level-fail-after-each.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-suite-level-fail-after-each.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('mocha-test-suite-level-pass', function () { diff --git a/packages/datadog-plugin-mocha/test/mocha-test-suite-level-fail-skip-describe.js b/packages/datadog-plugin-mocha/test/mocha-test-suite-level-fail-skip-describe.js index ccd960b50a1..2f79fd8ed21 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-suite-level-fail-skip-describe.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-suite-level-fail-skip-describe.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('mocha-test-suite-level-fail', function () { diff --git a/packages/datadog-plugin-mocha/test/mocha-test-suite-level-fail-test.js b/packages/datadog-plugin-mocha/test/mocha-test-suite-level-fail-test.js index 50ca4d85f12..3263d26a8a6 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-suite-level-fail-test.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-suite-level-fail-test.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('mocha-test-suite-level-fail', function () { diff --git a/packages/datadog-plugin-mocha/test/mocha-test-suite-level-pass.js b/packages/datadog-plugin-mocha/test/mocha-test-suite-level-pass.js index 3e2f4b23c13..855e4d86a63 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-suite-level-pass.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-suite-level-pass.js @@ -1,3 +1,5 @@ +'use strict' + const { expect } = require('chai') describe('mocha-test-suite-level-fail', function () { diff --git a/packages/datadog-plugin-mocha/test/mocha-test-timeout-fail.js b/packages/datadog-plugin-mocha/test/mocha-test-timeout-fail.js index d86d68f2e17..eac8964efb8 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-timeout-fail.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-timeout-fail.js @@ -1,3 +1,5 @@ +'use strict' + describe('mocha-test-timeout-fail', () => { it('times out', function (done) { this.timeout(100) diff --git a/packages/datadog-plugin-mocha/test/mocha-test-timeout-pass.js b/packages/datadog-plugin-mocha/test/mocha-test-timeout-pass.js index 3d10c03c42c..75868497e69 100644 --- a/packages/datadog-plugin-mocha/test/mocha-test-timeout-pass.js +++ b/packages/datadog-plugin-mocha/test/mocha-test-timeout-pass.js @@ -1,3 +1,5 @@ +'use strict' + describe('mocha-test-timeout-pass', () => { it('does not timeout', function (done) { this.timeout(300) diff --git a/packages/datadog-plugin-moleculer/test/naming.js b/packages/datadog-plugin-moleculer/test/naming.js index ee339847550..c46d358d11f 100644 --- a/packages/datadog-plugin-moleculer/test/naming.js +++ b/packages/datadog-plugin-moleculer/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-mongodb-core/test/naming.js b/packages/datadog-plugin-mongodb-core/test/naming.js index c5113333a29..0ff68b6c167 100644 --- a/packages/datadog-plugin-mongodb-core/test/naming.js +++ b/packages/datadog-plugin-mongodb-core/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-mysql/test/naming.js b/packages/datadog-plugin-mysql/test/naming.js index d9f2342d5d1..8c1ae665808 100644 --- a/packages/datadog-plugin-mysql/test/naming.js +++ b/packages/datadog-plugin-mysql/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-mysql2/test/naming.js b/packages/datadog-plugin-mysql2/test/naming.js index d9f2342d5d1..8c1ae665808 100644 --- a/packages/datadog-plugin-mysql2/test/naming.js +++ b/packages/datadog-plugin-mysql2/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-next/test/datadog.js b/packages/datadog-plugin-next/test/datadog.js index 565fe15db99..ba88ff20ea9 100644 --- a/packages/datadog-plugin-next/test/datadog.js +++ b/packages/datadog-plugin-next/test/datadog.js @@ -1,3 +1,5 @@ +'use strict' + const config = { validateStatus: code => false, hooks: { diff --git a/packages/datadog-plugin-next/test/naming.js b/packages/datadog-plugin-next/test/naming.js index 8b2f1740410..ee5bc55e43d 100644 --- a/packages/datadog-plugin-next/test/naming.js +++ b/packages/datadog-plugin-next/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-next/test/next.config.js b/packages/datadog-plugin-next/test/next.config.js index 7bdbd848df9..98a25b8f285 100644 --- a/packages/datadog-plugin-next/test/next.config.js +++ b/packages/datadog-plugin-next/test/next.config.js @@ -1,3 +1,5 @@ +'use strict' + // Build config dynamically for ease in testing and modification const { satisfies } = require('semver') diff --git a/packages/datadog-plugin-nyc/src/index.js b/packages/datadog-plugin-nyc/src/index.js index c407b55221c..3252e9be24b 100644 --- a/packages/datadog-plugin-nyc/src/index.js +++ b/packages/datadog-plugin-nyc/src/index.js @@ -1,3 +1,5 @@ +'use strict' + const CiPlugin = require('../../dd-trace/src/plugins/ci_plugin') class NycPlugin extends CiPlugin { diff --git a/packages/datadog-plugin-openai/test/no-init.js b/packages/datadog-plugin-openai/test/no-init.js index 002e07cb03d..d34bae7a9ad 100755 --- a/packages/datadog-plugin-openai/test/no-init.js +++ b/packages/datadog-plugin-openai/test/no-init.js @@ -1,4 +1,5 @@ #!/usr/bin/env node +'use strict' /** * Due to the complexity of the service initialization required by openai diff --git a/packages/datadog-plugin-opensearch/test/naming.js b/packages/datadog-plugin-opensearch/test/naming.js index 72b158196a0..fedf4903d1b 100644 --- a/packages/datadog-plugin-opensearch/test/naming.js +++ b/packages/datadog-plugin-opensearch/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-oracledb/src/connection-parser.js b/packages/datadog-plugin-oracledb/src/connection-parser.js index 8db94c4e9c2..89fada7b75a 100644 --- a/packages/datadog-plugin-oracledb/src/connection-parser.js +++ b/packages/datadog-plugin-oracledb/src/connection-parser.js @@ -1,3 +1,5 @@ +'use strict' + const { URL } = require('url') const log = require('../../dd-trace/src/log') diff --git a/packages/datadog-plugin-oracledb/test/naming.js b/packages/datadog-plugin-oracledb/test/naming.js index eaed0fa7516..6a7d84806a8 100644 --- a/packages/datadog-plugin-oracledb/test/naming.js +++ b/packages/datadog-plugin-oracledb/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-pg/test/naming.js b/packages/datadog-plugin-pg/test/naming.js index a961906a417..e24c5c21005 100644 --- a/packages/datadog-plugin-pg/test/naming.js +++ b/packages/datadog-plugin-pg/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-prisma/test/naming.js b/packages/datadog-plugin-prisma/test/naming.js index 1dfe11ca8df..74a05b60fc5 100644 --- a/packages/datadog-plugin-prisma/test/naming.js +++ b/packages/datadog-plugin-prisma/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-protobufjs/src/index.js b/packages/datadog-plugin-protobufjs/src/index.js index 800c3d9e3cb..bf2f2992ec6 100644 --- a/packages/datadog-plugin-protobufjs/src/index.js +++ b/packages/datadog-plugin-protobufjs/src/index.js @@ -1,3 +1,5 @@ +'use strict' + const SchemaPlugin = require('../../dd-trace/src/plugins/schema') const SchemaExtractor = require('./schema_iterator') diff --git a/packages/datadog-plugin-protobufjs/src/schema_iterator.js b/packages/datadog-plugin-protobufjs/src/schema_iterator.js index a1f8cb3bc5b..f7dba6439d5 100644 --- a/packages/datadog-plugin-protobufjs/src/schema_iterator.js +++ b/packages/datadog-plugin-protobufjs/src/schema_iterator.js @@ -1,3 +1,5 @@ +'use strict' + const PROTOBUF = 'protobuf' const { SCHEMA_DEFINITION, diff --git a/packages/datadog-plugin-protobufjs/test/helpers.js b/packages/datadog-plugin-protobufjs/test/helpers.js index d91be2e496b..8fdd54294df 100644 --- a/packages/datadog-plugin-protobufjs/test/helpers.js +++ b/packages/datadog-plugin-protobufjs/test/helpers.js @@ -1,3 +1,5 @@ +'use strict' + async function loadMessage (protobuf, messageTypeName) { if (messageTypeName === 'OtherMessage') { const root = await protobuf.load('packages/datadog-plugin-protobufjs/test/schemas/other_message.proto') diff --git a/packages/datadog-plugin-redis/test/naming.js b/packages/datadog-plugin-redis/test/naming.js index 1ed3f17e428..6d116c36968 100644 --- a/packages/datadog-plugin-redis/test/naming.js +++ b/packages/datadog-plugin-redis/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-rhea/test/naming.js b/packages/datadog-plugin-rhea/test/naming.js index f4e4508e8a7..f174f794963 100644 --- a/packages/datadog-plugin-rhea/test/naming.js +++ b/packages/datadog-plugin-rhea/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-selenium/src/index.js b/packages/datadog-plugin-selenium/src/index.js index fd1a8f1e651..05266d45bab 100644 --- a/packages/datadog-plugin-selenium/src/index.js +++ b/packages/datadog-plugin-selenium/src/index.js @@ -1,3 +1,5 @@ +'use strict' + const CiPlugin = require('../../dd-trace/src/plugins/ci_plugin') const { storage } = require('../../datadog-core') diff --git a/packages/datadog-plugin-tedious/test/naming.js b/packages/datadog-plugin-tedious/test/naming.js index da01aad5a8f..b1e0d53be0c 100644 --- a/packages/datadog-plugin-tedious/test/naming.js +++ b/packages/datadog-plugin-tedious/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-undici/test/naming.js b/packages/datadog-plugin-undici/test/naming.js index 5bf2be387c3..8ca1b94fea4 100644 --- a/packages/datadog-plugin-undici/test/naming.js +++ b/packages/datadog-plugin-undici/test/naming.js @@ -1,3 +1,5 @@ +'use strict' + const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') const rawExpectedSchema = { diff --git a/packages/datadog-plugin-vitest/src/index.js b/packages/datadog-plugin-vitest/src/index.js index 281c542acca..0d6e60a4437 100644 --- a/packages/datadog-plugin-vitest/src/index.js +++ b/packages/datadog-plugin-vitest/src/index.js @@ -1,3 +1,5 @@ +'use strict' + const CiPlugin = require('../../dd-trace/src/plugins/ci_plugin') const { storage } = require('../../datadog-core') const { getEnvironmentVariable } = require('../../dd-trace/src/config-helper') diff --git a/packages/dd-trace/src/appsec/iast/iast-context.js b/packages/dd-trace/src/appsec/iast/iast-context.js index 6daff4aa348..d123d4316f8 100644 --- a/packages/dd-trace/src/appsec/iast/iast-context.js +++ b/packages/dd-trace/src/appsec/iast/iast-context.js @@ -1,3 +1,5 @@ +'use strict' + const IAST_CONTEXT_KEY = Symbol('_dd.iast.context') const IAST_TRANSACTION_ID = Symbol('_dd.iast.transactionId') @@ -52,7 +54,9 @@ function cleanIastContext (store, context, iastContext) { context[IAST_CONTEXT_KEY] = null } if (iastContext) { - Object.keys(iastContext).forEach(key => delete iastContext[key]) + if (typeof iastContext === 'object') { // eslint-disable-line eslint-rules/eslint-safe-typeof-object + Object.keys(iastContext).forEach(key => delete iastContext[key]) + } return true } return false diff --git a/packages/dd-trace/src/appsec/iast/index.js b/packages/dd-trace/src/appsec/iast/index.js index 8601dc3c8ca..1f16df27eea 100644 --- a/packages/dd-trace/src/appsec/iast/index.js +++ b/packages/dd-trace/src/appsec/iast/index.js @@ -1,3 +1,5 @@ +'use strict' + const vulnerabilityReporter = require('./vulnerability-reporter') const { enableAllAnalyzers, disableAllAnalyzers } = require('./analyzers') const web = require('../../plugins/util/web') diff --git a/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter-esm.mjs b/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter-esm.mjs index 2bd186619fd..a5075668ea4 100644 --- a/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter-esm.mjs +++ b/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter-esm.mjs @@ -1,5 +1,3 @@ -'use strict' - import path from 'path' import { URL } from 'url' import { getName } from '../telemetry/verbosity.js' diff --git a/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js b/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js index 3415e13bc22..6c7a61a9f95 100644 --- a/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js +++ b/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js @@ -1,4 +1,5 @@ 'use strict' + /* eslint n/no-unsupported-features/node-builtins: ['error', { ignores: ['module.register'] }] */ const Module = require('module') diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-regex.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-regex.js index c8abc9e3a76..2adcb5cc302 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-regex.js +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-regex.js @@ -1,3 +1,5 @@ +'use strict' + // eslint-disable-next-line @stylistic/max-len const DEFAULT_IAST_REDACTION_NAME_PATTERN = '(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?|(?:sur|last)name|user(?:name)?|address|e?mail)' // eslint-disable-next-line @stylistic/max-len diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities.js b/packages/dd-trace/src/appsec/iast/vulnerabilities.js index 639ee43df8b..7a38f3741c3 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerabilities.js +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities.js @@ -1,3 +1,5 @@ +'use strict' + module.exports = { COMMAND_INJECTION: 'COMMAND_INJECTION', CODE_INJECTION: 'CODE_INJECTION', diff --git a/packages/dd-trace/src/appsec/telemetry/common.js b/packages/dd-trace/src/appsec/telemetry/common.js index a63e4b133ff..06ce35554ff 100644 --- a/packages/dd-trace/src/appsec/telemetry/common.js +++ b/packages/dd-trace/src/appsec/telemetry/common.js @@ -1,4 +1,4 @@ -'use strinct' +'use strict' const DD_TELEMETRY_REQUEST_METRICS = Symbol('_dd.appsec.telemetry.request.metrics') diff --git a/packages/dd-trace/src/ci-visibility/early-flake-detection/get-known-tests.js b/packages/dd-trace/src/ci-visibility/early-flake-detection/get-known-tests.js index d6f1c5a4d22..6f10be0ba9d 100644 --- a/packages/dd-trace/src/ci-visibility/early-flake-detection/get-known-tests.js +++ b/packages/dd-trace/src/ci-visibility/early-flake-detection/get-known-tests.js @@ -1,3 +1,5 @@ +'use strict' + const request = require('../../exporters/common/request') const id = require('../../id') const log = require('../../log') diff --git a/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js b/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js index c58c472e1e7..b74a148202f 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js +++ b/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js @@ -1,3 +1,5 @@ +'use strict' + const fs = require('fs') const path = require('path') diff --git a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js index 9f545d06348..ac7e0e1ee8b 100644 --- a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js +++ b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js @@ -1,3 +1,5 @@ +'use strict' + const request = require('../../exporters/common/request') const log = require('../../log') const { getEnvironmentVariable } = require('../../config-helper') diff --git a/packages/dd-trace/src/ci-visibility/log-submission/log-submission-plugin.js b/packages/dd-trace/src/ci-visibility/log-submission/log-submission-plugin.js index b97ee9731af..811edd0712a 100644 --- a/packages/dd-trace/src/ci-visibility/log-submission/log-submission-plugin.js +++ b/packages/dd-trace/src/ci-visibility/log-submission/log-submission-plugin.js @@ -1,3 +1,5 @@ +'use strict' + const Plugin = require('../../plugins/plugin') const log = require('../../log') const { getEnvironmentVariable } = require('../../config-helper') diff --git a/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js b/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js index 1d4febd98e4..e16b62dcdfa 100644 --- a/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js +++ b/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js @@ -1,3 +1,5 @@ +'use strict' + const request = require('../../exporters/common/request') const id = require('../../id') const log = require('../../log') diff --git a/packages/dd-trace/src/ci-visibility/telemetry.js b/packages/dd-trace/src/ci-visibility/telemetry.js index 7b23d38db08..b93c23b0eaf 100644 --- a/packages/dd-trace/src/ci-visibility/telemetry.js +++ b/packages/dd-trace/src/ci-visibility/telemetry.js @@ -1,3 +1,5 @@ +'use strict' + const telemetryMetrics = require('../telemetry/metrics') const ciVisibilityMetrics = telemetryMetrics.manager.namespace('civisibility') diff --git a/packages/dd-trace/src/ci-visibility/test-api-manual/test-api-manual-plugin.js b/packages/dd-trace/src/ci-visibility/test-api-manual/test-api-manual-plugin.js index 9cd37098143..fa6730288d2 100644 --- a/packages/dd-trace/src/ci-visibility/test-api-manual/test-api-manual-plugin.js +++ b/packages/dd-trace/src/ci-visibility/test-api-manual/test-api-manual-plugin.js @@ -1,3 +1,5 @@ +'use strict' + const CiPlugin = require('../../plugins/ci_plugin') const { TEST_STATUS, diff --git a/packages/dd-trace/src/ci-visibility/test-management/get-test-management-tests.js b/packages/dd-trace/src/ci-visibility/test-management/get-test-management-tests.js index 2e1897e7cb7..f31f855668c 100644 --- a/packages/dd-trace/src/ci-visibility/test-management/get-test-management-tests.js +++ b/packages/dd-trace/src/ci-visibility/test-management/get-test-management-tests.js @@ -1,3 +1,5 @@ +'use strict' + const request = require('../../exporters/common/request') const id = require('../../id') const { getEnvironmentVariable } = require('../../config-helper') diff --git a/packages/dd-trace/src/config_stable.js b/packages/dd-trace/src/config_stable.js index fa7855ca3e1..bbd459772b3 100644 --- a/packages/dd-trace/src/config_stable.js +++ b/packages/dd-trace/src/config_stable.js @@ -1,3 +1,5 @@ +'use strict' + const os = require('os') const fs = require('fs') const { getEnvironmentVariable } = require('../../dd-trace/src/config-helper') diff --git a/packages/dd-trace/src/datastreams/checkpointer.js b/packages/dd-trace/src/datastreams/checkpointer.js index 1b6e2a28c0f..ae36db43fdc 100644 --- a/packages/dd-trace/src/datastreams/checkpointer.js +++ b/packages/dd-trace/src/datastreams/checkpointer.js @@ -1,3 +1,5 @@ +'use strict' + const DataStreamsContext = require('./context') class DataStreamsCheckpointer { diff --git a/packages/dd-trace/src/datastreams/context.js b/packages/dd-trace/src/datastreams/context.js index 263840fe1c8..dccb204bee2 100644 --- a/packages/dd-trace/src/datastreams/context.js +++ b/packages/dd-trace/src/datastreams/context.js @@ -1,3 +1,5 @@ +'use strict' + const { storage } = require('../../../datadog-core') const log = require('../log') diff --git a/packages/dd-trace/src/datastreams/encoding.js b/packages/dd-trace/src/datastreams/encoding.js index e6e75548533..66f9998c73d 100644 --- a/packages/dd-trace/src/datastreams/encoding.js +++ b/packages/dd-trace/src/datastreams/encoding.js @@ -1,3 +1,5 @@ +'use strict' + // encodes positive and negative numbers, using zig zag encoding to reduce the size of the variable length encoding. // uses high and low part to ensure those parts are under the limit for byte operations in javascript (32 bits) // maximum number possible to encode is MAX_SAFE_INTEGER/2 (using zig zag shifts the bits by 1 to the left) diff --git a/packages/dd-trace/src/datastreams/fnv.js b/packages/dd-trace/src/datastreams/fnv.js index 4d04fa0c102..c4deb32d9d0 100644 --- a/packages/dd-trace/src/datastreams/fnv.js +++ b/packages/dd-trace/src/datastreams/fnv.js @@ -1,3 +1,5 @@ +'use strict' + const FNV_64_PRIME = BigInt('0x100000001B3') const FNV1_64_INIT = BigInt('0xCBF29CE484222325') diff --git a/packages/dd-trace/src/datastreams/pathway.js b/packages/dd-trace/src/datastreams/pathway.js index 27ea3dac2a1..fe235148ae6 100644 --- a/packages/dd-trace/src/datastreams/pathway.js +++ b/packages/dd-trace/src/datastreams/pathway.js @@ -1,3 +1,5 @@ +'use strict' + // encoding used here is sha256 // other languages use FNV1 // this inconsistency is ok because hashes do not need to be consistent across services diff --git a/packages/dd-trace/src/datastreams/processor.js b/packages/dd-trace/src/datastreams/processor.js index c0e3ff2a18c..5bc503d9f5f 100644 --- a/packages/dd-trace/src/datastreams/processor.js +++ b/packages/dd-trace/src/datastreams/processor.js @@ -1,3 +1,5 @@ +'use strict' + const os = require('os') const pkg = require('../../../../package.json') diff --git a/packages/dd-trace/src/datastreams/schemas/schema.js b/packages/dd-trace/src/datastreams/schemas/schema.js index 4378e37d080..a20da49149c 100644 --- a/packages/dd-trace/src/datastreams/schemas/schema.js +++ b/packages/dd-trace/src/datastreams/schemas/schema.js @@ -1,3 +1,5 @@ +'use strict' + class Schema { constructor (definition, id) { this.definition = definition diff --git a/packages/dd-trace/src/datastreams/schemas/schema_builder.js b/packages/dd-trace/src/datastreams/schemas/schema_builder.js index 6db3660d23e..6bcf58becd9 100644 --- a/packages/dd-trace/src/datastreams/schemas/schema_builder.js +++ b/packages/dd-trace/src/datastreams/schemas/schema_builder.js @@ -1,3 +1,5 @@ +'use strict' + const { LRUCache } = require('lru-cache') const { fnv64 } = require('../fnv') const { Schema } = require('./schema') diff --git a/packages/dd-trace/src/datastreams/schemas/schema_sampler.js b/packages/dd-trace/src/datastreams/schemas/schema_sampler.js index 01e8502e90f..f1ec1c4ce04 100644 --- a/packages/dd-trace/src/datastreams/schemas/schema_sampler.js +++ b/packages/dd-trace/src/datastreams/schemas/schema_sampler.js @@ -1,3 +1,5 @@ +'use strict' + const SAMPLE_INTERVAL_MILLIS = 30 * 1000 class SchemaSampler { diff --git a/packages/dd-trace/src/datastreams/writer.js b/packages/dd-trace/src/datastreams/writer.js index f810c81d9c8..f7fdfd59c22 100644 --- a/packages/dd-trace/src/datastreams/writer.js +++ b/packages/dd-trace/src/datastreams/writer.js @@ -1,3 +1,5 @@ +'use strict' + const pkg = require('../../../../package.json') const log = require('../log') const request = require('../exporters/common/request') diff --git a/packages/dd-trace/src/debugger/devtools_client/inspector_promises_polyfill.js b/packages/dd-trace/src/debugger/devtools_client/inspector_promises_polyfill.js index a940065a62d..26f6f7e3b7f 100644 --- a/packages/dd-trace/src/debugger/devtools_client/inspector_promises_polyfill.js +++ b/packages/dd-trace/src/debugger/devtools_client/inspector_promises_polyfill.js @@ -1,4 +1,5 @@ 'use strict' + /* eslint n/no-unsupported-features/node-builtins: ['error', { ignores: ['inspector/promises'] }] */ const { builtinModules } = require('node:module') diff --git a/packages/dd-trace/src/debugger/devtools_client/snapshot/symbols.js b/packages/dd-trace/src/debugger/devtools_client/snapshot/symbols.js index 66a82d0a160..5b250610437 100644 --- a/packages/dd-trace/src/debugger/devtools_client/snapshot/symbols.js +++ b/packages/dd-trace/src/debugger/devtools_client/snapshot/symbols.js @@ -1,4 +1,4 @@ -'use stict' +'use strict' module.exports = { collectionSizeSym: Symbol('datadog.collectionSize'), diff --git a/packages/dd-trace/src/encode/tags-processors.js b/packages/dd-trace/src/encode/tags-processors.js index efa0a78bb97..a832b5b0565 100644 --- a/packages/dd-trace/src/encode/tags-processors.js +++ b/packages/dd-trace/src/encode/tags-processors.js @@ -1,3 +1,5 @@ +'use strict' + // From agent truncators: https://github.com/DataDog/datadog-agent/blob/main/pkg/trace/agent/truncator.go // Values from: https://github.com/DataDog/datadog-agent/blob/main/pkg/trace/traceutil/truncate.go#L22-L27 diff --git a/packages/dd-trace/src/exporters/common/agent-info-exporter.js b/packages/dd-trace/src/exporters/common/agent-info-exporter.js index 3a77eab1c57..84026e1e30c 100644 --- a/packages/dd-trace/src/exporters/common/agent-info-exporter.js +++ b/packages/dd-trace/src/exporters/common/agent-info-exporter.js @@ -1,3 +1,5 @@ +'use strict' + const { URL, format } = require('url') const request = require('./request') diff --git a/packages/dd-trace/src/exporters/common/util.js b/packages/dd-trace/src/exporters/common/util.js index cc1c50c0965..5dd66c43726 100644 --- a/packages/dd-trace/src/exporters/common/util.js +++ b/packages/dd-trace/src/exporters/common/util.js @@ -1,3 +1,5 @@ +'use strict' + const { getEnvironmentVariable } = require('../../config-helper') function safeJSONStringify (value) { diff --git a/packages/dd-trace/src/exporters/span-stats/index.js b/packages/dd-trace/src/exporters/span-stats/index.js index 7feec80f578..e7a36fe271a 100644 --- a/packages/dd-trace/src/exporters/span-stats/index.js +++ b/packages/dd-trace/src/exporters/span-stats/index.js @@ -1,3 +1,5 @@ +'use strict' + const { URL, format } = require('url') const { Writer } = require('./writer') diff --git a/packages/dd-trace/src/exporters/span-stats/writer.js b/packages/dd-trace/src/exporters/span-stats/writer.js index 64258a99dcb..692dde47ca4 100644 --- a/packages/dd-trace/src/exporters/span-stats/writer.js +++ b/packages/dd-trace/src/exporters/span-stats/writer.js @@ -1,3 +1,5 @@ +'use strict' + const { SpanStatsEncoder } = require('../../encode/span-stats') const pkg = require('../../../../../package.json') diff --git a/packages/dd-trace/src/external-logger/src/index.js b/packages/dd-trace/src/external-logger/src/index.js index 1a2198f3bde..77f5523648a 100644 --- a/packages/dd-trace/src/external-logger/src/index.js +++ b/packages/dd-trace/src/external-logger/src/index.js @@ -1,3 +1,5 @@ +'use strict' + const tracerLogger = require('../../log')// path to require tracer logger const https = require('https') diff --git a/packages/dd-trace/src/git_metadata_tagger.js b/packages/dd-trace/src/git_metadata_tagger.js index dd3db945940..f33e18bc408 100644 --- a/packages/dd-trace/src/git_metadata_tagger.js +++ b/packages/dd-trace/src/git_metadata_tagger.js @@ -1,3 +1,5 @@ +'use strict' + const { SCI_COMMIT_SHA, SCI_REPOSITORY_URL } = require('./constants') class GitMetadataTagger { diff --git a/packages/dd-trace/src/git_properties.js b/packages/dd-trace/src/git_properties.js index d6429232226..43eaa119dcf 100644 --- a/packages/dd-trace/src/git_properties.js +++ b/packages/dd-trace/src/git_properties.js @@ -1,3 +1,5 @@ +'use strict' + const commitSHARegex = /git\.commit\.sha=([a-f\d]{40})/ const repositoryUrlRegex = /git\.repository_url=([\w\d:@/.-]+)/ diff --git a/packages/dd-trace/src/llmobs/plugins/bedrockruntime.js b/packages/dd-trace/src/llmobs/plugins/bedrockruntime.js index 7378949e3ac..62754072fb6 100644 --- a/packages/dd-trace/src/llmobs/plugins/bedrockruntime.js +++ b/packages/dd-trace/src/llmobs/plugins/bedrockruntime.js @@ -1,3 +1,5 @@ +'use strict' + const BaseLLMObsPlugin = require('./base') const { storage } = require('../../../../datadog-core') const llmobsStore = storage('llmobs') diff --git a/packages/dd-trace/src/noop/dogstatsd.js b/packages/dd-trace/src/noop/dogstatsd.js index 5ecb5d45840..686508badaa 100644 --- a/packages/dd-trace/src/noop/dogstatsd.js +++ b/packages/dd-trace/src/noop/dogstatsd.js @@ -1,3 +1,5 @@ +'use strict' + /** * @import { DogStatsD } from "../../../../index.d.ts" * @implements {DogStatsD} diff --git a/packages/dd-trace/src/opentracing/propagation/text_map_dsm.js b/packages/dd-trace/src/opentracing/propagation/text_map_dsm.js index 109746a620a..c9739e48255 100644 --- a/packages/dd-trace/src/opentracing/propagation/text_map_dsm.js +++ b/packages/dd-trace/src/opentracing/propagation/text_map_dsm.js @@ -1,3 +1,5 @@ +'use strict' + const pick = require('../../../../datadog-core/src/utils/src/pick') const log = require('../../log') diff --git a/packages/dd-trace/src/payload-tagging/config/index.js b/packages/dd-trace/src/payload-tagging/config/index.js index a5c8dc6cf1f..318e6b47247 100644 --- a/packages/dd-trace/src/payload-tagging/config/index.js +++ b/packages/dd-trace/src/payload-tagging/config/index.js @@ -1,3 +1,5 @@ +'use strict' + const aws = require('./aws.json') const sdks = { aws } diff --git a/packages/dd-trace/src/payload-tagging/index.js b/packages/dd-trace/src/payload-tagging/index.js index 342dcd75158..b80ce08d29a 100644 --- a/packages/dd-trace/src/payload-tagging/index.js +++ b/packages/dd-trace/src/payload-tagging/index.js @@ -1,3 +1,5 @@ +'use strict' + const rfdc = require('rfdc')({ proto: false, circles: false }) const { diff --git a/packages/dd-trace/src/payload-tagging/tagging.js b/packages/dd-trace/src/payload-tagging/tagging.js index 206e60b4a61..8e31ae40ee3 100644 --- a/packages/dd-trace/src/payload-tagging/tagging.js +++ b/packages/dd-trace/src/payload-tagging/tagging.js @@ -1,3 +1,5 @@ +'use strict' + const { PAYLOAD_TAGGING_MAX_TAGS } = require('../constants') const redactedKeys = new Set([ diff --git a/packages/dd-trace/src/plugins/apollo.js b/packages/dd-trace/src/plugins/apollo.js index 1c0d6aa98fd..b9d00d8ffe4 100644 --- a/packages/dd-trace/src/plugins/apollo.js +++ b/packages/dd-trace/src/plugins/apollo.js @@ -1,3 +1,5 @@ +'use strict' + const TracingPlugin = require('./tracing') const { storage } = require('../../../datadog-core') diff --git a/packages/dd-trace/src/plugins/ci_plugin.js b/packages/dd-trace/src/plugins/ci_plugin.js index f20b70c25b6..f57cb685b7e 100644 --- a/packages/dd-trace/src/plugins/ci_plugin.js +++ b/packages/dd-trace/src/plugins/ci_plugin.js @@ -1,3 +1,5 @@ +'use strict' + const { storage } = require('../../../datadog-core') const { getTestEnvironmentMetadata, diff --git a/packages/dd-trace/src/plugins/util/ci.js b/packages/dd-trace/src/plugins/util/ci.js index fcc4cee811e..e855b5460e9 100644 --- a/packages/dd-trace/src/plugins/util/ci.js +++ b/packages/dd-trace/src/plugins/util/ci.js @@ -1,3 +1,5 @@ +'use strict' + const { readFileSync } = require('fs') const { GIT_BRANCH, diff --git a/packages/dd-trace/src/plugins/util/env.js b/packages/dd-trace/src/plugins/util/env.js index c1721c4bb11..0b11b4abb78 100644 --- a/packages/dd-trace/src/plugins/util/env.js +++ b/packages/dd-trace/src/plugins/util/env.js @@ -1,3 +1,5 @@ +'use strict' + const os = require('os') const OS_PLATFORM = 'os.platform' diff --git a/packages/dd-trace/src/plugins/util/git.js b/packages/dd-trace/src/plugins/util/git.js index 45bb4e2af84..a94c1afcf5e 100644 --- a/packages/dd-trace/src/plugins/util/git.js +++ b/packages/dd-trace/src/plugins/util/git.js @@ -1,3 +1,5 @@ +'use strict' + const cp = require('child_process') const os = require('os') const path = require('path') diff --git a/packages/dd-trace/src/plugins/util/inferred_proxy.js b/packages/dd-trace/src/plugins/util/inferred_proxy.js index 965b62ae154..76f8696276c 100644 --- a/packages/dd-trace/src/plugins/util/inferred_proxy.js +++ b/packages/dd-trace/src/plugins/util/inferred_proxy.js @@ -1,3 +1,5 @@ +'use strict' + const log = require('../../log') const tags = require('../../../../../ext/tags') diff --git a/packages/dd-trace/src/plugins/util/llm.js b/packages/dd-trace/src/plugins/util/llm.js index 22a7ad211dc..92aedc7e5de 100644 --- a/packages/dd-trace/src/plugins/util/llm.js +++ b/packages/dd-trace/src/plugins/util/llm.js @@ -1,3 +1,5 @@ +'use strict' + const Sampler = require('../../sampler') const RE_NEWLINE = /\n/g diff --git a/packages/dd-trace/src/plugins/util/serverless.js b/packages/dd-trace/src/plugins/util/serverless.js index 3e969ffdfad..43958314705 100644 --- a/packages/dd-trace/src/plugins/util/serverless.js +++ b/packages/dd-trace/src/plugins/util/serverless.js @@ -1,3 +1,5 @@ +'use strict' + const types = require('../../../../../ext/types') const web = require('./web') diff --git a/packages/dd-trace/src/plugins/util/tags.js b/packages/dd-trace/src/plugins/util/tags.js index 9c3dc911e06..ec3a818396a 100644 --- a/packages/dd-trace/src/plugins/util/tags.js +++ b/packages/dd-trace/src/plugins/util/tags.js @@ -1,3 +1,5 @@ +'use strict' + const GIT_COMMIT_SHA = 'git.commit.sha' const GIT_BRANCH = 'git.branch' const GIT_REPOSITORY_URL = 'git.repository_url' diff --git a/packages/dd-trace/src/plugins/util/test.js b/packages/dd-trace/src/plugins/util/test.js index 341173ae383..3c3924495ac 100644 --- a/packages/dd-trace/src/plugins/util/test.js +++ b/packages/dd-trace/src/plugins/util/test.js @@ -1,3 +1,5 @@ +'use strict' + const path = require('path') const fs = require('fs') const { URL } = require('url') diff --git a/packages/dd-trace/src/plugins/util/url.js b/packages/dd-trace/src/plugins/util/url.js index 1d3cef5d41e..de95dee7b10 100644 --- a/packages/dd-trace/src/plugins/util/url.js +++ b/packages/dd-trace/src/plugins/util/url.js @@ -1,3 +1,5 @@ +'use strict' + const { URL } = require('url') function filterSensitiveInfoFromRepository (repositoryUrl) { diff --git a/packages/dd-trace/src/plugins/util/user-provided-git.js b/packages/dd-trace/src/plugins/util/user-provided-git.js index c2195cc92e4..e35a03b0108 100644 --- a/packages/dd-trace/src/plugins/util/user-provided-git.js +++ b/packages/dd-trace/src/plugins/util/user-provided-git.js @@ -1,3 +1,5 @@ +'use strict' + const { GIT_COMMIT_SHA, GIT_BRANCH, diff --git a/packages/dd-trace/src/profiling/exporters/event_serializer.js b/packages/dd-trace/src/profiling/exporters/event_serializer.js index 199482b6661..58f028e7983 100644 --- a/packages/dd-trace/src/profiling/exporters/event_serializer.js +++ b/packages/dd-trace/src/profiling/exporters/event_serializer.js @@ -1,4 +1,5 @@ 'use strict' + /* eslint n/no-unsupported-features/node-builtins: ['error', { ignores: ['os.availableParallelism'] }] */ const os = require('os') diff --git a/packages/dd-trace/src/profiling/profilers/event_plugins/dns.js b/packages/dd-trace/src/profiling/profilers/event_plugins/dns.js index 29b1e62775f..04df7e40db8 100644 --- a/packages/dd-trace/src/profiling/profilers/event_plugins/dns.js +++ b/packages/dd-trace/src/profiling/profilers/event_plugins/dns.js @@ -1,3 +1,5 @@ +'use strict' + const EventPlugin = require('./event') class DNSPlugin extends EventPlugin { diff --git a/packages/dd-trace/src/profiling/profilers/event_plugins/dns_lookup.js b/packages/dd-trace/src/profiling/profilers/event_plugins/dns_lookup.js index 13f7ff3bd77..6597326b970 100644 --- a/packages/dd-trace/src/profiling/profilers/event_plugins/dns_lookup.js +++ b/packages/dd-trace/src/profiling/profilers/event_plugins/dns_lookup.js @@ -1,3 +1,5 @@ +'use strict' + const DNSPlugin = require('./dns') class DNSLookupPlugin extends DNSPlugin { diff --git a/packages/dd-trace/src/profiling/profilers/event_plugins/dns_lookupservice.js b/packages/dd-trace/src/profiling/profilers/event_plugins/dns_lookupservice.js index c40eb37518d..f270fd7c81c 100644 --- a/packages/dd-trace/src/profiling/profilers/event_plugins/dns_lookupservice.js +++ b/packages/dd-trace/src/profiling/profilers/event_plugins/dns_lookupservice.js @@ -1,3 +1,5 @@ +'use strict' + const DNSPlugin = require('./dns') class DNSLookupServicePlugin extends DNSPlugin { diff --git a/packages/dd-trace/src/profiling/profilers/event_plugins/dns_resolve.js b/packages/dd-trace/src/profiling/profilers/event_plugins/dns_resolve.js index d649d8a65ef..8c65ba71d41 100644 --- a/packages/dd-trace/src/profiling/profilers/event_plugins/dns_resolve.js +++ b/packages/dd-trace/src/profiling/profilers/event_plugins/dns_resolve.js @@ -1,3 +1,5 @@ +'use strict' + const DNSPlugin = require('./dns') const queryNames = new Map() diff --git a/packages/dd-trace/src/profiling/profilers/event_plugins/dns_reverse.js b/packages/dd-trace/src/profiling/profilers/event_plugins/dns_reverse.js index ad01ff13ea4..2e67ef8dd40 100644 --- a/packages/dd-trace/src/profiling/profilers/event_plugins/dns_reverse.js +++ b/packages/dd-trace/src/profiling/profilers/event_plugins/dns_reverse.js @@ -1,3 +1,5 @@ +'use strict' + const DNSPlugin = require('./dns') class DNSReversePlugin extends DNSPlugin { diff --git a/packages/dd-trace/src/profiling/profilers/event_plugins/event.js b/packages/dd-trace/src/profiling/profilers/event_plugins/event.js index 35d6728b9b0..4812990bc88 100644 --- a/packages/dd-trace/src/profiling/profilers/event_plugins/event.js +++ b/packages/dd-trace/src/profiling/profilers/event_plugins/event.js @@ -1,3 +1,5 @@ +'use strict' + const TracingPlugin = require('../../../plugins/tracing') const { performance } = require('perf_hooks') diff --git a/packages/dd-trace/src/profiling/profilers/event_plugins/fs.js b/packages/dd-trace/src/profiling/profilers/event_plugins/fs.js index 34eb7b52353..263ba9b7db0 100644 --- a/packages/dd-trace/src/profiling/profilers/event_plugins/fs.js +++ b/packages/dd-trace/src/profiling/profilers/event_plugins/fs.js @@ -1,3 +1,5 @@ +'use strict' + const EventPlugin = require('./event') // Values taken from parameter names in datadog-instrumentations/src/fs.js. diff --git a/packages/dd-trace/src/profiling/profilers/event_plugins/net.js b/packages/dd-trace/src/profiling/profilers/event_plugins/net.js index ffd99bbda70..e85a124cd98 100644 --- a/packages/dd-trace/src/profiling/profilers/event_plugins/net.js +++ b/packages/dd-trace/src/profiling/profilers/event_plugins/net.js @@ -1,3 +1,5 @@ +'use strict' + const EventPlugin = require('./event') class NetPlugin extends EventPlugin { diff --git a/packages/dd-trace/src/profiling/profilers/events.js b/packages/dd-trace/src/profiling/profilers/events.js index 62a75bbd1a3..440e12d493e 100644 --- a/packages/dd-trace/src/profiling/profilers/events.js +++ b/packages/dd-trace/src/profiling/profilers/events.js @@ -1,3 +1,5 @@ +'use strict' + const { performance, constants, PerformanceObserver } = require('perf_hooks') const { END_TIMESTAMP_LABEL, SPAN_ID_LABEL, LOCAL_ROOT_SPAN_ID_LABEL, encodeProfileAsync } = require('./shared') const { Function, Label, Line, Location, Profile, Sample, StringTable, ValueType } = require('pprof-format') diff --git a/packages/dd-trace/src/profiling/webspan-utils.js b/packages/dd-trace/src/profiling/webspan-utils.js index a2541ca4216..c5436e79641 100644 --- a/packages/dd-trace/src/profiling/webspan-utils.js +++ b/packages/dd-trace/src/profiling/webspan-utils.js @@ -1,3 +1,5 @@ +'use strict' + const { HTTP_METHOD, HTTP_ROUTE, RESOURCE_NAME, SPAN_TYPE } = require('../../../../ext/tags') const { WEB } = require('../../../../ext/types') diff --git a/packages/dd-trace/src/runtime_metrics/runtime_metrics.js b/packages/dd-trace/src/runtime_metrics/runtime_metrics.js index 1417851427b..8550c449d84 100644 --- a/packages/dd-trace/src/runtime_metrics/runtime_metrics.js +++ b/packages/dd-trace/src/runtime_metrics/runtime_metrics.js @@ -1,4 +1,5 @@ 'use strict' + /* eslint n/no-unsupported-features/node-builtins: ['error', { ignores: ['v8.GCProfiler'] }] */ // TODO: capture every second and flush every 10 seconds diff --git a/packages/dd-trace/src/service-naming/index.js b/packages/dd-trace/src/service-naming/index.js index 470a9daa82d..385d6945d3f 100644 --- a/packages/dd-trace/src/service-naming/index.js +++ b/packages/dd-trace/src/service-naming/index.js @@ -1,3 +1,5 @@ +'use strict' + class SchemaManager { constructor () { this.schemas = {} diff --git a/packages/dd-trace/src/service-naming/schemas/definition.js b/packages/dd-trace/src/service-naming/schemas/definition.js index 44149ac48d9..d4f64602ee5 100644 --- a/packages/dd-trace/src/service-naming/schemas/definition.js +++ b/packages/dd-trace/src/service-naming/schemas/definition.js @@ -1,3 +1,5 @@ +'use strict' + class SchemaDefinition { constructor (schema) { this.schema = schema diff --git a/packages/dd-trace/src/service-naming/schemas/util.js b/packages/dd-trace/src/service-naming/schemas/util.js index 179ea7a04fe..97f5d80b677 100644 --- a/packages/dd-trace/src/service-naming/schemas/util.js +++ b/packages/dd-trace/src/service-naming/schemas/util.js @@ -1,3 +1,5 @@ +'use strict' + function identityService ({ tracerService }) { return tracerService } diff --git a/packages/dd-trace/src/service-naming/schemas/v0/graphql.js b/packages/dd-trace/src/service-naming/schemas/v0/graphql.js index db0c63778f4..5b5d04ded8d 100644 --- a/packages/dd-trace/src/service-naming/schemas/v0/graphql.js +++ b/packages/dd-trace/src/service-naming/schemas/v0/graphql.js @@ -1,3 +1,5 @@ +'use strict' + const { identityService } = require('../util') const graphql = { diff --git a/packages/dd-trace/src/service-naming/schemas/v0/index.js b/packages/dd-trace/src/service-naming/schemas/v0/index.js index 1b0b746035d..3ee4c59de11 100644 --- a/packages/dd-trace/src/service-naming/schemas/v0/index.js +++ b/packages/dd-trace/src/service-naming/schemas/v0/index.js @@ -1,3 +1,5 @@ +'use strict' + const SchemaDefinition = require('../definition') const messaging = require('./messaging') const storage = require('./storage') diff --git a/packages/dd-trace/src/service-naming/schemas/v0/messaging.js b/packages/dd-trace/src/service-naming/schemas/v0/messaging.js index 8b8742661f2..fa9dc21a066 100644 --- a/packages/dd-trace/src/service-naming/schemas/v0/messaging.js +++ b/packages/dd-trace/src/service-naming/schemas/v0/messaging.js @@ -1,3 +1,5 @@ +'use strict' + const { identityService, awsServiceV0 } = require('../util') function amqpServiceName ({ tracerService }) { diff --git a/packages/dd-trace/src/service-naming/schemas/v0/serverless.js b/packages/dd-trace/src/service-naming/schemas/v0/serverless.js index 64202b11873..2e32ac25950 100644 --- a/packages/dd-trace/src/service-naming/schemas/v0/serverless.js +++ b/packages/dd-trace/src/service-naming/schemas/v0/serverless.js @@ -1,3 +1,5 @@ +'use strict' + const { identityService } = require('../util') const serverless = { diff --git a/packages/dd-trace/src/service-naming/schemas/v0/storage.js b/packages/dd-trace/src/service-naming/schemas/v0/storage.js index d5ed0713fa9..c252ac798e8 100644 --- a/packages/dd-trace/src/service-naming/schemas/v0/storage.js +++ b/packages/dd-trace/src/service-naming/schemas/v0/storage.js @@ -1,3 +1,5 @@ +'use strict' + function getRedisService (pluginConfig, connectionName) { if (pluginConfig.splitByInstance && connectionName) { return pluginConfig.service diff --git a/packages/dd-trace/src/service-naming/schemas/v0/web.js b/packages/dd-trace/src/service-naming/schemas/v0/web.js index 777c5db1b9c..23046f8ce8d 100644 --- a/packages/dd-trace/src/service-naming/schemas/v0/web.js +++ b/packages/dd-trace/src/service-naming/schemas/v0/web.js @@ -1,3 +1,5 @@ +'use strict' + const { identityService, httpPluginClientService, awsServiceV0 } = require('../util') const web = { diff --git a/packages/dd-trace/src/service-naming/schemas/v1/graphql.js b/packages/dd-trace/src/service-naming/schemas/v1/graphql.js index 1a207d807c2..9cfcf599ce1 100644 --- a/packages/dd-trace/src/service-naming/schemas/v1/graphql.js +++ b/packages/dd-trace/src/service-naming/schemas/v1/graphql.js @@ -1,3 +1,5 @@ +'use strict' + const { identityService } = require('../util') const graphql = { diff --git a/packages/dd-trace/src/service-naming/schemas/v1/index.js b/packages/dd-trace/src/service-naming/schemas/v1/index.js index 1b0b746035d..3ee4c59de11 100644 --- a/packages/dd-trace/src/service-naming/schemas/v1/index.js +++ b/packages/dd-trace/src/service-naming/schemas/v1/index.js @@ -1,3 +1,5 @@ +'use strict' + const SchemaDefinition = require('../definition') const messaging = require('./messaging') const storage = require('./storage') diff --git a/packages/dd-trace/src/service-naming/schemas/v1/messaging.js b/packages/dd-trace/src/service-naming/schemas/v1/messaging.js index b7e3da0663a..931b90d6cab 100644 --- a/packages/dd-trace/src/service-naming/schemas/v1/messaging.js +++ b/packages/dd-trace/src/service-naming/schemas/v1/messaging.js @@ -1,3 +1,5 @@ +'use strict' + const { identityService } = require('../util') const amqpInbound = { diff --git a/packages/dd-trace/src/service-naming/schemas/v1/serverless.js b/packages/dd-trace/src/service-naming/schemas/v1/serverless.js index 64202b11873..2e32ac25950 100644 --- a/packages/dd-trace/src/service-naming/schemas/v1/serverless.js +++ b/packages/dd-trace/src/service-naming/schemas/v1/serverless.js @@ -1,3 +1,5 @@ +'use strict' + const { identityService } = require('../util') const serverless = { diff --git a/packages/dd-trace/src/service-naming/schemas/v1/storage.js b/packages/dd-trace/src/service-naming/schemas/v1/storage.js index 2836d6b2bcd..04ed9943819 100644 --- a/packages/dd-trace/src/service-naming/schemas/v1/storage.js +++ b/packages/dd-trace/src/service-naming/schemas/v1/storage.js @@ -1,3 +1,5 @@ +'use strict' + function configWithFallback ({ tracerService, pluginConfig }) { return pluginConfig.service || tracerService } diff --git a/packages/dd-trace/src/service-naming/schemas/v1/web.js b/packages/dd-trace/src/service-naming/schemas/v1/web.js index 333ccae51c3..66b1afee22f 100644 --- a/packages/dd-trace/src/service-naming/schemas/v1/web.js +++ b/packages/dd-trace/src/service-naming/schemas/v1/web.js @@ -1,3 +1,5 @@ +'use strict' + const { identityService, httpPluginClientService } = require('../util') const web = { diff --git a/packages/dd-trace/src/span_stats.js b/packages/dd-trace/src/span_stats.js index 4e7c3377a5b..1bf0d378a6b 100644 --- a/packages/dd-trace/src/span_stats.js +++ b/packages/dd-trace/src/span_stats.js @@ -1,3 +1,5 @@ +'use strict' + const os = require('os') const { version } = require('./pkg') const pkg = require('../../../package.json') diff --git a/packages/dd-trace/src/telemetry/send-data.js b/packages/dd-trace/src/telemetry/send-data.js index 9a1d9e828f9..43851f63583 100644 --- a/packages/dd-trace/src/telemetry/send-data.js +++ b/packages/dd-trace/src/telemetry/send-data.js @@ -1,3 +1,5 @@ +'use strict' + const request = require('../exporters/common/request') const log = require('../log') const { isTrue } = require('../util') diff --git a/packages/dd-trace/test/appsec/graphql.spec.js b/packages/dd-trace/test/appsec/graphql.spec.js index f624be32284..8ead52c42b8 100644 --- a/packages/dd-trace/test/appsec/graphql.spec.js +++ b/packages/dd-trace/test/appsec/graphql.spec.js @@ -1,3 +1,5 @@ +'use strict' + const proxyquire = require('proxyquire') const waf = require('../../src/appsec/waf') const web = require('../../src/plugins/util/web') diff --git a/packages/dd-trace/test/appsec/iast/analyzers/resources/random-functions.js b/packages/dd-trace/test/appsec/iast/analyzers/resources/random-functions.js index f608645242d..a533953b5e6 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/resources/random-functions.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/resources/random-functions.js @@ -1,3 +1,5 @@ +'use strict' + function weakRandom () { return Math.random() } diff --git a/packages/dd-trace/test/appsec/iast/analyzers/weak-randomness-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/weak-randomness-analyzer.spec.js index a80c257760a..de46384f8c3 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/weak-randomness-analyzer.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/weak-randomness-analyzer.spec.js @@ -94,7 +94,7 @@ describe('weak-randomness-analyzer', () => { occurrences: 1, location: { path: randomFunctionsPath, - line: 2 + line: 4 } }) }) diff --git a/packages/dd-trace/test/appsec/iast/index.spec.js b/packages/dd-trace/test/appsec/iast/index.spec.js index 5fdb00bd260..e0351d5201d 100644 --- a/packages/dd-trace/test/appsec/iast/index.spec.js +++ b/packages/dd-trace/test/appsec/iast/index.spec.js @@ -1,3 +1,5 @@ +'use strict' + const proxyquire = require('proxyquire') const Config = require('../../../src/config') const agent = require('../../plugins/agent') diff --git a/packages/dd-trace/test/appsec/iast/path-line.spec.js b/packages/dd-trace/test/appsec/iast/path-line.spec.js index ae669b37b24..e69782fcab8 100644 --- a/packages/dd-trace/test/appsec/iast/path-line.spec.js +++ b/packages/dd-trace/test/appsec/iast/path-line.spec.js @@ -1,3 +1,5 @@ +'use strict' + const proxyquire = require('proxyquire') const path = require('path') const os = require('os') diff --git a/packages/dd-trace/test/appsec/iast/taint-tracking/resources/propagationFunctions.js b/packages/dd-trace/test/appsec/iast/taint-tracking/resources/propagationFunctions.js index de37c351789..f1988e78a61 100644 --- a/packages/dd-trace/test/appsec/iast/taint-tracking/resources/propagationFunctions.js +++ b/packages/dd-trace/test/appsec/iast/taint-tracking/resources/propagationFunctions.js @@ -1,3 +1,5 @@ +'use strict' + function concatSuffix (str) { return str + '_suffix' } diff --git a/packages/dd-trace/test/appsec/iast/vulnerability-reporter.spec.js b/packages/dd-trace/test/appsec/iast/vulnerability-reporter.spec.js index 996d87d2019..717cf3080f5 100644 --- a/packages/dd-trace/test/appsec/iast/vulnerability-reporter.spec.js +++ b/packages/dd-trace/test/appsec/iast/vulnerability-reporter.spec.js @@ -1,3 +1,5 @@ +'use strict' + const { addVulnerability, sendVulnerabilities, clearCache, start, stop } = require('../../../src/appsec/iast/vulnerability-reporter') const VulnerabilityAnalyzer = require('../../../../dd-trace/src/appsec/iast/analyzers/vulnerability-analyzer') diff --git a/packages/dd-trace/test/appsec/next/pages-dir/server.js b/packages/dd-trace/test/appsec/next/pages-dir/server.js index 6bfac617836..d0827f71565 100644 --- a/packages/dd-trace/test/appsec/next/pages-dir/server.js +++ b/packages/dd-trace/test/appsec/next/pages-dir/server.js @@ -1,5 +1,3 @@ -'use strict' - const { PORT, HOSTNAME } = process.env const { createServer } = require('http') diff --git a/packages/dd-trace/test/custom-metrics-app.js b/packages/dd-trace/test/custom-metrics-app.js index c46f41f18b4..06b870a8a0f 100644 --- a/packages/dd-trace/test/custom-metrics-app.js +++ b/packages/dd-trace/test/custom-metrics-app.js @@ -1,4 +1,5 @@ #!/usr/bin/env node +'use strict' /* eslint-disable no-console */ diff --git a/packages/dd-trace/test/datastreams/data_streams_checkpointer.spec.js b/packages/dd-trace/test/datastreams/data_streams_checkpointer.spec.js index db29f96b575..0851facbea6 100644 --- a/packages/dd-trace/test/datastreams/data_streams_checkpointer.spec.js +++ b/packages/dd-trace/test/datastreams/data_streams_checkpointer.spec.js @@ -1,3 +1,5 @@ +'use strict' + require('../setup/tap') const agent = require('../plugins/agent') diff --git a/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/max-collection-size.js b/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/max-collection-size.js index 09c8ca81100..69f0af85afe 100644 --- a/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/max-collection-size.js +++ b/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/max-collection-size.js @@ -1,4 +1,4 @@ -'use stict' +'use strict' function run () { const arr = [] diff --git a/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/max-field-count-scopes.js b/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/max-field-count-scopes.js index 90b317b8104..993a682ff6c 100644 --- a/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/max-field-count-scopes.js +++ b/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/max-field-count-scopes.js @@ -1,4 +1,4 @@ -'use stict' +'use strict' function run () { // local scope diff --git a/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/max-field-count.js b/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/max-field-count.js index ea8eb955079..2a1f6d2fdac 100644 --- a/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/max-field-count.js +++ b/packages/dd-trace/test/debugger/devtools_client/snapshot/target-code/max-field-count.js @@ -1,4 +1,4 @@ -'use stict' +'use strict' function run () { const obj = {} diff --git a/packages/dd-trace/test/git_properties.spec.js b/packages/dd-trace/test/git_properties.spec.js index 1ba42840fc4..efa628c80a5 100644 --- a/packages/dd-trace/test/git_properties.spec.js +++ b/packages/dd-trace/test/git_properties.spec.js @@ -1,3 +1,5 @@ +'use strict' + require('./setup/tap') const { getGitMetadataFromGitProperties } = require('../src/git_properties') diff --git a/packages/dd-trace/test/lambda/fixtures/handler.js b/packages/dd-trace/test/lambda/fixtures/handler.js index 12cf0e8ad08..7eb425872ca 100644 --- a/packages/dd-trace/test/lambda/fixtures/handler.js +++ b/packages/dd-trace/test/lambda/fixtures/handler.js @@ -1,4 +1,5 @@ 'use strict' + const _tracer = require('../../../../dd-trace') const sleep = ms => new Promise(resolve => setTimeout(resolve, ms)) diff --git a/packages/dd-trace/test/payload-tagging/index.spec.js b/packages/dd-trace/test/payload-tagging/index.spec.js index a4f4da8108e..7ee6e580c59 100644 --- a/packages/dd-trace/test/payload-tagging/index.spec.js +++ b/packages/dd-trace/test/payload-tagging/index.spec.js @@ -1,3 +1,5 @@ +'use strict' + const { PAYLOAD_TAG_REQUEST_PREFIX, PAYLOAD_TAG_RESPONSE_PREFIX diff --git a/packages/dd-trace/test/payload_tagging.spec.js b/packages/dd-trace/test/payload_tagging.spec.js index 630c773d567..6d7b48999f8 100644 --- a/packages/dd-trace/test/payload_tagging.spec.js +++ b/packages/dd-trace/test/payload_tagging.spec.js @@ -1,3 +1,5 @@ +'use strict' + require('./setup/tap') const { diff --git a/packages/dd-trace/test/ritm-tests/module-b.js b/packages/dd-trace/test/ritm-tests/module-b.js index 265fa450071..d01c29043f3 100644 --- a/packages/dd-trace/test/ritm-tests/module-b.js +++ b/packages/dd-trace/test/ritm-tests/module-b.js @@ -1,3 +1,5 @@ +'use strict' + const { a } = require('./module-a') module.exports.b = () => { return a() } diff --git a/packages/dd-trace/test/service-naming/schema.spec.js b/packages/dd-trace/test/service-naming/schema.spec.js index 2cb994f4ff6..38487c6cdb5 100644 --- a/packages/dd-trace/test/service-naming/schema.spec.js +++ b/packages/dd-trace/test/service-naming/schema.spec.js @@ -1,3 +1,5 @@ +'use strict' + require('../setup/tap') const { expect } = require('chai') diff --git a/packages/dd-trace/test/setup/services/oracledb.js b/packages/dd-trace/test/setup/services/oracledb.js index b39a4d1f390..669cb93f17b 100644 --- a/packages/dd-trace/test/setup/services/oracledb.js +++ b/packages/dd-trace/test/setup/services/oracledb.js @@ -1,3 +1,5 @@ +'use strict' + const RetryOperation = require('../operation') const oracledb = require('../../../../../versions/oracledb').get() diff --git a/register.js b/register.js index 5189a0ffede..752069bda95 100644 --- a/register.js +++ b/register.js @@ -1,3 +1,5 @@ +'use strict' + /* eslint n/no-unsupported-features/node-builtins: ['error', { version: '>=20.6.0', allowExperimental: true }] */ const { register } = require('node:module') diff --git a/scripts/check-proposal-labels.js b/scripts/check-proposal-labels.js index 87d89cce777..2d182cf641e 100644 --- a/scripts/check-proposal-labels.js +++ b/scripts/check-proposal-labels.js @@ -1,3 +1,5 @@ +'use strict' + /* eslint-disable no-console */ const childProcess = require('child_process') diff --git a/scripts/flakiness.mjs b/scripts/flakiness.mjs index 0503e6abc69..b09be3e22fc 100644 --- a/scripts/flakiness.mjs +++ b/scripts/flakiness.mjs @@ -1,5 +1,3 @@ -'use strict' - /* eslint-disable no-console */ import { Octokit } from 'octokit' diff --git a/scripts/get-chrome-driver-download-url.js b/scripts/get-chrome-driver-download-url.js index 99f98a9079b..67fd4b916c4 100644 --- a/scripts/get-chrome-driver-download-url.js +++ b/scripts/get-chrome-driver-download-url.js @@ -1,3 +1,5 @@ +'use strict' + const URL = 'https://googlechromelabs.github.io/chrome-for-testing/known-good-versions-with-downloads.json' // Get chrome driver download URL from a given chrome version, provided via CHROME_VERSION env var diff --git a/scripts/verify-ci-config.js b/scripts/verify-ci-config.js index 5f83f4cae88..7ea874e60c7 100644 --- a/scripts/verify-ci-config.js +++ b/scripts/verify-ci-config.js @@ -1,4 +1,5 @@ 'use strict' + /* eslint-disable no-console */ /* eslint n/no-unsupported-features/node-builtins: ['error', { version: '>=22.0.0' }] */ From bc9cf2ec203cbf9ff5f30c183a3229b634dc7849 Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Tue, 8 Jul 2025 13:13:41 -0400 Subject: [PATCH 05/53] test: remove get-port usage from inferred proxy test (#6054) * move test cleanup to hook --- .../test/plugins/util/inferred_proxy.spec.js | 25 +++++++++++-------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/packages/dd-trace/test/plugins/util/inferred_proxy.spec.js b/packages/dd-trace/test/plugins/util/inferred_proxy.spec.js index a5fd2805875..b5e493353cc 100644 --- a/packages/dd-trace/test/plugins/util/inferred_proxy.spec.js +++ b/packages/dd-trace/test/plugins/util/inferred_proxy.spec.js @@ -3,7 +3,6 @@ require('../../setup/tap') const agent = require('../agent') -const getPort = require('get-port') const { expect } = require('chai') const axios = require('axios') @@ -19,7 +18,6 @@ describe('Inferred Proxy Spans', function () { process.env.DD_SERVICE = 'aws-server' process.env.DD_TRACE_INFERRED_PROXY_SERVICES_ENABLED = 'true' - port = await getPort() require('../../../../dd-trace') await agent.load(['http'], null, options) @@ -37,14 +35,19 @@ describe('Inferred Proxy Spans', function () { } }) - appListener = server.listen(port, '127.0.0.1') + return new Promise((resolve, reject) => { + appListener = server.listen(0, '127.0.0.1', () => { + port = server.address().port + resolve() + }) + }) } // test cleanup function - const cleanupTest = function () { + const cleanupTest = async function () { appListener && appListener.close() try { - agent.close({ ritmReset: false }) + await agent.close({ ritmReset: false }) } catch { // pass } @@ -59,6 +62,8 @@ describe('Inferred Proxy Spans', function () { 'x-dd-proxy-stage': 'dev' } + afterEach(cleanupTest) + describe('without configuration', () => { it('should create a parent span and a child span for a 200', async () => { await loadTest({}) @@ -103,7 +108,7 @@ describe('Inferred Proxy Spans', function () { continue } } - }).then(cleanupTest).catch(cleanupTest) + }) }) it('should create a parent span and a child span for an error', async () => { @@ -150,7 +155,7 @@ describe('Inferred Proxy Spans', function () { continue } } - }).then(cleanupTest).catch(cleanupTest) + }) }) it('should not create an API Gateway span if all necessary headers are missing', async () => { @@ -182,7 +187,7 @@ describe('Inferred Proxy Spans', function () { continue } } - }).then(cleanupTest).catch(cleanupTest) + }) }) it('should not create an API Gateway span if missing the proxy system header', async () => { @@ -217,7 +222,7 @@ describe('Inferred Proxy Spans', function () { continue } } - }).then(cleanupTest).catch(cleanupTest) + }) }) }) @@ -251,7 +256,7 @@ describe('Inferred Proxy Spans', function () { continue } } - }).then(cleanupTest).catch(cleanupTest) + }) }) }) }) From 9cf30a21bc1f9f2458806069a129048e2e797c99 Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Wed, 9 Jul 2025 03:29:36 -0400 Subject: [PATCH 06/53] test: remove get-port usage in core tests and appsec (#6060) * remove get-port usage in core tests and a few more * remove get-port usage from appsec plugin tests * remove explicit hostname from appsec express test * remove explicit hostname from appsec request helper test * make port explicit and add comment to explain why an arbitrary port is fine --- .../test/integration-test/client.spec.js | 2 +- .../test/integration-test/server.mjs | 6 +- .../test/integration-test/client.spec.js | 2 +- .../test/integration-test/server.mjs | 5 +- ...cker-fingerprinting.fastify.plugin.spec.js | 12 ++-- .../test/appsec/index.express.plugin.spec.js | 28 ++++---- .../test/appsec/index.fastify.plugin.spec.js | 69 +++++++++---------- .../test/exporters/common/request.spec.js | 69 +++++++++---------- packages/dd-trace/test/flare.spec.js | 16 ++--- .../test/profiling/exporters/agent.spec.js | 21 +++--- packages/dd-trace/test/setup/core.js | 27 -------- 11 files changed, 103 insertions(+), 154 deletions(-) diff --git a/packages/datadog-plugin-fetch/test/integration-test/client.spec.js b/packages/datadog-plugin-fetch/test/integration-test/client.spec.js index 922c889134d..ff6dbb2141e 100644 --- a/packages/datadog-plugin-fetch/test/integration-test/client.spec.js +++ b/packages/datadog-plugin-fetch/test/integration-test/client.spec.js @@ -16,7 +16,7 @@ describe('esm', () => { before(async function () { this.timeout(50000) - sandbox = await createSandbox(['get-port'], false, [ + sandbox = await createSandbox([], false, [ './packages/datadog-plugin-fetch/test/integration-test/*']) }) diff --git a/packages/datadog-plugin-fetch/test/integration-test/server.mjs b/packages/datadog-plugin-fetch/test/integration-test/server.mjs index 578d66a12ca..c15de21a39f 100644 --- a/packages/datadog-plugin-fetch/test/integration-test/server.mjs +++ b/packages/datadog-plugin-fetch/test/integration-test/server.mjs @@ -1,9 +1,7 @@ import 'dd-trace/init.js' -import getPort from 'get-port' -const port = await getPort() - -global.fetch(`http://localhost:${port}/foo`) +// An arbitrary port is used here as we just need a request even if it fails. +global.fetch('http://localhost:55555/foo') .then((response) => {}) .then((data) => {}) .catch((err) => {}) diff --git a/packages/datadog-plugin-net/test/integration-test/client.spec.js b/packages/datadog-plugin-net/test/integration-test/client.spec.js index 47576d33a1a..53e5c988a73 100644 --- a/packages/datadog-plugin-net/test/integration-test/client.spec.js +++ b/packages/datadog-plugin-net/test/integration-test/client.spec.js @@ -15,7 +15,7 @@ describe('esm', () => { before(async function () { this.timeout(20000) - sandbox = await createSandbox(['net', 'get-port'], false, [ + sandbox = await createSandbox(['net'], false, [ './packages/datadog-plugin-net/test/integration-test/*']) }) diff --git a/packages/datadog-plugin-net/test/integration-test/server.mjs b/packages/datadog-plugin-net/test/integration-test/server.mjs index fc7ec19a696..a0c9bc76137 100644 --- a/packages/datadog-plugin-net/test/integration-test/server.mjs +++ b/packages/datadog-plugin-net/test/integration-test/server.mjs @@ -1,10 +1,7 @@ import 'dd-trace/init.js' import net from 'net' -import getPort from 'get-port' -const port = await getPort() - -const client = net.createConnection(port, () => {}) +const client = net.createConnection(0, () => {}) client.on('data', (data) => {}) diff --git a/packages/dd-trace/test/appsec/attacker-fingerprinting.fastify.plugin.spec.js b/packages/dd-trace/test/appsec/attacker-fingerprinting.fastify.plugin.spec.js index 53a48173895..504d8bff0c4 100644 --- a/packages/dd-trace/test/appsec/attacker-fingerprinting.fastify.plugin.spec.js +++ b/packages/dd-trace/test/appsec/attacker-fingerprinting.fastify.plugin.spec.js @@ -2,7 +2,6 @@ const Axios = require('axios') const { assert } = require('chai') -const getPort = require('get-port') const path = require('path') const agent = require('../plugins/agent') @@ -26,13 +25,12 @@ withVersions('fastify', 'fastify', fastifyVersion => { reply.send('DONE') }) - getPort().then((port) => { - app.listen({ port }, () => { - axios = Axios.create({ baseURL: `http://localhost:${port}` }) - done() - }) - server = app.server + app.listen({ port: 0 }, () => { + const port = server.address().port + axios = Axios.create({ baseURL: `http://localhost:${port}` }) + done() }) + server = app.server }) after(() => { diff --git a/packages/dd-trace/test/appsec/index.express.plugin.spec.js b/packages/dd-trace/test/appsec/index.express.plugin.spec.js index 3e6770d252d..03fbae31af4 100644 --- a/packages/dd-trace/test/appsec/index.express.plugin.spec.js +++ b/packages/dd-trace/test/appsec/index.express.plugin.spec.js @@ -2,7 +2,6 @@ const Axios = require('axios') const { assert } = require('chai') -const getPort = require('get-port') const path = require('path') const agent = require('../plugins/agent') const appsec = require('../../src/appsec') @@ -46,11 +45,10 @@ withVersions('express', 'express', version => { app.param('callbackedParameter', paramCallbackSpy) - getPort().then((port) => { - server = app.listen(port, () => { - axios = Axios.create({ baseURL: `http://localhost:${port}` }) - done() - }) + server = app.listen(0, () => { + const port = server.address().port + axios = Axios.create({ baseURL: `http://localhost:${port}` }) + done() }) }) @@ -196,11 +194,10 @@ withVersions('express', 'express', version => { res.end('DONE') }) - getPort().then((port) => { - server = app.listen(port, () => { - axios = Axios.create({ baseURL: `http://localhost:${port}` }) - done() - }) + server = app.listen(0, () => { + const port = server.address().port + axios = Axios.create({ baseURL: `http://localhost:${port}` }) + done() }) }) @@ -274,11 +271,10 @@ withVersions('express', 'express', version => { res.json({ jsonResKey: 'jsonResValue' }) }) - getPort().then((port) => { - server = app.listen(port, () => { - axios = Axios.create({ baseURL: `http://localhost:${port}` }) - done() - }) + server = app.listen(0, () => { + const port = server.address().port + axios = Axios.create({ baseURL: `http://localhost:${port}` }) + done() }) }) diff --git a/packages/dd-trace/test/appsec/index.fastify.plugin.spec.js b/packages/dd-trace/test/appsec/index.fastify.plugin.spec.js index a9d5804c9cc..ae28854dfa9 100644 --- a/packages/dd-trace/test/appsec/index.fastify.plugin.spec.js +++ b/packages/dd-trace/test/appsec/index.fastify.plugin.spec.js @@ -2,7 +2,6 @@ const Axios = require('axios') const { assert } = require('chai') -const getPort = require('get-port') const path = require('path') const zlib = require('zlib') const fs = require('node:fs') @@ -29,13 +28,12 @@ withVersions('fastify', 'fastify', '>=2', version => { reply.send('DONE') }) - getPort().then((port) => { - app.listen({ port }, () => { - axios = Axios.create({ baseURL: `http://localhost:${port}` }) - done() - }) - server = app.server + app.listen({ port: 0 }, () => { + const port = server.address().port + axios = Axios.create({ baseURL: `http://localhost:${port}` }) + done() }) + server = app.server }) after(() => { @@ -95,13 +93,12 @@ withVersions('fastify', 'fastify', '>=2', version => { reply.send('DONE') }) - getPort().then((port) => { - app.listen({ port }, () => { - axios = Axios.create({ baseURL: `http://localhost:${port}` }) - done() - }) - server = app.server + app.listen({ port: 0 }, () => { + const port = server.address().port + axios = Axios.create({ baseURL: `http://localhost:${port}` }) + done() }) + server = app.server }) after(() => { @@ -205,13 +202,12 @@ withVersions('fastify', 'fastify', '>=2', version => { reply.send('DONE') }) - getPort().then((port) => { - app.listen({ port }, () => { - axios = Axios.create({ baseURL: `http://localhost:${port}` }) - done() - }) - server = app.server + app.listen({ port: 0 }, () => { + const port = server.address().port + axios = Axios.create({ baseURL: `http://localhost:${port}` }) + done() }) + server = app.server }) after(() => { @@ -295,13 +291,12 @@ withVersions('fastify', 'fastify', '>=2', version => { reply.send('DONE') }) - getPort().then((port) => { - app.listen({ port }, () => { - axios = Axios.create({ baseURL: `http://localhost:${port}` }) - done() - }) - server = app.server + app.listen({ port: 0 }, () => { + const port = server.address().port + axios = Axios.create({ baseURL: `http://localhost:${port}` }) + done() }) + server = app.server }) after(() => { @@ -478,13 +473,12 @@ withVersions('fastify', 'fastify', '>=2', version => { reply.send('DONE') }) - getPort().then((port) => { - app.listen({ port }, () => { - axios = Axios.create({ baseURL: `http://localhost:${port}` }) - done() - }) - server = app.server - }).catch(done) + app.listen({ port: 0 }, () => { + const port = server.address().port + axios = Axios.create({ baseURL: `http://localhost:${port}` }) + done() + }) + server = app.server }) beforeEach(async () => { @@ -577,13 +571,12 @@ describe('Api Security - Fastify', () => { reply.send(new Uint16Array(10)) }) - getPort().then((port) => { - app.listen({ port }, () => { - axios = Axios.create({ baseURL: `http://localhost:${port}` }) - done() - }) - server = app.server + app.listen({ port: 0 }, () => { + const port = server.address().port + axios = Axios.create({ baseURL: `http://localhost:${port}` }) + done() }) + server = app.server }) after(() => { diff --git a/packages/dd-trace/test/exporters/common/request.spec.js b/packages/dd-trace/test/exporters/common/request.spec.js index cd0aacec2f3..95c8df8facb 100644 --- a/packages/dd-trace/test/exporters/common/request.spec.js +++ b/packages/dd-trace/test/exporters/common/request.spec.js @@ -3,13 +3,12 @@ require('../../setup/tap') const nock = require('nock') -const getPort = require('get-port') const http = require('http') const zlib = require('zlib') const FormData = require('../../../src/exporters/common/form-data') -const initHTTPServer = (port) => { +const initHTTPServer = () => { return new Promise(resolve => { const sockets = [] const requestListener = function (req, res) { @@ -23,11 +22,13 @@ const initHTTPServer = (port) => { server.on('connection', socket => sockets.push(socket)) - server.listen(port, () => { - resolve(() => { + server.listen(0, () => { + const shutdown = () => { sockets.forEach(socket => socket.end()) server.close() - }) + } + shutdown.port = server.address().port + resolve(shutdown) }) }) } @@ -223,37 +224,33 @@ describe('request', function () { }) it('should be able to send concurrent requests to different hosts', function (done) { - // TODO: try to simplify the setup here. I haven't been able to reproduce the - // concurrent socket issue using nock - Promise.all([getPort(), getPort()]).then(([port1, port2]) => { - Promise.all([initHTTPServer(port1), initHTTPServer(port2)]).then(([shutdownFirst, shutdownSecond]) => { - // this interval is blocking a socket for the other request - const intervalId = setInterval(() => { - request(Buffer.from(''), { - path: '/', - method: 'POST', - hostname: 'localhost', - protocol: 'http:', - port: port1 - }, () => {}) - }, 1000) - - setTimeout(() => { - request(Buffer.from(''), { - path: '/', - method: 'POST', - hostname: 'localhost', - protocol: 'http:', - port: port2 - }, (err, res) => { - expect(res).to.equal('OK') - shutdownFirst() - shutdownSecond() - clearInterval(intervalId) - done() - }) - }, 2000) - }) + Promise.all([initHTTPServer(), initHTTPServer()]).then(([shutdownFirst, shutdownSecond]) => { + // this interval is blocking a socket for the other request + const intervalId = setInterval(() => { + request(Buffer.from(''), { + path: '/', + method: 'POST', + hostname: 'localhost', + protocol: 'http:', + port: shutdownFirst.port + }, () => {}) + }, 1000) + + setTimeout(() => { + request(Buffer.from(''), { + path: '/', + method: 'POST', + hostname: 'localhost', + protocol: 'http:', + port: shutdownSecond.port + }, (err, res) => { + expect(res).to.equal('OK') + shutdownFirst() + shutdownSecond() + clearInterval(intervalId) + done() + }) + }, 2000) }) }) diff --git a/packages/dd-trace/test/flare.spec.js b/packages/dd-trace/test/flare.spec.js index 1ece6432e2d..a72963ccfd6 100644 --- a/packages/dd-trace/test/flare.spec.js +++ b/packages/dd-trace/test/flare.spec.js @@ -3,7 +3,6 @@ const Config = require('../src/config') const { channel } = require('dc-polyfill') const express = require('express') -const getPort = require('get-port') const http = require('http') const upload = require('multer')() const proxyquire = require('proxyquire').noCallThru() @@ -23,7 +22,7 @@ describe('Flare', () => { let socket let handler - const createServer = () => { + const createServer = (done) => { const app = express() app.post('/tracer_flare/v1', upload.any(), (req, res) => { @@ -36,7 +35,10 @@ describe('Flare', () => { socket = socket_ }) - listener = server.listen(port) + listener = server.listen(0, '127.0.0.1', () => { + port = server.address().port + done() + }) } beforeEach(() => { @@ -49,12 +51,10 @@ describe('Flare', () => { flare = proxyquire('../src/flare', { '../startup-log': startupLog }) - - return getPort().then(port_ => { - port = port_ - }) }) + beforeEach(createServer) + beforeEach(() => { tracerConfig = new Config({ url: `http://127.0.0.1:${port}` @@ -65,8 +65,6 @@ describe('Flare', () => { hostname: 'myhostname', user_handle: 'user.name@datadoghq.com' } - - createServer() }) afterEach(done => { diff --git a/packages/dd-trace/test/profiling/exporters/agent.spec.js b/packages/dd-trace/test/profiling/exporters/agent.spec.js index 8bcc45818fd..faa1df5a072 100644 --- a/packages/dd-trace/test/profiling/exporters/agent.spec.js +++ b/packages/dd-trace/test/profiling/exporters/agent.spec.js @@ -10,7 +10,6 @@ const upload = require('multer')() const os = require('os') const path = require('path') const { request } = require('http') -const getPort = require('get-port') const proxyquire = require('proxyquire') const WallProfiler = require('../../../src/profiling/profilers/wall') const SpaceProfiler = require('../../../src/profiling/profilers/space') @@ -171,13 +170,13 @@ describe('exporters/agent', function () { describe('using HTTP', () => { beforeEach(done => { - getPort().then(port => { + listener = app.listen(0, '127.0.0.1', () => { + const port = listener.address().port url = new URL(`http://127.0.0.1:${port}`) - - listener = app.listen(port, '127.0.0.1', done) - listener.on('connection', socket => sockets.push(socket)) - startSpan = sinon.spy(tracer._tracer, 'startSpan') + done() }) + listener.on('connection', socket => sockets.push(socket)) + startSpan = sinon.spy(tracer._tracer, 'startSpan') }) afterEach(done => { @@ -391,13 +390,13 @@ describe('exporters/agent', function () { describe('using ipv6', () => { beforeEach(done => { - getPort().then(port => { + listener = app.listen(0, '0:0:0:0:0:0:0:1', () => { + const port = listener.address().port url = new URL(`http://[0:0:0:0:0:0:0:1]:${port}`) - - listener = app.listen(port, '0:0:0:0:0:0:0:1', done) - listener.on('connection', socket => sockets.push(socket)) - startSpan = sinon.spy(tracer._tracer, 'startSpan') + done() }) + listener.on('connection', socket => sockets.push(socket)) + startSpan = sinon.spy(tracer._tracer, 'startSpan') }) afterEach(done => { diff --git a/packages/dd-trace/test/setup/core.js b/packages/dd-trace/test/setup/core.js index ed090d5afd7..6dc88017d8d 100644 --- a/packages/dd-trace/test/setup/core.js +++ b/packages/dd-trace/test/setup/core.js @@ -3,36 +3,9 @@ const sinon = require('sinon') const chai = require('chai') const sinonChai = require('sinon-chai') -const { setTimeout } = require('timers/promises') const proxyquire = require('../proxyquire') const { NODE_MAJOR } = require('../../../../version') -{ - // get-port can often return a port that is already in use, thanks to a race - // condition. This patch adds a retry for 10 iterations, which should be - // enough to avoid flaky tests. The patch is added here in the require cache - // because it's used in all sorts of places. - const getPort = require('get-port') - require.cache[require.resolve('get-port')].exports = async function (...args) { - let tries = 10 - let err = null - while (tries-- > 0) { - try { - return await getPort(...args) - } catch (e) { - if (e.code !== 'EADDRINUSE') { - throw e - } - if (tries) { - await setTimeout(5) - } - err = e - } - } - throw err - } -} - chai.use(sinonChai) chai.use(require('../asserts/profile')) From a8769c3fcc4342cc5271e86e8a05ef3931f79e36 Mon Sep 17 00:00:00 2001 From: Ruben Bridgewater Date: Wed, 9 Jul 2025 11:15:07 +0200 Subject: [PATCH 07/53] datastreams: safer and faster schema serialization (#6042) The original implementation contained unused code and used replace calls after serializing an object to JSON. The strings could have contained parts that the replace call would have matched (while unlikely due to the content). This now directly serializes to JSON and does not have that issue anymore. That should also be faster overall. --- .../src/datastreams/schemas/schema_builder.js | 65 ++++++++++--------- 1 file changed, 33 insertions(+), 32 deletions(-) diff --git a/packages/dd-trace/src/datastreams/schemas/schema_builder.js b/packages/dd-trace/src/datastreams/schemas/schema_builder.js index 6bcf58becd9..aa66414faa0 100644 --- a/packages/dd-trace/src/datastreams/schemas/schema_builder.js +++ b/packages/dd-trace/src/datastreams/schemas/schema_builder.js @@ -15,13 +15,13 @@ class SchemaBuilder { this.properties = 0 } + // TODO: This is only used in tests. Let's refactor the code and stop exposing the cache. static getCache () { return CACHE } static getSchemaDefinition (schema) { - const noNones = convertToJsonCompatible(schema) - const definition = jsonStringify(noNones) + const definition = toJSON(schema) const id = fnv64(Buffer.from(definition, 'utf8')).toString() return new Schema(definition, id) } @@ -96,42 +96,43 @@ class OpenApiComponents { } } -function convertToJsonCompatible (obj) { - if (Array.isArray(obj)) { - return obj.filter(item => item !== null).map(item => convertToJsonCompatible(item)) - } else if (obj && typeof obj === 'object') { - const jsonObj = {} - for (const [key, value] of Object.entries(obj)) { - if (value !== null) { - jsonObj[key] = convertToJsonCompatible(value) +// This adds a single whitespace between entries without adding newlines. +// This differs from JSON.stringify and is used to align with the output +// in other platforms. +function toJSON (value) { + // eslint-disable-next-line eslint-rules/eslint-safe-typeof-object + if (typeof value === 'object') { + if (value === null) { + return 'null' + } + if (Array.isArray(value)) { + let result = '[' + for (let i = 0; i < value.length; i++) { + if (i > 0) { + result += ', ' + } + result += value[i] == null ? 'null' : toJSON(value[i]) } + return `${result}]` } - return jsonObj - } - return obj -} - -function convertKey (key) { - if (key === 'enumValues') { - return 'enum' + let result = '{' + for (const [key, objectValue] of Object.entries(value)) { + if (objectValue != null && typeof key === 'string') { + const converted = toJSON(objectValue) + if (converted !== undefined) { + if (result !== '{') { + result += ', ' + } + result += `"${key}": ${converted}` + } + } + } + return `${result}}` } - return key -} - -function jsonStringify (obj, indent = 2) { - // made to stringify json exactly similar to python / java in order for hashing to be the same - const jsonString = JSON.stringify(obj, (_, value) => value, indent) - return jsonString.replaceAll(/^ +/gm, ' ') // Replace leading spaces with single space - .replaceAll('\n', '') // Remove newlines - .replaceAll('{ ', '{') // Remove space after '{' - .replaceAll(' }', '}') // Remove space before '}' - .replaceAll('[ ', '[') // Remove space after '[' - .replaceAll(' ]', ']') // Remove space before ']' + return JSON.stringify(value) } module.exports = { SchemaBuilder, OpenApiSchema, - convertToJsonCompatible, - convertKey } From cced560b02fe2467d88ad58978311f444e0058bf Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Wed, 9 Jul 2025 11:17:54 +0200 Subject: [PATCH 08/53] ESLint: Scope certain files to other Node.js versions than v18.0.0 (#6030) Not all of our files have Node.js 18.0.0 as the target version. --- eslint.config.mjs | 91 ++++++++++++++++--- init.js | 2 - .../datadog-plugin-cypress/src/support.js | 4 +- packages/dd-trace/src/guardrails/index.js | 7 +- packages/dd-trace/src/guardrails/log.js | 4 +- packages/dd-trace/src/guardrails/telemetry.js | 13 +-- packages/dd-trace/src/guardrails/util.js | 2 - version.js | 3 - 8 files changed, 90 insertions(+), 36 deletions(-) diff --git a/eslint.config.mjs b/eslint.config.mjs index 9c90e9a1275..29d13ed5273 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -51,8 +51,7 @@ export default [ 'integration-tests/esbuild/aws-sdk-out.js', // Generated 'packages/datadog-plugin-graphql/src/tools/index.js', // Inlined from apollo-graphql 'packages/datadog-plugin-graphql/src/tools/signature.js', // Inlined from apollo-graphql - 'packages/datadog-plugin-graphql/src/tools/transforms.js', // Inlined from apollo-graphql - 'packages/dd-trace/src/guardrails/**/*' // Guardrails contain very old JS + 'packages/datadog-plugin-graphql/src/tools/transforms.js' // Inlined from apollo-graphql ] }, { name: '@eslint/js/recommended', ...eslintPluginJs.configs.recommended }, @@ -301,21 +300,17 @@ export default [ }, { name: 'dd-trace/defaults', - plugins: { '@stylistic': eslintPluginStylistic, import: eslintPluginImport, n: eslintPluginN }, - languageOptions: { globals: { ...globals.node }, - ecmaVersion: 2022 }, - settings: { node: { // Used by `eslint-plugin-n` to determine the minimum version of Node.js to support. @@ -325,7 +320,6 @@ export default [ version: '>=18.0.0' } }, - rules: { '@stylistic/max-len': ['error', { code: 120, tabWidth: 2, ignoreUrls: true, ignoreRegExpLiterals: true }], '@stylistic/object-curly-newline': ['error', { multiline: true, consistent: true }], @@ -436,6 +430,81 @@ export default [ 'unicorn/switch-case-braces': 'off', // Questionable benefit } }, + { + name: 'dd-trace/defaults/v0.8-oldest', + plugins: { + n: eslintPluginN + }, + files: [ + 'init.js', + 'packages/dd-trace/src/guardrails/**/*', + 'version.js' + ], + settings: { + node: { + version: '>=0.8.0' + } + }, + rules: { + 'eslint-rules/eslint-process-env': 'off', // Would require us to load a module outside the guardrails directory + 'n/no-unsupported-features/es-builtins': ['error', { + // The following are false positives that are supported in Node.js 0.8.0 + ignores: [ + 'JSON', + 'JSON.stringify', + 'parseInt', + 'String' + ] + }], + 'n/no-unsupported-features/es-syntax': ['error', { + // The following are false positives that are supported in Node.js 0.8.0 + ignores: [ + 'array-prototype-indexof', + 'json' + ] + }], + 'no-var': 'off', // Only supported in Node.js 6+ + 'object-shorthand': 'off', // Only supported in Node.js 4+ + 'unicorn/prefer-includes': 'off', // Only supported in Node.js 6+ + 'unicorn/prefer-number-properties': 'off', // Only supported in Node.js 0.12+ + 'unicorn/prefer-optional-catch-binding': 'off', // Only supported in Node.js 10+ + 'unicorn/prefer-set-has': 'off', // Only supported in Node.js 0.12+ + 'unicorn/prefer-string-replace-all': 'off' // Only supported in Node.js 15+ + } + }, + { + name: 'dd-trace/defaults/v16-oldest', + plugins: { + n: eslintPluginN + }, + files: [ + 'packages/datadog-plugin-cypress/src/support.js' + ], + settings: { + node: { + version: '>=16.0.0' + } + } + }, + { + name: 'dd-trace/defaults/v18-latest', + plugins: { + n: eslintPluginN + }, + files: [ + 'benchmark/**/*', + 'scripts/**/*', + ...TEST_FILES + ], + settings: { + node: { + version: '>=18' // These files don't have to support the oldest v18 release + } + }, + rules: { + 'n/no-unsupported-features/node-builtins': ['error', { allowExperimental: true }] + } + }, { ...eslintPluginCypress.configs.recommended, files: [ @@ -462,9 +531,7 @@ export default [ 'scripts/**/*' ], rules: { - 'n/no-unsupported-features/node-builtins': ['error', { - allowExperimental: true - }] + 'n/no-unsupported-features/node-builtins': ['error', { allowExperimental: true }] } }, { @@ -494,9 +561,7 @@ export default [ 'mocha/no-top-level-hooks': 'off', 'n/handle-callback-err': 'off', 'n/no-missing-require': 'off', - 'n/no-unsupported-features/node-builtins': ['error', { - allowExperimental: true - }], + 'n/no-unsupported-features/node-builtins': ['error', { allowExperimental: true }], 'require-await': 'off' } }, diff --git a/init.js b/init.js index 63fa9ba96be..62c6bfe8549 100644 --- a/init.js +++ b/init.js @@ -1,7 +1,5 @@ 'use strict' -/* eslint-disable no-var */ - // TODO: It shouldn't be necessary to disable n/no-unpublished-require - Research // eslint-disable-next-line n/no-unpublished-require var guard = require('./packages/dd-trace/src/guardrails') diff --git a/packages/datadog-plugin-cypress/src/support.js b/packages/datadog-plugin-cypress/src/support.js index 86679d9de79..2c46271af02 100644 --- a/packages/datadog-plugin-cypress/src/support.js +++ b/packages/datadog-plugin-cypress/src/support.js @@ -30,8 +30,8 @@ function isNewTest (test) { } function getTestProperties (testName) { - // We neeed to do it in this way because of compatibility with older versions as '?' is not supported in older - // versions of Cypress + // TODO: Use optional chaining when we drop support for older Cypress versions, which will happen when dd-trace@5 is + // EoL. Until then, this files needs to support Node.js 16. const properties = testManagementTests[testName] && testManagementTests[testName].properties || {} const { attempt_to_fix: isAttemptToFix, disabled: isDisabled, quarantined: isQuarantined } = properties diff --git a/packages/dd-trace/src/guardrails/index.js b/packages/dd-trace/src/guardrails/index.js index 25daea00527..ab60c2d550f 100644 --- a/packages/dd-trace/src/guardrails/index.js +++ b/packages/dd-trace/src/guardrails/index.js @@ -1,7 +1,5 @@ 'use strict' -/* eslint-disable no-var */ - var path = require('path') var Module = require('module') var isTrue = require('./util').isTrue @@ -26,9 +24,11 @@ function guard (fn) { var resolvedInApp var entrypoint = process.argv[1] try { + // eslint-disable-next-line n/no-unsupported-features/node-builtins resolvedInApp = Module.createRequire(entrypoint).resolve('dd-trace') } catch (e) { // Ignore. If we can't resolve the module, we assume it's not in the app. + // TODO: There's also the possibility that this version of Node.js doesn't have Module.createRequire (pre v12.2.0) } if (resolvedInApp) { var ourselves = path.normalize(path.join(__dirname, '..', '..', '..', '..', 'index.js')) @@ -54,8 +54,7 @@ function guard (fn) { } if (!clobberBailout && (!initBailout || forced)) { - // Ensure the instrumentation source is set for the current process and potential - // child processes. + // Ensure the instrumentation source is set for the current process and potential child processes. var result = fn() telemetry('complete', ['injection_forced:' + (forced && initBailout ? 'true' : 'false')]) log.info('Application instrumentation bootstrapping complete') diff --git a/packages/dd-trace/src/guardrails/log.js b/packages/dd-trace/src/guardrails/log.js index dd74e5bdbf0..79a5ed019fc 100644 --- a/packages/dd-trace/src/guardrails/log.js +++ b/packages/dd-trace/src/guardrails/log.js @@ -1,6 +1,5 @@ 'use strict' -/* eslint-disable no-var */ /* eslint-disable no-console */ var isTrue = require('./util').isTrue @@ -23,7 +22,8 @@ var logLevel = isTrue(DD_TRACE_DEBUG) : logLevels.off var log = { - debug: logLevel <= 20 ? console.debug.bind(console) : function () {}, + /* eslint n/no-unsupported-features/node-builtins: ['error', { ignores: ['console.debug'] }] */ + debug: logLevel <= 20 ? (console.debug || console.log).bind(console) : function () {}, info: logLevel <= 30 ? console.info.bind(console) : function () {}, warn: logLevel <= 40 ? console.warn.bind(console) : function () {}, error: logLevel <= 50 ? console.error.bind(console) : function () {} diff --git a/packages/dd-trace/src/guardrails/telemetry.js b/packages/dd-trace/src/guardrails/telemetry.js index 49a8e5fbd5c..f27850cc02d 100644 --- a/packages/dd-trace/src/guardrails/telemetry.js +++ b/packages/dd-trace/src/guardrails/telemetry.js @@ -1,8 +1,5 @@ 'use strict' -/* eslint-disable no-var */ -/* eslint-disable object-shorthand */ - var fs = require('fs') var spawn = require('child_process').spawn var tracerVersion = require('../../../../package.json').version @@ -11,12 +8,12 @@ var log = require('./log') module.exports = sendTelemetry if (!process.env.DD_INJECTION_ENABLED) { - module.exports = function () {} + module.exports = function noop () {} } var telemetryForwarderPath = process.env.DD_TELEMETRY_FORWARDER_PATH if (typeof telemetryForwarderPath !== 'string' || !fs.existsSync(telemetryForwarderPath)) { - module.exports = function () {} + module.exports = function noop () {} } var metadata = { @@ -32,12 +29,12 @@ var seen = [] function hasSeen (point) { if (point.name === 'abort') { // This one can only be sent once, regardless of tags - return seen.includes('abort') + return seen.indexOf('abort') !== -1 } if (point.name === 'abort.integration') { // For now, this is the only other one we want to dedupe var compiledPoint = point.name + point.tags.join('') - return seen.includes(compiledPoint) + return seen.indexOf(compiledPoint) !== -1 } return false } @@ -48,7 +45,7 @@ function sendTelemetry (name, tags) { points = [{ name: name, tags: tags || [] }] } if (['1', 'true', 'True'].indexOf(process.env.DD_INJECT_FORCE) !== -1) { - points = points.filter(function (p) { return ['error', 'complete'].includes(p.name) }) + points = points.filter(function (p) { return ['error', 'complete'].indexOf(p.name) !== -1 }) } points = points.filter(function (p) { return !hasSeen(p) }) for (var i = 0; i < points.length; i++) { diff --git a/packages/dd-trace/src/guardrails/util.js b/packages/dd-trace/src/guardrails/util.js index 9aa60713573..5cae1c87936 100644 --- a/packages/dd-trace/src/guardrails/util.js +++ b/packages/dd-trace/src/guardrails/util.js @@ -1,7 +1,5 @@ 'use strict' -/* eslint-disable object-shorthand */ - function isTrue (str) { str = String(str).toLowerCase() return str === 'true' || str === '1' diff --git a/version.js b/version.js index 26ff826e8e1..abd20badf48 100644 --- a/version.js +++ b/version.js @@ -1,8 +1,5 @@ 'use strict' -/* eslint-disable no-var */ -/* eslint-disable unicorn/prefer-number-properties */ - var version = require('./package.json').version var ddMatches = version.match(/^(\d+)\.(\d+)\.(\d+)/) var nodeMatches = process.versions.node.match(/^(\d+)\.(\d+)\.(\d+)/) From 27157189f134aa0962fc1e34905f4dcd6d300cdb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mario=20Vidal=20Dom=C3=ADnguez?= <60353145+Mariovido@users.noreply.github.com> Date: Wed, 9 Jul 2025 11:18:51 +0200 Subject: [PATCH 09/53] [test-optimization] Add logs for Playwright test (#6055) --- integration-tests/automatic-log-submission.spec.js | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/integration-tests/automatic-log-submission.spec.js b/integration-tests/automatic-log-submission.spec.js index 3b010706985..ae13f94c3ef 100644 --- a/integration-tests/automatic-log-submission.spec.js +++ b/integration-tests/automatic-log-submission.spec.js @@ -69,7 +69,8 @@ describe('test visibility automatic log submission', () => { command: './node_modules/.bin/playwright test -c playwright.config.js', getExtraEnvVars: () => ({ PW_BASE_URL: `http://localhost:${webAppPort}`, - TEST_DIR: 'ci-visibility/automatic-log-submission-playwright' + TEST_DIR: 'ci-visibility/automatic-log-submission-playwright', + DD_TRACE_DEBUG: 1 }) } ] @@ -135,6 +136,10 @@ describe('test visibility automatic log submission', () => { childProcess.on('exit', () => { Promise.all([logsPromise, eventsPromise]).then(() => { + if (name === 'playwright') { + // eslint-disable-next-line no-console + console.log(testOutput) + } const { logSpanId, logTraceId } = logIds const { testSpanId, testTraceId } = testIds assert.include(testOutput, 'Hello simple log!') From d2c9789a41de7e810de691e7e0d5e970dc398c41 Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Wed, 9 Jul 2025 05:29:14 -0400 Subject: [PATCH 10/53] test: use dynamic queue name for aws sqs tests (#6050) * should call promise for sending sqs messages with await syntax --------- Co-authored-by: William Conti --- .../datadog-plugin-aws-sdk/test/sqs.spec.js | 137 ++++++++++-------- 1 file changed, 80 insertions(+), 57 deletions(-) diff --git a/packages/datadog-plugin-aws-sdk/test/sqs.spec.js b/packages/datadog-plugin-aws-sdk/test/sqs.spec.js index ac2ad39fe81..5c00003a66d 100644 --- a/packages/datadog-plugin-aws-sdk/test/sqs.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/sqs.spec.js @@ -1,15 +1,14 @@ 'use strict' +const { randomUUID } = require('crypto') const sinon = require('sinon') const { withNamingSchema, withPeerService, withVersions } = require('../../dd-trace/test/setup/mocha') const agent = require('../../dd-trace/test/plugins/agent') const { setup } = require('./spec_helpers') const semver = require('semver') const { rawExpectedSchema } = require('./sqs-naming') - -const queueName = 'SQS_QUEUE_NAME' -const queueNameDSM = 'SQS_QUEUE_NAME_DSM' -const queueNameDSMConsumerOnly = 'SQS_QUEUE_NAME_DSM_CONSUMER_ONLY' +const { computePathwayHash } = require('../../dd-trace/src/datastreams/pathway') +const { ENTRY_PARENT_HASH } = require('../../dd-trace/src/datastreams/processor') const getQueueParams = (queueName) => { return { @@ -20,10 +19,6 @@ const getQueueParams = (queueName) => { } } -const queueOptions = getQueueParams(queueName) -const queueOptionsDsm = getQueueParams(queueNameDSM) -const queueOptionsDsmConsumerOnly = getQueueParams(queueNameDSMConsumerOnly) - describe('Plugin', () => { describe('aws-sdk (sqs)', function () { setup() @@ -31,13 +26,35 @@ describe('Plugin', () => { withVersions('aws-sdk', ['aws-sdk', '@aws-sdk/smithy-client'], (version, moduleName) => { let AWS let sqs - const QueueUrl = 'http://127.0.0.1:4566/00000000000000000000/SQS_QUEUE_NAME' - const QueueUrlDsm = 'http://127.0.0.1:4566/00000000000000000000/SQS_QUEUE_NAME_DSM' - const QueueUrlDsmConsumerOnly = 'http://127.0.0.1:4566/00000000000000000000/SQS_QUEUE_NAME_DSM_CONSUMER_ONLY' + let queueName + let queueNameDSM + let queueNameDSMConsumerOnly + let queueOptions + let queueOptionsDsm + let queueOptionsDsmConsumerOnly + let QueueUrl + let QueueUrlDsm + let QueueUrlDsmConsumerOnly let tracer const sqsClientName = moduleName === '@aws-sdk/smithy-client' ? '@aws-sdk/client-sqs' : 'aws-sdk' + beforeEach(() => { + const id = randomUUID() + + queueName = `SQS_QUEUE_NAME-${id}` + queueNameDSM = `SQS_QUEUE_NAME_DSM-${id}` + queueNameDSMConsumerOnly = `SQS_QUEUE_NAME_DSM_CONSUMER_ONLY-${id}` + + queueOptions = getQueueParams(queueName) + queueOptionsDsm = getQueueParams(queueNameDSM) + queueOptionsDsmConsumerOnly = getQueueParams(queueNameDSMConsumerOnly) + + QueueUrl = `http://127.0.0.1:4566/00000000000000000000/SQS_QUEUE_NAME-${id}` + QueueUrlDsm = `http://127.0.0.1:4566/00000000000000000000/SQS_QUEUE_NAME_DSM-${id}` + QueueUrlDsmConsumerOnly = `http://127.0.0.1:4566/00000000000000000000/SQS_QUEUE_NAME_DSM_CONSUMER_ONLY-${id}` + }) + describe('without configuration', () => { before(() => { process.env.DD_DATA_STREAMS_ENABLED = 'true' @@ -49,10 +66,12 @@ describe('Plugin', () => { ) }) - before(done => { + before(() => { AWS = require(`../../../versions/${sqsClientName}@${version}`).get() - sqs = new AWS.SQS({ endpoint: 'http://127.0.0.1:4566', region: 'us-east-1' }) + }) + + beforeEach(done => { sqs.createQueue(queueOptions, (err, res) => { if (err) return done(err) @@ -60,7 +79,7 @@ describe('Plugin', () => { }) }) - after(done => { + afterEach(done => { sqs.deleteQueue({ QueueUrl }, done) }) @@ -75,7 +94,7 @@ describe('Plugin', () => { MessageBody: 'test body', QueueUrl }, done), - 'SQS_QUEUE_NAME', + () => queueName, 'queuename' ) @@ -125,7 +144,7 @@ describe('Plugin', () => { expect(span.resource.startsWith('sendMessage')).to.equal(true) expect(span.meta).to.include({ - queuename: 'SQS_QUEUE_NAME' + queuename: queueName }) parentId = span.span_id.toString() @@ -164,7 +183,7 @@ describe('Plugin', () => { expect(span.resource.startsWith('sendMessageBatch')).to.equal(true) expect(span.meta).to.include({ - queuename: 'SQS_QUEUE_NAME' + queuename: queueName }) parentId = span.span_id.toString() @@ -314,10 +333,12 @@ describe('Plugin', () => { ) }) - before(done => { + before(() => { AWS = require(`../../../versions/${sqsClientName}@${version}`).get() - sqs = new AWS.SQS({ endpoint: 'http://127.0.0.1:4566', region: 'us-east-1' }) + }) + + beforeEach(done => { sqs.createQueue(queueOptions, (err, res) => { if (err) return done(err) @@ -325,7 +346,7 @@ describe('Plugin', () => { }) }) - after(done => { + afterEach(done => { sqs.deleteQueue({ QueueUrl }, done) }) @@ -345,7 +366,7 @@ describe('Plugin', () => { }) expect(span.meta).to.include({ - queuename: 'SQS_QUEUE_NAME', + queuename: queueName, aws_service: 'SQS', region: 'us-east-1' }) @@ -385,8 +406,8 @@ describe('Plugin', () => { }) describe('data stream monitoring', () => { - const expectedProducerHash = '4673734031235697865' - const expectedConsumerHash = '9749472979704578383' + let expectedProducerHash + let expectedConsumerHash let nowStub before(() => { @@ -398,40 +419,47 @@ describe('Plugin', () => { before(async () => { return agent.load('aws-sdk', { sqs: { - consumer: false, dsmEnabled: true } }, { dsmEnabled: true }) }) - before(done => { + before(() => { AWS = require(`../../../versions/${sqsClientName}@${version}`).get() - sqs = new AWS.SQS({ endpoint: 'http://127.0.0.1:4566', region: 'us-east-1' }) - sqs.createQueue(queueOptionsDsm, (err, res) => { - if (err) return done(err) - - done() - }) }) - before(done => { - AWS = require(`../../../versions/${sqsClientName}@${version}`).get() + beforeEach(() => { + const producerHash = computePathwayHash( + 'test', + 'tester', + ['direction:out', 'topic:' + queueNameDSM, 'type:sqs'], + ENTRY_PARENT_HASH + ) - sqs = new AWS.SQS({ endpoint: 'http://127.0.0.1:4566', region: 'us-east-1' }) - sqs.createQueue(queueOptionsDsmConsumerOnly, (err, res) => { - if (err) return done(err) + expectedProducerHash = producerHash.readBigUInt64BE(0).toString() + expectedConsumerHash = computePathwayHash( + 'test', + 'tester', + ['direction:in', 'topic:' + queueNameDSM, 'type:sqs'], + producerHash + ).readBigUInt64BE(0).toString() + }) - done() - }) + beforeEach(done => { + sqs.createQueue(queueOptionsDsm, (err, res) => err ? done(err) : done()) + }) + + beforeEach(done => { + sqs.createQueue(queueOptionsDsmConsumerOnly, (err, res) => err ? done(err) : done()) }) - after(done => { + afterEach(done => { sqs.deleteQueue({ QueueUrl: QueueUrlDsm }, done) }) - after(done => { + afterEach(done => { sqs.deleteQueue({ QueueUrl: QueueUrlDsmConsumerOnly }, done) }) @@ -502,28 +530,23 @@ describe('Plugin', () => { if (sqsClientName === 'aws-sdk' && semver.intersects(version, '>=2.3')) { it('Should set pathway hash tag on a span when consuming and promise() was used over a callback', async () => { - await sqs.sendMessage({ MessageBody: 'test DSM', QueueUrl: QueueUrlDsm }) - await sqs.receiveMessage({ QueueUrl: QueueUrlDsm }).promise() - let consumeSpanMeta = {} - return new Promise((resolve, reject) => { - agent.assertSomeTraces(traces => { - const span = traces[0][0] + const tracePromise = agent.assertSomeTraces(traces => { + const span = traces[0][0] - if (span.name === 'aws.request' && span.meta['aws.operation'] === 'receiveMessage') { - consumeSpanMeta = span.meta - } + if (span.name === 'aws.request' && span.meta['aws.operation'] === 'receiveMessage') { + consumeSpanMeta = span.meta + } - try { - expect(consumeSpanMeta).to.include({ - 'pathway.hash': expectedConsumerHash - }) - resolve() - } catch (error) { - reject(error) - } + expect(consumeSpanMeta).to.include({ + 'pathway.hash': expectedConsumerHash }) }) + + await sqs.sendMessage({ MessageBody: 'test DSM', QueueUrl: QueueUrlDsm }).promise() + await sqs.receiveMessage({ QueueUrl: QueueUrlDsm }).promise() + + return tracePromise }) } From 04a1b55bfa4e6030ef64fcb834c93b5812c9b055 Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Wed, 9 Jul 2025 12:27:49 +0200 Subject: [PATCH 11/53] Use files property in package.json instead of .npmignore (#5320) --- .npmignore | 22 ---------------------- esbuild.js | 2 -- index.js | 2 -- init.js | 2 -- package.json | 25 +++++++++++++++++++++++++ 5 files changed, 25 insertions(+), 28 deletions(-) delete mode 100644 .npmignore diff --git a/.npmignore b/.npmignore deleted file mode 100644 index 1cd65a06c4a..00000000000 --- a/.npmignore +++ /dev/null @@ -1,22 +0,0 @@ -* -!ext/**/* -!packages/*/lib/**/* -!packages/*/src/**/* -!packages/*/index.js -!packages/datadog-instrumentations/orchestrion.yml -!scripts/preinstall.js -!vendor/**/* -!LICENSE -!LICENSE-3rdparty.csv -!README.md -!index.d.ts -!index.js -!esbuild.js -!init.js -!initialize.mjs -!loader-hook.mjs -!register.js -!package.json -!cypress/**/* -!ci/**/* -!version.js diff --git a/esbuild.js b/esbuild.js index 5c80493515a..424ba5cb908 100644 --- a/esbuild.js +++ b/esbuild.js @@ -1,5 +1,3 @@ 'use strict' -// TODO: It shouldn't be necessary to disable n/no-unpublished-require - Research -// eslint-disable-next-line n/no-unpublished-require module.exports = require('./packages/datadog-esbuild/index.js') diff --git a/index.js b/index.js index ed48ff2fea5..a8c61274ad8 100644 --- a/index.js +++ b/index.js @@ -1,5 +1,3 @@ 'use strict' -// TODO: It shouldn't be necessary to disable n/no-unpublished-require - Research -// eslint-disable-next-line n/no-unpublished-require module.exports = require('./packages/dd-trace') diff --git a/init.js b/init.js index 62c6bfe8549..0ab55eb1140 100644 --- a/init.js +++ b/init.js @@ -1,7 +1,5 @@ 'use strict' -// TODO: It shouldn't be necessary to disable n/no-unpublished-require - Research -// eslint-disable-next-line n/no-unpublished-require var guard = require('./packages/dd-trace/src/guardrails') module.exports = guard(function () { diff --git a/package.json b/package.json index 10066b36f83..20bd8c4597a 100644 --- a/package.json +++ b/package.json @@ -84,6 +84,31 @@ "engines": { "node": ">=18" }, + "files": [ + "ci/**/*", + "cypress/**/*", + "esbuild.js", + "ext/**/*", + "index.d.ts", + "index.js", + "init.js", + "initialize.mjs", + "LICENSE-3rdparty.csv", + "LICENSE", + "LICENSE.Apache", + "LICENSE.BSD3", + "loader-hook.mjs", + "package.json", + "packages/*/index.js", + "packages/*/lib/**/*", + "packages/*/src/**/*", + "packages/datadog-instrumentations/orchestrion.yml", + "README.md", + "register.js", + "scripts/preinstall.js", + "vendor/**/*", + "version.js" + ], "dependencies": { "@datadog/libdatadog": "0.7.0", "@datadog/native-appsec": "9.0.0", From 30e621109494e0654539314e7f71dac0b0326b4c Mon Sep 17 00:00:00 2001 From: Ilyas Shabi Date: Wed, 9 Jul 2025 15:19:56 +0200 Subject: [PATCH 12/53] support blocking on fastify multipart (#5980) --- .../datadog-instrumentations/src/fastify.js | 20 +++- .../test/appsec/index.fastify.plugin.spec.js | 113 ++++++++++++++++-- packages/dd-trace/test/plugins/externals.json | 4 + 3 files changed, 125 insertions(+), 12 deletions(-) diff --git a/packages/datadog-instrumentations/src/fastify.js b/packages/datadog-instrumentations/src/fastify.js index 1f355a49a02..8fb772d00a6 100644 --- a/packages/datadog-instrumentations/src/fastify.js +++ b/packages/datadog-instrumentations/src/fastify.js @@ -14,6 +14,7 @@ const pathParamsReadCh = channel('datadog:fastify:path-params:finish') const parsingResources = new WeakMap() const cookiesPublished = new WeakSet() +const bodyPublished = new WeakSet() function wrapFastify (fastify, hasParsingEvents) { if (typeof fastify !== 'function') return fastify @@ -124,6 +125,20 @@ function preHandler (request, reply, done) { if (!reply || typeof reply.send !== 'function') return done() const req = getReq(request) + const res = getRes(reply) + + const hasBody = request.body && Object.keys(request.body).length > 0 + + // For multipart/form-data, the body is not available until after preValidation hook + if (bodyParserReadCh.hasSubscribers && hasBody && !bodyPublished.has(req)) { + const abortController = new AbortController() + + bodyParserReadCh.publish({ req, res, body: request.body, abortController }) + + bodyPublished.add(req) + + if (abortController.signal.aborted) return + } reply.send = wrapSend(reply.send, req) @@ -151,11 +166,14 @@ function preValidation (request, reply, done) { if (abortController.signal.aborted) return } - if (bodyParserReadCh.hasSubscribers && request.body) { + // Analyze body before schema validation + if (bodyParserReadCh.hasSubscribers && request.body && !bodyPublished.has(req)) { abortController ??= new AbortController() bodyParserReadCh.publish({ req, res, body: request.body, abortController }) + bodyPublished.add(req) + if (abortController.signal.aborted) return } diff --git a/packages/dd-trace/test/appsec/index.fastify.plugin.spec.js b/packages/dd-trace/test/appsec/index.fastify.plugin.spec.js index ae28854dfa9..d169634ed06 100644 --- a/packages/dd-trace/test/appsec/index.fastify.plugin.spec.js +++ b/packages/dd-trace/test/appsec/index.fastify.plugin.spec.js @@ -5,12 +5,13 @@ const { assert } = require('chai') const path = require('path') const zlib = require('zlib') const fs = require('node:fs') +const semver = require('semver') const agent = require('../plugins/agent') const appsec = require('../../src/appsec') const Config = require('../../src/config') const { json } = require('../../src/appsec/blocked_templates') -withVersions('fastify', 'fastify', '>=2', version => { +withVersions('fastify', 'fastify', '>=2', (fastifyVersion, _, fastifyLoadedVersion) => { describe('Suspicious request blocking - query', () => { let server, requestBody, axios @@ -19,7 +20,7 @@ withVersions('fastify', 'fastify', '>=2', version => { }) before((done) => { - const fastify = require(`../../../../versions/fastify@${version}`).get() + const fastify = require(`../../../../versions/fastify@${fastifyVersion}`).get() const app = fastify() @@ -84,7 +85,7 @@ withVersions('fastify', 'fastify', '>=2', version => { }) before((done) => { - const fastify = require(`../../../../versions/fastify@${version}`).get() + const fastify = require(`../../../../versions/fastify@${fastifyVersion}`).get() const app = fastify() @@ -184,7 +185,7 @@ withVersions('fastify', 'fastify', '>=2', version => { }) before((done) => { - const fastify = require(`../../../../versions/fastify@${version}`).get() + const fastify = require(`../../../../versions/fastify@${fastifyVersion}`).get() const app = fastify() @@ -259,7 +260,7 @@ withVersions('fastify', 'fastify', '>=2', version => { }) before((done) => { - const fastify = require(`../../../../versions/fastify@${version}`).get() + const fastify = require(`../../../../versions/fastify@${fastifyVersion}`).get() const app = fastify() app.get('/multiple-path-params/:parameter1/:parameter2', (request, reply) => { @@ -441,13 +442,13 @@ withVersions('fastify', 'fastify', '>=2', version => { let server, requestCookie, axios before(function () { - if (version === '3.9.2') { + if (semver.intersects(fastifyLoadedVersion, '3.9.2')) { // Fastify 3.9.2 is incompatible with @fastify/cookie >=6 this.skip() } // Skip preParsing hook for Fastify 2.x - has compatibility issues - if (hook === 'preParsing' && version.startsWith('2')) { + if (hook === 'preParsing' && semver.intersects(fastifyLoadedVersion, '2')) { this.skip() } @@ -455,7 +456,7 @@ withVersions('fastify', 'fastify', '>=2', version => { }) before((done) => { - const fastify = require(`../../../../versions/fastify@${version}`).get() + const fastify = require(`../../../../versions/fastify@${fastifyVersion}`).get() const fastifyCookie = require(`../../../../versions/@fastify/cookie@${cookieVersion}`).get() const app = fastify() @@ -498,9 +499,7 @@ withVersions('fastify', 'fastify', '>=2', version => { }) after(() => { - if (server) { - server.close() - } + server?.close() return agent.close({ ritmReset: false }) }) @@ -530,6 +529,98 @@ withVersions('fastify', 'fastify', '>=2', version => { }) }) }) + + describe('Suspicious request blocking - multipart', () => { + withVersions('fastify', '@fastify/multipart', (multipartVersion, _, multipartLoadedVersion) => { + let server, uploadSpy, axios + + // The skips in this section are complex because of the incompatibilities between Fastify and @fastify/multipart + // We are not testing every major version of those libraries because of the complexity of the tests + before(function () { + // @fastify/multipart is not compatible with Fastify 2.x + if (semver.intersects(fastifyLoadedVersion, '2')) { + this.skip() + } + + // This Fastify version is working only with @fastify/multipart 6 + if (semver.intersects(fastifyLoadedVersion, '3.9.2') && semver.intersects(multipartLoadedVersion, '>=7')) { + this.skip() + } + + // Fastify 5 drop le support pour multipart <7 + if (semver.intersects(fastifyLoadedVersion, '>=5') && semver.intersects(multipartLoadedVersion, '<7.0.0')) { + this.skip() + } + + return agent.load(['fastify', '@fastify/multipart', 'http'], { client: false }) + }) + + before((done) => { + const fastify = require(`../../../../versions/fastify@${fastifyVersion}`).get() + const fastifyMultipart = require(`../../../../versions/@fastify/multipart@${multipartVersion}`).get() + + const app = fastify() + + app.register(fastifyMultipart, { attachFieldsToBody: true }) + + app.post('/', (request, reply) => { + uploadSpy() + reply.send('DONE') + }) + + app.listen({ port: 0 }, () => { + const port = server.address().port + axios = Axios.create({ baseURL: `http://localhost:${port}` }) + done() + }) + server = app.server + }) + + beforeEach(() => { + uploadSpy = sinon.stub() + appsec.enable(new Config({ + appsec: { + enabled: true, + rules: path.join(__dirname, 'body-parser-rules.json') + } + })) + }) + + afterEach(() => { + appsec.disable() + }) + + after(() => { + server?.close() + return agent.close({ ritmReset: false }) + }) + + it('should not block the request without an attack', async () => { + const form = new FormData() + form.append('key', 'value') + + const res = await axios.post('/', form) + + assert.strictEqual(res.status, 200) + sinon.assert.calledOnce(uploadSpy) + assert.strictEqual(res.data, 'DONE') + }) + + it('should block the request when attack is detected', async () => { + try { + const form = new FormData() + form.append('key', 'testattack') + + await axios.post('/', form) + + return Promise.reject(new Error('Request should not return 200')) + } catch (e) { + assert.strictEqual(e.response.status, 403) + sinon.assert.notCalled(uploadSpy) + } + }) + }) + }) }) describe('Api Security - Fastify', () => { diff --git a/packages/dd-trace/test/plugins/externals.json b/packages/dd-trace/test/plugins/externals.json index 7ab594047db..183bf1b73ab 100644 --- a/packages/dd-trace/test/plugins/externals.json +++ b/packages/dd-trace/test/plugins/externals.json @@ -161,6 +161,10 @@ { "name": "@fastify/cookie", "versions": [">=6"] + }, + { + "name": "@fastify/multipart", + "versions": [">=6"] } ], "generic-pool": [ From babfb59b60d853ce1cd4f868ea2dd53c4c7f8cb5 Mon Sep 17 00:00:00 2001 From: Ruben Bridgewater Date: Wed, 9 Jul 2025 16:21:25 +0200 Subject: [PATCH 13/53] instrumentation: add hono support (#6061) The instrumentation does not yet handle middlewares, while the basic functionality should work just fine. --------- Co-authored-by: Luke Dunscombe --- .github/workflows/apm-integrations.yml | 8 + docs/test.ts | 2 + index.d.ts | 7 + .../src/helpers/hooks.js | 1 + .../src/helpers/register.js | 2 +- packages/datadog-instrumentations/src/hono.js | 102 ++++++++ packages/datadog-plugin-hono/src/index.js | 28 +++ .../datadog-plugin-hono/test/index.spec.js | 229 ++++++++++++++++++ .../test/integration-test/client.spec.js | 49 ++++ .../test/integration-test/server.mjs | 16 ++ packages/dd-trace/src/plugins/index.js | 1 + .../src/supported-configurations.json | 1 + packages/dd-trace/test/plugins/externals.json | 6 + 13 files changed, 451 insertions(+), 1 deletion(-) create mode 100644 packages/datadog-instrumentations/src/hono.js create mode 100644 packages/datadog-plugin-hono/src/index.js create mode 100644 packages/datadog-plugin-hono/test/index.spec.js create mode 100644 packages/datadog-plugin-hono/test/integration-test/client.spec.js create mode 100644 packages/datadog-plugin-hono/test/integration-test/server.mjs diff --git a/.github/workflows/apm-integrations.yml b/.github/workflows/apm-integrations.yml index cb2b19cd23d..6b8e6e4549f 100644 --- a/.github/workflows/apm-integrations.yml +++ b/.github/workflows/apm-integrations.yml @@ -438,6 +438,14 @@ jobs: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: ./.github/actions/plugins/test + hono: + runs-on: ubuntu-latest + env: + PLUGINS: hono + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: ./.github/actions/plugins/test + http: strategy: matrix: diff --git a/docs/test.ts b/docs/test.ts index 879a7e4da18..fd8e3570479 100644 --- a/docs/test.ts +++ b/docs/test.ts @@ -339,6 +339,8 @@ tracer.use('grpc', { client: { metadata: [] } }); tracer.use('grpc', { server: { metadata: [] } }); tracer.use('hapi'); tracer.use('hapi', httpServerOptions); +tracer.use('hono'); +tracer.use('hono', httpServerOptions); tracer.use('http'); tracer.use('http', { server: httpServerOptions diff --git a/index.d.ts b/index.d.ts index 8a304e017be..52ab38f3ee8 100644 --- a/index.d.ts +++ b/index.d.ts @@ -186,6 +186,7 @@ interface Plugins { "graphql": tracer.plugins.graphql; "grpc": tracer.plugins.grpc; "hapi": tracer.plugins.hapi; + "hono": tracer.plugins.hono; "http": tracer.plugins.http; "http2": tracer.plugins.http2; "ioredis": tracer.plugins.ioredis; @@ -1603,6 +1604,12 @@ declare namespace tracer { */ interface hapi extends HttpServer {} + /** + * This plugin automatically instruments the + * [hono](https://hono.dev/) module. + */ + interface hono extends HttpServer {} + /** * This plugin automatically instruments the * [http](https://nodejs.org/api/http.html) module. diff --git a/packages/datadog-instrumentations/src/helpers/hooks.js b/packages/datadog-instrumentations/src/helpers/hooks.js index 97f80158652..18d83e22160 100644 --- a/packages/datadog-instrumentations/src/helpers/hooks.js +++ b/packages/datadog-instrumentations/src/helpers/hooks.js @@ -58,6 +58,7 @@ module.exports = { grpc: () => require('../grpc'), handlebars: () => require('../handlebars'), hapi: () => require('../hapi'), + hono: { esmFirst: true, fn: () => require('../hono') }, http: () => require('../http'), http2: () => require('../http2'), https: () => require('../http'), diff --git a/packages/datadog-instrumentations/src/helpers/register.js b/packages/datadog-instrumentations/src/helpers/register.js index faac734203e..23fa29cfaa3 100644 --- a/packages/datadog-instrumentations/src/helpers/register.js +++ b/packages/datadog-instrumentations/src/helpers/register.js @@ -207,7 +207,7 @@ function filename (name, file) { // This function captures the instrumentation file name for a given package by parsing the hook require // function given the module name. It is used to ensure that instrumentations such as redis // that have several different modules being hooked, ie: 'redis' main package, and @redis/client submodule -// return a consistent instrumentation name. This is used later to ensure that atleast some portion of +// return a consistent instrumentation name. This is used later to ensure that at least some portion of // the integration was successfully instrumented. Prevents incorrect `Found incompatible integration version: ` messages // Example: // redis -> "() => require('../redis')" -> redis diff --git a/packages/datadog-instrumentations/src/hono.js b/packages/datadog-instrumentations/src/hono.js new file mode 100644 index 00000000000..894851a060f --- /dev/null +++ b/packages/datadog-instrumentations/src/hono.js @@ -0,0 +1,102 @@ +'use strict' + +const shimmer = require('../../datadog-shimmer') +const { + addHook, + channel +} = require('./helpers/instrument') + +const routeChannel = channel('apm:hono:request:route') +const handleChannel = channel('apm:hono:request:handle') +const errorChannel = channel('apm:hono:request:error') + +function wrapFetch (fetch) { + return function (request, env, executionCtx) { + handleChannel.publish({ req: env.incoming }) + return fetch.apply(this, arguments) + } +} + +function wrapCompose (compose) { + return function (middleware, onError, onNotFound) { + const instrumentedOnError = (...args) => { + const [error, context] = args + const req = context.env.incoming + errorChannel.publish({ req, error }) + return onError(...args) + } + + const instrumentedMiddlewares = middleware.map(h => { + const [[fn, meta], params] = h + + // TODO: handle middleware instrumentation + const instrumentedFn = (...args) => { + const context = args[0] + const req = context.env.incoming + const route = meta.path + routeChannel.publish({ + req, + route + }) + return fn(...args) + } + return [[instrumentedFn, meta], params] + }) + return compose.apply(this, [instrumentedMiddlewares, instrumentedOnError, onNotFound]) + } +} + +addHook({ + name: 'hono', + versions: ['>=4'], + file: 'dist/hono.js' +}, hono => { + class Hono extends hono.Hono { + constructor (...args) { + super(...args) + shimmer.wrap(this, 'fetch', wrapFetch) + } + } + + hono.Hono = Hono + + return hono +}) + +addHook({ + name: 'hono', + versions: ['>=4'], + file: 'dist/cjs/hono.js' +}, hono => { + class Hono extends hono.Hono { + constructor (...args) { + super(...args) + shimmer.wrap(this, 'fetch', wrapFetch) + } + } + + return Object.create(hono, { + Hono: { + get () { + return Hono + }, + enumerable: true, + } + }) +}) + +addHook({ + name: 'hono', + versions: ['>=4'], + file: 'dist/cjs/compose.js' +}, Compose => { + return shimmer.wrap(Compose, 'compose', wrapCompose, { replaceGetter: true }) +}) + +addHook({ + name: 'hono', + versions: ['>=4'], + file: 'dist/compose.js' +}, Compose => { + return shimmer.wrap(Compose, 'compose', wrapCompose) +}) diff --git a/packages/datadog-plugin-hono/src/index.js b/packages/datadog-plugin-hono/src/index.js new file mode 100644 index 00000000000..c6959f85c53 --- /dev/null +++ b/packages/datadog-plugin-hono/src/index.js @@ -0,0 +1,28 @@ +'use strict' + +const RouterPlugin = require('../../datadog-plugin-router/src') +const web = require('../../dd-trace/src/plugins/util/web') + +class HonoPlugin extends RouterPlugin { + static get id () { + return 'hono' + } + + constructor (...args) { + super(...args) + + this.addSub('apm:hono:request:handle', ({ req }) => { + this.setFramework(req, 'hono', this.config) + }) + + this.addSub('apm:hono:request:route', ({ req, route }) => { + web.setRoute(req, route) + }) + + this.addSub('apm:hono:request:error', ({ req, error }) => { + web.addError(req, error) + }) + } +} + +module.exports = HonoPlugin diff --git a/packages/datadog-plugin-hono/test/index.spec.js b/packages/datadog-plugin-hono/test/index.spec.js new file mode 100644 index 00000000000..d0f171df1f9 --- /dev/null +++ b/packages/datadog-plugin-hono/test/index.spec.js @@ -0,0 +1,229 @@ +'use strict' + +const assert = require('node:assert') +const axios = require('axios') +const agent = require('../../dd-trace/test/plugins/agent') +const { + ERROR_TYPE, + ERROR_MESSAGE, + ERROR_STACK +} = require('../../dd-trace/src/constants') + +describe('Plugin', () => { + let tracer + let server + let app + let serve + let hono + + describe('hono', () => { + withVersions('hono', 'hono', version => { + before(async () => { + await agent.load(['hono', 'http'], [{}, { client: false }]) + hono = require(`../../../versions/hono@${version}`).get() + }) + + after(() => { + return agent.close({ ritmReset: false }) + }) + + beforeEach(() => { + tracer = require('../../dd-trace') + serve = require('../../../versions/@hono/node-server@1.15.0').get().serve + + app = new hono.Hono() + + app.use((c, next) => { + c.set('middleware', 'test') + return next() + }) + + app.get('/user/:id', (c) => { + return c.json({ + id: c.req.param('id'), + middleware: c.get('middleware') + }) + }) + }) + + afterEach(() => { + server?.close() + server = null + }) + + it('should do automatic instrumentation on routes', async function () { + let resolver + const promise = new Promise((resolve) => { + resolver = resolve + }) + + server = serve({ + fetch: app.fetch, + port: 0 + }, ({ port }) => resolver(port)) + + const port = await promise + + const { data } = await axios.get(`http://localhost:${port}/user/123`) + + assert.deepStrictEqual(data, { + id: '123', + middleware: 'test' + }) + + await agent.assertFirstTraceSpan({ + name: 'hono.request', + service: 'test', + type: 'web', + resource: 'GET /user/:id', + meta: { + 'span.kind': 'server', + 'http.url': `http://localhost:${port}/user/123`, + 'http.method': 'GET', + 'http.status_code': '200', + component: 'hono', + } + }) + }) + + it('should do automatic instrumentation on nested routes', async function () { + let resolver + const promise = new Promise((resolve) => { + resolver = resolve + }) + + const books = new hono.Hono() + + books.get('/:id', (c) => c.json({ + id: c.req.param('id'), + name: 'test' + })) + + app.route('/books', books) + + server = serve({ + fetch: app.fetch, + port: 0 + }, ({ port }) => resolver(port)) + + const port = await promise + + const { data } = await axios.get(`http://localhost:${port}/books/12345`) + + assert.deepStrictEqual(data, { + id: '12345', + name: 'test' + }) + + await agent.assertFirstTraceSpan({ + name: 'hono.request', + service: 'test', + type: 'web', + resource: 'GET /books/:id', + meta: { + 'span.kind': 'server', + 'http.url': `http://localhost:${port}/books/12345`, + 'http.method': 'GET', + 'http.status_code': '200', + component: 'hono', + } + }) + }) + + it('should handle errors', async function () { + let resolver + const promise = new Promise((resolve) => { + resolver = resolve + }) + + const error = new Error('message') + + app.get('/error', () => { + throw error + }) + + server = serve({ + fetch: app.fetch, + port: 0 + }, ({ port }) => resolver(port)) + + const port = await promise + + await assert.rejects( + axios.get(`http://localhost:${port}/error`), + { + message: 'Request failed with status code 500', + name: 'AxiosError' + } + ) + + await agent.assertFirstTraceSpan({ + error: 1, + resource: 'GET /error', + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + 'http.status_code': '500', + component: 'hono', + } + }) + }) + + it('should have active scope within request', async () => { + let resolver + const promise = new Promise((resolve) => { + resolver = resolve + }) + + app.get('/request', (c) => { + assert(tracer.scope().active()) + return c.text('test') + }) + + server = serve({ + fetch: app.fetch, + port: 0 + }, ({ port }) => resolver(port)) + + const port = await promise + + const { data } = await axios.get(`http://localhost:${port}/request`) + + assert.deepStrictEqual(data, 'test') + }) + + it('should extract its parent span from the headers', async () => { + let resolver + const promise = new Promise((resolve) => { + resolver = resolve + }) + + app.get('/request', (c) => { + assert(tracer.scope().active()) + return c.text('test') + }) + + server = serve({ + fetch: app.fetch, + port: 0 + }, ({ port }) => resolver(port)) + + const port = await promise + + await axios.get(`http://localhost:${port}/user/123`, { + headers: { + 'x-datadog-trace-id': '1234', + 'x-datadog-parent-id': '5678', + 'ot-baggage-foo': 'bar' + } + }) + + await agent.assertFirstTraceSpan({ + trace_id: 1234n, + parent_id: 5678n, + }) + }) + }) + }) +}) diff --git a/packages/datadog-plugin-hono/test/integration-test/client.spec.js b/packages/datadog-plugin-hono/test/integration-test/client.spec.js new file mode 100644 index 00000000000..e1b5456d430 --- /dev/null +++ b/packages/datadog-plugin-hono/test/integration-test/client.spec.js @@ -0,0 +1,49 @@ +'use strict' + +const { + FakeAgent, + createSandbox, + curlAndAssertMessage, + spawnPluginIntegrationTestProc, + assertObjectContains, +} = require('../../../../integration-tests/helpers') + +describe('esm', () => { + let agent + let proc + let sandbox + + withVersions('hono', 'hono', version => { + before(async function () { + this.timeout(50000) + sandbox = await createSandbox([`'hono@${version}'`, '@hono/node-server@1.15.0'], false, + ['./packages/datadog-plugin-hono/test/integration-test/*']) + }) + + after(async function () { + this.timeout(50000) + await sandbox.remove() + }) + + beforeEach(async () => { + agent = await new FakeAgent().start() + }) + + afterEach(async () => { + proc?.kill() + await agent.stop() + }) + + it('is instrumented', async () => { + proc = await spawnPluginIntegrationTestProc(sandbox.folder, 'server.mjs', agent.port) + proc.url += '/hello' + + return curlAndAssertMessage(agent, proc, ({ headers, payload }) => { + assertObjectContains(headers, { host: `127.0.0.1:${agent.port}` }) + // TODO: Fix the resource! It should be 'GET /hello' + // This seems to be a generic ESM issue, also e.g., on express. + assertObjectContains(payload, [[{ name: 'hono.request', resource: 'GET' }]]) + }) + }).timeout(50000) + }) +}) diff --git a/packages/datadog-plugin-hono/test/integration-test/server.mjs b/packages/datadog-plugin-hono/test/integration-test/server.mjs new file mode 100644 index 00000000000..8630dd2ac87 --- /dev/null +++ b/packages/datadog-plugin-hono/test/integration-test/server.mjs @@ -0,0 +1,16 @@ +import 'dd-trace/init.js' +import { Hono } from 'hono' +import { serve } from '@hono/node-server' + +const app = new Hono() + +app.get('/hello', (c) => { + return c.text('green energy\n') +}) + +serve({ + fetch: app.fetch, +}, (i) => { + const port = i.port + process.send({ port }) +}) diff --git a/packages/dd-trace/src/plugins/index.js b/packages/dd-trace/src/plugins/index.js index dcf82b5239a..34350f40c72 100644 --- a/packages/dd-trace/src/plugins/index.js +++ b/packages/dd-trace/src/plugins/index.js @@ -45,6 +45,7 @@ module.exports = { get graphql () { return require('../../../datadog-plugin-graphql/src') }, get grpc () { return require('../../../datadog-plugin-grpc/src') }, get hapi () { return require('../../../datadog-plugin-hapi/src') }, + get hono () { return require('../../../datadog-plugin-hono/src') }, get http () { return require('../../../datadog-plugin-http/src') }, get http2 () { return require('../../../datadog-plugin-http2/src') }, get https () { return require('../../../datadog-plugin-http/src') }, diff --git a/packages/dd-trace/src/supported-configurations.json b/packages/dd-trace/src/supported-configurations.json index 28cf84a1111..8d07a437be9 100644 --- a/packages/dd-trace/src/supported-configurations.json +++ b/packages/dd-trace/src/supported-configurations.json @@ -277,6 +277,7 @@ "DD_TRACE_HAPI_ENABLED": ["A"], "DD_TRACE_HAPI_HAPI_ENABLED": ["A"], "DD_TRACE_HEADER_TAGS": ["A"], + "DD_TRACE_HONO_ENABLED": ["A"], "DD_TRACE_HTTP_ENABLED": ["A"], "DD_TRACE_HTTP2_ENABLED": ["A"], "DD_TRACE_HTTPS_ENABLED": ["A"], diff --git a/packages/dd-trace/test/plugins/externals.json b/packages/dd-trace/test/plugins/externals.json index 183bf1b73ab..ac98325f767 100644 --- a/packages/dd-trace/test/plugins/externals.json +++ b/packages/dd-trace/test/plugins/externals.json @@ -261,6 +261,12 @@ "versions": ["9.1.4"] } ], + "hono": [ + { + "name": "@hono/node-server", + "versions": ["1.15.0"] + } + ], "jest": [ { "name": "jest", From dbb698e701c022f5ac0a86b2e91b32dcfa02ed3c Mon Sep 17 00:00:00 2001 From: Carles Capell <107924659+CarlesDD@users.noreply.github.com> Date: Thu, 10 Jul 2025 07:18:57 +0200 Subject: [PATCH 14/53] Bump native-appsec package + ASM multi DD config capability (#6049) --- integration-tests/appsec/graphql.spec.js | 10 +++--- package.json | 2 +- packages/dd-trace/src/appsec/reporter.js | 14 ++++---- .../src/appsec/waf/waf_context_wrapper.js | 4 +-- .../src/remote_config/capabilities.js | 3 +- packages/dd-trace/src/remote_config/index.js | 2 ++ .../dd-trace/test/appsec/reporter.spec.js | 10 +++--- .../dd-trace/test/appsec/waf/index.spec.js | 34 +++++++++---------- .../appsec/waf/waf_context_wrapper.spec.js | 8 ++--- .../dd-trace/test/remote_config/index.spec.js | 10 ++++++ yarn.lock | 8 ++--- 11 files changed, 59 insertions(+), 46 deletions(-) diff --git a/integration-tests/appsec/graphql.spec.js b/integration-tests/appsec/graphql.spec.js index 9ce71b82221..a70d378c941 100644 --- a/integration-tests/appsec/graphql.spec.js +++ b/integration-tests/appsec/graphql.spec.js @@ -72,15 +72,15 @@ describe('graphql', () => { const result = { triggers: [ { - rule: - { + rule: { id: 'test-rule-id-1', name: 'test-rule-name-1', tags: { - category: 'attack_attempt', - type: 'security_scanner' - } + type: 'security_scanner', + category: 'attack_attempt' + }, + on_match: [] }, rule_matches: [ { diff --git a/package.json b/package.json index 20bd8c4597a..9abf47321e2 100644 --- a/package.json +++ b/package.json @@ -111,7 +111,7 @@ ], "dependencies": { "@datadog/libdatadog": "0.7.0", - "@datadog/native-appsec": "9.0.0", + "@datadog/native-appsec": "10.0.0", "@datadog/native-iast-taint-tracking": "4.0.0", "@datadog/native-metrics": "3.1.1", "@datadog/pprof": "5.9.0", diff --git a/packages/dd-trace/src/appsec/reporter.js b/packages/dd-trace/src/appsec/reporter.js index 2e9c8ac6927..d4e820592a8 100644 --- a/packages/dd-trace/src/appsec/reporter.js +++ b/packages/dd-trace/src/appsec/reporter.js @@ -430,12 +430,12 @@ function isRaspAttack (events) { return events.some(e => e.rule?.tags?.module === 'rasp') } -function isFingerprintDerivative (derivative) { - return derivative.startsWith('_dd.appsec.fp') +function isFingerprintAttribute (attribute) { + return attribute.startsWith('_dd.appsec.fp') } -function reportDerivatives (derivatives) { - if (!derivatives) return +function reportAttributes (attributes) { + if (!attributes) return const req = storage('legacy').getStore()?.req const rootSpan = web.root(req) @@ -443,8 +443,8 @@ function reportDerivatives (derivatives) { if (!rootSpan) return const tags = {} - for (let [tag, value] of Object.entries(derivatives)) { - if (!isFingerprintDerivative(tag)) { + for (let [tag, value] of Object.entries(attributes)) { + if (!isFingerprintAttribute(tag)) { const gzippedValue = zlib.gzipSync(JSON.stringify(value)) value = gzippedValue.toString('base64') } @@ -543,7 +543,7 @@ module.exports = { reportAttack, reportWafUpdate: incrementWafUpdatesMetric, reportRaspRuleSkipped: updateRaspRuleSkippedMetricTags, - reportDerivatives, + reportAttributes, finishRequest, mapHeaderAndTags, truncateRequestBody diff --git a/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js b/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js index 348f2abd6eb..d9d8856982f 100644 --- a/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js +++ b/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js @@ -135,7 +135,7 @@ class WAFContextWrapper { this.setUserIdCache(userId, result) } - metrics.duration = result.totalRuntime / 1e3 + metrics.duration = result.duration / 1e3 metrics.blockTriggered = blockTriggered metrics.ruleTriggered = ruleTriggered metrics.wafTimeout = result.timeout @@ -144,7 +144,7 @@ class WAFContextWrapper { Reporter.reportAttack(result.events) } - Reporter.reportDerivatives(result.derivatives) + Reporter.reportAttributes(result.attributes) return result } catch (err) { diff --git a/packages/dd-trace/src/remote_config/capabilities.js b/packages/dd-trace/src/remote_config/capabilities.js index 9b56344c4e2..61a6e6ae09d 100644 --- a/packages/dd-trace/src/remote_config/capabilities.js +++ b/packages/dd-trace/src/remote_config/capabilities.js @@ -27,5 +27,6 @@ module.exports = { ASM_SESSION_FINGERPRINT: 1n << 33n, ASM_NETWORK_FINGERPRINT: 1n << 34n, ASM_HEADER_FINGERPRINT: 1n << 35n, - ASM_RASP_CMDI: 1n << 37n + ASM_RASP_CMDI: 1n << 37n, + ASM_DD_MULTICONFIG: 1n << 42n } diff --git a/packages/dd-trace/src/remote_config/index.js b/packages/dd-trace/src/remote_config/index.js index ab149bdf000..255803f73ea 100644 --- a/packages/dd-trace/src/remote_config/index.js +++ b/packages/dd-trace/src/remote_config/index.js @@ -93,6 +93,7 @@ function enableWafUpdate (appsecConfig) { rc.updateCapabilities(RemoteConfigCapabilities.ASM_SESSION_FINGERPRINT, true) rc.updateCapabilities(RemoteConfigCapabilities.ASM_NETWORK_FINGERPRINT, true) rc.updateCapabilities(RemoteConfigCapabilities.ASM_HEADER_FINGERPRINT, true) + rc.updateCapabilities(RemoteConfigCapabilities.ASM_DD_MULTICONFIG, true) if (appsecConfig.rasp?.enabled) { rc.updateCapabilities(RemoteConfigCapabilities.ASM_RASP_SQLI, true) @@ -128,6 +129,7 @@ function disableWafUpdate () { rc.updateCapabilities(RemoteConfigCapabilities.ASM_SESSION_FINGERPRINT, false) rc.updateCapabilities(RemoteConfigCapabilities.ASM_NETWORK_FINGERPRINT, false) rc.updateCapabilities(RemoteConfigCapabilities.ASM_HEADER_FINGERPRINT, false) + rc.updateCapabilities(RemoteConfigCapabilities.ASM_DD_MULTICONFIG, false) rc.updateCapabilities(RemoteConfigCapabilities.ASM_RASP_SQLI, false) rc.updateCapabilities(RemoteConfigCapabilities.ASM_RASP_SSRF, false) diff --git a/packages/dd-trace/test/appsec/reporter.spec.js b/packages/dd-trace/test/appsec/reporter.spec.js index 79b1050e50d..a9bb98b62e9 100644 --- a/packages/dd-trace/test/appsec/reporter.spec.js +++ b/packages/dd-trace/test/appsec/reporter.spec.js @@ -696,20 +696,20 @@ describe('reporter', () => { }) }) - describe('reportDerivatives', () => { + describe('reportAttributes', () => { it('should not call addTags if parameter is undefined', () => { - Reporter.reportDerivatives(undefined) + Reporter.reportAttributes(undefined) expect(span.addTags).not.to.be.called }) it('should call addTags with an empty array', () => { - Reporter.reportDerivatives([]) + Reporter.reportAttributes([]) expect(span.addTags).to.be.calledOnceWithExactly({}) }) it('should call addTags', () => { const schemaValue = [{ key: [8] }] - const derivatives = { + const attributes = { '_dd.appsec.fp.http.endpoint': 'endpoint_fingerprint', '_dd.appsec.fp.http.header': 'header_fingerprint', '_dd.appsec.fp.http.network': 'network_fingerprint', @@ -722,7 +722,7 @@ describe('reporter', () => { 'custom.processor.output': schemaValue } - Reporter.reportDerivatives(derivatives) + Reporter.reportAttributes(attributes) const schemaEncoded = zlib.gzipSync(JSON.stringify(schemaValue)).toString('base64') expect(span.addTags).to.be.calledOnceWithExactly({ diff --git a/packages/dd-trace/test/appsec/waf/index.spec.js b/packages/dd-trace/test/appsec/waf/index.spec.js index 8b643d273a3..6754e253dc3 100644 --- a/packages/dd-trace/test/appsec/waf/index.spec.js +++ b/packages/dd-trace/test/appsec/waf/index.spec.js @@ -49,7 +49,7 @@ describe('WAF Manager', () => { sinon.stub(Reporter.metricsQueue, 'set') sinon.stub(Reporter, 'reportMetrics') sinon.stub(Reporter, 'reportAttack') - sinon.stub(Reporter, 'reportDerivatives') + sinon.stub(Reporter, 'reportAttributes') sinon.spy(Reporter, 'reportWafInit') sinon.spy(Reporter, 'reportWafConfigUpdate') @@ -333,7 +333,7 @@ describe('WAF Manager', () => { }) it('should call ddwafContext.run with params', () => { - ddwafContext.run.returns({ totalRuntime: 1, durationExt: 1 }) + ddwafContext.run.returns({ duration: 1, durationExt: 1 }) wafContextWrapper.run({ persistent: { @@ -354,7 +354,7 @@ describe('WAF Manager', () => { it('should report attack when ddwafContext returns events', () => { const result = { - totalRuntime: 1, + duration: 1, durationExt: 1, events: ['ATTACK DATA'] } @@ -373,7 +373,7 @@ describe('WAF Manager', () => { it('should report if rule is triggered', () => { const result = { - totalRuntime: 1, + duration: 1, durationExt: 1, events: ['ruleTriggered'] } @@ -395,7 +395,7 @@ describe('WAF Manager', () => { it('should report raspRuleType', () => { const result = { - totalRuntime: 1, + duration: 1, durationExt: 1 } @@ -414,7 +414,7 @@ describe('WAF Manager', () => { it('should not report raspRuleType when it is not provided', () => { const result = { - totalRuntime: 1, + duration: 1, durationExt: 1 } @@ -432,7 +432,7 @@ describe('WAF Manager', () => { }) it('should not report attack when ddwafContext does not return events', () => { - ddwafContext.run.returns({ totalRuntime: 1, durationExt: 1 }) + ddwafContext.run.returns({ duration: 1, durationExt: 1 }) const params = { persistent: { 'server.request.headers.no_cookies': { header: 'value' } @@ -445,7 +445,7 @@ describe('WAF Manager', () => { }) it('should not report attack when ddwafContext returns empty data', () => { - ddwafContext.run.returns({ totalRuntime: 1, durationExt: 1, events: [] }) + ddwafContext.run.returns({ duration: 1, durationExt: 1, events: [] }) const params = { persistent: { 'server.request.headers.no_cookies': { header: 'value' } @@ -459,7 +459,7 @@ describe('WAF Manager', () => { it('should return waf result', () => { const result = { - totalRuntime: 1, durationExt: 1, events: [], actions: ['block'] + duration: 1, durationExt: 1, events: [], actions: ['block'] } ddwafContext.run.returns(result) @@ -474,11 +474,11 @@ describe('WAF Manager', () => { expect(wafResult).to.be.equals(result) }) - it('should report schemas when ddwafContext returns schemas in the derivatives', () => { + it('should report schemas when ddwafContext returns schemas in the attributes', () => { const result = { - totalRuntime: 1, + duration: 1, durationExt: 1, - derivatives: [{ '_dd.appsec.s.req.body': [8] }] + attributes: [{ '_dd.appsec.s.req.body': [8] }] } const params = { persistent: { @@ -492,14 +492,14 @@ describe('WAF Manager', () => { ddwafContext.run.returns(result) wafContextWrapper.run(params) - expect(Reporter.reportDerivatives).to.be.calledOnceWithExactly(result.derivatives) + expect(Reporter.reportAttributes).to.be.calledOnceWithExactly(result.attributes) }) - it('should report fingerprints when ddwafContext returns fingerprints in results derivatives', () => { + it('should report fingerprints when ddwafContext returns fingerprints in results attributes', () => { const result = { - totalRuntime: 1, + duration: 1, durationExt: 1, - derivatives: { + attributes: { '_dd.appsec.s.req.body': [8], '_dd.appsec.fp.http.endpoint': 'http-post-abcdefgh-12345678-abcdefgh', '_dd.appsec.fp.http.network': 'net-1-0100000000', @@ -514,7 +514,7 @@ describe('WAF Manager', () => { 'server.request.body': 'foo' } }) - sinon.assert.calledOnceWithExactly(Reporter.reportDerivatives, result.derivatives) + sinon.assert.calledOnceWithExactly(Reporter.reportAttributes, result.attributes) }) }) }) diff --git a/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js b/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js index 41fe88206e4..74ed7c51033 100644 --- a/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js +++ b/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js @@ -25,7 +25,7 @@ describe('WAFContextWrapper', () => { const ddwafContext = { run: sinon.stub().returns({ events: {}, - derivatives: {} + attributes: {} }) } const wafContextWrapper = new WAFContextWrapper(ddwafContext, 1000, '1.14.0', '1.8.0', knownAddresses) @@ -72,7 +72,7 @@ describe('WAFContextWrapper', () => { const ddwafContext = { run: sinon.stub().returns({ events: {}, - derivatives: {} + attributes: {} }) } const wafContextWrapper = new WAFContextWrapper(ddwafContext, 1000, '1.14.0', '1.8.0', knownAddresses) @@ -178,13 +178,13 @@ describe('WAFContextWrapper', () => { const ddwafContext = { run: sinon.stub().returns({ events: [{ rule_matches: [] }], - derivatives: [], + attributes: [], actions: { redirect_request: { status_code: 301 } }, - totalRuntime: 123456, + duration: 123456, timeout: false, metrics: { maxTruncatedString: 5000, diff --git a/packages/dd-trace/test/remote_config/index.spec.js b/packages/dd-trace/test/remote_config/index.spec.js index 33e8a423efd..f20f06db81a 100644 --- a/packages/dd-trace/test/remote_config/index.spec.js +++ b/packages/dd-trace/test/remote_config/index.spec.js @@ -248,6 +248,8 @@ describe('Remote Config index', () => { .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SHI, true) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_CMDI, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_DD_MULTICONFIG, true) expect(rc.setProductHandler).to.have.been.calledWith('ASM_DATA') expect(rc.setProductHandler).to.have.been.calledWith('ASM_DD') @@ -296,6 +298,8 @@ describe('Remote Config index', () => { .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SHI, true) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_CMDI, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_DD_MULTICONFIG, true) expect(rc.setProductHandler).to.have.been.calledWith('ASM_DATA') expect(rc.setProductHandler).to.have.been.calledWith('ASM_DD') @@ -346,6 +350,8 @@ describe('Remote Config index', () => { .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SHI, true) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_CMDI, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_DD_MULTICONFIG, true) }) it('should not activate rasp capabilities if rasp is disabled', () => { @@ -391,6 +397,8 @@ describe('Remote Config index', () => { .to.not.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SHI) expect(rc.updateCapabilities) .to.not.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_CMDI) + expect(rc.updateCapabilities) + .to.not.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_DD_MULTICONFIG) }) }) @@ -436,6 +444,8 @@ describe('Remote Config index', () => { .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SHI, false) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_CMDI, false) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_DD_MULTICONFIG, false) expect(rc.removeProductHandler).to.have.been.calledWith('ASM_DATA') expect(rc.removeProductHandler).to.have.been.calledWith('ASM_DD') diff --git a/yarn.lock b/yarn.lock index 8edfc60c376..b583b268e15 100644 --- a/yarn.lock +++ b/yarn.lock @@ -211,10 +211,10 @@ resolved "https://registry.yarnpkg.com/@datadog/libdatadog/-/libdatadog-0.7.0.tgz#81e07d3040c628892db697ccd01ae3c4d2a76315" integrity sha512-VVZLspzQcfEU47gmGCVoRkngn7RgFRR4CHjw4YaX8eWT+xz4Q4l6PvA45b7CMk9nlt3MNN5MtGdYttYMIpo6Sg== -"@datadog/native-appsec@9.0.0": - version "9.0.0" - resolved "https://registry.yarnpkg.com/@datadog/native-appsec/-/native-appsec-9.0.0.tgz#3ac854d8597ca75af0aa534aae28fa7f13057d2a" - integrity sha512-C7v16pP4p4Y+Cx0jcxTYmhZTptfVs8TYbn6LH/aQgTkwx2tWsWN5lss7fjBYjWyZoPMwVh2UX/yDm4ES25hJnQ== +"@datadog/native-appsec@10.0.0": + version "10.0.0" + resolved "https://registry.yarnpkg.com/@datadog/native-appsec/-/native-appsec-10.0.0.tgz#366d292445c6d4b02782f3c28a2a4f26acd498f3" + integrity sha512-1veYkp5DRy3RnqFRQGGIfX6ON8UibIq6YOXa+oJ0n5TRQFd1v6qWPzfonO31W4rDJ5qjxNRV6lX33m8JfaYnag== dependencies: node-gyp-build "^3.9.0" From 72ac79f22f15e5b8778ceda998b00164bbd8bf7d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mario=20Vidal=20Dom=C3=ADnguez?= <60353145+Mariovido@users.noreply.github.com> Date: Thu, 10 Jul 2025 09:58:52 +0200 Subject: [PATCH 15/53] [test-optimization] [SDTEST-2267] Remove `getPort` from Test Optimization tests (#6064) --- .../automatic-log-submission.spec.js | 9 ++--- integration-tests/cucumber/cucumber.spec.js | 4 +- integration-tests/cypress/cypress.spec.js | 38 +++++++++---------- .../playwright/playwright.spec.js | 14 +++---- integration-tests/selenium/selenium.spec.js | 9 ++--- integration-tests/test-api-manual.spec.js | 4 +- .../test-optimization-startup.spec.js | 4 +- 7 files changed, 37 insertions(+), 45 deletions(-) diff --git a/integration-tests/automatic-log-submission.spec.js b/integration-tests/automatic-log-submission.spec.js index ae13f94c3ef..6bc950e9d13 100644 --- a/integration-tests/automatic-log-submission.spec.js +++ b/integration-tests/automatic-log-submission.spec.js @@ -3,7 +3,6 @@ const { exec, execSync } = require('child_process') const { assert } = require('chai') -const getPort = require('get-port') const { createSandbox, @@ -31,8 +30,9 @@ describe('test visibility automatic log submission', () => { // Install chromium (configured in integration-tests/playwright.config.js) // *Be advised*: this means that we'll only be using chromium for this test suite execSync('npx playwright install chromium', { cwd, env: restOfEnv, stdio: 'inherit' }) - webAppPort = await getPort() - webAppServer.listen(webAppPort) + webAppServer.listen(0, () => { + webAppPort = webAppServer.address().port + }) }) after(async () => { @@ -41,8 +41,7 @@ describe('test visibility automatic log submission', () => { }) beforeEach(async function () { - const port = await getPort() - receiver = await new FakeCiVisIntake(port).start() + receiver = await new FakeCiVisIntake().start() }) afterEach(async () => { diff --git a/integration-tests/cucumber/cucumber.spec.js b/integration-tests/cucumber/cucumber.spec.js index ea1f9668afb..c7920b72aad 100644 --- a/integration-tests/cucumber/cucumber.spec.js +++ b/integration-tests/cucumber/cucumber.spec.js @@ -2,7 +2,6 @@ const { exec, execSync } = require('child_process') -const getPort = require('get-port') const { assert } = require('chai') const fs = require('fs') const path = require('path') @@ -95,8 +94,7 @@ versions.forEach(version => { }) beforeEach(async function () { - const port = await getPort() - receiver = await new FakeCiVisIntake(port).start() + receiver = await new FakeCiVisIntake().start() }) afterEach(async () => { diff --git a/integration-tests/cypress/cypress.spec.js b/integration-tests/cypress/cypress.spec.js index 62398825541..fde07315ebe 100644 --- a/integration-tests/cypress/cypress.spec.js +++ b/integration-tests/cypress/cypress.spec.js @@ -5,7 +5,6 @@ const { exec, execSync } = require('child_process') const path = require('path') const fs = require('fs') -const getPort = require('get-port') const { assert } = require('chai') const { @@ -114,7 +113,7 @@ moduleTypes.forEach(({ this.retries(2) this.timeout(60000) - let sandbox, cwd, receiver, childProcess, webAppPort, secondWebAppServer + let sandbox, cwd, receiver, childProcess, webAppPort, secondWebAppServer, secondWebAppPort if (type === 'commonJS') { testCommand = testCommand(version) @@ -124,8 +123,24 @@ moduleTypes.forEach(({ // cypress-fail-fast is required as an incompatible plugin sandbox = await createSandbox([`cypress@${version}`, 'cypress-fail-fast@7.1.0'], true) cwd = sandbox.folder - webAppPort = await getPort() - webAppServer.listen(webAppPort) + webAppServer.listen(0, 'localhost', () => { + webAppPort = webAppServer.address().port + }) + if (version === 'latest') { + secondWebAppServer = http.createServer((req, res) => { + res.setHeader('Content-Type', 'text/html') + res.writeHead(200) + res.end(` + + +
Hella World
+ + `) + }) + secondWebAppServer.listen(0, 'localhost', () => { + secondWebAppPort = secondWebAppServer.address().port + }) + } }) after(async () => { @@ -1732,21 +1747,6 @@ moduleTypes.forEach(({ ...restEnvVars } = getCiVisEvpProxyConfig(receiver.port) - const secondWebAppPort = await getPort() - - secondWebAppServer = http.createServer((req, res) => { - res.setHeader('Content-Type', 'text/html') - res.writeHead(200) - res.end(` - - -
Hella World
- - `) - }) - - secondWebAppServer.listen(secondWebAppPort) - const specToRun = 'cypress/e2e/multi-origin.js' childProcess = exec( diff --git a/integration-tests/playwright/playwright.spec.js b/integration-tests/playwright/playwright.spec.js index 7ba2d460aa9..199ef81749f 100644 --- a/integration-tests/playwright/playwright.spec.js +++ b/integration-tests/playwright/playwright.spec.js @@ -5,7 +5,6 @@ const satisfies = require('semifies') const path = require('path') const fs = require('fs') -const getPort = require('get-port') const { assert } = require('chai') const { @@ -86,10 +85,12 @@ versions.forEach((version) => { // Install chromium (configured in integration-tests/playwright.config.js) // *Be advised*: this means that we'll only be using chromium for this test suite execSync('npx playwright install chromium', { cwd, env: restOfEnv, stdio: 'inherit' }) - webAppPort = await getPort() - webAppServer.listen(webAppPort) - webPortWithRedirect = await getPort() - webAppServerWithRedirect.listen(webPortWithRedirect) + webAppServer.listen(0, () => { + webAppPort = webAppServer.address().port + }) + webAppServerWithRedirect.listen(0, () => { + webPortWithRedirect = webAppServerWithRedirect.address().port + }) }) after(async () => { @@ -99,8 +100,7 @@ versions.forEach((version) => { }) beforeEach(async function () { - const port = await getPort() - receiver = await new FakeCiVisIntake(port).start() + receiver = await new FakeCiVisIntake().start() }) afterEach(async () => { diff --git a/integration-tests/selenium/selenium.spec.js b/integration-tests/selenium/selenium.spec.js index 491522e40ee..46721689c8b 100644 --- a/integration-tests/selenium/selenium.spec.js +++ b/integration-tests/selenium/selenium.spec.js @@ -3,7 +3,6 @@ const { exec } = require('child_process') const { assert } = require('chai') -const getPort = require('get-port') const { createSandbox, @@ -41,8 +40,9 @@ versionRange.forEach(version => { ]) cwd = sandbox.folder - webAppPort = await getPort() - webAppServer.listen(webAppPort) + webAppServer.listen(0, () => { + webAppPort = webAppServer.address().port + }) }) after(async function () { @@ -51,8 +51,7 @@ versionRange.forEach(version => { }) beforeEach(async function () { - const port = await getPort() - receiver = await new FakeCiVisIntake(port).start() + receiver = await new FakeCiVisIntake().start() }) afterEach(async () => { diff --git a/integration-tests/test-api-manual.spec.js b/integration-tests/test-api-manual.spec.js index c403168206a..0c2b9918830 100644 --- a/integration-tests/test-api-manual.spec.js +++ b/integration-tests/test-api-manual.spec.js @@ -2,7 +2,6 @@ const { exec } = require('child_process') -const getPort = require('get-port') const { assert } = require('chai') const { @@ -27,8 +26,7 @@ describe('test-api-manual', () => { }) beforeEach(async function () { - const port = await getPort() - receiver = await new FakeCiVisIntake(port).start() + receiver = await new FakeCiVisIntake().start() }) afterEach(async () => { diff --git a/integration-tests/test-optimization-startup.spec.js b/integration-tests/test-optimization-startup.spec.js index a15d49cf8ef..e44f241cd0f 100644 --- a/integration-tests/test-optimization-startup.spec.js +++ b/integration-tests/test-optimization-startup.spec.js @@ -2,7 +2,6 @@ const { exec } = require('child_process') -const getPort = require('get-port') const { assert } = require('chai') const { createSandbox } = require('./helpers') @@ -24,8 +23,7 @@ describe('test optimization startup', () => { beforeEach(async function () { processOutput = '' - const port = await getPort() - receiver = await new FakeCiVisIntake(port).start() + receiver = await new FakeCiVisIntake().start() }) afterEach(async () => { From dd9aad3a6f29ff332d797f80100e535953d257de Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mario=20Vidal=20Dom=C3=ADnguez?= <60353145+Mariovido@users.noreply.github.com> Date: Thu, 10 Jul 2025 14:26:50 +0200 Subject: [PATCH 16/53] [test-optimization] [SDTEST-2272] Fix `attempt_to_fix` tagging in Playwright (#6071) --- .../retried-test.js | 15 ++++++ .../playwright/playwright.spec.js | 47 +++++++++++++++++++ .../src/playwright.js | 4 +- 3 files changed, 65 insertions(+), 1 deletion(-) create mode 100644 integration-tests/ci-visibility/playwright-tests-retries-tagging/retried-test.js diff --git a/integration-tests/ci-visibility/playwright-tests-retries-tagging/retried-test.js b/integration-tests/ci-visibility/playwright-tests-retries-tagging/retried-test.js new file mode 100644 index 00000000000..5994693b392 --- /dev/null +++ b/integration-tests/ci-visibility/playwright-tests-retries-tagging/retried-test.js @@ -0,0 +1,15 @@ +'use strict' + +const { test, expect } = require('@playwright/test') + +test.beforeEach(async ({ page }) => { + await page.goto(process.env.PW_BASE_URL) +}) + +test.describe('retried test', () => { + test('should be retried', async ({ page }) => { + await expect(page.locator('.hello-world')).toHaveText([ + 'Hello World' + ]) + }) +}) diff --git a/integration-tests/playwright/playwright.spec.js b/integration-tests/playwright/playwright.spec.js index 199ef81749f..ddd97484eb4 100644 --- a/integration-tests/playwright/playwright.spec.js +++ b/integration-tests/playwright/playwright.spec.js @@ -1727,5 +1727,52 @@ versions.forEach((version) => { }) }) }) + + contextNewVersions('check retries tagging', () => { + it('does not send attempt to fix tags if test is retried and not attempt to fix', (done) => { + const receiverPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcycle', (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + + assert.equal(tests.length, NUM_RETRIES_EFD + 1) + for (const test of tests) { + assert.notProperty(test.meta, TEST_MANAGEMENT_ATTEMPT_TO_FIX_PASSED) + assert.notProperty(test.meta, TEST_HAS_FAILED_ALL_RETRIES) + } + }) + + receiver.setSettings({ + impacted_tests_enabled: true, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD + } + }, + known_tests_enabled: true, + test_management: { + attempt_to_fix_retries: NUM_RETRIES_EFD + } + }) + + childProcess = exec( + './node_modules/.bin/playwright test -c playwright.config.js retried-test.js', + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + PW_BASE_URL: `http://localhost:${webAppPort}`, + TEST_DIR: './ci-visibility/playwright-tests-retries-tagging', + }, + stdio: 'pipe' + } + ) + + childProcess.on('exit', () => { + receiverPromise.then(done).catch(done) + }) + }) + }) }) }) diff --git a/packages/datadog-instrumentations/src/playwright.js b/packages/datadog-instrumentations/src/playwright.js index ffdb236dedb..a54a007d0cb 100644 --- a/packages/datadog-instrumentations/src/playwright.js +++ b/packages/datadog-instrumentations/src/playwright.js @@ -337,7 +337,9 @@ function testEndHandler (test, annotations, testStatus, error, isTimeout, isMain testStatuses.push(testStatus) } - if (testStatuses.length === testManagementAttemptToFixRetries + 1) { + const testProperties = getTestProperties(test) + + if (testStatuses.length === testManagementAttemptToFixRetries + 1 && testProperties.attemptToFix) { if (testStatuses.includes('fail')) { test._ddHasFailedAttemptToFixRetries = true } From 88acf660b4a469dd4f23bc8497ba1ddaef161131 Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Thu, 10 Jul 2025 15:46:11 +0200 Subject: [PATCH 17/53] ESLint: Clean up Node.js built-in ignores (#6059) --- eslint.config.mjs | 18 +++++++----------- integration-tests/appsec/esm-app/index.mjs | 2 -- packages/datadog-instrumentations/src/fetch.js | 2 -- .../src/runtime_metrics/runtime_metrics.js | 2 -- 4 files changed, 7 insertions(+), 17 deletions(-) diff --git a/eslint.config.mjs b/eslint.config.mjs index 29d13ed5273..a08ee47b670 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -339,6 +339,7 @@ export default [ 'n/no-process-exit': 'off', // TODO: Enable this rule once we have a plan to address it 'n/no-unsupported-features/node-builtins': ['error', { ignores: [ + 'Request', 'Response', 'async_hooks.createHook', 'async_hooks.executionAsyncId', @@ -502,7 +503,12 @@ export default [ } }, rules: { - 'n/no-unsupported-features/node-builtins': ['error', { allowExperimental: true }] + 'n/no-unsupported-features/node-builtins': ['error', { + allowExperimental: true, + ignores: [ + 'module.register' + ] + }] } }, { @@ -525,15 +531,6 @@ export default [ 'n/no-missing-require': 'off' } }, - { - name: 'dd-trace/scripts', - files: [ - 'scripts/**/*' - ], - rules: { - 'n/no-unsupported-features/node-builtins': ['error', { allowExperimental: true }] - } - }, { name: 'dd-trace/tests/all', files: TEST_FILES, @@ -561,7 +558,6 @@ export default [ 'mocha/no-top-level-hooks': 'off', 'n/handle-callback-err': 'off', 'n/no-missing-require': 'off', - 'n/no-unsupported-features/node-builtins': ['error', { allowExperimental: true }], 'require-await': 'off' } }, diff --git a/integration-tests/appsec/esm-app/index.mjs b/integration-tests/appsec/esm-app/index.mjs index 504e71d1c20..7c44644e990 100644 --- a/integration-tests/appsec/esm-app/index.mjs +++ b/integration-tests/appsec/esm-app/index.mjs @@ -1,5 +1,3 @@ -/* eslint n/no-unsupported-features/node-builtins: ['error', { ignores: ['module.register'] }] */ - import childProcess from 'node:child_process' import express from 'express' import Module from 'node:module' diff --git a/packages/datadog-instrumentations/src/fetch.js b/packages/datadog-instrumentations/src/fetch.js index 731a420a31d..8a1c855790e 100644 --- a/packages/datadog-instrumentations/src/fetch.js +++ b/packages/datadog-instrumentations/src/fetch.js @@ -1,7 +1,5 @@ 'use strict' -/* eslint n/no-unsupported-features/node-builtins: ['error', { ignores: ['fetch', 'Request'] }] */ - const { isInServerlessEnvironment } = require('../../dd-trace/src/serverless') if (globalThis.fetch) { diff --git a/packages/dd-trace/src/runtime_metrics/runtime_metrics.js b/packages/dd-trace/src/runtime_metrics/runtime_metrics.js index 8550c449d84..ab7865093a9 100644 --- a/packages/dd-trace/src/runtime_metrics/runtime_metrics.js +++ b/packages/dd-trace/src/runtime_metrics/runtime_metrics.js @@ -1,7 +1,5 @@ 'use strict' -/* eslint n/no-unsupported-features/node-builtins: ['error', { ignores: ['v8.GCProfiler'] }] */ - // TODO: capture every second and flush every 10 seconds const v8 = require('v8') From 5769703dbc94034b7e32ccea333bf59204906606 Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Thu, 10 Jul 2025 14:43:52 -0400 Subject: [PATCH 18/53] move test optimization integration tests to dedicated folder (#6047) --- .github/workflows/test-optimization.yml | 13 +++++++++++++ .../automatic-log-submission.spec.js | 6 +++--- .../{ => ci-visibility}/test-api-manual.spec.js | 6 +++--- .../test-optimization-startup.spec.js | 4 ++-- package.json | 1 + 5 files changed, 22 insertions(+), 8 deletions(-) rename integration-tests/{ => ci-visibility}/automatic-log-submission.spec.js (98%) rename integration-tests/{ => ci-visibility}/test-api-manual.spec.js (95%) rename integration-tests/{ => ci-visibility}/test-optimization-startup.spec.js (94%) diff --git a/.github/workflows/test-optimization.yml b/.github/workflows/test-optimization.yml index 4efb4607ae4..b516300e6c7 100644 --- a/.github/workflows/test-optimization.yml +++ b/.github/workflows/test-optimization.yml @@ -34,6 +34,19 @@ jobs: env: GITHUB_TOKEN: ${{ steps.app-token.outputs.token }} + integration: + strategy: + matrix: + version: [oldest, maintenance, active, latest] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: ./.github/actions/node + with: + version: ${{ matrix.version }} + - uses: ./.github/actions/install + - run: yarn test:integration:testopt + integration-playwright: strategy: matrix: diff --git a/integration-tests/automatic-log-submission.spec.js b/integration-tests/ci-visibility/automatic-log-submission.spec.js similarity index 98% rename from integration-tests/automatic-log-submission.spec.js rename to integration-tests/ci-visibility/automatic-log-submission.spec.js index 6bc950e9d13..5e25ab89ffc 100644 --- a/integration-tests/automatic-log-submission.spec.js +++ b/integration-tests/ci-visibility/automatic-log-submission.spec.js @@ -8,9 +8,9 @@ const { createSandbox, getCiVisAgentlessConfig, getCiVisEvpProxyConfig -} = require('./helpers') -const { FakeCiVisIntake } = require('./ci-visibility-intake') -const webAppServer = require('./ci-visibility/web-app-server') +} = require('../helpers') +const { FakeCiVisIntake } = require('../ci-visibility-intake') +const webAppServer = require('./web-app-server') describe('test visibility automatic log submission', () => { let sandbox, cwd, receiver, childProcess, webAppPort diff --git a/integration-tests/test-api-manual.spec.js b/integration-tests/ci-visibility/test-api-manual.spec.js similarity index 95% rename from integration-tests/test-api-manual.spec.js rename to integration-tests/ci-visibility/test-api-manual.spec.js index 0c2b9918830..09ab26e9764 100644 --- a/integration-tests/test-api-manual.spec.js +++ b/integration-tests/ci-visibility/test-api-manual.spec.js @@ -7,11 +7,11 @@ const { assert } = require('chai') const { createSandbox, getCiVisAgentlessConfig -} = require('./helpers') -const { FakeCiVisIntake } = require('./ci-visibility-intake') +} = require('../helpers') +const { FakeCiVisIntake } = require('../ci-visibility-intake') const { TEST_STATUS -} = require('../packages/dd-trace/src/plugins/util/test') +} = require('../../packages/dd-trace/src/plugins/util/test') describe('test-api-manual', () => { let sandbox, cwd, receiver, childProcess diff --git a/integration-tests/test-optimization-startup.spec.js b/integration-tests/ci-visibility/test-optimization-startup.spec.js similarity index 94% rename from integration-tests/test-optimization-startup.spec.js rename to integration-tests/ci-visibility/test-optimization-startup.spec.js index e44f241cd0f..1e532f256f8 100644 --- a/integration-tests/test-optimization-startup.spec.js +++ b/integration-tests/ci-visibility/test-optimization-startup.spec.js @@ -4,8 +4,8 @@ const { exec } = require('child_process') const { assert } = require('chai') -const { createSandbox } = require('./helpers') -const { FakeCiVisIntake } = require('./ci-visibility-intake') +const { createSandbox } = require('../helpers') +const { FakeCiVisIntake } = require('../ci-visibility-intake') const packageManagers = ['yarn', 'npm', 'pnpm'] diff --git a/package.json b/package.json index 9abf47321e2..8ef0d384d08 100644 --- a/package.json +++ b/package.json @@ -55,6 +55,7 @@ "test:integration:playwright": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/playwright/*.spec.js\"", "test:integration:selenium": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/selenium/*.spec.js\"", "test:integration:vitest": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/vitest/*.spec.js\"", + "test:integration:testopt": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/ci-visibility/*.spec.js\"", "test:integration:profiler": "mocha --timeout 180000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/profiler/*.spec.js\"", "test:integration:plugins": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/datadog-plugin-@($(echo $PLUGINS))/test/integration-test/**/*.spec.js\"", "test:unit:plugins": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/datadog-instrumentations/test/@($(echo $PLUGINS)).spec.js\" \"packages/datadog-plugin-@($(echo $PLUGINS))/test/**/*.spec.js\" --exclude \"packages/datadog-plugin-@($(echo $PLUGINS))/test/integration-test/**/*.spec.js\"", From f237bf594c64aa83ccd25ca6aca557094f65bbda Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Thu, 10 Jul 2025 14:44:44 -0400 Subject: [PATCH 19/53] fix sqs test race condition when deleting the queue (#6068) --- .../datadog-plugin-aws-sdk/test/sqs.spec.js | 16 +++++---- .../test/index.spec.js | 28 +++++++++------ .../test/integration-test/server.mjs | 2 +- .../datadog-plugin-mysql2/test/index.spec.js | 22 +++++++----- .../test/integration-test/server.mjs | 2 +- .../test/index.spec.js | 16 +++++---- .../test/integration-test/server.mjs | 2 +- packages/dd-trace/test/setup/mocha.js | 36 ++++++++++--------- .../test/setup/services/elasticsearch.js | 2 +- .../dd-trace/test/setup/services/mysql.js | 2 +- 10 files changed, 75 insertions(+), 53 deletions(-) diff --git a/packages/datadog-plugin-aws-sdk/test/sqs.spec.js b/packages/datadog-plugin-aws-sdk/test/sqs.spec.js index 5c00003a66d..913df5bab62 100644 --- a/packages/datadog-plugin-aws-sdk/test/sqs.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/sqs.spec.js @@ -99,10 +99,10 @@ describe('Plugin', () => { ) withNamingSchema( - (done) => sqs.sendMessage({ + () => new Promise((resolve, reject) => sqs.sendMessage({ MessageBody: 'test body', QueueUrl - }, (err) => err && done(err)), + }, (err) => err ? reject(err) : resolve())), rawExpectedSchema.producer, { desc: 'producer' @@ -110,17 +110,17 @@ describe('Plugin', () => { ) withNamingSchema( - (done) => sqs.sendMessage({ + () => new Promise((resolve, reject) => sqs.sendMessage({ MessageBody: 'test body', QueueUrl }, (err) => { - if (err) return done(err) + if (err) return reject(err) sqs.receiveMessage({ QueueUrl, MessageAttributeNames: ['.*'] - }, (err) => err && done(err)) - }), + }, (err) => err ? reject(err) : resolve()) + })), rawExpectedSchema.consumer, { desc: 'consumer' @@ -128,7 +128,9 @@ describe('Plugin', () => { ) withNamingSchema( - (done) => sqs.listQueues({}, (err) => err && done(err)), + () => new Promise((resolve, reject) => { + sqs.listQueues({}, (err) => err ? reject(err) : resolve()) + }), rawExpectedSchema.client, { desc: 'client' diff --git a/packages/datadog-plugin-elasticsearch/test/index.spec.js b/packages/datadog-plugin-elasticsearch/test/index.spec.js index cb8107518df..635e824ca37 100644 --- a/packages/datadog-plugin-elasticsearch/test/index.spec.js +++ b/packages/datadog-plugin-elasticsearch/test/index.spec.js @@ -304,13 +304,17 @@ describe('Plugin', () => { client.ping().catch(done) }) - withNamingSchema( - () => client.search( - { index: 'logstash-2000.01.01', body: {} }, - hasCallbackSupport ? () => {} : undefined - ), - rawExpectedSchema.outbound - ) + describe('test', () => { + withNamingSchema( + () => { + client.search( + { index: 'logstash-2000.01.01', body: {} }, + hasCallbackSupport ? () => {} : undefined + ) + }, + rawExpectedSchema.outbound + ) + }) }) }) @@ -370,10 +374,12 @@ describe('Plugin', () => { }) withNamingSchema( - () => client.search( - { index: 'logstash-2000.01.01', body: {} }, - hasCallbackSupport ? () => {} : undefined - ), + () => { + client.search( + { index: 'logstash-2000.01.01', body: {} }, + hasCallbackSupport ? () => {} : undefined + ) + }, { v0: { opName: 'elasticsearch.query', diff --git a/packages/datadog-plugin-elasticsearch/test/integration-test/server.mjs b/packages/datadog-plugin-elasticsearch/test/integration-test/server.mjs index f3f2cc1d9a7..ff47d43b773 100644 --- a/packages/datadog-plugin-elasticsearch/test/integration-test/server.mjs +++ b/packages/datadog-plugin-elasticsearch/test/integration-test/server.mjs @@ -1,6 +1,6 @@ import 'dd-trace/init.js' import { Client } from '@elastic/elasticsearch' -const client = new Client({ node: 'http://localhost:9200' }) +const client = new Client({ node: 'http://127.0.0.1:9200' }) await client.ping() diff --git a/packages/datadog-plugin-mysql2/test/index.spec.js b/packages/datadog-plugin-mysql2/test/index.spec.js index 1c6576a0209..3296e71473b 100644 --- a/packages/datadog-plugin-mysql2/test/index.spec.js +++ b/packages/datadog-plugin-mysql2/test/index.spec.js @@ -34,7 +34,7 @@ describe('Plugin', () => { mysql2 = proxyquire(`../../../versions/mysql2@${version}`, {}).get() connection = mysql2.createConnection({ - host: 'localhost', + host: '127.0.0.1', user: 'root', database: 'db' }) @@ -51,7 +51,9 @@ describe('Plugin', () => { ) withNamingSchema( - () => connection.query('SELECT 1', (_) => {}), + () => new Promise((resolve) => { + connection.query('SELECT 1', (_) => resolve()) + }), rawExpectedSchema.outbound ) @@ -213,7 +215,7 @@ describe('Plugin', () => { mysql2 = proxyquire(`../../../versions/mysql2@${version}`, {}).get() connection = mysql2.createConnection({ - host: 'localhost', + host: '127.0.0.1', user: 'root', database: 'db' }) @@ -222,7 +224,9 @@ describe('Plugin', () => { }) withNamingSchema( - () => connection.query('SELECT 1', (_) => {}), + () => new Promise((resolve) => { + connection.query('SELECT 1', (_) => resolve()) + }), { v0: { opName: 'mysql.query', @@ -262,7 +266,7 @@ describe('Plugin', () => { mysql2 = proxyquire(`../../../versions/mysql2@${version}`, {}).get() connection = mysql2.createConnection({ - host: 'localhost', + host: '127.0.0.1', user: 'root', database: 'db' }) @@ -271,7 +275,9 @@ describe('Plugin', () => { }) withNamingSchema( - () => connection.query('SELECT 1', (_) => {}), + () => new Promise((resolve) => { + connection.query('SELECT 1', (_) => resolve()) + }), { v0: { opName: 'mysql.query', @@ -288,7 +294,7 @@ describe('Plugin', () => { agent.assertSomeTraces(traces => { expect(traces[0][0]).to.have.property('service', 'custom') sinon.assert.calledWith(serviceSpy, sinon.match({ - host: 'localhost', + host: '127.0.0.1', user: 'root', database: 'db' })) @@ -314,7 +320,7 @@ describe('Plugin', () => { pool = mysql2.createPool({ connectionLimit: 1, - host: 'localhost', + host: '127.0.0.1', user: 'root' }) }) diff --git a/packages/datadog-plugin-mysql2/test/integration-test/server.mjs b/packages/datadog-plugin-mysql2/test/integration-test/server.mjs index 0b5c52ffd10..733248f50be 100644 --- a/packages/datadog-plugin-mysql2/test/integration-test/server.mjs +++ b/packages/datadog-plugin-mysql2/test/integration-test/server.mjs @@ -2,7 +2,7 @@ import 'dd-trace/init.js' import mysql from 'mysql2' const conn = { - host: 'localhost', + host: '127.0.0.1', user: 'root', database: 'db', port: 3306 diff --git a/packages/datadog-plugin-opensearch/test/index.spec.js b/packages/datadog-plugin-opensearch/test/index.spec.js index d8ccc7f9e2c..2c1c8326706 100644 --- a/packages/datadog-plugin-opensearch/test/index.spec.js +++ b/packages/datadog-plugin-opensearch/test/index.spec.js @@ -33,7 +33,7 @@ describe('Plugin', () => { opensearch = metaModule.get() client = new opensearch.Client({ - node: 'http://localhost:9201' + node: 'http://127.0.0.1:9201' }) }) @@ -67,7 +67,7 @@ describe('Plugin', () => { 'opensearch.url': '/docs/_search', 'opensearch.body': '{"query":{"match_all":{}}}', component: 'opensearch', - 'out.host': 'localhost' + 'out.host': '127.0.0.1' } }) .then(done) @@ -213,7 +213,9 @@ describe('Plugin', () => { }) withNamingSchema( - () => client.search({ index: 'logstash-2000.01.01', body: {} }), + () => { + client.search({ index: 'logstash-2000.01.01', body: {} }) + }, rawExpectedSchema.outbound ) }) @@ -239,7 +241,7 @@ describe('Plugin', () => { beforeEach(() => { opensearch = require(`../../../versions/${moduleName}@${version}`).get() client = new opensearch.Client({ - node: 'http://localhost:9201' + node: 'http://127.0.0.1:9201' }) }) @@ -258,7 +260,7 @@ describe('Plugin', () => { }).catch(() => { // Ignore index_not_found_exception for peer service assertion }), - 'localhost', + '127.0.0.1', 'out.host' ) @@ -291,7 +293,9 @@ describe('Plugin', () => { }) withNamingSchema( - () => client.search({ index: 'logstash-2000.01.01', body: {} }), + () => { + client.search({ index: 'logstash-2000.01.01', body: {} }) + }, { v0: { opName: 'opensearch.query', diff --git a/packages/datadog-plugin-opensearch/test/integration-test/server.mjs b/packages/datadog-plugin-opensearch/test/integration-test/server.mjs index 21be1cead43..23e36041d32 100644 --- a/packages/datadog-plugin-opensearch/test/integration-test/server.mjs +++ b/packages/datadog-plugin-opensearch/test/integration-test/server.mjs @@ -1,5 +1,5 @@ import 'dd-trace/init.js' import opensearch from '@opensearch-project/opensearch' -const client = new opensearch.Client({ node: 'http://localhost:9201' }) +const client = new opensearch.Client({ node: 'http://127.0.0.1:9201' }) await client.ping() diff --git a/packages/dd-trace/test/setup/mocha.js b/packages/dd-trace/test/setup/mocha.js index 72bc28a6a83..ef684349b92 100644 --- a/packages/dd-trace/test/setup/mocha.js +++ b/packages/dd-trace/test/setup/mocha.js @@ -62,8 +62,9 @@ function withNamingSchema ( it('should conform to the naming schema', function () { this.timeout(10000) + return new Promise((resolve, reject) => { - agent + const agentPromise = agent .assertSomeTraces(traces => { const span = selectSpan(traces) const expectedOpName = typeof opName === 'function' @@ -76,9 +77,10 @@ function withNamingSchema ( expect(span).to.have.property('name', expectedOpName) expect(span).to.have.property('service', expectedServiceName) }) - .then(resolve) - .catch(reject) - spanProducerFn(reject) + + const testPromise = spanProducerFn(reject) + + Promise.all([testPromise, agentPromise]).then(resolve, reject) }) }) }) @@ -102,19 +104,21 @@ function withNamingSchema ( const { serviceName } = expected.v1 - it('should pass service name through', done => { - agent - .assertSomeTraces(traces => { - const span = traces[0][0] - const expectedServiceName = typeof serviceName === 'function' - ? serviceName() - : serviceName - expect(span).to.have.property('service', expectedServiceName) - }) - .then(done) - .catch(done) + it('should pass service name through', () => { + return new Promise((resolve, reject) => { + const agentPromise = agent + .assertSomeTraces(traces => { + const span = traces[0][0] + const expectedServiceName = typeof serviceName === 'function' + ? serviceName() + : serviceName + expect(span).to.have.property('service', expectedServiceName) + }) + + const testPromise = spanProducerFn(reject) - spanProducerFn(done) + Promise.all([testPromise, agentPromise]).then(resolve, reject) + }) }) }) }) diff --git a/packages/dd-trace/test/setup/services/elasticsearch.js b/packages/dd-trace/test/setup/services/elasticsearch.js index 8df222d39a6..c6dcf2698ac 100644 --- a/packages/dd-trace/test/setup/services/elasticsearch.js +++ b/packages/dd-trace/test/setup/services/elasticsearch.js @@ -9,7 +9,7 @@ function waitForElasticsearch () { operation.attempt(currentAttempt => { // Not using ES client because it's buggy for initial connection. - axios.get('http://localhost:9200/_cluster/health?wait_for_status=green&local=true&timeout=100ms') + axios.get('http://127.0.0.1:9200/_cluster/health?wait_for_status=green&local=true&timeout=100ms') .then(() => resolve()) .catch(err => { if (operation.retry(err)) return diff --git a/packages/dd-trace/test/setup/services/mysql.js b/packages/dd-trace/test/setup/services/mysql.js index 59ebce5f8e5..ffe33c8a35b 100644 --- a/packages/dd-trace/test/setup/services/mysql.js +++ b/packages/dd-trace/test/setup/services/mysql.js @@ -9,7 +9,7 @@ function waitForMysql () { operation.attempt(currentAttempt => { const connection = mysql.createConnection({ - host: 'localhost', + host: '127.0.0.1', user: 'root', database: 'db' }) From af62fda46b51cba8016a3b956b77f1f1070f194d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mario=20Vidal=20Dom=C3=ADnguez?= <60353145+Mariovido@users.noreply.github.com> Date: Fri, 11 Jul 2025 10:30:17 +0200 Subject: [PATCH 20/53] [test-optimization] [SDTEST-2264] Upgrade attempt_to_fix to `v5` (#6074) --- integration-tests/cucumber/cucumber.spec.js | 2 +- integration-tests/cypress/cypress.spec.js | 2 +- integration-tests/jest/jest.spec.js | 2 +- integration-tests/mocha/mocha.spec.js | 2 +- .../playwright/playwright.spec.js | 2 +- integration-tests/vitest/vitest.spec.js | 2 +- .../src/cypress-plugin.js | 9 ++- .../exporters/git/git_metadata.js | 2 +- .../get-test-management-tests.js | 8 ++- packages/dd-trace/src/plugins/ci_plugin.js | 9 ++- packages/dd-trace/src/plugins/util/git.js | 43 +++++++++++-- packages/dd-trace/src/plugins/util/tags.js | 16 ++++- packages/dd-trace/src/plugins/util/test.js | 11 ++-- .../test/plugins/util/ci-env/appveyor.json | 4 +- .../plugins/util/ci-env/azurepipelines.json | 4 +- .../test/plugins/util/ci-env/bitbucket.json | 4 +- .../test/plugins/util/ci-env/bitrise.json | 4 +- .../test/plugins/util/ci-env/buddy.json | 4 +- .../test/plugins/util/ci-env/buildkite.json | 4 +- .../test/plugins/util/ci-env/circleci.json | 4 +- .../test/plugins/util/ci-env/gitlab.json | 4 +- .../test/plugins/util/ci-env/jenkins.json | 4 +- .../test/plugins/util/ci-env/travisci.json | 2 +- .../plugins/util/ci-env/usersupplied.json | 4 +- .../dd-trace/test/plugins/util/git.spec.js | 64 +++++++++++++++++-- 25 files changed, 162 insertions(+), 54 deletions(-) diff --git a/integration-tests/cucumber/cucumber.spec.js b/integration-tests/cucumber/cucumber.spec.js index c7920b72aad..8112cf662db 100644 --- a/integration-tests/cucumber/cucumber.spec.js +++ b/integration-tests/cucumber/cucumber.spec.js @@ -2509,7 +2509,7 @@ versions.forEach(version => { assert.equal(metadata.test[DD_CAPABILITIES_IMPACTED_TESTS], '1') assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_QUARANTINE], '1') assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_DISABLE], '1') - assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_ATTEMPT_TO_FIX], '4') + assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_ATTEMPT_TO_FIX], '5') assert.equal(metadata.test[DD_CAPABILITIES_FAILED_TEST_REPLAY], '1') // capabilities logic does not overwrite test session name assert.equal(metadata.test[TEST_SESSION_NAME], 'my-test-session-name') diff --git a/integration-tests/cypress/cypress.spec.js b/integration-tests/cypress/cypress.spec.js index fde07315ebe..1f2bd27995c 100644 --- a/integration-tests/cypress/cypress.spec.js +++ b/integration-tests/cypress/cypress.spec.js @@ -2264,7 +2264,7 @@ moduleTypes.forEach(({ assert.equal(metadata.test[DD_CAPABILITIES_IMPACTED_TESTS], '1') assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_QUARANTINE], '1') assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_DISABLE], '1') - assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_ATTEMPT_TO_FIX], '4') + assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_ATTEMPT_TO_FIX], '5') assert.equal(metadata.test[DD_CAPABILITIES_FAILED_TEST_REPLAY], '1') // capabilities logic does not overwrite test session name assert.equal(metadata.test[TEST_SESSION_NAME], 'my-test-session-name') diff --git a/integration-tests/jest/jest.spec.js b/integration-tests/jest/jest.spec.js index 35f910a47fd..854c3e2d54b 100644 --- a/integration-tests/jest/jest.spec.js +++ b/integration-tests/jest/jest.spec.js @@ -3535,7 +3535,7 @@ describe('jest CommonJS', () => { assert.equal(metadata.test[DD_CAPABILITIES_IMPACTED_TESTS], '1') assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_QUARANTINE], '1') assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_DISABLE], '1') - assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_ATTEMPT_TO_FIX], '4') + assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_ATTEMPT_TO_FIX], '5') assert.equal(metadata.test[DD_CAPABILITIES_FAILED_TEST_REPLAY], '1') // capabilities logic does not overwrite test session name assert.equal(metadata.test[TEST_SESSION_NAME], 'my-test-session-name') diff --git a/integration-tests/mocha/mocha.spec.js b/integration-tests/mocha/mocha.spec.js index 3e9ff44bb21..ff6b29257ad 100644 --- a/integration-tests/mocha/mocha.spec.js +++ b/integration-tests/mocha/mocha.spec.js @@ -3107,7 +3107,7 @@ describe('mocha CommonJS', function () { assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_ATTEMPT_TO_FIX], undefined) } else { assert.equal(metadata.test[DD_CAPABILITIES_TEST_IMPACT_ANALYSIS], '1') - assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_ATTEMPT_TO_FIX], '4') + assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_ATTEMPT_TO_FIX], '5') } assert.equal(metadata.test[DD_CAPABILITIES_EARLY_FLAKE_DETECTION], '1') assert.equal(metadata.test[DD_CAPABILITIES_AUTO_TEST_RETRIES], '1') diff --git a/integration-tests/playwright/playwright.spec.js b/integration-tests/playwright/playwright.spec.js index ddd97484eb4..eccc4e908b1 100644 --- a/integration-tests/playwright/playwright.spec.js +++ b/integration-tests/playwright/playwright.spec.js @@ -1370,7 +1370,7 @@ versions.forEach((version) => { assert.equal(metadata.test[DD_CAPABILITIES_IMPACTED_TESTS], '1') assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_QUARANTINE], '1') assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_DISABLE], '1') - assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_ATTEMPT_TO_FIX], '4') + assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_ATTEMPT_TO_FIX], '5') assert.equal(metadata.test[DD_CAPABILITIES_FAILED_TEST_REPLAY], '1') } else { assert.equal(metadata.test[DD_CAPABILITIES_EARLY_FLAKE_DETECTION], undefined) diff --git a/integration-tests/vitest/vitest.spec.js b/integration-tests/vitest/vitest.spec.js index 88bc2302277..520f0907fbe 100644 --- a/integration-tests/vitest/vitest.spec.js +++ b/integration-tests/vitest/vitest.spec.js @@ -1867,7 +1867,7 @@ versions.forEach((version) => { assert.equal(metadata.test[DD_CAPABILITIES_IMPACTED_TESTS], '1') assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_QUARANTINE], '1') assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_DISABLE], '1') - assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_ATTEMPT_TO_FIX], '4') + assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_ATTEMPT_TO_FIX], '5') assert.equal(metadata.test[DD_CAPABILITIES_FAILED_TEST_REPLAY], '1') // capabilities logic does not overwrite test session name assert.equal(metadata.test[TEST_SESSION_NAME], 'my-test-session-name') diff --git a/packages/datadog-plugin-cypress/src/cypress-plugin.js b/packages/datadog-plugin-cypress/src/cypress-plugin.js index 91e8e8c551b..5d4084659c4 100644 --- a/packages/datadog-plugin-cypress/src/cypress-plugin.js +++ b/packages/datadog-plugin-cypress/src/cypress-plugin.js @@ -78,7 +78,8 @@ const { GIT_TAG, GIT_PULL_REQUEST_BASE_BRANCH_SHA, GIT_COMMIT_HEAD_SHA, - GIT_PULL_REQUEST_BASE_BRANCH + GIT_PULL_REQUEST_BASE_BRANCH, + GIT_COMMIT_HEAD_MESSAGE } = require('../../dd-trace/src/plugins/util/tags') const { OS_VERSION, @@ -240,7 +241,8 @@ class CypressPlugin { [GIT_COMMIT_MESSAGE]: commitMessage, [GIT_TAG]: tag, [GIT_PULL_REQUEST_BASE_BRANCH_SHA]: pullRequestBaseSha, - [GIT_COMMIT_HEAD_SHA]: commitHeadSha + [GIT_COMMIT_HEAD_SHA]: commitHeadSha, + [GIT_COMMIT_HEAD_MESSAGE]: commitHeadMessage } = this.testEnvironmentMetadata this.repositoryRoot = repositoryRoot || process.cwd() @@ -260,7 +262,8 @@ class CypressPlugin { commitMessage, tag, pullRequestBaseSha, - commitHeadSha + commitHeadSha, + commitHeadMessage } this.finishedTestsByFile = {} this.testStatuses = {} diff --git a/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js b/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js index b74a148202f..72bf23987f1 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js +++ b/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js @@ -289,7 +289,7 @@ function sendGitMetadata (url, { isEvpProxy, evpProxyPrefix }, configRepositoryU // Otherwise we unshallow and get commits to upload again log.debug('It is shallow clone, unshallowing...') if (!isFalse(getEnvironmentVariable('DD_CIVISIBILITY_GIT_UNSHALLOW_ENABLED'))) { - unshallowRepository() + unshallowRepository(false) } // The latest commits change after unshallowing diff --git a/packages/dd-trace/src/ci-visibility/test-management/get-test-management-tests.js b/packages/dd-trace/src/ci-visibility/test-management/get-test-management-tests.js index f31f855668c..b2b783c2e80 100644 --- a/packages/dd-trace/src/ci-visibility/test-management/get-test-management-tests.js +++ b/packages/dd-trace/src/ci-visibility/test-management/get-test-management-tests.js @@ -11,7 +11,9 @@ function getTestManagementTests ({ isGzipCompatible, repositoryUrl, commitMessage, - sha + sha, + commitHeadSha, + commitHeadMessage }, done) { const options = { path: '/api/v2/test/libraries/test-management/tests', @@ -45,8 +47,8 @@ function getTestManagementTests ({ type: 'ci_app_libraries_tests_request', attributes: { repository_url: repositoryUrl, - commit_message: commitMessage, - sha + commit_message: commitHeadMessage || commitMessage, + sha: commitHeadSha || sha } } }) diff --git a/packages/dd-trace/src/plugins/ci_plugin.js b/packages/dd-trace/src/plugins/ci_plugin.js index f57cb685b7e..76596b97b47 100644 --- a/packages/dd-trace/src/plugins/ci_plugin.js +++ b/packages/dd-trace/src/plugins/ci_plugin.js @@ -55,7 +55,8 @@ const { GIT_TAG, GIT_PULL_REQUEST_BASE_BRANCH_SHA, GIT_COMMIT_HEAD_SHA, - GIT_PULL_REQUEST_BASE_BRANCH + GIT_PULL_REQUEST_BASE_BRANCH, + GIT_COMMIT_HEAD_MESSAGE } = require('./util/tags') const { OS_VERSION, OS_PLATFORM, OS_ARCHITECTURE, RUNTIME_NAME, RUNTIME_VERSION } = require('./util/env') const getDiClient = require('../ci-visibility/dynamic-instrumentation') @@ -313,7 +314,8 @@ module.exports = class CiPlugin extends Plugin { [GIT_COMMIT_MESSAGE]: commitMessage, [GIT_TAG]: tag, [GIT_PULL_REQUEST_BASE_BRANCH_SHA]: pullRequestBaseSha, - [GIT_COMMIT_HEAD_SHA]: commitHeadSha + [GIT_COMMIT_HEAD_SHA]: commitHeadSha, + [GIT_COMMIT_HEAD_MESSAGE]: commitHeadMessage } = this.testEnvironmentMetadata this.repositoryRoot = repositoryRoot || process.cwd() @@ -335,7 +337,8 @@ module.exports = class CiPlugin extends Plugin { commitMessage, tag, pullRequestBaseSha, - commitHeadSha + commitHeadSha, + commitHeadMessage } } diff --git a/packages/dd-trace/src/plugins/util/git.js b/packages/dd-trace/src/plugins/util/git.js index a94c1afcf5e..22225085f97 100644 --- a/packages/dd-trace/src/plugins/util/git.js +++ b/packages/dd-trace/src/plugins/util/git.js @@ -18,7 +18,14 @@ const { GIT_COMMIT_AUTHOR_DATE, GIT_COMMIT_AUTHOR_EMAIL, GIT_COMMIT_AUTHOR_NAME, - CI_WORKSPACE_PATH + CI_WORKSPACE_PATH, + GIT_COMMIT_HEAD_AUTHOR_DATE, + GIT_COMMIT_HEAD_AUTHOR_EMAIL, + GIT_COMMIT_HEAD_AUTHOR_NAME, + GIT_COMMIT_HEAD_COMMITER_DATE, + GIT_COMMIT_HEAD_COMMITER_EMAIL, + GIT_COMMIT_HEAD_COMMITER_NAME, + GIT_COMMIT_HEAD_MESSAGE } = require('./tags') const { incrementCountMetric, @@ -119,7 +126,7 @@ function getGitVersion () { } } -function unshallowRepository () { +function unshallowRepository (parentOnly = false) { const gitVersion = getGitVersion() if (!gitVersion) { log.warn('Git version could not be extracted, so git unshallow will not proceed') @@ -134,7 +141,7 @@ function unshallowRepository () { const baseGitOptions = [ 'fetch', - '--shallow-since="1 month ago"', + parentOnly ? '--deepen=1' : '--shallow-since="1 month ago"', '--update-shallow', '--filter=blob:none', '--recurse-submodules=no', @@ -453,7 +460,8 @@ function getGitMetadata (ciMetadata) { commitMessage, authorName: ciAuthorName, authorEmail: ciAuthorEmail, - ciWorkspacePath + ciWorkspacePath, + headCommitSha } = ciMetadata // With stdio: 'pipe', errors in this command will not be output to the parent process, @@ -472,7 +480,32 @@ function getGitMetadata (ciMetadata) { commitMessage || sanitizedExec('git', ['show', '-s', '--format=%B'], null, null, null, false), [GIT_BRANCH]: branch || sanitizedExec('git', ['rev-parse', '--abbrev-ref', 'HEAD']), [GIT_COMMIT_SHA]: commitSHA || sanitizedExec('git', ['rev-parse', 'HEAD']), - [CI_WORKSPACE_PATH]: ciWorkspacePath || sanitizedExec('git', ['rev-parse', '--show-toplevel']) + [CI_WORKSPACE_PATH]: ciWorkspacePath || sanitizedExec('git', ['rev-parse', '--show-toplevel']), + } + + if (headCommitSha) { + if (isShallowRepository()) { + unshallowRepository(true) + } + + tags[GIT_COMMIT_HEAD_MESSAGE] = + sanitizedExec('git', ['show', '-s', '--format=%B', headCommitSha], null, null, null, false) + + const [ + headAuthorName, + headAuthorEmail, + headAuthorDate, + headCommitterName, + headCommitterEmail, + headCommitterDate + ] = sanitizedExec('git', ['show', '-s', '--format=%an,%ae,%aI,%cn,%ce,%cI', headCommitSha]).split(',') + + tags[GIT_COMMIT_HEAD_AUTHOR_DATE] = headAuthorDate + tags[GIT_COMMIT_HEAD_AUTHOR_EMAIL] = headAuthorEmail + tags[GIT_COMMIT_HEAD_AUTHOR_NAME] = headAuthorName + tags[GIT_COMMIT_HEAD_COMMITER_DATE] = headCommitterDate + tags[GIT_COMMIT_HEAD_COMMITER_EMAIL] = headCommitterEmail + tags[GIT_COMMIT_HEAD_COMMITER_NAME] = headCommitterName } const entries = [ diff --git a/packages/dd-trace/src/plugins/util/tags.js b/packages/dd-trace/src/plugins/util/tags.js index ec3a818396a..8acd7e44154 100644 --- a/packages/dd-trace/src/plugins/util/tags.js +++ b/packages/dd-trace/src/plugins/util/tags.js @@ -11,7 +11,14 @@ const GIT_COMMIT_COMMITTER_NAME = 'git.commit.committer.name' const GIT_COMMIT_AUTHOR_DATE = 'git.commit.author.date' const GIT_COMMIT_AUTHOR_EMAIL = 'git.commit.author.email' const GIT_COMMIT_AUTHOR_NAME = 'git.commit.author.name' -const GIT_COMMIT_HEAD_SHA = 'git.commit.head_sha' +const GIT_COMMIT_HEAD_SHA = 'git.commit.head.sha' +const GIT_COMMIT_HEAD_MESSAGE = 'git.commit.head.message' +const GIT_COMMIT_HEAD_AUTHOR_DATE = 'git.commit.head.author.date' +const GIT_COMMIT_HEAD_AUTHOR_EMAIL = 'git.commit.head.author.email' +const GIT_COMMIT_HEAD_AUTHOR_NAME = 'git.commit.head.author.name' +const GIT_COMMIT_HEAD_COMMITER_DATE = 'git.commit.head.commiter.date' +const GIT_COMMIT_HEAD_COMMITER_EMAIL = 'git.commit.head.commiter.email' +const GIT_COMMIT_HEAD_COMMITER_NAME = 'git.commit.head.commiter.name' const GIT_PULL_REQUEST_BASE_BRANCH_SHA = 'git.pull_request.base_branch_sha' const GIT_PULL_REQUEST_BASE_BRANCH = 'git.pull_request.base_branch' @@ -45,6 +52,13 @@ module.exports = { GIT_COMMIT_AUTHOR_EMAIL, GIT_COMMIT_AUTHOR_NAME, GIT_COMMIT_HEAD_SHA, + GIT_COMMIT_HEAD_MESSAGE, + GIT_COMMIT_HEAD_AUTHOR_DATE, + GIT_COMMIT_HEAD_AUTHOR_EMAIL, + GIT_COMMIT_HEAD_AUTHOR_NAME, + GIT_COMMIT_HEAD_COMMITER_DATE, + GIT_COMMIT_HEAD_COMMITER_EMAIL, + GIT_COMMIT_HEAD_COMMITER_NAME, GIT_PULL_REQUEST_BASE_BRANCH_SHA, GIT_PULL_REQUEST_BASE_BRANCH, CI_PIPELINE_ID, diff --git a/packages/dd-trace/src/plugins/util/test.js b/packages/dd-trace/src/plugins/util/test.js index 3c3924495ac..63abcd10c97 100644 --- a/packages/dd-trace/src/plugins/util/test.js +++ b/packages/dd-trace/src/plugins/util/test.js @@ -34,7 +34,8 @@ const { GIT_COMMIT_MESSAGE, CI_WORKSPACE_PATH, CI_PIPELINE_URL, - CI_JOB_NAME + CI_JOB_NAME, + GIT_COMMIT_HEAD_SHA } = require('./tags') const id = require('../../id') const { @@ -448,7 +449,8 @@ function getTestEnvironmentMetadata (testFramework, config) { [GIT_COMMIT_AUTHOR_NAME]: authorName, [GIT_COMMIT_AUTHOR_EMAIL]: authorEmail, [GIT_COMMIT_MESSAGE]: commitMessage, - [CI_WORKSPACE_PATH]: ciWorkspacePath + [CI_WORKSPACE_PATH]: ciWorkspacePath, + [GIT_COMMIT_HEAD_SHA]: headCommitSha } = ciMetadata const gitMetadata = getGitMetadata({ @@ -459,7 +461,8 @@ function getTestEnvironmentMetadata (testFramework, config) { authorName, authorEmail, commitMessage, - ciWorkspacePath + ciWorkspacePath, + headCommitSha }) const userProvidedGitMetadata = getUserProviderGitMetadata() @@ -977,7 +980,7 @@ function getLibraryCapabilitiesTags (testFramework, isParallel, frameworkVersion : undefined, [DD_CAPABILITIES_TEST_MANAGEMENT_ATTEMPT_TO_FIX]: isAttemptToFixSupported(testFramework, isParallel, frameworkVersion) - ? '4' + ? '5' : undefined, [DD_CAPABILITIES_FAILED_TEST_REPLAY]: isFailedTestReplaySupported(testFramework, frameworkVersion) ? '1' diff --git a/packages/dd-trace/test/plugins/util/ci-env/appveyor.json b/packages/dd-trace/test/plugins/util/ci-env/appveyor.json index 53f770f6def..6fdf89a6a35 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/appveyor.json +++ b/packages/dd-trace/test/plugins/util/ci-env/appveyor.json @@ -369,7 +369,7 @@ "git.branch": "pr", "git.commit.author.email": "appveyor-commit-author-email@datadoghq.com", "git.commit.author.name": "appveyor-commit-author-name", - "git.commit.head_sha": "724faca55efebf66fc15bfccc34577c64c5480bd", + "git.commit.head.sha": "724faca55efebf66fc15bfccc34577c64c5480bd", "git.commit.message": "appveyor-commit-message\nappveyor-commit-message-extended", "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.pull_request.base_branch": "master", @@ -405,7 +405,7 @@ "git.branch": "pr", "git.commit.author.email": "appveyor-commit-author-email@datadoghq.com", "git.commit.author.name": "appveyor-commit-author-name", - "git.commit.head_sha": "724faca55efebf66fc15bfccc34577c64c5480bd", + "git.commit.head.sha": "724faca55efebf66fc15bfccc34577c64c5480bd", "git.commit.message": "appveyor-commit-message\nappveyor-commit-message-extended", "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.pull_request.base_branch": "master", diff --git a/packages/dd-trace/test/plugins/util/ci-env/azurepipelines.json b/packages/dd-trace/test/plugins/util/ci-env/azurepipelines.json index 2d9c577cc49..59aab7588d7 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/azurepipelines.json +++ b/packages/dd-trace/test/plugins/util/ci-env/azurepipelines.json @@ -658,7 +658,7 @@ "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", - "TESTING_TEST_OPTIMIZATION_TEST_CASE_NAME": "http-repository-url-no-git-suffix", + "DD_TEST_CASE_NAME": "http-repository-url-no-git-suffix", "SYSTEM_JOBID": "azure-pipelines-job-id", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", @@ -687,7 +687,7 @@ "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", - "TESTING_TEST_OPTIMIZATION_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix", + "DD_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix", "SYSTEM_JOBID": "azure-pipelines-job-id", "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", diff --git a/packages/dd-trace/test/plugins/util/ci-env/bitbucket.json b/packages/dd-trace/test/plugins/util/ci-env/bitbucket.json index abb86f4fd4f..98ecc422f03 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/bitbucket.json +++ b/packages/dd-trace/test/plugins/util/ci-env/bitbucket.json @@ -407,7 +407,7 @@ "BITBUCKET_GIT_HTTP_ORIGIN": "https://bitbucket.org/DataDog/dogweb", "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo", - "TESTING_TEST_OPTIMIZATION_TEST_CASE_NAME": "http-repository-url-no-git-suffix" + "DD_TEST_CASE_NAME": "http-repository-url-no-git-suffix" }, { "ci.job.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", @@ -427,7 +427,7 @@ "BITBUCKET_GIT_HTTP_ORIGIN": "ssh://host.xz:54321/path/to/repo/", "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo", - "TESTING_TEST_OPTIMIZATION_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix" + "DD_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix" }, { "ci.job.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", diff --git a/packages/dd-trace/test/plugins/util/ci-env/bitrise.json b/packages/dd-trace/test/plugins/util/ci-env/bitrise.json index 6b36fe78fc9..b909235ca8c 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/bitrise.json +++ b/packages/dd-trace/test/plugins/util/ci-env/bitrise.json @@ -486,7 +486,7 @@ "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", - "TESTING_TEST_OPTIMIZATION_TEST_CASE_NAME": "http-repository-url-no-git-suffix", + "DD_TEST_CASE_NAME": "http-repository-url-no-git-suffix", "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GIT_REPOSITORY_URL": "https://github.com/DataDog/dogweb" }, @@ -508,7 +508,7 @@ "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", - "TESTING_TEST_OPTIMIZATION_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix", + "DD_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix", "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GIT_REPOSITORY_URL": "ssh://host.xz:54321/path/to/repo/" }, diff --git a/packages/dd-trace/test/plugins/util/ci-env/buddy.json b/packages/dd-trace/test/plugins/util/ci-env/buddy.json index d3d881e0b20..06f426e9607 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/buddy.json +++ b/packages/dd-trace/test/plugins/util/ci-env/buddy.json @@ -192,7 +192,7 @@ "BUDDY_PIPELINE_ID": "456", "BUDDY_PIPELINE_NAME": "Deploy to Production", "BUDDY_SCM_URL": "https://github.com/buddyworks/my-project", - "TESTING_TEST_OPTIMIZATION_TEST_CASE_NAME": "http-repository-url-no-git-suffix" + "DD_TEST_CASE_NAME": "http-repository-url-no-git-suffix" }, { "ci.pipeline.id": "456/buddy-execution-id", @@ -223,7 +223,7 @@ "BUDDY_PIPELINE_ID": "456", "BUDDY_PIPELINE_NAME": "Deploy to Production", "BUDDY_SCM_URL": "ssh://host.xz:54321/path/to/repo/", - "TESTING_TEST_OPTIMIZATION_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix" + "DD_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix" }, { "ci.pipeline.id": "456/buddy-execution-id", diff --git a/packages/dd-trace/test/plugins/util/ci-env/buildkite.json b/packages/dd-trace/test/plugins/util/ci-env/buildkite.json index 191508f6dcd..bd17e857ccc 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/buildkite.json +++ b/packages/dd-trace/test/plugins/util/ci-env/buildkite.json @@ -728,7 +728,7 @@ "BUILDKITE_PULL_REQUEST_BASE_BRANCH": "", "BUILDKITE_REPO": "https://github.com/DataDog/dogweb", "BUILDKITE_TAG": "", - "TESTING_TEST_OPTIMIZATION_TEST_CASE_NAME": "http-repository-url-no-git-suffix" + "DD_TEST_CASE_NAME": "http-repository-url-no-git-suffix" }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", @@ -762,7 +762,7 @@ "BUILDKITE_PULL_REQUEST_BASE_BRANCH": "", "BUILDKITE_REPO": "ssh://host.xz:54321/path/to/repo/", "BUILDKITE_TAG": "", - "TESTING_TEST_OPTIMIZATION_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix" + "DD_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix" }, { "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", diff --git a/packages/dd-trace/test/plugins/util/ci-env/circleci.json b/packages/dd-trace/test/plugins/util/ci-env/circleci.json index 6404b3b6d9b..e577bf03a0a 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/circleci.json +++ b/packages/dd-trace/test/plugins/util/ci-env/circleci.json @@ -529,7 +529,7 @@ "CIRCLE_REPOSITORY_URL": "https://github.com/DataDog/dogweb", "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", - "TESTING_TEST_OPTIMIZATION_TEST_CASE_NAME": "http-repository-url-no-git-suffix" + "DD_TEST_CASE_NAME": "http-repository-url-no-git-suffix" }, { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", @@ -553,7 +553,7 @@ "CIRCLE_REPOSITORY_URL": "ssh://host.xz:54321/path/to/repo/", "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", - "TESTING_TEST_OPTIMIZATION_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix" + "DD_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix" }, { "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", diff --git a/packages/dd-trace/test/plugins/util/ci-env/gitlab.json b/packages/dd-trace/test/plugins/util/ci-env/gitlab.json index 3a232575e1f..54d2866092c 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/gitlab.json +++ b/packages/dd-trace/test/plugins/util/ci-env/gitlab.json @@ -469,7 +469,7 @@ "CI_PROJECT_PATH": "gitlab-pipeline-name", "CI_PROJECT_URL": "https://gitlab.com/repo", "CI_REPOSITORY_URL": "http://hostname.com/repo", - "TESTING_TEST_OPTIMIZATION_TEST_CASE_NAME": "http-repository-url-no-git-suffix", + "DD_TEST_CASE_NAME": "http-repository-url-no-git-suffix", "GITLAB_CI": "gitlab" }, { @@ -510,7 +510,7 @@ "CI_PROJECT_PATH": "gitlab-pipeline-name", "CI_PROJECT_URL": "https://gitlab.com/repo", "CI_REPOSITORY_URL": "ssh://host.xz:54321/path/to/repo/", - "TESTING_TEST_OPTIMIZATION_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix", + "DD_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix", "GITLAB_CI": "gitlab" }, { diff --git a/packages/dd-trace/test/plugins/util/ci-env/jenkins.json b/packages/dd-trace/test/plugins/util/ci-env/jenkins.json index 57edb23bbdf..4410fc44236 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/jenkins.json +++ b/packages/dd-trace/test/plugins/util/ci-env/jenkins.json @@ -672,7 +672,7 @@ "BUILD_TAG": "jenkins-pipeline-id", "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", - "TESTING_TEST_OPTIMIZATION_TEST_CASE_NAME": "http-repository-url-no-git-suffix", + "DD_TEST_CASE_NAME": "http-repository-url-no-git-suffix", "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GIT_URL_1": "https://github.com/DataDog/dogweb", "JENKINS_URL": "jenkins", @@ -694,7 +694,7 @@ "BUILD_TAG": "jenkins-pipeline-id", "BUILD_URL": "https://jenkins.com/pipeline", "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", - "TESTING_TEST_OPTIMIZATION_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix", + "DD_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix", "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "GIT_URL_1": "ssh://host.xz:54321/path/to/repo/", "JENKINS_URL": "jenkins", diff --git a/packages/dd-trace/test/plugins/util/ci-env/travisci.json b/packages/dd-trace/test/plugins/util/ci-env/travisci.json index 010e10f729c..81219bc6597 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/travisci.json +++ b/packages/dd-trace/test/plugins/util/ci-env/travisci.json @@ -564,7 +564,7 @@ "ci.pipeline.url": "https://travisci.com/pipeline", "ci.provider.name": "travisci", "git.branch": "pr", - "git.commit.head_sha": "724faca55efebf66fc15bfccc34577c64c5480bd", + "git.commit.head.sha": "724faca55efebf66fc15bfccc34577c64c5480bd", "git.commit.message": "travis-commit-message", "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.pull_request.base_branch": "master", diff --git a/packages/dd-trace/test/plugins/util/ci-env/usersupplied.json b/packages/dd-trace/test/plugins/util/ci-env/usersupplied.json index ba660f76a0b..add7045275c 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/usersupplied.json +++ b/packages/dd-trace/test/plugins/util/ci-env/usersupplied.json @@ -166,7 +166,7 @@ "DD_GIT_COMMIT_MESSAGE": "usersupplied-message", "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "DD_GIT_REPOSITORY_URL": "https://github.com/DataDog/dogweb", - "TESTING_TEST_OPTIMIZATION_TEST_CASE_NAME": "http-repository-url-no-git-suffix" + "DD_TEST_CASE_NAME": "http-repository-url-no-git-suffix" }, { "git.commit.author.date": "usersupplied-authordate", @@ -191,7 +191,7 @@ "DD_GIT_COMMIT_MESSAGE": "usersupplied-message", "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "DD_GIT_REPOSITORY_URL": "ssh://host.xz:54321/path/to/repo/", - "TESTING_TEST_OPTIMIZATION_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix" + "DD_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix" }, { "git.commit.author.date": "usersupplied-authordate", diff --git a/packages/dd-trace/test/plugins/util/git.spec.js b/packages/dd-trace/test/plugins/util/git.spec.js index a9130ed619a..4a3d02e2646 100644 --- a/packages/dd-trace/test/plugins/util/git.spec.js +++ b/packages/dd-trace/test/plugins/util/git.spec.js @@ -23,7 +23,14 @@ const { GIT_COMMIT_AUTHOR_DATE, GIT_COMMIT_AUTHOR_EMAIL, GIT_COMMIT_AUTHOR_NAME, - CI_WORKSPACE_PATH + CI_WORKSPACE_PATH, + GIT_COMMIT_HEAD_MESSAGE, + GIT_COMMIT_HEAD_AUTHOR_DATE, + GIT_COMMIT_HEAD_AUTHOR_EMAIL, + GIT_COMMIT_HEAD_AUTHOR_NAME, + GIT_COMMIT_HEAD_COMMITER_DATE, + GIT_COMMIT_HEAD_COMMITER_EMAIL, + GIT_COMMIT_HEAD_COMMITER_NAME } = require('../../../src/plugins/util/tags') const { getGitMetadata, unshallowRepository, getGitDiff } = proxyquire('../../../src/plugins/util/git', @@ -63,7 +70,8 @@ describe('git', () => { branch: 'myBranch', commitMessage: 'myCommitMessage', authorName: 'ciAuthorName', - ciWorkspacePath: 'ciWorkspacePath' + ciWorkspacePath: 'ciWorkspacePath', + headCommitSha: 'headCommitSha' } const metadata = getGitMetadata(ciMetadata) @@ -83,6 +91,11 @@ describe('git', () => { expect(execFileSyncStub).not.to.have.been.calledWith('git', ['rev-parse', 'HEAD']) expect(execFileSyncStub).not.to.have.been.calledWith('git', ['rev-parse', '--abbrev-ref', 'HEAD']) expect(execFileSyncStub).not.to.have.been.calledWith('git', ['rev-parse', '--show-toplevel']) + expect(execFileSyncStub).to.have.been.calledWith('git', ['show', '-s', '--format=%B', ciMetadata.headCommitSha]) + expect(execFileSyncStub).to.have.been.calledWith( + 'git', + ['show', '-s', '--format=%an,%ae,%aI,%cn,%ce,%cI', ciMetadata.headCommitSha] + ) }) it('does not crash if git is not available', () => { @@ -102,6 +115,8 @@ describe('git', () => { it('returns all git metadata is git is available', () => { const commitMessage = `multi line commit message` + const headCommitMessage = `multi line + head commit message` execFileSyncStub .onCall(0).returns( @@ -112,9 +127,15 @@ describe('git', () => { .onCall(2).returns('gitBranch') .onCall(3).returns('gitCommitSHA') .onCall(4).returns('ciWorkspacePath') - .onCall(5).returns('https://github.com/datadog/safe-repository.git') + .onCall(5).returns(false) + .onCall(6).returns(headCommitMessage) + .onCall(7).returns( + 'git head author,git.head.author@email.com,2022-02-14T16:22:03-05:00,' + + 'git head committer,git.head.committer@email.com,2022-02-14T16:23:03-05:00' + ) + .onCall(8).returns('https://github.com/datadog/safe-repository.git') - const metadata = getGitMetadata({ tag: 'ciTag' }) + const metadata = getGitMetadata({ tag: 'ciTag', headCommitSha: 'headCommitSha' }) expect(metadata).to.eql({ [GIT_BRANCH]: 'gitBranch', @@ -128,6 +149,13 @@ describe('git', () => { [GIT_COMMIT_COMMITTER_EMAIL]: 'git.committer@email.com', [GIT_COMMIT_COMMITTER_DATE]: '2022-02-14T16:23:03-05:00', [GIT_COMMIT_COMMITTER_NAME]: 'git committer', + [GIT_COMMIT_HEAD_MESSAGE]: headCommitMessage, + [GIT_COMMIT_HEAD_AUTHOR_DATE]: '2022-02-14T16:22:03-05:00', + [GIT_COMMIT_HEAD_AUTHOR_EMAIL]: 'git.head.author@email.com', + [GIT_COMMIT_HEAD_AUTHOR_NAME]: 'git head author', + [GIT_COMMIT_HEAD_COMMITER_DATE]: '2022-02-14T16:23:03-05:00', + [GIT_COMMIT_HEAD_COMMITER_EMAIL]: 'git.head.committer@email.com', + [GIT_COMMIT_HEAD_COMMITER_NAME]: 'git head committer', [CI_WORKSPACE_PATH]: 'ciWorkspacePath' }) @@ -292,7 +320,29 @@ describe('unshallowRepository', () => { 'daede5785233abb1a3cb76b9453d4eb5b98290b3' ] - unshallowRepository() + unshallowRepository(false) + expect(execFileSyncStub).to.have.been.calledWith('git', options) + }) + + it('works for the usual case with parentOnly', () => { + execFileSyncStub + .onCall(0).returns( + 'git version 2.39.0' + ) + .onCall(1).returns('origin') + .onCall(2).returns('daede5785233abb1a3cb76b9453d4eb5b98290b3') + + const options = [ + 'fetch', + '--deepen=1', + '--update-shallow', + '--filter=blob:none', + '--recurse-submodules=no', + 'origin', + 'daede5785233abb1a3cb76b9453d4eb5b98290b3' + ] + + unshallowRepository(true) expect(execFileSyncStub).to.have.been.calledWith('git', options) }) @@ -316,7 +366,7 @@ describe('unshallowRepository', () => { 'origin/master' ] - unshallowRepository() + unshallowRepository(false) expect(execFileSyncStub).to.have.been.calledWith('git', options) }) @@ -340,7 +390,7 @@ describe('unshallowRepository', () => { 'origin' ] - unshallowRepository() + unshallowRepository(false) expect(execFileSyncStub).to.have.been.calledWith('git', options) }) }) From 330c6cb05990afee8f979f230ba2d29ca1827d39 Mon Sep 17 00:00:00 2001 From: Carles Capell <107924659+CarlesDD@users.noreply.github.com> Date: Fri, 11 Jul 2025 14:19:31 +0200 Subject: [PATCH 21/53] Update WAF rules to v1.15.0 (#6082) --- packages/dd-trace/src/appsec/recommended.json | 90 ++++++++++++++++++- 1 file changed, 88 insertions(+), 2 deletions(-) diff --git a/packages/dd-trace/src/appsec/recommended.json b/packages/dd-trace/src/appsec/recommended.json index 734144cb5de..4dc987ddfab 100644 --- a/packages/dd-trace/src/appsec/recommended.json +++ b/packages/dd-trace/src/appsec/recommended.json @@ -1,7 +1,7 @@ { "version": "2.2", "metadata": { - "rules_version": "1.14.2" + "rules_version": "1.15.0" }, "rules": [ { @@ -2985,7 +2985,7 @@ "address": "graphql.server.resolver" } ], - "regex": "\\b(?:(?:l(?:(?:utimes|chmod)(?:Sync)?|(?:stat|ink)Sync)|w(?:rite(?:(?:File|v)(?:Sync)?|Sync)|atchFile)|u(?:n(?:watchFile|linkSync)|times(?:Sync)?)|s(?:(?:ymlink|tat)Sync|pawn(?:File|Sync))|ex(?:ec(?:File(?:Sync)?|Sync)|istsSync)|a(?:ppendFile|ccess)(?:Sync)?|(?:Caveat|Inode)s|open(?:dir)?Sync|new\\s+Function|Availability|\\beval)\\s*\\(|m(?:ain(?:Module\\s*(?:\\W*\\s*(?:constructor|require)|\\[)|\\s*(?:\\W*\\s*(?:constructor|require)|\\[))|kd(?:temp(?:Sync)?|irSync)\\s*\\(|odule\\.exports\\s*=)|c(?:(?:(?:h(?:mod|own)|lose)Sync|reate(?:Write|Read)Stream|p(?:Sync)?)\\s*\\(|o(?:nstructor\\s*(?:\\W*\\s*_load|\\[)|pyFile(?:Sync)?\\s*\\())|f(?:(?:(?:s(?:(?:yncS)?|tatS)|datas(?:yncS)?)ync|ch(?:mod|own)(?:Sync)?)\\s*\\(|u(?:nction\\s*\\(\\s*\\)\\s*{|times(?:Sync)?\\s*\\())|r(?:e(?:(?:ad(?:(?:File|link|dir)?Sync|v(?:Sync)?)|nameSync)\\s*\\(|quire\\s*(?:\\W*\\s*main|\\[))|m(?:Sync)?\\s*\\()|process\\s*(?:\\W*\\s*(?:mainModule|binding)|\\[)|t(?:his\\.constructor|runcateSync\\s*\\()|_(?:\\$\\$ND_FUNC\\$\\$_|_js_function)|global\\s*(?:\\W*\\s*process|\\[)|String\\s*\\.\\s*fromCharCode|binding\\s*\\[)", + "regex": "\\b(?:(?:l(?:(?:utimes|chmod)(?:Sync)?|(?:stat|ink)Sync)|w(?:rite(?:(?:File|v)(?:Sync)?|Sync)|atchFile)|u(?:n(?:watchFile|linkSync)|times(?:Sync)?)|s(?:(?:ymlink|tat)Sync|pawn(?:File|Sync))|ex(?:ec(?:File(?:Sync)?|Sync)|istsSync)|a(?:ppendFile|ccess)(?:Sync)?|(?:Caveat|Inode)s|open(?:dir)?Sync|new\\s+Function|Availability|\\beval)\\s*\\(|m(?:ain(?:Module\\s*(?:\\W*\\s*(?:constructor|require)|\\[)|\\s*(?:\\W*\\s*(?:constructor|require)|\\[))|kd(?:temp(?:Sync)?|irSync)\\s*\\(|odule\\.exports\\s*=)|c(?:(?:(?:h(?:mod|own)|lose)Sync|reate(?:Write|Read)Stream|p(?:Sync)?)\\s*\\(|o(?:nstructor\\s*(?:\\W*\\s*_load|\\[)|pyFile(?:Sync)?\\s*\\())|f(?:(?:(?:s(?:(?:yncS)?|tatS)|datas(?:yncS)?)ync|ch(?:mod|own)(?:Sync)?)\\s*\\(|u(?:nction\\s*\\(\\s*\\)\\s*{|times(?:Sync)?\\s*\\())|r(?:e(?:(?:ad(?:(?:File|link|dir)?Sync|v(?:Sync)?)|nameSync)\\s*\\(|quire\\s*(?:\\W*\\s*main\\b|\\[))|m(?:Sync)?\\s*\\()|process\\s*(?:\\W*\\s*(?:mainModule|binding)|\\[)|t(?:his\\.constructor|runcateSync\\s*\\()|_(?:\\$\\$ND_FUNC\\$\\$_|_js_function)|global\\s*(?:\\W*\\s*process|\\[)|String\\s*\\.\\s*fromCharCode|binding\\s*\\[)", "options": { "case_sensitive": true, "min_length": 3 @@ -5656,6 +5656,52 @@ ], "transformers": [] }, + { + "id": "dog-932-110", + "name": "Python: Subprocess-based command injection", + "tags": { + "type": "command_injection", + "category": "attack_attempt", + "confidence": "0", + "module": "waf" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?s)\\bsubprocess\\b.*\\b(?:check_output|run|Popen|call|check_call)\\b", + "options": { + "case_sensitive": true, + "min_length": 14 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, { "id": "dog-934-001", "name": "XXE - XML file loads external entity", @@ -9074,6 +9120,28 @@ "evaluate": true, "output": true }, + { + "id": "decode-auth-jwt", + "generator": "jwt_decode", + "min_version": "1.25.0", + "parameters": { + "mappings": [ + { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "authorization" + ] + } + ], + "output": "server.request.jwt" + } + ] + }, + "evaluate": true, + "output": false + }, { "id": "http-network-fingerprint", "generator": "http_network_fingerprint", @@ -9918,6 +9986,24 @@ "category": "payment" } }, + { + "id": "c542c147-3883-43d6-a067-178e4a7bd65d", + "name": "Password", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\bpass(?:[_-]?word|wd)?\\b|\\bpwd\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "tags": { + "type": "password", + "category": "credentials" + } + }, { "id": "18b608bd7a764bff5b2344c0", "name": "Phone number", From 5faf91f4af415940791c7724115129ad6f17ccaa Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Fri, 11 Jul 2025 13:31:32 -0400 Subject: [PATCH 22/53] fix indeterministic request id failing di snapshot test (#6084) --- integration-tests/debugger/snapshot.spec.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/integration-tests/debugger/snapshot.spec.js b/integration-tests/debugger/snapshot.spec.js index 57b101df686..09f9223b669 100644 --- a/integration-tests/debugger/snapshot.spec.js +++ b/integration-tests/debugger/snapshot.spec.js @@ -87,7 +87,8 @@ describe('Dynamic Instrumentation', function () { // There's no reason to test the `request` object 100%, instead just check its fingerprint assert.deepEqual(Object.keys(request), ['type', 'fields']) assert.equal(request.type, 'Request') - assert.deepEqual(request.fields.id, { type: 'string', value: 'req-1' }) + assert.equal(request.fields.id.type, 'string') + assert.match(request.fields.id.value, /^req-\d+$/) assert.deepEqual(request.fields.params, { type: 'NullObject', fields: { name: { type: 'string', value: 'foo' } } }) From cefdf1d33cb706bad40d6be910fd3907d3ff9992 Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Mon, 14 Jul 2025 06:32:43 +0200 Subject: [PATCH 23/53] [Code Origin] Take source maps into account in stack traces (#6070) Ensure that the stack traces reported via Code Origin for Spans are resolved using source maps, if Node.js is running with the `--enable-source-maps` flag. --- eslint.config.mjs | 1 + integration-tests/code-origin.spec.js | 55 ++ .../code-origin/scripts/build-typescript.sh | 3 + integration-tests/code-origin/typescript.js | 58 +++ .../code-origin/typescript.js.map | 1 + integration-tests/code-origin/typescript.ts | 17 + packages/datadog-code-origin/index.js | 23 +- .../dd-trace/src/plugins/util/stacktrace.js | 228 ++++++-- .../test/plugins/util/stacktrace.spec.js | 488 ++++++++++++++++-- 9 files changed, 775 insertions(+), 99 deletions(-) create mode 100644 integration-tests/code-origin.spec.js create mode 100755 integration-tests/code-origin/scripts/build-typescript.sh create mode 100644 integration-tests/code-origin/typescript.js create mode 100644 integration-tests/code-origin/typescript.js.map create mode 100644 integration-tests/code-origin/typescript.ts diff --git a/eslint.config.mjs b/eslint.config.mjs index a08ee47b670..9128fb4e680 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -43,6 +43,7 @@ export default [ '**/versions', // This is effectively a node_modules tree. '**/acmeair-nodejs', // We don't own this. '**/vendor', // Generally, we didn't author this code. + 'integration-tests/code-origin/typescript.js', // Generated 'integration-tests/debugger/target-app/source-map-support/bundle.js', // Generated 'integration-tests/debugger/target-app/source-map-support/hello/world.js', // Generated 'integration-tests/debugger/target-app/source-map-support/minify.min.js', // Generated diff --git a/integration-tests/code-origin.spec.js b/integration-tests/code-origin.spec.js new file mode 100644 index 00000000000..4853b0f9fb8 --- /dev/null +++ b/integration-tests/code-origin.spec.js @@ -0,0 +1,55 @@ +'use strict' + +const assert = require('node:assert') +const path = require('node:path') +const Axios = require('axios') +const { FakeAgent, spawnProc, createSandbox } = require('./helpers') + +describe('Code Origin for Spans', function () { + let sandbox, cwd, appFile, agent, proc, axios + + before(async () => { + sandbox = await createSandbox(['fastify']) + cwd = sandbox.folder + appFile = path.join(cwd, 'code-origin', 'typescript.js') + }) + + after(async () => { + await sandbox?.remove() + }) + + beforeEach(async () => { + agent = await new FakeAgent().start() + proc = await spawnProc(appFile, { + cwd, + env: { + NODE_OPTIONS: '--enable-source-maps', + DD_TRACE_AGENT_URL: `http://localhost:${agent.port}` + }, + stdio: 'pipe', + }) + axios = Axios.create({ baseURL: proc.url }) + }) + + afterEach(async () => { + proc?.kill() + await agent?.stop() + }) + + describe('source map support', function () { + it('should support source maps', async () => { + await Promise.all([ + agent.assertMessageReceived(({ payload }) => { + const [span] = payload.flatMap(p => p.filter(span => span.name === 'fastify.request')) + assert.strictEqual(span.meta['_dd.code_origin.type'], 'entry') + assert.ok(span.meta['_dd.code_origin.frames.0.file'].endsWith(`${cwd}/code-origin/typescript.ts`)) + assert.strictEqual(span.meta['_dd.code_origin.frames.0.line'], '10') + assert.strictEqual(span.meta['_dd.code_origin.frames.0.column'], '5') + assert.strictEqual(span.meta['_dd.code_origin.frames.0.method'], '') + assert.strictEqual(span.meta['_dd.code_origin.frames.0.type'], 'Object') + }, 2_500), + await axios.get('/') + ]) + }) + }) +}) diff --git a/integration-tests/code-origin/scripts/build-typescript.sh b/integration-tests/code-origin/scripts/build-typescript.sh new file mode 100755 index 00000000000..d40beb84ac6 --- /dev/null +++ b/integration-tests/code-origin/scripts/build-typescript.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env sh + +npx --package=typescript -- tsc --sourceMap integration-tests/code-origin/typescript.ts \ No newline at end of file diff --git a/integration-tests/code-origin/typescript.js b/integration-tests/code-origin/typescript.js new file mode 100644 index 00000000000..f2d12f3b64c --- /dev/null +++ b/integration-tests/code-origin/typescript.js @@ -0,0 +1,58 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +require('dd-trace/init'); +// @ts-ignore - fastify will be available at runtime +var fastify_1 = require("fastify"); +var app = (0, fastify_1.default)({ + logger: true +}); +app.get('/', function handler() { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + return [2 /*return*/, { hello: 'world' }]; + }); + }); +}); +app.listen({ port: process.env.APP_PORT || 0 }, function (err) { + var _a; + if (err) + throw err; + (_a = process.send) === null || _a === void 0 ? void 0 : _a.call(process, { port: app.server.address().port }); +}); +//# sourceMappingURL=typescript.js.map \ No newline at end of file diff --git a/integration-tests/code-origin/typescript.js.map b/integration-tests/code-origin/typescript.js.map new file mode 100644 index 00000000000..9c1f0c75b62 --- /dev/null +++ b/integration-tests/code-origin/typescript.js.map @@ -0,0 +1 @@ +{"version":3,"file":"typescript.js","sourceRoot":"","sources":["typescript.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,OAAO,CAAC,eAAe,CAAC,CAAA;AAExB,oDAAoD;AACpD,mCAA6B;AAE7B,IAAM,GAAG,GAAG,IAAA,iBAAO,EAAC;IAClB,MAAM,EAAE,IAAI;CACb,CAAC,CAAA;AAEF,GAAG,CAAC,GAAG,CAAC,GAAG,EAAE,SAAe,OAAO;;;YACjC,sBAAO,EAAE,KAAK,EAAE,OAAO,EAAE,EAAA;;;CAC1B,CAAC,CAAA;AAEF,GAAG,CAAC,MAAM,CAAC,EAAE,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC,QAAQ,IAAI,CAAC,EAAE,EAAE,UAAC,GAAG;;IAClD,IAAI,GAAG;QAAE,MAAM,GAAG,CAAA;IAClB,MAAA,OAAO,CAAC,IAAI,wDAAG,EAAE,IAAI,EAAE,GAAG,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,IAAI,EAAE,CAAC,CAAA;AACrD,CAAC,CAAC,CAAA"} \ No newline at end of file diff --git a/integration-tests/code-origin/typescript.ts b/integration-tests/code-origin/typescript.ts new file mode 100644 index 00000000000..bed63e6874e --- /dev/null +++ b/integration-tests/code-origin/typescript.ts @@ -0,0 +1,17 @@ +require('dd-trace/init') + +// @ts-ignore - fastify will be available at runtime +import Fastify from 'fastify' + +const app = Fastify({ + logger: true +}) + +app.get('/', async function handler () { + return { hello: 'world' } +}) + +app.listen({ port: process.env.APP_PORT || 0 }, (err) => { + if (err) throw err + process.send?.({ port: app.server.address().port }) +}) diff --git a/packages/datadog-code-origin/index.js b/packages/datadog-code-origin/index.js index d87bb2cd4a9..7a972497471 100644 --- a/packages/datadog-code-origin/index.js +++ b/packages/datadog-code-origin/index.js @@ -1,6 +1,6 @@ 'use strict' -const { getUserLandFrames } = require('../dd-trace/src/plugins/util/stacktrace') +const { parseUserLandFrames } = require('../dd-trace/src/plugins/util/stacktrace') const ENTRY_SPAN_STACK_FRAMES_LIMIT = 1 const EXIT_SPAN_STACK_FRAMES_LIMIT = Number(process.env._DD_CODE_ORIGIN_FOR_SPANS_EXIT_SPAN_MAX_USER_FRAMES) || 8 @@ -36,20 +36,25 @@ function exitTags (topOfStackFunc) { * @returns {Record} */ function tag (type, topOfStackFunc, limit) { - const frames = getUserLandFrames(topOfStackFunc, limit) + // The `Error.prepareStackTrace` API doesn't support resolving source maps. + // Fall back to manually parsing the stack trace. + const dummy = {} + Error.captureStackTrace(dummy, topOfStackFunc) + const frames = parseUserLandFrames(dummy.stack, limit) + const tags = { '_dd.code_origin.type': type } for (let i = 0; i < frames.length; i++) { const frame = frames[i] - tags[`_dd.code_origin.frames.${i}.file`] = frame.file - tags[`_dd.code_origin.frames.${i}.line`] = String(frame.line) - tags[`_dd.code_origin.frames.${i}.column`] = String(frame.column) - if (frame.method) { - tags[`_dd.code_origin.frames.${i}.method`] = frame.method + tags[`_dd.code_origin.frames.${i}.file`] = frame.fileName + tags[`_dd.code_origin.frames.${i}.line`] = frame.lineNumber + tags[`_dd.code_origin.frames.${i}.column`] = frame.columnNumber + if (frame.methodName || frame.functionName) { + tags[`_dd.code_origin.frames.${i}.method`] = frame.methodName || frame.functionName } - if (frame.type) { - tags[`_dd.code_origin.frames.${i}.type`] = frame.type + if (frame.typeName) { + tags[`_dd.code_origin.frames.${i}.type`] = frame.typeName } } return tags diff --git a/packages/dd-trace/src/plugins/util/stacktrace.js b/packages/dd-trace/src/plugins/util/stacktrace.js index 04cf78cab5f..aba54c2b431 100644 --- a/packages/dd-trace/src/plugins/util/stacktrace.js +++ b/packages/dd-trace/src/plugins/util/stacktrace.js @@ -1,12 +1,15 @@ 'use strict' -const { relative, sep, isAbsolute } = require('path') +const { relative, sep } = require('path') const cwd = process.cwd() +const NODE_MODULES_PATTERN_MIDDLE = `${sep}node_modules${sep}` +const NODE_MODULES_PATTERN_START = `node_modules${sep}` + module.exports = { getCallSites, - getUserLandFrames + parseUserLandFrames } // From https://github.com/felixge/node-stack-trace/blob/ba06dcdb50d465cd440d84a563836e293b360427/index.js#L1 @@ -32,70 +35,195 @@ function getCallSites (constructorOpt) { /** * Get stack trace of user-land frames. * - * @param {Function} constructorOpt - Function to pass along to Error.captureStackTrace + * @param {string} stack - The stack trace to parse * @param {number} [limit=Infinity] - The maximum number of frames to return * @returns {StackFrame[]} - A list of stack frames from user-land code + */ +function parseUserLandFrames (stack, limit = Infinity) { + let index = stack.indexOf('\n at ') + const frames = [] + + while (index !== -1 && frames.length !== limit) { + const nextIndex = stack.indexOf('\n', index + 1) + const frame = parseLine(stack, index, nextIndex === -1 ? stack.length : nextIndex) + if (frame !== undefined) frames.push(frame) + index = nextIndex + } + + return frames +} + +/** + * Parses a line of the stack trace and returns the parsed frame if it is a user-land frame. + * Returns `undefined` otherwise. + * + * @param {string} stack - The stack trace in which the line is located. + * @param {number} start - The start index of the line to parse within the stack trace. + * @param {number} end - The end index of the line to parse within the stack trace. + * @returns {StackFrame|undefined} The parsed frame if it is a user frame, `undefined` otherwise. * * @typedef {Object} StackFrame - * @property {string} file - The file path of the frame - * @property {number} line - The line number in the file - * @property {number} column - The column number in the file - * @property {string} [method] - The function name, if available - * @property {string} [type] - The type name, if available + * @property {string} fileName - The file name of the frame. + * @property {string} lineNumber - The line number of the frame. + * @property {string} columnNumber - The column number of the frame. + * @property {string} [functionName] - The function name of the frame. + * @property {string} [methodName] - The method name of the frame. + * @property {string} [typeName] - The type name of the frame. */ -function getUserLandFrames (constructorOpt, limit = Infinity) { - const callsites = getCallSites(constructorOpt) - const frames = [] +function parseLine (stack, start, end) { + let index + if (stack[end - 1] === ')') { + index = end - 2 // skip the last closing parenthesis + const code = stack.charCodeAt(index) + if (code < 0x30 || code > 0x39) return // not a digit + } else { + index = end - 1 + } - for (const callsite of callsites) { - if (callsite.isNative()) { - continue + start += 8 // skip the `\n at ` prefix + if (stack.startsWith('new ', start)) start += 4 // skip `new ` + else if (stack.startsWith('async ', start)) start += 6 // skip `async ` + + let fileName, lineNumber, columnNumber + const result = parseLocation(stack, start, index) + if (result === undefined) return + [fileName, lineNumber, columnNumber, index] = result + + if (isNodeModulesFrame(fileName)) return + + // parse method name + let methodName, functionName + if (stack[index] === ']') { + methodName = '' + index-- // skip the closing square bracket + for (; index >= start; index--) { + const char = stack[index] + if (char === ' ' && stack.slice(index - 4, index) === ' [as') { + // The space after `[as` in `[as Foo]` + index -= 4 // skip ` [as` + break + } else if (char === '[') { + // This isn't a method name after all, but probably a symbol + functionName = `${stack.slice(start, index)}[${methodName}]` + methodName = undefined + break + } + methodName = char + methodName } + index-- // skip the opening square bracket + } - const filename = callsite.getFileName() - - // If the callsite is native, there will be no associated filename. However, there might be other instances where - // this can happen, so to be sure, we add this additional check - if (filename === null) { - continue + // parse function and type name + functionName ??= start <= index ? stack.slice(start, index + 1) : undefined + let typeName + if (functionName !== undefined && functionName[0] !== '[') { + const periodIndex = functionName.indexOf('.') + if (periodIndex !== -1) { + typeName = functionName.slice(0, periodIndex) + functionName = functionName.slice(periodIndex + 1) } + } + + return { + lineNumber, + columnNumber, + fileName, + methodName, + functionName, + typeName + } +} + +// TODO: Technically, the algorithm below could be simplified to not use the relative path, but be simply: +// +// return filename.includes(NODE_MODULES_PATTERN_MIDDLE)) +// +// However, if the user happens to be running this within a directory where `node_modules` is one of the parent +// directories, it will be flagged as a false positive. +function isNodeModulesFrame (fileName) { + // Quick check first - if it doesn't contain node_modules, it's not a node_modules frame + if (!fileName.includes(NODE_MODULES_PATTERN_MIDDLE)) { + return false + } + + // More expensive relative path calculation only when necessary + const actualPath = fileName.startsWith('file:') ? fileName.slice(7) : fileName + const relativePath = relative(cwd, actualPath) - // ESM module paths start with the "file://" protocol (because ESM supports https imports) - // TODO: Node.js also supports `data:` and `node:` imports, should we do something specific for `data:`? - const containsFileProtocol = filename.startsWith('file:') + return relativePath.startsWith(NODE_MODULES_PATTERN_START) || relativePath.includes(NODE_MODULES_PATTERN_MIDDLE) +} - // TODO: I'm not sure how stable this check is. Alternatively, we could consider reversing it if we can get - // a comprehensive list of all non-file-based values, eg: - // - // filename === '' || filename.startsWith('node:') - if (containsFileProtocol === false && isAbsolute(filename) === false) { +/** + * A stack trace location can be in one of the following formats: + * + * 1. `myscript.js:10:3` + * 2. `(myscript.js:10:3` + * 3. `(eval at Foo.a (myscript.js:10:3)` + * 4. `(eval at Foo.a (myscript.js:10:3), :1:1` + * 5. `(eval at Foo.a (eval at Bar.z (myscript.js:10:3)` + * 6. `(eval at Foo.a (eval at Bar.z (myscript.js:10:3), :1:1` + * + * Notice how the optional closing parenthesis is not included in the location string at this point. It has been + * skipped to save time. + * + * This function extracts the `myscript.js:10:3` part, passes it, returns the file name, line number, and column + * number and sets the `index` to the start of the whole location string. + * + * @returns {[string, string, string, number]|undefined} + */ +function parseLocation (stack, start, index) { + // parse column number + let columnNumber = '' + for (; index >= start; index--) { + const code = stack.charCodeAt(index) + if (code === 0x29) { // closing parenthesis + // e.g. `eval at Foo.a (eval at Bar.z (myscript.js:10:3))` continue } + if (code < 0x30 || code > 0x39) break // not a digit + columnNumber = stack[index] + columnNumber + } - // TODO: Technically, the algorithm below could be simplified to not use the relative path, but be simply: - // - // if (filename.includes(sep + 'node_modules' + sep)) continue - // - // However, the tests in `packages/dd-trace/test/plugins/util/stacktrace.spec.js` will fail on my machine - // because I have the source code in a parent folder called `node_modules`. So the code below thinks that - // it's not in user-land - const relativePath = relative(cwd, containsFileProtocol ? filename.slice(7) : filename) - if (relativePath.startsWith('node_modules' + sep) || relativePath.includes(sep + 'node_modules' + sep)) { - continue + index-- // skip colon + + // parse line number + let lineNumber = '' + for (; index >= start; index--) { + const code = stack.charCodeAt(index) + if (code < 0x30 || code > 0x39) break // not a digit + lineNumber = stack[index] + lineNumber + } + + index-- // skip colon + + // parse file name + let nestedParenthesis = 1 // 1 instead of 0 because the trailing parenthesis wasn't seen by this function + let fileName = '' + for (; index >= start; index--) { + const char = stack[index] + if (char === ')') { + nestedParenthesis++ + } else if (char === '(' && --nestedParenthesis === 0) { + index -= 2 // skip the opening parenthesis and the whitespace before it + break + } else if (nestedParenthesis === 1 && char === ':' && stack.slice(index - 4, index) === 'node') { + return // e.g. `node:vm:137:12` is not considered a user frame } + fileName = char + fileName + } - const method = callsite.getFunctionName() - const type = callsite.getTypeName() - frames.push({ - file: filename, - line: callsite.getLineNumber(), - column: callsite.getColumnNumber(), - method: method ?? undefined, // force to undefined if null so JSON.stringify will omit it - type: type ?? undefined // force to undefined if null so JSON.stringify will omit it - }) - - if (frames.length === limit) break + if (fileName.startsWith('eval at ')) { + // The location we parsed was not the actual location, but the location inside the eval. Let's parse the nested + // location, which will be the location of the eval. + const result = parseLocation(fileName, 0, fileName.lastIndexOf(',') - 2) + if (result === undefined) return + [fileName, lineNumber, columnNumber] = result // ignore returned index, as we need to retain the original one } - return frames + return [ + fileName, + lineNumber, + columnNumber, + index // return the index, so the caller knows how far we got + ] } diff --git a/packages/dd-trace/test/plugins/util/stacktrace.spec.js b/packages/dd-trace/test/plugins/util/stacktrace.spec.js index a96ed87f965..58ac52ec765 100644 --- a/packages/dd-trace/test/plugins/util/stacktrace.spec.js +++ b/packages/dd-trace/test/plugins/util/stacktrace.spec.js @@ -1,65 +1,473 @@ 'use strict' -const { isAbsolute } = require('path') -const { getNextLineNumber } = require('../helpers') +const assert = require('node:assert') +const { join } = require('node:path') require('../../setup/tap') const { getCallSites, - getUserLandFrames + parseUserLandFrames } = require('../../../src/plugins/util/stacktrace') describe('stacktrace utils', () => { it('should get callsites array from getCallsites', () => { const callsites = getCallSites() - expect(callsites).to.be.an('array') - expect(callsites.length).to.be.gt(0) + assert.strictEqual(Array.isArray(callsites), true) + assert.strictEqual(callsites.length > 0, true) callsites.forEach((callsite) => { - expect(callsite).to.be.an.instanceof(Object) - expect(callsite.constructor.name).to.equal('CallSite') - expect(callsite.getFileName).to.be.an.instanceof(Function) + assert.strictEqual(callsite instanceof Object, true) + assert.strictEqual(callsite.constructor.name, 'CallSite') + assert.strictEqual(callsite.getFileName instanceof Function, true) }) }) - describe('getUserLandFrames', () => { - it('should return array of frame objects', function helloWorld () { - function someFunction () { - const frames = getUserLandFrames(someFunction) - - expect(frames).to.be.an('array') - expect(frames.length).to.be.gt(1) - frames.forEach((frame) => { - expect(frame).to.be.an.instanceof(Object) - expect(frame).to.have.all.keys('file', 'line', 'column', 'method', 'type') - expect(frame.file).to.be.a('string') - expect(frame.line).to.be.gt(0) - expect(frame.column).to.be.gt(0) - expect(typeof frame.method).to.be.oneOf(['string', 'undefined']) - expect(typeof frame.type).to.be.oneOf(['string', 'undefined']) - expect(isAbsolute(frame.file)).to.be.true + describe('parse', () => { + const nonUserLandFrame = ` at foo (${join(__dirname, 'node_modules', 'bar.js')}:123:456)` + + it('should bail on invalid stack', () => { + assert.deepStrictEqual(parseUserLandFrames(genStackTrace('foo')), []) + }) + + describe('non-hardcoded stack traces', () => { + it('should parse a frame', function outerFunction () { + const lineNumber = getNextLineNumber() + const { stack } = new Error('foo') + const [frame] = parseUserLandFrames(stack) + assert.deepStrictEqual(frame, { + typeName: 'Test', + functionName: 'outerFunction', + methodName: undefined, + fileName: __filename, + lineNumber, + columnNumber: '27' + }) + }) + + it('should parse frame with eval', () => { + const lineNumber = getNextLineNumber() + const { stack } = eval('new Error("foo")') // eslint-disable-line no-eval + const [frame] = parseUserLandFrames(stack) + assert.deepStrictEqual(frame, { + typeName: undefined, + functionName: 'eval', + methodName: undefined, + fileName: __filename, + lineNumber, + columnNumber: '27' }) + }) - const frame = frames[0] - expect(frame.file).to.equal(__filename) - expect(frame.line).to.equal(lineNumber) - expect(frame.method).to.equal('helloWorld') - expect(frame.type).to.equal('Test') + function getNextLineNumber () { + const stack = new Error('foo').stack.split('\n') + return String(Number(stack[2].split(':').at(-2)) + 1) } + }) + + describe('should parse frame with location not wrapped in parentheses', () => { + it('normal case', () => { + assertStackTraceWithFrame(' at foo/bar/baz.js:123:456', { + typeName: undefined, + functionName: undefined, + methodName: undefined, + fileName: 'foo/bar/baz.js', + lineNumber: '123', + columnNumber: '456' + }) + }) + + it('with weird characters', () => { + assertStackTraceWithFrame(' at f[i](l:.js:1:2) :2:1', { + typeName: undefined, + functionName: undefined, + methodName: undefined, + fileName: ' f[i](l:.js:1:2) ', + lineNumber: '2', + columnNumber: '1' + }) + }) + + it('evalmachine.', () => { + assertStackTraceWithFrame(' at evalmachine.:1:17', { + typeName: undefined, + functionName: undefined, + methodName: undefined, + fileName: 'evalmachine.', + lineNumber: '1', + columnNumber: '17' + }) + }) + }) + + it('should parse frame with a function name and a normal location', () => { + assertStackTraceWithFrame(' at foo (/foo/bar/baz.js:123:456)', { + typeName: undefined, + functionName: 'foo', + methodName: undefined, + fileName: '/foo/bar/baz.js', + lineNumber: '123', + columnNumber: '456' + }) + }) + + it('should parse frame with a function name and a weird filename', () => { + assertStackTraceWithFrame(' at foo (foo [bar] (baz).js:123:456)', { + typeName: undefined, + functionName: 'foo', + methodName: undefined, + fileName: 'foo [bar] (baz).js', + lineNumber: '123', + columnNumber: '456' + }) + }) + + it('should parse frame with a function name and a weird filename 2', () => { + assertStackTraceWithFrame(' at x ( f[i](l:.js:1:2) :1:33)', { + typeName: undefined, + functionName: 'x', + methodName: undefined, + fileName: ' f[i](l:.js:1:2) ', + lineNumber: '1', + columnNumber: '33' + }) + }) + + it('should be able to parse file: paths', () => { + assertStackTraceWithFrame(' at foo (file:///foo/bar/baz.js:123:456)', { + typeName: undefined, + functionName: 'foo', + methodName: undefined, + fileName: 'file:///foo/bar/baz.js', + lineNumber: '123', + columnNumber: '456' + }) + }) + + it('should be able to parse Windows paths', () => { + assertStackTraceWithFrame(' at foo (D:\\foo\\bar\\baz.js:123:456)', { + typeName: undefined, + functionName: 'foo', + methodName: undefined, + fileName: 'D:\\foo\\bar\\baz.js', + lineNumber: '123', + columnNumber: '456' + }) + }) + + it('should parse frame with method name', () => { + assertStackTraceWithFrame(' at foo [as bar] (/foo/bar/baz.js:3:8)', { + typeName: undefined, + functionName: 'foo', + methodName: 'bar', + fileName: '/foo/bar/baz.js', + lineNumber: '3', + columnNumber: '8' + }) + }) + + it('should parse frame with something that looks like a method name, but is not', () => { + assertStackTraceWithFrame(' at foo [bar baz] (/foo/bar/baz.js:3:8)', { + typeName: undefined, + functionName: 'foo [bar baz]', + methodName: undefined, + fileName: '/foo/bar/baz.js', + lineNumber: '3', + columnNumber: '8' + }) + }) + + it('should parse frame with a class name', () => { + assertStackTraceWithFrame(' at Foo.bar (/foo/bar/baz.js:3:8)', { + functionName: 'bar', + methodName: undefined, + typeName: 'Foo', + fileName: '/foo/bar/baz.js', + lineNumber: '3', + columnNumber: '8' + }) + }) + + it('should parse frame with a class name and a method name', () => { + assertStackTraceWithFrame(' at Foo.bar [as baz] (/foo/bar/baz.js:3:8)', { + functionName: 'bar', + methodName: 'baz', + typeName: 'Foo', + fileName: '/foo/bar/baz.js', + lineNumber: '3', + columnNumber: '8' + }) + }) + + it('should parse frame with whitespace in the function name', () => { + assertStackTraceWithFrame(' at foo bar (/foo/bar/baz.js:3:8)', { + typeName: undefined, + functionName: 'foo bar', + methodName: undefined, + fileName: '/foo/bar/baz.js', + lineNumber: '3', + columnNumber: '8' + }) + }) + + it('should parse frame with weird unicode characters', () => { + const frame = ' at Object.asdf ][)( \u0000\u0001\u0002\u0003\u001b[44;37m foo (/foo/bar/baz.js:3:8)' + assertStackTraceWithFrame(frame, { + typeName: 'Object', + functionName: 'asdf ][)( \u0000\u0001\u0002\u0003\u001b[44;37m foo', + methodName: undefined, + fileName: '/foo/bar/baz.js', + lineNumber: '3', + columnNumber: '8' + }) + }) - const lineNumber = getNextLineNumber() - someFunction() + it('should parse frame where the function name contains what looks like a location', () => { + assertStackTraceWithFrame(' at Object.asdf (a/b.js:1:2) (c/d/e.js:3:4)', { + typeName: 'Object', + functionName: 'asdf (a/b.js:1:2)', + methodName: undefined, + lineNumber: '3', + columnNumber: '4', + fileName: 'c/d/e.js' + }) }) - it('should respect limit', function helloWorld () { - (function someFunction () { - const frames = getUserLandFrames(someFunction, 1) - expect(frames.length).to.equal(1) - const frame = frames[0] - expect(frame.file).to.equal(__filename) - expect(frame.method).to.equal('helloWorld') - expect(frame.type).to.equal('Test') - })() + it('should parse frame a class name and whitespace in the function name', () => { + // { "foo bar" () { throw new Error() } } + assertStackTraceWithFrame(' at Object.foo bar (/foo/bar/baz.js:3:8)', { + typeName: 'Object', + functionName: 'foo bar', + methodName: undefined, + fileName: '/foo/bar/baz.js', + lineNumber: '3', + columnNumber: '8' + }) }) + + it('should parse frame with a symbol as the function name', () => { + assertStackTraceWithFrame(' at [Symbol.iterator] (/foo/bar/baz.js:3:8)', { + typeName: undefined, + functionName: '[Symbol.iterator]', + methodName: undefined, + fileName: '/foo/bar/baz.js', + lineNumber: '3', + columnNumber: '8' + }) + }) + + it('should parse frame with a class name and a symbol as the function name', () => { + // Array.from({ *[Symbol.iterator] () { throw new Error() } }) + assertStackTraceWithFrame(' at Object.[Symbol.iterator] (/foo/bar/baz.js:3:8)', { + typeName: 'Object', + functionName: '[Symbol.iterator]', + methodName: undefined, + fileName: '/foo/bar/baz.js', + lineNumber: '3', + columnNumber: '8' + }) + }) + + it('should parse frame with a class name and function name with weird characters', () => { + assertStackTraceWithFrame(' at Object.foo [a (b) []] (/foo/bar/baz.js:3:8)', { + typeName: 'Object', + functionName: 'foo [a (b) []]', + methodName: undefined, + fileName: '/foo/bar/baz.js', + lineNumber: '3', + columnNumber: '8' + }) + }) + + it('should parse frame with an anonymous function', () => { + assertStackTraceWithFrame(' at (/foo/bar/baz.js:3:8)', { + typeName: undefined, + functionName: '', + methodName: undefined, + fileName: '/foo/bar/baz.js', + lineNumber: '3', + columnNumber: '8' + }) + }) + + it('should parse frame with a class name and an anonymous function', () => { + assertStackTraceWithFrame(' at Object. (/foo/bar/baz.js:3:8)', { + typeName: 'Object', + functionName: '', + methodName: undefined, + fileName: '/foo/bar/baz.js', + lineNumber: '3', + columnNumber: '8' + }) + }) + + it('should parse frame with a class name and a period in the function name', () => { + assertStackTraceWithFrame(' at Object.foo.bar (/foo/bar/baz.js:3:8)', { + typeName: 'Object', + functionName: 'foo.bar', + methodName: undefined, + fileName: '/foo/bar/baz.js', + lineNumber: '3', + columnNumber: '8' + }) + }) + + it('should parse frame that calls a constructor', () => { + assertStackTraceWithFrame(' at new Foo (/foo/bar/baz.js:3:8)', { + typeName: undefined, + functionName: 'Foo', + methodName: undefined, + fileName: '/foo/bar/baz.js', + lineNumber: '3', + columnNumber: '8' + }) + }) + + it('should parse frame that is async', () => { + assertStackTraceWithFrame(' at async foo (/foo/bar/baz.js:3:8)', { + typeName: undefined, + functionName: 'foo', + methodName: undefined, + fileName: '/foo/bar/baz.js', + lineNumber: '3', + columnNumber: '8' + }) + }) + + describe('eval', () => { + it('should parse frame with eval (normal case - anonymous)', () => { + assertStackTraceWithFrame(' at eval (eval at (/foo/bar/baz.js:1:2), :3:4)', { + lineNumber: '1', + columnNumber: '2', + fileName: '/foo/bar/baz.js', + functionName: 'eval', + methodName: undefined, + typeName: undefined + }) + }) + + it('should parse frame with eval (normal case - not anonymous)', () => { + assertStackTraceWithFrame(' at eval (eval at foo (/foo/bar/baz.js:1:2), :3:4)', { + lineNumber: '1', + columnNumber: '2', + fileName: '/foo/bar/baz.js', + functionName: 'eval', + methodName: undefined, + typeName: undefined + }) + }) + + it('should parse frame with eval (weird filename)', () => { + assertStackTraceWithFrame(' at eval (eval at fooeval (a file with eval .js:1:2), :3:4)', { + lineNumber: '1', + columnNumber: '2', + fileName: 'a file with eval .js', + functionName: 'eval', + methodName: undefined, + typeName: undefined + }) + }) + + it('should parse frame with eval (normal case - nested eval)', () => { + const frame = ' at eval (eval at (eval at D (/foo/bar/baz.js:1:2)), :3:4)' + assertStackTraceWithFrame(frame, { + lineNumber: '1', + columnNumber: '2', + fileName: '/foo/bar/baz.js', + functionName: 'eval', + methodName: undefined, + typeName: undefined + }) + }) + }) + + it('should parse frame from native code', () => { + assert.deepStrictEqual(parseUserLandFrames(genStackTrace(' at foo (native)')), []) + }) + + it('should parse frame from an unknown location', () => { + assert.deepStrictEqual(parseUserLandFrames(genStackTrace(' at foo (unknown location)')), []) + }) + + it('should parse frame with an anonymous location', () => { + assert.deepStrictEqual(parseUserLandFrames(genStackTrace(' at foo ()')), []) + }) + + it('should parse frame from an Node.js core', () => { + assert.deepStrictEqual(parseUserLandFrames(genStackTrace(' at foo (node:vm:137:12)')), []) + }) + + it('should parse frame where filename that contains whitespace and parentheses', () => { + assertStackTraceWithFrame(' at X. (/USER/Db (Person)/x/y.js:14:11)', { + typeName: 'X', + functionName: '', + methodName: undefined, + fileName: '/USER/Db (Person)/x/y.js', + lineNumber: '14', + columnNumber: '11' + }) + }) + + describe('user-land frame', () => { + it('should should only return user-land frames', () => { + const stack = genStackTraceWithManyNonUserLandFramesAnd( + ` at foo (${join(__dirname, 'bar.js')}:123:456)`, + ` at foo (${join(__dirname, 'baz.js')}:1:2)` + ) + assert.deepStrictEqual(parseUserLandFrames(stack, Infinity), [{ + columnNumber: '456', + fileName: join(__dirname, 'bar.js'), + functionName: 'foo', + lineNumber: '123', + methodName: undefined, + typeName: undefined + }, { + columnNumber: '2', + fileName: join(__dirname, 'baz.js'), + functionName: 'foo', + lineNumber: '1', + methodName: undefined, + typeName: undefined + }]) + }) + + it('should return an emtpy array if there are no user-land frames', () => { + const stack = genStackTraceWithManyNonUserLandFramesAnd( + ` at foo (${join(__dirname, 'node_modules', 'bar.js')}:123:456)` + ) + assert.deepStrictEqual(parseUserLandFrames(stack, Infinity), []) + }) + }) + + describe('limit', () => { + it('should return the correct number of frames', () => { + const stack = genStackTraceWithManyNonUserLandFramesAnd( + ` at foo (${join(__dirname, 'bar.js')}:123:456)`, + ` at foo (${join(__dirname, 'baz.js')}:1:2)` + ) + assert.strictEqual(parseUserLandFrames(stack, 1).length, 1) + assert.strictEqual(parseUserLandFrames(stack, 2).length, 2) + assert.strictEqual(parseUserLandFrames(stack, 3).length, 2) + assert.strictEqual(parseUserLandFrames(stack, 4).length, 2) + assert.strictEqual(parseUserLandFrames(stack, 5).length, 2) + }) + }) + + function genStackTraceWithManyNonUserLandFramesAnd (...frames) { + return `Error: multi\nline\n${nonUserLandFrame}\n${frames.join('\n')}\n${nonUserLandFrame}` + } + + function genStackTrace (frameStr) { + return `Error: multi\nline\n${frameStr}\n${frameStr}` + } + + function assertStackTraceWithFrame (frame, expected) { + assertStackTrace(parseUserLandFrames(genStackTrace(frame)), expected) + } + + function assertStackTrace (frames, expected) { + assert.strictEqual(frames.length, 2, 'Expected two stack frames, got ' + frames.length) + assert.deepStrictEqual(frames[0], frames[1], 'Expected the two stack frames to be identical') + assert.deepStrictEqual(frames[0], expected) + } }) }) From 1c6b5ccf88f94183484f275f07e2e42ff96519c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mario=20Vidal=20Dom=C3=ADnguez?= <60353145+Mariovido@users.noreply.github.com> Date: Mon, 14 Jul 2025 10:00:04 +0200 Subject: [PATCH 24/53] [test-optimization] [SDTEST-2290] Remove Cucumber and Selenium tests (#6086) --- .github/workflows/test-optimization.yml | 18 ++++++++++-------- .../automatic-log-submission.spec.js | 9 +++++---- scripts/verify-ci-config.js | 3 ++- 3 files changed, 17 insertions(+), 13 deletions(-) diff --git a/.github/workflows/test-optimization.yml b/.github/workflows/test-optimization.yml index b516300e6c7..617a2cffb83 100644 --- a/.github/workflows/test-optimization.yml +++ b/.github/workflows/test-optimization.yml @@ -75,8 +75,9 @@ jobs: integration-ci: strategy: matrix: + # TODO: Add cucumber and selenium once cucumber+12 is fixed version: [oldest, latest] - framework: [cucumber, selenium, jest, mocha] + framework: [jest, mocha] runs-on: ubuntu-latest env: DD_SERVICE: dd-trace-js-integration-tests @@ -160,13 +161,14 @@ jobs: env: NODE_OPTIONS: '-r ./ci/init' - plugin-cucumber: - runs-on: ubuntu-latest - env: - PLUGINS: cucumber - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: ./.github/actions/plugins/test + # TODO: Remove comment once cucumber+12 is fixed + # plugin-cucumber: + # runs-on: ubuntu-latest + # env: + # PLUGINS: cucumber + # steps: + # - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + # - uses: ./.github/actions/plugins/test # TODO: fix performance issues and test more Node versions plugin-cypress: diff --git a/integration-tests/ci-visibility/automatic-log-submission.spec.js b/integration-tests/ci-visibility/automatic-log-submission.spec.js index 5e25ab89ffc..9ed91cd2946 100644 --- a/integration-tests/ci-visibility/automatic-log-submission.spec.js +++ b/integration-tests/ci-visibility/automatic-log-submission.spec.js @@ -59,10 +59,11 @@ describe('test visibility automatic log submission', () => { name: 'jest', command: 'node ./node_modules/jest/bin/jest --config ./ci-visibility/automatic-log-submission/config-jest.js' }, - { - name: 'cucumber', - command: './node_modules/.bin/cucumber-js ci-visibility/automatic-log-submission-cucumber/*.feature' - }, + // TODO: Uncomment once cucumber+12 is fixed + // { + // name: 'cucumber', + // command: './node_modules/.bin/cucumber-js ci-visibility/automatic-log-submission-cucumber/*.feature' + // }, { name: 'playwright', command: './node_modules/.bin/playwright test -c playwright.config.js', diff --git a/scripts/verify-ci-config.js b/scripts/verify-ci-config.js index 7ea874e60c7..2432c6dbcc7 100644 --- a/scripts/verify-ci-config.js +++ b/scripts/verify-ci-config.js @@ -150,7 +150,8 @@ checkPlugins(path.join(__dirname, '..', '.github', 'workflows', 'test-optimizati .filter(file => fs.existsSync(path.join(__dirname, '..', 'packages', file, 'test'))) .map(file => file.replace('datadog-plugin-', '')) for (const plugin of allPlugins) { - if (!allTestedPlugins.has(plugin)) { + // TODO: Remove check of cucumber once cucumber+12 is fixed + if (!allTestedPlugins.has(plugin) && plugin !== 'cucumber') { pluginErrorMsg(plugin, 'ERROR', 'Plugin is tested but not in at least one GitHub workflow') } } From 2406c6dc3ee6f23ba0e44dadabad564659347cd5 Mon Sep 17 00:00:00 2001 From: Sam Brenner <106700075+sabrenner@users.noreply.github.com> Date: Mon, 14 Jul 2025 09:52:54 -0400 Subject: [PATCH 25/53] chore(llmobs): add internal ability to tag tool id on tool messages (#6022) * add ability to tag tool id on tool messages * add type doc * fix test * remove documenting --- packages/dd-trace/src/llmobs/tagger.js | 11 ++++- packages/dd-trace/test/llmobs/tagger.spec.js | 49 ++++++++++++++++++++ 2 files changed, 59 insertions(+), 1 deletion(-) diff --git a/packages/dd-trace/src/llmobs/tagger.js b/packages/dd-trace/src/llmobs/tagger.js index 8d1288c6a0f..7746dfaa7d1 100644 --- a/packages/dd-trace/src/llmobs/tagger.js +++ b/packages/dd-trace/src/llmobs/tagger.js @@ -281,6 +281,7 @@ class LLMObsTagger { const { content = '', role } = message const toolCalls = message.toolCalls + const toolId = message.toolId const messageObj = { content } const valid = typeof content === 'string' @@ -288,7 +289,7 @@ class LLMObsTagger { this.#handleFailure('Message content must be a string.', 'invalid_io_messages') } - const condition = this.#tagConditionalString(role, 'Message role', messageObj, 'role') + let condition = this.#tagConditionalString(role, 'Message role', messageObj, 'role') if (toolCalls) { const filteredToolCalls = this.#filterToolCalls(toolCalls) @@ -298,6 +299,14 @@ class LLMObsTagger { } } + if (toolId) { + if (role === 'tool') { + condition = this.#tagConditionalString(toolId, 'Tool ID', messageObj, 'tool_id') + } else { + log.warn(`Tool ID for tool message not associated with a "tool" role, instead got "${role}"`) + } + } + if (valid && condition) { messages.push(messageObj) } diff --git a/packages/dd-trace/test/llmobs/tagger.spec.js b/packages/dd-trace/test/llmobs/tagger.spec.js index 69f558fbcab..b1da1fdf2ab 100644 --- a/packages/dd-trace/test/llmobs/tagger.spec.js +++ b/packages/dd-trace/test/llmobs/tagger.spec.js @@ -391,6 +391,44 @@ describe('tagger', () => { expect(() => tagger.tagLLMIO(span, messages, undefined)).to.throw() }) }) + + describe('tool message tagging', () => { + it('tags a span with a tool message', () => { + const messages = [ + { role: 'tool', content: 'The weather in San Francisco is sunny', toolId: '123' } + ] + + tagger._register(span) + tagger.tagLLMIO(span, messages, undefined) + expect(Tagger.tagMap.get(span)).to.deep.equal({ + '_ml_obs.meta.input.messages': [ + { role: 'tool', content: 'The weather in San Francisco is sunny', tool_id: '123' } + ] + }) + }) + + it('throws if the tool id is not a string', () => { + const messages = [ + { role: 'tool', content: 'The weather in San Francisco is sunny', toolId: 123 } + ] + + expect(() => tagger.tagLLMIO(span, messages, undefined)).to.throw() + }) + + it('logs a warning if the tool id is not associated with a tool role', () => { + const messages = [ + { role: 'user', content: 'The weather in San Francisco is sunny', toolId: '123' } + ] + + tagger._register(span) + tagger.tagLLMIO(span, messages, undefined) + + const messageTags = Tagger.tagMap.get(span)['_ml_obs.meta.input.messages'] + expect(messageTags[0]).to.not.have.property('tool_id') + + expect(logger.warn).to.have.been.calledOnce + }) + }) }) describe('tagEmbeddingIO', () => { @@ -640,6 +678,17 @@ describe('tagger', () => { expect(logger.warn.callCount).to.equal(5) // 4 for tool call + 1 for role }) }) + + it('logs a warning if the tool id is not a string', () => { + const messages = [ + { role: 'tool', content: 'The weather in San Francisco is sunny', toolId: 123 } + ] + + tagger._register(span) + tagger.tagLLMIO(span, messages, undefined) + expect(Tagger.tagMap.get(span)).to.not.have.property('_ml_obs.meta.input.messages') + expect(logger.warn).to.have.been.calledOnce + }) }) }) }) From d364bcb68d0624c7e11030480de34281e5db5aad Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Mon, 14 Jul 2025 16:06:16 +0200 Subject: [PATCH 26/53] ci: exercise more of the guardrails code during testing (#6034) --- .github/workflows/platform.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/platform.yml b/.github/workflows/platform.yml index 37e7675732a..eb2ad7965de 100644 --- a/.github/workflows/platform.yml +++ b/.github/workflows/platform.yml @@ -304,6 +304,8 @@ jobs: matrix: version: ['0.8', '0.10', '0.12', '4', '6', '8', '10', '12'] runs-on: ubuntu-latest + env: + DD_TRACE_DEBUG: 'true' # This exercises more of the guardrails code steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: ./.github/actions/node From afc0d949174ef3b6b5a3bf18915cac067603b0b0 Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Mon, 14 Jul 2025 16:06:37 +0200 Subject: [PATCH 27/53] [DI] Add support for loading probes from JSON file (#5941) Add support for loading Dynamic Instrumentation / Live Debugger probes via a JSON file instead of Remote Configuration (RC). To use this feature, specify a path to the JSON file using either the environment variable `DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE` or the programmatic configuration `dynamicInstrumentation.probeFile`. This can be used in combination with RC. The JSON should be an array of probe objects in the same format as received via the RC `config` object, for example: [{ id: '100c9a5c-45ad-49dc-818b-c570d31e11d1', version: 0, type: 'LOG_PROBE', where: { sourceFile: 'index.js', lines: ['25'] }, template: 'Hello World', segments: [{ str: 'Hello World' }], captureSnapshot: true, capture: { maxReferenceDepth: 3 }, sampling: { snapshotsPerSecond: 100 } }] --- integration-tests/debugger/basic.spec.js | 54 +++++++++++-------- .../dynamic-instrumentation/index.js | 6 +-- packages/dd-trace/src/config.js | 4 ++ .../debugger/devtools_client/remote_config.js | 13 +++-- packages/dd-trace/src/debugger/index.js | 45 ++++++++++++---- .../src/supported-configurations.json | 1 + packages/dd-trace/test/config.spec.js | 11 ++++ 7 files changed, 96 insertions(+), 38 deletions(-) diff --git a/integration-tests/debugger/basic.spec.js b/integration-tests/debugger/basic.spec.js index 7fbdce6e236..3bd0905ad9e 100644 --- a/integration-tests/debugger/basic.spec.js +++ b/integration-tests/debugger/basic.spec.js @@ -1,6 +1,8 @@ 'use strict' +const { writeFileSync } = require('fs') const os = require('os') +const { join } = require('path') const { assert } = require('chai') const { pollInterval, setup } = require('./utils') @@ -188,7 +190,7 @@ describe('Dynamic Instrumentation', function () { it( 'should send expected error diagnostics messages if probe doesn\'t conform to expected schema', - unsupporedOrInvalidProbesTest('bad config!!!', { status: 'ERROR' }) + unsupporedOrInvalidProbesTest({ invalid: 'config' }, { status: 'ERROR' }) ) it( @@ -493,10 +495,7 @@ describe('Dynamic Instrumentation', function () { }) describe('input messages', function () { - it( - 'should capture and send expected payload when a log line probe is triggered', - testBasicInputWithDD.bind(null, t) - ) + it('should capture and send expected payload when a log line probe is triggered', testBasicInput.bind(null, t)) it('should respond with updated message if probe message is updated', function (done) { const expectedMessages = ['Hello World!', 'Hello Updated World!'] @@ -743,6 +742,18 @@ describe('Dynamic Instrumentation', function () { }) }) + describe('probe file', function () { + const probeFile = join(os.tmpdir(), 'probes.json') + const t = setup({ + env: { DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE: probeFile }, + dependencies: ['fastify'] + }) + const probe = t.generateProbeConfig() + writeFileSync(probeFile, JSON.stringify([probe])) + + it('should install probes from a probe file', testBasicInputWithoutRC.bind(null, t, probe)) + }) + describe('DD_TRACING_ENABLED=true, DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED=true', function () { const t = setup({ env: { DD_TRACING_ENABLED: true, DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED: true }, @@ -750,10 +761,7 @@ describe('Dynamic Instrumentation', function () { }) describe('input messages', function () { - it( - 'should capture and send expected payload when a log line probe is triggered', - testBasicInputWithDD.bind(null, t) - ) + it('should capture and send expected payload when a log line probe is triggered', testBasicInput.bind(null, t)) }) }) @@ -764,10 +772,7 @@ describe('Dynamic Instrumentation', function () { }) describe('input messages', function () { - it( - 'should capture and send expected payload when a log line probe is triggered', - testBasicInputWithDD.bind(null, t) - ) + it('should capture and send expected payload when a log line probe is triggered', testBasicInput.bind(null, t)) }) }) @@ -786,10 +791,19 @@ describe('Dynamic Instrumentation', function () { }) }) -function testBasicInputWithDD (t, done) { - let traceId, spanId, dd - +function testBasicInput (t, done) { t.triggerBreakpoint() + setupAssertionListeners(t, done) + t.agent.addRemoteConfig(t.rcConfig) +} + +function testBasicInputWithoutRC (t, probe, done) { + t.triggerBreakpoint(false) + setupAssertionListeners(t, done, probe) +} + +function setupAssertionListeners (t, done, probe) { + let traceId, spanId, dd t.agent.on('message', ({ payload }) => { const span = payload.find((arr) => arr[0].name === 'fastify.request')?.[0] @@ -802,7 +816,7 @@ function testBasicInputWithDD (t, done) { }) t.agent.on('debugger-input', ({ payload }) => { - assertBasicInputPayload(t, payload) + assertBasicInputPayload(t, payload, probe) payload = payload[0] assert.isObject(payload.dd) @@ -816,8 +830,6 @@ function testBasicInputWithDD (t, done) { assertDD() }) - t.agent.addRemoteConfig(t.rcConfig) - function assertDD () { if (!traceId || !spanId || !dd) return assert.strictEqual(dd.trace_id, traceId) @@ -838,7 +850,7 @@ function testBasicInputWithoutDD (t, done) { t.agent.addRemoteConfig(t.rcConfig) } -function assertBasicInputPayload (t, payload) { +function assertBasicInputPayload (t, payload, probe = t.rcConfig.config) { assert.isArray(payload) assert.lengthOf(payload, 1) payload = payload[0] @@ -857,7 +869,7 @@ function assertBasicInputPayload (t, payload) { debugger: { snapshot: { probe: { - id: t.rcConfig.config.id, + id: probe.id, version: 0, location: { file: t.breakpoint.deployedFile, lines: [String(t.breakpoint.line)] } }, diff --git a/packages/dd-trace/src/ci-visibility/dynamic-instrumentation/index.js b/packages/dd-trace/src/ci-visibility/dynamic-instrumentation/index.js index aae03250bb6..f687e194770 100644 --- a/packages/dd-trace/src/ci-visibility/dynamic-instrumentation/index.js +++ b/packages/dd-trace/src/ci-visibility/dynamic-instrumentation/index.js @@ -62,7 +62,7 @@ class TestVisDynamicInstrumentation { log.debug('Starting Test Visibility - Dynamic Instrumentation client...') - const rcChannel = new MessageChannel() // mock channel + const probeChannel = new MessageChannel() // mock channel const configChannel = new MessageChannel() // mock channel this.worker = new Worker( @@ -84,14 +84,14 @@ class TestVisDynamicInstrumentation { workerData: { config: this._config.serialize(), parentThreadId, - rcPort: rcChannel.port1, + probePort: probeChannel.port1, configPort: configChannel.port1, breakpointSetChannel: this.breakpointSetChannel.port1, breakpointHitChannel: this.breakpointHitChannel.port1, breakpointRemoveChannel: this.breakpointRemoveChannel.port1 }, transferList: [ - rcChannel.port1, + probeChannel.port1, configChannel.port1, this.breakpointSetChannel.port1, this.breakpointHitChannel.port1, diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index 79a10352cd3..bd33f3cedfc 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -516,6 +516,7 @@ class Config { defaults['dogstatsd.port'] = '8125' defaults.dsmEnabled = false defaults['dynamicInstrumentation.enabled'] = false + defaults['dynamicInstrumentation.probeFile'] = undefined defaults['dynamicInstrumentation.redactedIdentifiers'] = [] defaults['dynamicInstrumentation.redactionExcludedIdentifiers'] = [] defaults['dynamicInstrumentation.uploadIntervalSeconds'] = 1 @@ -707,6 +708,7 @@ class Config { DD_DOGSTATSD_HOST, DD_DOGSTATSD_PORT, DD_DYNAMIC_INSTRUMENTATION_ENABLED, + DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE, DD_DYNAMIC_INSTRUMENTATION_REDACTED_IDENTIFIERS, DD_DYNAMIC_INSTRUMENTATION_REDACTION_EXCLUDED_IDENTIFIERS, DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS, @@ -883,6 +885,7 @@ class Config { this._setString(env, 'dogstatsd.port', DD_DOGSTATSD_PORT) this._setBoolean(env, 'dsmEnabled', DD_DATA_STREAMS_ENABLED) this._setBoolean(env, 'dynamicInstrumentation.enabled', DD_DYNAMIC_INSTRUMENTATION_ENABLED) + this._setString(env, 'dynamicInstrumentation.probeFile', DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE) this._setArray(env, 'dynamicInstrumentation.redactedIdentifiers', DD_DYNAMIC_INSTRUMENTATION_REDACTED_IDENTIFIERS) this._setArray( env, @@ -1108,6 +1111,7 @@ class Config { } this._setBoolean(opts, 'dsmEnabled', options.dsmEnabled) this._setBoolean(opts, 'dynamicInstrumentation.enabled', options.dynamicInstrumentation?.enabled) + this._setString(opts, 'dynamicInstrumentation.probeFile', options.dynamicInstrumentation?.probeFile) this._setArray( opts, 'dynamicInstrumentation.redactedIdentifiers', diff --git a/packages/dd-trace/src/debugger/devtools_client/remote_config.js b/packages/dd-trace/src/debugger/devtools_client/remote_config.js index db2957640f4..1254e4b1d19 100644 --- a/packages/dd-trace/src/debugger/devtools_client/remote_config.js +++ b/packages/dd-trace/src/debugger/devtools_client/remote_config.js @@ -1,6 +1,6 @@ 'use strict' -const { workerData: { rcPort } } = require('node:worker_threads') +const { workerData: { probePort } } = require('node:worker_threads') const { addBreakpoint, removeBreakpoint, modifyBreakpoint } = require('./breakpoints') const { ackReceived, ackInstalled, ackError } = require('./status') const log = require('../../log') @@ -32,16 +32,19 @@ const log = require('../../log') // sampling: { snapshotsPerSecond: 5000 }, // evaluateAt: 'EXIT' // only used for method probes // } -rcPort.on('message', async ({ action, conf: probe, ackId }) => { +probePort.on('message', async ({ action, probe, ackId }) => { try { await processMsg(action, probe) - rcPort.postMessage({ ackId }) + probePort.postMessage({ ackId }) } catch (err) { - rcPort.postMessage({ ackId, error: err }) + probePort.postMessage({ ackId, error: err }) ackError(err, probe) } }) -rcPort.on('messageerror', (err) => log.error('[debugger:devtools_client] received "messageerror" on RC port', err)) +probePort.on( + 'messageerror', + (err) => log.error('[debugger:devtools_client] received "messageerror" on probe port', err) +) async function processMsg (action, probe) { log.debug( diff --git a/packages/dd-trace/src/debugger/index.js b/packages/dd-trace/src/debugger/index.js index 45d10e1e7c7..a31d80fcacb 100644 --- a/packages/dd-trace/src/debugger/index.js +++ b/packages/dd-trace/src/debugger/index.js @@ -1,5 +1,6 @@ 'use strict' +const { readFile } = require('fs') const { types } = require('util') const { join } = require('path') const { Worker, MessageChannel, threadId: parentThreadId } = require('worker_threads') @@ -23,17 +24,24 @@ function start (config, rc) { log.debug('[debugger] Starting Dynamic Instrumentation client...') const rcAckCallbacks = new Map() - const rcChannel = new MessageChannel() + const probeChannel = new MessageChannel() configChannel = new MessageChannel() process[Symbol.for('datadog:node:util:types')] = types - rc.setProductHandler('LIVE_DEBUGGING', (action, conf, id, ack) => { + readProbeFile(config.dynamicInstrumentation.probeFile, (probes) => { + const action = 'apply' + for (const probe of probes) { + probeChannel.port2.postMessage({ action, probe }) + } + }) + + rc.setProductHandler('LIVE_DEBUGGING', (action, probe, id, ack) => { rcAckCallbacks.set(++ackId, ack) - rcChannel.port2.postMessage({ action, conf, ackId }) + probeChannel.port2.postMessage({ action, probe, ackId }) }) - rcChannel.port2.on('message', ({ ackId, error }) => { + probeChannel.port2.on('message', ({ ackId, error }) => { const ack = rcAckCallbacks.get(ackId) if (ack === undefined) { // This should never happen, but just in case something changes in the future, we should guard against it @@ -44,7 +52,7 @@ function start (config, rc) { ack(error) rcAckCallbacks.delete(ackId) }) - rcChannel.port2.on('messageerror', (err) => log.error('[debugger] received "messageerror" on RC port', err)) + probeChannel.port2.on('messageerror', (err) => log.error('[debugger] received "messageerror" on probe port', err)) worker = new Worker( join(__dirname, 'devtools_client', 'index.js'), @@ -54,10 +62,10 @@ function start (config, rc) { workerData: { config: config.serialize(), parentThreadId, - rcPort: rcChannel.port1, + probePort: probeChannel.port1, configPort: configChannel.port1 }, - transferList: [rcChannel.port1, configChannel.port1] + transferList: [probeChannel.port1, configChannel.port1] } ) @@ -84,8 +92,8 @@ function start (config, rc) { }) worker.unref() - rcChannel.port1.unref() - rcChannel.port2.unref() + probeChannel.port1.unref() + probeChannel.port2.unref() configChannel.port1.unref() configChannel.port2.unref() } @@ -94,3 +102,22 @@ function configure (config) { if (configChannel === null) return configChannel.port2.postMessage(config.serialize()) } + +function readProbeFile (path, cb) { + if (!path) return + + log.debug('[debugger] Reading probe file: %s', path) + readFile(path, 'utf8', (err, data) => { + if (err) { + log.error('[debugger] Failed to read probe file: %s', path, err) + return + } + try { + const parsedData = JSON.parse(data) + log.debug('[debugger] Successfully parsed probe file: %s', path) + cb(parsedData) + } catch (err) { + log.error('[debugger] Probe file (%s) is not valid JSON', path, err) + } + }) +} diff --git a/packages/dd-trace/src/supported-configurations.json b/packages/dd-trace/src/supported-configurations.json index 8d07a437be9..c5733da3c94 100644 --- a/packages/dd-trace/src/supported-configurations.json +++ b/packages/dd-trace/src/supported-configurations.json @@ -56,6 +56,7 @@ "DD_DOGSTATSD_HOST": ["A"], "DD_DOGSTATSD_PORT": ["A"], "DD_DYNAMIC_INSTRUMENTATION_ENABLED": ["A"], + "DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE": ["A"], "DD_DYNAMIC_INSTRUMENTATION_REDACTED_IDENTIFIERS": ["A"], "DD_DYNAMIC_INSTRUMENTATION_REDACTION_EXCLUDED_IDENTIFIERS": ["A"], "DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS": ["A"], diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index 715c00df7b6..e16b90e7bea 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -293,6 +293,7 @@ describe('Config', () => { expect(config).to.have.nested.property('dogstatsd.hostname', '127.0.0.1') expect(config).to.have.nested.property('dogstatsd.port', '8125') expect(config).to.have.nested.property('dynamicInstrumentation.enabled', false) + expect(config).to.have.nested.property('dynamicInstrumentation.probeFile', undefined) expect(config).to.have.nested.deep.property('dynamicInstrumentation.redactedIdentifiers', []) expect(config).to.have.nested.deep.property('dynamicInstrumentation.redactionExcludedIdentifiers', []) expect(config).to.have.nested.property('dynamicInstrumentation.uploadIntervalSeconds', 1) @@ -393,6 +394,7 @@ describe('Config', () => { { name: 'dogstatsd.port', value: '8125', origin: 'default' }, { name: 'dsmEnabled', value: false, origin: 'default' }, { name: 'dynamicInstrumentation.enabled', value: false, origin: 'default' }, + { name: 'dynamicInstrumentation.probeFile', value: undefined, origin: 'default' }, { name: 'dynamicInstrumentation.redactedIdentifiers', value: [], origin: 'default' }, { name: 'dynamicInstrumentation.redactionExcludedIdentifiers', value: [], origin: 'default' }, { name: 'dynamicInstrumentation.uploadIntervalSeconds', value: 1, origin: 'default' }, @@ -554,6 +556,7 @@ describe('Config', () => { process.env.DD_DOGSTATSD_HOSTNAME = 'dsd-agent' process.env.DD_DOGSTATSD_PORT = '5218' process.env.DD_DYNAMIC_INSTRUMENTATION_ENABLED = 'true' + process.env.DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE = 'probes.json' process.env.DD_DYNAMIC_INSTRUMENTATION_REDACTED_IDENTIFIERS = 'foo,bar' process.env.DD_DYNAMIC_INSTRUMENTATION_REDACTION_EXCLUDED_IDENTIFIERS = 'a,b,c' process.env.DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS = '0.1' @@ -672,6 +675,7 @@ describe('Config', () => { expect(config).to.have.nested.property('dogstatsd.hostname', 'dsd-agent') expect(config).to.have.nested.property('dogstatsd.port', '5218') expect(config).to.have.nested.property('dynamicInstrumentation.enabled', true) + expect(config).to.have.nested.property('dynamicInstrumentation.probeFile', 'probes.json') expect(config).to.have.nested.deep.property('dynamicInstrumentation.redactedIdentifiers', ['foo', 'bar']) expect(config).to.have.nested.deep.property('dynamicInstrumentation.redactionExcludedIdentifiers', ['a', 'b', 'c']) expect(config).to.have.nested.property('dynamicInstrumentation.uploadIntervalSeconds', 0.1) @@ -776,6 +780,7 @@ describe('Config', () => { { name: 'dogstatsd.hostname', value: 'dsd-agent', origin: 'env_var' }, { name: 'dogstatsd.port', value: '5218', origin: 'env_var' }, { name: 'dynamicInstrumentation.enabled', value: true, origin: 'env_var' }, + { name: 'dynamicInstrumentation.probeFile', value: 'probes.json', origin: 'env_var' }, { name: 'dynamicInstrumentation.redactedIdentifiers', value: ['foo', 'bar'], origin: 'env_var' }, { name: 'dynamicInstrumentation.redactionExcludedIdentifiers', value: ['a', 'b', 'c'], origin: 'env_var' }, { name: 'dynamicInstrumentation.uploadIntervalSeconds', value: 0.1, origin: 'env_var' }, @@ -991,6 +996,7 @@ describe('Config', () => { }, dynamicInstrumentation: { enabled: true, + probeFile: 'probes.json', redactedIdentifiers: ['foo', 'bar'], redactionExcludedIdentifiers: ['a', 'b', 'c'], uploadIntervalSeconds: 0.1 @@ -1082,6 +1088,7 @@ describe('Config', () => { expect(config).to.have.nested.property('dogstatsd.hostname', 'agent-dsd') expect(config).to.have.nested.property('dogstatsd.port', '5218') expect(config).to.have.nested.property('dynamicInstrumentation.enabled', true) + expect(config).to.have.nested.property('dynamicInstrumentation.probeFile', 'probes.json') expect(config).to.have.nested.deep.property('dynamicInstrumentation.redactedIdentifiers', ['foo', 'bar']) expect(config).to.have.nested.deep.property('dynamicInstrumentation.redactionExcludedIdentifiers', ['a', 'b', 'c']) expect(config).to.have.nested.property('dynamicInstrumentation.uploadIntervalSeconds', 0.1) @@ -1165,6 +1172,7 @@ describe('Config', () => { { name: 'dogstatsd.hostname', value: 'agent-dsd', origin: 'code' }, { name: 'dogstatsd.port', value: '5218', origin: 'code' }, { name: 'dynamicInstrumentation.enabled', value: true, origin: 'code' }, + { name: 'dynamicInstrumentation.probeFile', value: 'probes.json', origin: 'code' }, { name: 'dynamicInstrumentation.redactedIdentifiers', value: ['foo', 'bar'], origin: 'code' }, { name: 'dynamicInstrumentation.redactionExcludedIdentifiers', value: ['a', 'b', 'c'], origin: 'code' }, { name: 'dynamicInstrumentation.uploadIntervalSeconds', value: 0.1, origin: 'code' }, @@ -1386,6 +1394,7 @@ describe('Config', () => { process.env.DD_CODE_ORIGIN_FOR_SPANS_EXPERIMENTAL_EXIT_SPANS_ENABLED = 'true' process.env.DD_DOGSTATSD_PORT = '5218' process.env.DD_DYNAMIC_INSTRUMENTATION_ENABLED = 'true' + process.env.DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE = 'probes.json' process.env.DD_DYNAMIC_INSTRUMENTATION_REDACTED_IDENTIFIERS = 'foo,bar' process.env.DD_DYNAMIC_INSTRUMENTATION_REDACTION_EXCLUDED_IDENTIFIERS = 'a,b,c' process.env.DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS = '0.1' @@ -1476,6 +1485,7 @@ describe('Config', () => { }, dynamicInstrumentation: { enabled: false, + probeFile: 'probes2.json', redactedIdentifiers: ['foo2', 'bar2'], redactionExcludedIdentifiers: ['a2', 'b2'], uploadIntervalSeconds: 0.2 @@ -1563,6 +1573,7 @@ describe('Config', () => { expect(config).to.have.nested.property('dogstatsd.hostname', 'server') expect(config).to.have.nested.property('dogstatsd.port', '8888') expect(config).to.have.nested.property('dynamicInstrumentation.enabled', false) + expect(config).to.have.nested.property('dynamicInstrumentation.probeFile', 'probes2.json') expect(config).to.have.nested.deep.property('dynamicInstrumentation.redactedIdentifiers', ['foo2', 'bar2']) expect(config).to.have.nested.deep.property('dynamicInstrumentation.redactionExcludedIdentifiers', ['a2', 'b2']) expect(config).to.have.nested.property('dynamicInstrumentation.uploadIntervalSeconds', 0.2) From 65d6a3308e095aca636dcd73ebb57db2eaeecc1f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mario=20Vidal=20Dom=C3=ADnguez?= <60353145+Mariovido@users.noreply.github.com> Date: Mon, 14 Jul 2025 16:24:15 +0200 Subject: [PATCH 28/53] [test-optimization] [SDTEST-2290] Fix support for Cucumber 12+ (#6087) --- .github/workflows/test-optimization.yml | 18 ++++++++---------- .../automatic-log-submission.spec.js | 12 +++++++----- integration-tests/cucumber/cucumber.spec.js | 3 +++ integration-tests/selenium/selenium.spec.js | 2 ++ .../datadog-instrumentations/src/cucumber.js | 18 ++++++++++-------- scripts/verify-ci-config.js | 3 +-- 6 files changed, 31 insertions(+), 25 deletions(-) diff --git a/.github/workflows/test-optimization.yml b/.github/workflows/test-optimization.yml index 617a2cffb83..b516300e6c7 100644 --- a/.github/workflows/test-optimization.yml +++ b/.github/workflows/test-optimization.yml @@ -75,9 +75,8 @@ jobs: integration-ci: strategy: matrix: - # TODO: Add cucumber and selenium once cucumber+12 is fixed version: [oldest, latest] - framework: [jest, mocha] + framework: [cucumber, selenium, jest, mocha] runs-on: ubuntu-latest env: DD_SERVICE: dd-trace-js-integration-tests @@ -161,14 +160,13 @@ jobs: env: NODE_OPTIONS: '-r ./ci/init' - # TODO: Remove comment once cucumber+12 is fixed - # plugin-cucumber: - # runs-on: ubuntu-latest - # env: - # PLUGINS: cucumber - # steps: - # - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - # - uses: ./.github/actions/plugins/test + plugin-cucumber: + runs-on: ubuntu-latest + env: + PLUGINS: cucumber + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: ./.github/actions/plugins/test # TODO: fix performance issues and test more Node versions plugin-cypress: diff --git a/integration-tests/ci-visibility/automatic-log-submission.spec.js b/integration-tests/ci-visibility/automatic-log-submission.spec.js index 9ed91cd2946..02a9596513b 100644 --- a/integration-tests/ci-visibility/automatic-log-submission.spec.js +++ b/integration-tests/ci-visibility/automatic-log-submission.spec.js @@ -11,6 +11,7 @@ const { } = require('../helpers') const { FakeCiVisIntake } = require('../ci-visibility-intake') const webAppServer = require('./web-app-server') +const { NODE_MAJOR } = require('../../version') describe('test visibility automatic log submission', () => { let sandbox, cwd, receiver, childProcess, webAppPort @@ -59,11 +60,10 @@ describe('test visibility automatic log submission', () => { name: 'jest', command: 'node ./node_modules/jest/bin/jest --config ./ci-visibility/automatic-log-submission/config-jest.js' }, - // TODO: Uncomment once cucumber+12 is fixed - // { - // name: 'cucumber', - // command: './node_modules/.bin/cucumber-js ci-visibility/automatic-log-submission-cucumber/*.feature' - // }, + { + name: 'cucumber', + command: './node_modules/.bin/cucumber-js ci-visibility/automatic-log-submission-cucumber/*.feature' + }, { name: 'playwright', command: './node_modules/.bin/playwright test -c playwright.config.js', @@ -76,6 +76,8 @@ describe('test visibility automatic log submission', () => { ] testFrameworks.forEach(({ name, command, getExtraEnvVars = () => ({}) }) => { + if ((NODE_MAJOR === 18 || NODE_MAJOR === 23) && name === 'cucumber') return + context(`with ${name}`, () => { it('can automatically submit logs', (done) => { let logIds, testIds diff --git a/integration-tests/cucumber/cucumber.spec.js b/integration-tests/cucumber/cucumber.spec.js index 8112cf662db..49c5827c5cc 100644 --- a/integration-tests/cucumber/cucumber.spec.js +++ b/integration-tests/cucumber/cucumber.spec.js @@ -63,6 +63,7 @@ const { DD_CAPABILITIES_IMPACTED_TESTS } = require('../../packages/dd-trace/src/plugins/util/test') const { DD_HOST_CPU_COUNT } = require('../../packages/dd-trace/src/plugins/util/env') +const { NODE_MAJOR } = require('../../version') const versions = ['7.0.0', 'latest'] @@ -74,6 +75,8 @@ const featuresPath = 'ci-visibility/features/' const fileExtension = 'js' versions.forEach(version => { + if ((NODE_MAJOR === 18 || NODE_MAJOR === 23) && version === 'latest') return + // TODO: add esm tests describe(`cucumber@${version} commonJS`, () => { let sandbox, cwd, receiver, childProcess, testOutput diff --git a/integration-tests/selenium/selenium.spec.js b/integration-tests/selenium/selenium.spec.js index 46721689c8b..e6e64c3b6ab 100644 --- a/integration-tests/selenium/selenium.spec.js +++ b/integration-tests/selenium/selenium.spec.js @@ -17,6 +17,7 @@ const { TEST_IS_RUM_ACTIVE, TEST_TYPE } = require('../../packages/dd-trace/src/plugins/util/test') +const { NODE_MAJOR } = require('../../version') const webAppServer = require('../ci-visibility/web-app-server') @@ -74,6 +75,7 @@ versionRange.forEach(version => { } ] testFrameworks.forEach(({ name, command }) => { + if ((NODE_MAJOR === 18 || NODE_MAJOR === 23) && name === 'cucumber') return context(`with ${name}`, () => { it('identifies tests using selenium as browser tests', (done) => { const assertionPromise = receiver diff --git a/packages/datadog-instrumentations/src/cucumber.js b/packages/datadog-instrumentations/src/cucumber.js index 729d59d899a..56ea882d3fe 100644 --- a/packages/datadog-instrumentations/src/cucumber.js +++ b/packages/datadog-instrumentations/src/cucumber.js @@ -43,6 +43,7 @@ const { CUCUMBER_WORKER_TRACE_PAYLOAD_CODE, getIsFaultyEarlyFlakeDetection } = require('../../dd-trace/src/plugins/util/test') +const satisfies = require('semifies') const isMarkedAsUnskippable = (pickle) => { return pickle.tags.some(tag => tag.name === '@datadog:unskippable') @@ -224,7 +225,7 @@ function getPickleByFile (runtimeOrCoodinator) { }, {}) } -function wrapRun (pl, isLatestVersion) { +function wrapRun (pl, isLatestVersion, version) { if (patched.has(pl)) return patched.add(pl) @@ -398,9 +399,10 @@ function wrapRun (pl, isLatestVersion) { const promise = runStep.apply(this, arguments) promise.then((result) => { - const { status, skipReason, errorMessage } = isLatestVersion - ? getStatusFromResultLatest(result) - : getStatusFromResult(result) + const finalResult = satisfies(version, '>=12.0.0') ? result.result : result + const getStatus = satisfies(version, '>=7.3.0') ? getStatusFromResultLatest : getStatusFromResult + + const { status, skipReason, errorMessage } = getStatus(finalResult) testFinishCh.publish({ isStep: true, status, skipReason, errorMessage, ...ctx.currentStore }) }) @@ -415,18 +417,18 @@ function wrapRun (pl, isLatestVersion) { }) } -function pickleHook (PickleRunner) { +function pickleHook (PickleRunner, version) { const pl = PickleRunner.default - wrapRun(pl, false) + wrapRun(pl, false, version) return PickleRunner } -function testCaseHook (TestCaseRunner) { +function testCaseHook (TestCaseRunner, version) { const pl = TestCaseRunner.default - wrapRun(pl, true) + wrapRun(pl, true, version) return TestCaseRunner } diff --git a/scripts/verify-ci-config.js b/scripts/verify-ci-config.js index 2432c6dbcc7..7ea874e60c7 100644 --- a/scripts/verify-ci-config.js +++ b/scripts/verify-ci-config.js @@ -150,8 +150,7 @@ checkPlugins(path.join(__dirname, '..', '.github', 'workflows', 'test-optimizati .filter(file => fs.existsSync(path.join(__dirname, '..', 'packages', file, 'test'))) .map(file => file.replace('datadog-plugin-', '')) for (const plugin of allPlugins) { - // TODO: Remove check of cucumber once cucumber+12 is fixed - if (!allTestedPlugins.has(plugin) && plugin !== 'cucumber') { + if (!allTestedPlugins.has(plugin)) { pluginErrorMsg(plugin, 'ERROR', 'Plugin is tested but not in at least one GitHub workflow') } } From 225d9417b5f2d28b816d0734742ba0f5a5a4ef64 Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Mon, 14 Jul 2025 17:19:17 +0200 Subject: [PATCH 29/53] fix: some guardrail telemetry should only be sent once (#6029) Fixes bug in the guardrail telemetry `hasSeen` function which meant that all telemetry was always sent even though `abort` and `abort.integration` should only be sent once. --- .github/workflows/platform.yml | 9 +++ integration-tests/helpers/index.js | 43 +++++++------ package.json | 2 + packages/dd-trace/src/guardrails/telemetry.js | 21 +++--- .../test/guardrails/telemetry.spec.js | 64 +++++++++++++++++++ 5 files changed, 111 insertions(+), 28 deletions(-) create mode 100644 packages/dd-trace/test/guardrails/telemetry.spec.js diff --git a/.github/workflows/platform.yml b/.github/workflows/platform.yml index eb2ad7965de..dd4278200f3 100644 --- a/.github/workflows/platform.yml +++ b/.github/workflows/platform.yml @@ -283,6 +283,15 @@ jobs: - run: yarn test:integration - run: yarn test:integration:esbuild + unit-guardrails: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: ./.github/actions/node/active-lts + - uses: ./.github/actions/install + - run: yarn test:trace:guardrails:ci + - uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3 + # We'll run these separately for earlier (i.e. unsupported) versions integration-guardrails: strategy: diff --git a/integration-tests/helpers/index.js b/integration-tests/helpers/index.js index 86217fee227..86aab896bfc 100644 --- a/integration-tests/helpers/index.js +++ b/integration-tests/helpers/index.js @@ -58,34 +58,35 @@ let sandbox // This _must_ be used with the useSandbox function async function runAndCheckWithTelemetry (filename, expectedOut, expectedTelemetryPoints, expectedSource) { const cwd = sandbox.folder - const cleanup = telemetryForwarder(expectedTelemetryPoints) + const cleanup = telemetryForwarder(expectedTelemetryPoints.length > 0) const pid = await runAndCheckOutput(filename, cwd, expectedOut, expectedSource) const msgs = await cleanup() if (expectedTelemetryPoints.length === 0) { // assert no telemetry sent - try { - assert.deepStrictEqual(msgs.length, 0) - } catch (e) { - // This console.log is useful for debugging telemetry. Plz don't remove. - // eslint-disable-next-line no-console - console.error('Expected no telemetry, but got:\n', msgs.map(msg => JSON.stringify(msg[1].points)).join('\n')) - throw e - } - return + assert.strictEqual(msgs.length, 0, `Expected no telemetry, but got:\n${ + msgs.map(msg => JSON.stringify(msg[1].points)).join('\n') + }`) + } else { + assertTelemetryPoints(pid, msgs, expectedTelemetryPoints) } +} + +function assertTelemetryPoints (pid, msgs, expectedTelemetryPoints) { let points = [] for (const [telemetryType, data] of msgs) { assert.strictEqual(telemetryType, 'library_entrypoint') assert.deepStrictEqual(data.metadata, meta(pid)) points = points.concat(data.points) } - let expectedPoints = getPoints(...expectedTelemetryPoints) - // We now have to sort both the expected and actual telemetry points. - // This is because data can come in in any order. - // We'll just contatenate all the data together for each point and sort them. - points = points.map(p => p.name + '\t' + p.tags.join(',')).sort().join('\n') - expectedPoints = expectedPoints.map(p => p.name + '\t' + p.tags.join(',')).sort().join('\n') - assert.strictEqual(points, expectedPoints) + const expectedPoints = getPoints(...expectedTelemetryPoints) + // Sort since data can come in in any order. + assert.deepStrictEqual(points.sort(pointsSorter), expectedPoints.sort(pointsSorter)) + + function pointsSorter (a, b) { + a = a.name + '\t' + a.tags.join(',') + b = b.name + '\t' + b.tags.join(',') + return a === b ? 0 : a < b ? -1 : 1 + } function getPoints (...args) { const expectedPoints = [] @@ -94,7 +95,7 @@ async function runAndCheckWithTelemetry (filename, expectedOut, expectedTelemetr if (!currentPoint.name) { currentPoint.name = 'library_entrypoint.' + arg } else { - currentPoint.tags = arg.split(',') + currentPoint.tags = arg.split(',').filter(Boolean) expectedPoints.push(currentPoint) currentPoint = {} } @@ -239,7 +240,7 @@ async function createSandbox (dependencies = [], isGitRepo = false, } } -function telemetryForwarder (expectedTelemetryPoints) { +function telemetryForwarder (shouldExpectTelemetryPoints = true) { process.env.DD_TELEMETRY_FORWARDER_PATH = path.join(__dirname, '..', 'telemetry-forwarder.sh') process.env.FORWARDER_OUT = path.join(__dirname, `forwarder-${Date.now()}.out`) @@ -257,7 +258,7 @@ function telemetryForwarder (expectedTelemetryPoints) { try { msgs = fs.readFileSync(process.env.FORWARDER_OUT, 'utf8').trim().split('\n') } catch (e) { - if (expectedTelemetryPoints.length && e.code === 'ENOENT' && retries < 10) { + if (shouldExpectTelemetryPoints && e.code === 'ENOENT' && retries < 10) { return tryAgain() } return [] @@ -438,6 +439,8 @@ module.exports = { assertObjectContains, assertUUID, spawnProc, + telemetryForwarder, + assertTelemetryPoints, runAndCheckWithTelemetry, createSandbox, curl, diff --git a/package.json b/package.json index 8ef0d384d08..3bbb745498e 100644 --- a/package.json +++ b/package.json @@ -27,6 +27,8 @@ "test:eslint-rules": "node eslint-rules/*.test.mjs", "test:trace:core": "tap packages/dd-trace/test/*.spec.js \"packages/dd-trace/test/{ci-visibility,datastreams,encode,exporters,opentelemetry,opentracing,plugins,service-naming,standalone,telemetry}/**/*.spec.js\"", "test:trace:core:ci": "npm run test:trace:core -- --coverage --nyc-arg=--include=\"packages/dd-trace/src/**/*.js\"", + "test:trace:guardrails": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/dd-trace/test/guardrails/**/*.spec.js\"", + "test:trace:guardrails:ci": "nyc --no-clean --include \"packages/dd-trace/src/guardrails/**/*.js\" -- npm run test:trace:guardrails", "test:instrumentations": "mocha -r 'packages/dd-trace/test/setup/mocha.js' \"packages/datadog-instrumentations/test/@($(echo $PLUGINS)).spec.js\"", "test:instrumentations:ci": "yarn services && nyc --no-clean --include \"packages/datadog-instrumentations/src/@($(echo $PLUGINS)).js\" --include \"packages/datadog-instrumentations/src/@($(echo $PLUGINS))/**/*.js\" -- npm run test:instrumentations", "test:instrumentations:misc": "mocha -r 'packages/dd-trace/test/setup/mocha.js' 'packages/datadog-instrumentations/test/*/**/*.spec.js'", diff --git a/packages/dd-trace/src/guardrails/telemetry.js b/packages/dd-trace/src/guardrails/telemetry.js index f27850cc02d..3930c9c15a9 100644 --- a/packages/dd-trace/src/guardrails/telemetry.js +++ b/packages/dd-trace/src/guardrails/telemetry.js @@ -25,18 +25,23 @@ var metadata = { pid: process.pid } -var seen = [] -function hasSeen (point) { +var seen = {} +function shouldSend (point) { if (point.name === 'abort') { // This one can only be sent once, regardless of tags - return seen.indexOf('abort') !== -1 - } - if (point.name === 'abort.integration') { + if (seen.abort) { + return false + } + seen.abort = true + } else if (point.name === 'abort.integration') { // For now, this is the only other one we want to dedupe var compiledPoint = point.name + point.tags.join('') - return seen.indexOf(compiledPoint) !== -1 + if (seen[compiledPoint]) { + return false + } + seen[compiledPoint] = true } - return false + return true } function sendTelemetry (name, tags) { @@ -47,7 +52,7 @@ function sendTelemetry (name, tags) { if (['1', 'true', 'True'].indexOf(process.env.DD_INJECT_FORCE) !== -1) { points = points.filter(function (p) { return ['error', 'complete'].indexOf(p.name) !== -1 }) } - points = points.filter(function (p) { return !hasSeen(p) }) + points = points.filter(function (p) { return shouldSend(p) }) for (var i = 0; i < points.length; i++) { points[i].name = 'library_entrypoint.' + points[i].name } diff --git a/packages/dd-trace/test/guardrails/telemetry.spec.js b/packages/dd-trace/test/guardrails/telemetry.spec.js new file mode 100644 index 00000000000..29d8cb99b35 --- /dev/null +++ b/packages/dd-trace/test/guardrails/telemetry.spec.js @@ -0,0 +1,64 @@ +'use strict' + +process.env.DD_INJECTION_ENABLED = 'true' + +const { telemetryForwarder, assertTelemetryPoints } = require('../../../../integration-tests/helpers') + +describe('sendTelemetry', () => { + let cleanup, sendTelemetry + + beforeEach(() => { + cleanup = telemetryForwarder() + sendTelemetry = proxyquire('../src/guardrails/telemetry', {}) + }) + + it('should send telemetry', async () => { + sendTelemetry([ + { name: 'abort', tags: ['1'] }, + { name: 'abort.integration', tags: ['2'] }, + { name: 'abort.integration', tags: ['3'] }, + { name: 'foo', tags: ['4'] } + ]) + const msgs = await cleanup() + assertTelemetryPoints(process.pid, msgs, [ + 'abort', '1', + 'abort.integration', '2', + 'abort.integration', '3', + 'foo', '4' + ]) + }) + + describe('no duplicates', () => { + it('should not send `abort` more than once in the same call', async () => { + sendTelemetry([ + { name: 'abort', tags: ['1'] }, + { name: 'abort', tags: ['2'] } + ]) + const msgs = await cleanup() + assertTelemetryPoints(process.pid, msgs, ['abort', '1']) + }) + + it('should not send `abort` more than once in a different call', async () => { + sendTelemetry('abort', ['1']) + sendTelemetry('abort', ['2']) + const msgs = await cleanup() + assertTelemetryPoints(process.pid, msgs, ['abort', '1']) + }) + + it('should not send `abort.integration` more than once if tags are the same in the same call', async () => { + sendTelemetry([ + { name: 'abort.integration', tags: ['1'] }, + { name: 'abort.integration', tags: ['1'] } + ]) + const msgs = await cleanup() + assertTelemetryPoints(process.pid, msgs, ['abort.integration', '1']) + }) + + it('should not send `abort.integration` more than once if tags are the same in a different call', async () => { + sendTelemetry('abort.integration', ['1']) + sendTelemetry('abort.integration', ['1']) + const msgs = await cleanup() + assertTelemetryPoints(process.pid, msgs, ['abort.integration', '1']) + }) + }) +}) From d6d13703e8303d6c45c558aa305a2dd131d2e4a0 Mon Sep 17 00:00:00 2001 From: William Conti <58711692+wconti27@users.noreply.github.com> Date: Mon, 14 Jul 2025 13:50:07 -0400 Subject: [PATCH 30/53] remove async storage from child_process instrumentation (#5960) --- .../src/child_process.js | 63 ++++------ .../test/child_process.spec.js | 118 ++++++++++++------ .../datadog-plugin-child_process/src/index.js | 40 ++++-- 3 files changed, 131 insertions(+), 90 deletions(-) diff --git a/packages/datadog-instrumentations/src/child_process.js b/packages/datadog-instrumentations/src/child_process.js index b1fff08c729..71c199b7991 100644 --- a/packages/datadog-instrumentations/src/child_process.js +++ b/packages/datadog-instrumentations/src/child_process.js @@ -4,8 +4,7 @@ const { errorMonitor } = require('events') const util = require('util') const { - addHook, - AsyncResource + addHook } = require('./helpers/instrument') const shimmer = require('../../datadog-shimmer') const dc = require('dc-polyfill') @@ -80,7 +79,8 @@ function createContextFromChildProcessInfo (childProcessInfo) { const context = { command: childProcessInfo.command, file: childProcessInfo.file, - shell: childProcessInfo.shell + shell: childProcessInfo.shell, + abortController: new AbortController() } if (childProcessInfo.fileArgs) { @@ -98,17 +98,12 @@ function wrapChildProcessSyncMethod (returnError, shell = false) { } const childProcessInfo = normalizeArgs(arguments, shell) + const context = createContextFromChildProcessInfo(childProcessInfo) - const innerResource = new AsyncResource('bound-anonymous-fn') - return innerResource.runInAsyncScope(() => { - const context = createContextFromChildProcessInfo(childProcessInfo) - const abortController = new AbortController() - - childProcessChannel.start.publish({ ...context, abortController }) - + return childProcessChannel.start.runStores(context, () => { try { - if (abortController.signal.aborted) { - const error = abortController.signal.reason || new Error('Aborted') + if (context.abortController.signal.aborted) { + const error = context.abortController.signal.reason || new Error('Aborted') // expected behaviors on error are different return returnError(error, context) } @@ -141,21 +136,17 @@ function wrapChildProcessCustomPromisifyMethod (customPromisifyMethod, shell) { const context = createContextFromChildProcessInfo(childProcessInfo) const { start, end, asyncStart, asyncEnd, error } = childProcessChannel - const abortController = new AbortController() - - start.publish({ - ...context, - abortController - }) + start.publish(context) let result - if (abortController.signal.aborted) { - result = Promise.reject(abortController.signal.reason || new Error('Aborted')) + if (context.abortController.signal.aborted) { + result = Promise.reject(context.abortController.signal.reason || new Error('Aborted')) } else { try { result = customPromisifyMethod.apply(this, arguments) } catch (error) { - error.publish({ ...context, error }) + context.error = error + error.publish(context) throw error } finally { end.publish(context) @@ -192,26 +183,15 @@ function wrapChildProcessAsyncMethod (ChildProcess, shell = false) { const childProcessInfo = normalizeArgs(arguments, shell) - const cb = arguments[arguments.length - 1] - if (typeof cb === 'function') { - const callbackResource = new AsyncResource('bound-anonymous-fn') - arguments[arguments.length - 1] = callbackResource.bind(cb) - } - - const innerResource = new AsyncResource('bound-anonymous-fn') - return innerResource.runInAsyncScope(() => { - const context = createContextFromChildProcessInfo(childProcessInfo) - const abortController = new AbortController() - - childProcessChannel.start.publish({ ...context, abortController }) - + const context = createContextFromChildProcessInfo(childProcessInfo) + return childProcessChannel.start.runStores(context, () => { let childProcess - if (abortController.signal.aborted) { + if (context.abortController.signal.aborted) { childProcess = new ChildProcess() childProcess.on('error', () => {}) // Original method does not crash when non subscribers process.nextTick(() => { - const error = abortController.signal.reason || new Error('Aborted') + const error = context.abortController.signal.reason || new Error('Aborted') childProcess.emit('error', error) const cb = arguments[arguments.length - 1] @@ -230,17 +210,16 @@ function wrapChildProcessAsyncMethod (ChildProcess, shell = false) { childProcess.on(errorMonitor, (e) => { errorExecuted = true - childProcessChannel.error.publish(e) + context.error = e + childProcessChannel.error.publish(context) }) childProcess.on('close', (code = 0) => { if (!errorExecuted && code !== 0) { - childProcessChannel.error.publish() + childProcessChannel.error.publish(context) } - childProcessChannel.asyncEnd.publish({ - ...context, - result: code - }) + context.result = code + childProcessChannel.asyncEnd.publish(context) }) } diff --git a/packages/datadog-instrumentations/test/child_process.spec.js b/packages/datadog-instrumentations/test/child_process.spec.js index 39525b165f4..e0d813536f4 100644 --- a/packages/datadog-instrumentations/test/child_process.spec.js +++ b/packages/datadog-instrumentations/test/child_process.spec.js @@ -58,13 +58,15 @@ describe('child process', () => { const childEmitter = childProcess[methodName]('ls') childEmitter.once('close', () => { - expect(start).to.have.been.calledOnceWith({ + expect(start).to.have.been.calledOnce + expect(start).to.have.been.calledWithMatch({ command: 'ls', file: 'ls', shell: false, abortController: sinon.match.instanceOf(AbortController) }) - expect(asyncFinish).to.have.been.calledOnceWith({ + expect(asyncFinish).to.have.been.calledOnce + expect(asyncFinish).to.have.been.calledWithMatch({ command: 'ls', file: 'ls', shell: false, @@ -79,14 +81,16 @@ describe('child process', () => { const childEmitter = childProcess[methodName]('ls', ['-la']) childEmitter.once('close', () => { - expect(start).to.have.been.calledOnceWith({ + expect(start).to.have.been.calledOnce + expect(start).to.have.been.calledWithMatch({ command: 'ls -la', file: 'ls', fileArgs: ['-la'], shell: false, abortController: sinon.match.instanceOf(AbortController) }) - expect(asyncFinish).to.have.been.calledOnceWith({ + expect(asyncFinish).to.have.been.calledOnce + expect(asyncFinish).to.have.been.calledWithMatch({ command: 'ls -la', file: 'ls', shell: false, @@ -106,13 +110,15 @@ describe('child process', () => { childEmitter.once('error', () => {}) childEmitter.once('close', () => { - expect(start).to.have.been.calledOnceWith({ + expect(start).to.have.been.calledOnce + expect(start).to.have.been.calledWithMatch({ command: 'invalid_command_test', file: 'invalid_command_test', shell: false, abortController: sinon.match.instanceOf(AbortController) }) - expect(asyncFinish).to.have.been.calledOnceWith({ + expect(asyncFinish).to.have.been.calledOnce + expect(asyncFinish).to.have.been.calledWithMatch({ command: 'invalid_command_test', file: 'invalid_command_test', shell: false, @@ -127,13 +133,15 @@ describe('child process', () => { const childEmitter = childProcess[methodName]('node -e "process.exit(1)"', { shell: true }) childEmitter.once('close', () => { - expect(start).to.have.been.calledOnceWith({ + expect(start).to.have.been.calledOnce + expect(start).to.have.been.calledWithMatch({ command: 'node -e "process.exit(1)"', file: 'node -e "process.exit(1)"', abortController: sinon.match.instanceOf(AbortController), shell: true }) - expect(asyncFinish).to.have.been.calledOnceWith({ + expect(asyncFinish).to.have.been.calledOnce + expect(asyncFinish).to.have.been.calledWithMatch({ command: 'node -e "process.exit(1)"', file: 'node -e "process.exit(1)"', shell: true, @@ -156,7 +164,8 @@ describe('child process', () => { file: 'echo', shell: false }) - expect(asyncFinish).to.have.been.calledOnceWith({ + expect(asyncFinish).to.have.been.calledOnce + expect(asyncFinish).to.have.been.calledWithMatch({ command: 'echo', file: 'echo', shell: false, @@ -228,13 +237,20 @@ describe('child process', () => { const res = childProcess[methodName]('ls') res.once('close', () => { - expect(start).to.have.been.calledOnceWith({ + expect(start).to.have.been.calledOnce + expect(start).to.have.been.calledWithMatch({ command: 'ls', file: 'ls', shell: true, abortController: sinon.match.instanceOf(AbortController) }) - expect(asyncFinish).to.have.been.calledOnceWith({ command: 'ls', file: 'ls', shell: true, result: 0 }) + expect(asyncFinish).to.have.been.calledOnce + expect(asyncFinish).to.have.been.calledWithMatch({ + command: 'ls', + file: 'ls', + shell: true, + result: 0 + }) expect(error).not.to.have.been.called done() }) @@ -244,13 +260,15 @@ describe('child process', () => { const res = childProcess[methodName]('node -e "process.exit(1)"') res.once('close', () => { - expect(start).to.have.been.calledOnceWith({ + expect(start).to.have.been.calledOnce + expect(start).to.have.been.calledWithMatch({ command: 'node -e "process.exit(1)"', file: 'node -e "process.exit(1)"', abortController: sinon.match.instanceOf(AbortController), shell: true }) - expect(asyncFinish).to.have.been.calledOnceWith({ + expect(asyncFinish).to.have.been.calledOnce + expect(asyncFinish).to.have.been.calledWithMatch({ command: 'node -e "process.exit(1)"', file: 'node -e "process.exit(1)"', shell: true, @@ -265,14 +283,16 @@ describe('child process', () => { const res = childProcess[methodName]('invalid_command_test') res.once('close', () => { - expect(start).to.have.been.calledOnceWith({ + expect(start).to.have.been.calledOnce + expect(start).to.have.been.calledWithMatch({ command: 'invalid_command_test', file: 'invalid_command_test', abortController: sinon.match.instanceOf(AbortController), shell: true }) expect(error).to.have.been.calledOnce - expect(asyncFinish).to.have.been.calledOnceWith({ + expect(asyncFinish).to.have.been.calledOnce + expect(asyncFinish).to.have.been.calledWithMatch({ command: 'invalid_command_test', file: 'invalid_command_test', shell: true, @@ -286,13 +306,15 @@ describe('child process', () => { describe(`method ${methodName} with promisify`, () => { it('should execute success callbacks', async () => { await promisify(childProcess[methodName])('echo') - expect(start).to.have.been.calledOnceWith({ + expect(start).to.have.been.calledOnce + expect(start).to.have.been.calledWithMatch({ command: 'echo', file: 'echo', abortController: sinon.match.instanceOf(AbortController), shell: true }) - expect(asyncFinish).to.have.been.calledOnceWith({ + expect(start).to.have.been.calledOnce + expect(asyncFinish).to.have.been.calledWithMatch({ command: 'echo', file: 'echo', shell: true, @@ -306,7 +328,8 @@ describe('child process', () => { await promisify(childProcess[methodName])('invalid_command_test') return Promise.reject(new Error('Command expected to fail')) } catch (e) { - expect(start).to.have.been.calledOnceWith({ + expect(start).to.have.been.calledOnce + expect(start).to.have.been.calledWithMatch({ command: 'invalid_command_test', file: 'invalid_command_test', abortController: sinon.match.instanceOf(AbortController), @@ -322,13 +345,15 @@ describe('child process', () => { await promisify(childProcess[methodName])('node -e "process.exit(1)"') return Promise.reject(new Error('Command expected to fail')) } catch (e) { - expect(start).to.have.been.calledOnceWith({ + expect(start).to.have.been.calledOnce + expect(start).to.have.been.calledWithMatch({ command: 'node -e "process.exit(1)"', file: 'node -e "process.exit(1)"', abortController: sinon.match.instanceOf(AbortController), shell: true }) - expect(asyncFinish).to.have.been.calledOnceWith({ + expect(asyncFinish).to.have.been.calledOnce + expect(asyncFinish).to.have.been.calledWithMatch({ command: 'node -e "process.exit(1)"', file: 'node -e "process.exit(1)"', shell: true, @@ -401,7 +426,8 @@ describe('child process', () => { it('should execute success callbacks', () => { const result = childProcess[methodName]('ls') - expect(start).to.have.been.calledOnceWith({ + expect(start).to.have.been.calledOnce + expect(start).to.have.been.calledWithMatch({ command: 'ls', file: 'ls', shell: false, @@ -409,7 +435,8 @@ describe('child process', () => { }, 'tracing:datadog:child_process:execution:start') - expect(finish).to.have.been.calledOnceWith({ + expect(finish).to.have.been.calledOnce + expect(finish).to.have.been.calledWithMatch({ command: 'ls', file: 'ls', shell: false, @@ -423,14 +450,16 @@ describe('child process', () => { it('should publish arguments', () => { const result = childProcess[methodName]('ls', ['-la']) - expect(start).to.have.been.calledOnceWith({ + expect(start).to.have.been.calledOnce + expect(start).to.have.been.calledWithMatch({ command: 'ls -la', file: 'ls', shell: false, fileArgs: ['-la'], abortController: sinon.match.instanceOf(AbortController) }) - expect(finish).to.have.been.calledOnceWith({ + expect(finish).to.have.been.calledOnce + expect(finish).to.have.been.calledWithMatch({ command: 'ls -la', file: 'ls', shell: false, @@ -455,15 +484,17 @@ describe('child process', () => { file: 'invalid_command_test', shell: false } - expect(start).to.have.been.calledOnceWith({ + expect(start).to.have.been.calledOnce + expect(start).to.have.been.calledWithMatch({ ...expectedContext, abortController: sinon.match.instanceOf(AbortController) }) - expect(finish).to.have.been.calledOnceWith({ + expect(finish).to.have.been.calledOnce + expect(finish).to.have.been.calledWithMatch({ ...expectedContext, error: childError }) - expect(error).to.have.been.calledOnceWith({ + expect(error).to.have.been.calledWithMatch({ ...expectedContext, error: childError }) @@ -482,11 +513,13 @@ describe('child process', () => { file: 'node -e "process.exit(1)"', shell: false } - expect(start).to.have.been.calledOnceWith({ + expect(start).to.have.been.calledOnce + expect(start).to.have.been.calledWithMatch({ ...expectedContext, abortController: sinon.match.instanceOf(AbortController) }) - expect(finish).to.have.been.calledOnceWith({ + expect(finish).to.have.been.calledOnce + expect(finish).to.have.been.calledWithMatch({ ...expectedContext, error: childError }) @@ -505,11 +538,13 @@ describe('child process', () => { file: 'node -e "process.exit(1)"', shell: true } - expect(start).to.have.been.calledOnceWith({ + expect(start).to.have.been.calledOnce + expect(start).to.have.been.calledWithMatch({ ...expectedContext, abortController: sinon.match.instanceOf(AbortController) }) - expect(finish).to.have.been.calledOnceWith({ + expect(finish).to.have.been.calledOnce + expect(finish).to.have.been.calledWithMatch({ ...expectedContext, error: childError }) @@ -531,11 +566,13 @@ describe('child process', () => { file: 'ls', shell: true } - expect(start).to.have.been.calledOnceWith({ + expect(start).to.have.been.calledOnce + expect(start).to.have.been.calledWithMatch({ ...expectedContext, abortController: sinon.match.instanceOf(AbortController) }) - expect(finish).to.have.been.calledOnceWith({ + expect(finish).to.have.been.calledOnce + expect(finish).to.have.been.calledWithMatch({ ...expectedContext, result }) @@ -554,15 +591,18 @@ describe('child process', () => { file: 'invalid_command_test', shell: true } - expect(start).to.have.been.calledOnceWith({ + expect(start).to.have.been.calledOnce + expect(start).to.have.been.calledWithMatch({ ...expectedContext, abortController: sinon.match.instanceOf(AbortController) }) - expect(finish).to.have.been.calledOnceWith({ + expect(finish).to.have.been.calledOnce + expect(finish).to.have.been.calledWithMatch({ ...expectedContext, error: childError }) - expect(error).to.have.been.calledOnceWith({ + expect(error).to.have.been.calledOnce + expect(error).to.have.been.calledWithMatch({ ...expectedContext, error: childError }) @@ -581,11 +621,13 @@ describe('child process', () => { file: 'node -e "process.exit(1)"', shell: true } - expect(start).to.have.been.calledOnceWith({ + expect(start).to.have.been.calledOnce + expect(start).to.have.been.calledWithMatch({ ...expectedContext, abortController: sinon.match.instanceOf(AbortController) }) - expect(finish).to.have.been.calledOnceWith({ + expect(finish).to.have.been.calledOnce + expect(finish).to.have.been.calledWithMatch({ ...expectedContext, error: childError }) diff --git a/packages/datadog-plugin-child_process/src/index.js b/packages/datadog-plugin-child_process/src/index.js index 95b7e4d80ab..c145859fb28 100644 --- a/packages/datadog-plugin-child_process/src/index.js +++ b/packages/datadog-plugin-child_process/src/index.js @@ -35,7 +35,9 @@ class ChildProcessPlugin extends TracingPlugin { return this._tracer } - start ({ command, shell }) { + start (ctx) { + const { command, shell } = ctx + if (typeof command !== 'string') { return } @@ -58,10 +60,13 @@ class ChildProcessPlugin extends TracingPlugin { resource: (shell === true) ? 'sh' : cmdFields[0], type: 'system', meta - }) + }, ctx) + + return ctx.currentStore } - end ({ result, error }) { + end (ctx) { + const { result, error } = ctx let exitCode if (result !== undefined) { @@ -74,17 +79,32 @@ class ChildProcessPlugin extends TracingPlugin { exitCode = error?.status || error?.code || 0 } - this.activeSpan?.setTag('cmd.exit_code', `${exitCode}`) - this.activeSpan?.finish() + const span = ctx.currentStore?.span || this.activeSpan + + span?.setTag('cmd.exit_code', `${exitCode}`) + span?.finish() + + return ctx.parentStore } - error (error) { - this.addError(error) + error (ctx) { + const { error } = ctx + + const span = ctx.currentStore?.span || this.activeSpan + this.addError(error, span) + + return ctx.parentStore } - asyncEnd ({ result }) { - this.activeSpan?.setTag('cmd.exit_code', `${result}`) - this.activeSpan?.finish() + asyncEnd (ctx) { + const { result } = ctx + + const span = ctx.currentStore?.span || this.activeSpan + + span?.setTag('cmd.exit_code', `${result}`) + span?.finish() + + return ctx.parentStore } } From edbded50be58725afe414476c708929334963c11 Mon Sep 17 00:00:00 2001 From: Ruben Bridgewater Date: Mon, 14 Jul 2025 20:12:27 +0200 Subject: [PATCH 31/53] datastreams: skip null array entries (#6090) This aligns with the former implementation. --- .../dd-trace/src/datastreams/schemas/schema_builder.js | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/packages/dd-trace/src/datastreams/schemas/schema_builder.js b/packages/dd-trace/src/datastreams/schemas/schema_builder.js index aa66414faa0..63dcdcc2225 100644 --- a/packages/dd-trace/src/datastreams/schemas/schema_builder.js +++ b/packages/dd-trace/src/datastreams/schemas/schema_builder.js @@ -99,6 +99,8 @@ class OpenApiComponents { // This adds a single whitespace between entries without adding newlines. // This differs from JSON.stringify and is used to align with the output // in other platforms. +// TODO: Add tests to verify this behavior. A couple of cases are not +// covered by the existing tests. function toJSON (value) { // eslint-disable-next-line eslint-rules/eslint-safe-typeof-object if (typeof value === 'object') { @@ -108,10 +110,12 @@ function toJSON (value) { if (Array.isArray(value)) { let result = '[' for (let i = 0; i < value.length; i++) { - if (i > 0) { - result += ', ' + if (value[i] !== null) { + if (i !== 0) { + result += ', ' + } + result += value[i] === undefined ? 'null' : toJSON(value[i]) } - result += value[i] == null ? 'null' : toJSON(value[i]) } return `${result}]` } From a3047f6e9c915276f166a05e3719162ea8ff47ac Mon Sep 17 00:00:00 2001 From: Ruben Bridgewater Date: Tue, 15 Jul 2025 00:30:46 +0200 Subject: [PATCH 32/53] [test-optimization] downgrade limiter to v1.1.5 for old cypress (#6091) The old cypress implementation fails due to the global not being present in older Node.js versions. v2.1.0 fails in our tests. v3.0.0 passes, it just requires Node.js 16, which we do not support for the old cypress support range. --- .github/dependabot.yml | 5 +++++ package.json | 2 +- packages/dd-trace/src/rate_limiter.js | 2 +- yarn.lock | 8 ++++---- 4 files changed, 11 insertions(+), 6 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 891a2bba0e7..b113545e10b 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -37,6 +37,11 @@ updates: - dependency-name: "lru-cache" # 11.0.0 onwards only supports Node.js 20 and above update-types: ["version-update:semver-major"] + - dependency-name: "limiter" + # 2.0.0 onwards breaks our tests. 3.0.0 works but it requires Node.js v16. + # That breaks cypress in our v5 release line. Update when v5 is EOL. + # Reverting this commit should suffice. + update-types: ["version-update:semver-major"] groups: dev-minor-and-patch-dependencies: dependency-type: "development" diff --git a/package.json b/package.json index 3bbb745498e..bff34c5000e 100644 --- a/package.json +++ b/package.json @@ -131,7 +131,7 @@ "jest-docblock": "^29.7.0", "jsonpath-plus": "^10.3.0", "koalas": "^1.0.2", - "limiter": "^3.0.0", + "limiter": "^1.1.5", "lodash.sortby": "^4.7.0", "lru-cache": "^10.4.3", "module-details-from-path": "^1.0.4", diff --git a/packages/dd-trace/src/rate_limiter.js b/packages/dd-trace/src/rate_limiter.js index a584216335f..3789ffaeb72 100644 --- a/packages/dd-trace/src/rate_limiter.js +++ b/packages/dd-trace/src/rate_limiter.js @@ -5,7 +5,7 @@ const limiter = require('limiter') class RateLimiter { constructor (rateLimit, interval = 'second') { this._rateLimit = Number.parseInt(rateLimit) - this._limiter = new limiter.RateLimiter({ tokensPerInterval: this._rateLimit, interval }) + this._limiter = new limiter.RateLimiter(this._rateLimit, interval) this._tokensRequested = 0 this._prevIntervalTokens = 0 this._prevTokensRequested = 0 diff --git a/yarn.lock b/yarn.lock index b583b268e15..4a79a95bf5e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3147,10 +3147,10 @@ lie@~3.3.0: dependencies: immediate "~3.0.5" -limiter@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/limiter/-/limiter-3.0.0.tgz#03556b76d1a81f547caeecc6b83ecc6f24495715" - integrity sha512-hev7DuXojsTFl2YwyzUJMDnZ/qBDd3yZQLSH3aD4tdL1cqfc3TMnoecEJtWFaQFdErZsKoFMBTxF/FBSkgDbEg== +limiter@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/limiter/-/limiter-1.1.5.tgz#8f92a25b3b16c6131293a0cc834b4a838a2aa7c2" + integrity sha512-FWWMIEOxz3GwUI4Ts/IvgVy6LPvoMPgjMdQ185nN6psJyBJ4yOpzqm695/h5umdLJg2vW3GR5iG11MAkR2AzJA== locate-path@^5.0.0: version "5.0.0" From 08b048c24897e66ba4c961f5e3c02dd604290c8f Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Mon, 14 Jul 2025 18:41:23 -0400 Subject: [PATCH 33/53] ci: add stats and branch/date options to flakiness report (#6076) --- scripts/flakiness.mjs | 46 ++++++++++++++++++++++++++++++++----------- 1 file changed, 35 insertions(+), 11 deletions(-) diff --git a/scripts/flakiness.mjs b/scripts/flakiness.mjs index b09be3e22fc..911d5a7c9a7 100644 --- a/scripts/flakiness.mjs +++ b/scripts/flakiness.mjs @@ -2,7 +2,12 @@ import { Octokit } from 'octokit' -const { DAYS = '1', OCCURRENCES = '1' } = process.env +const { + BRANCH, + DAYS = '1', + OCCURRENCES = '1', + UNTIL +} = process.env const ONE_DAY = 24 * 60 * 60 * 1000 @@ -23,6 +28,12 @@ const workflows = [ const flaky = {} const reported = new Set() +const untilMatch = UNTIL?.match(/^\d{4}-\d{2}-\d{2}$/)?.[0] +const endDate = untilMatch ?? new Date().toISOString().slice(0, 10) +const startDate = new Date(new Date(endDate).getTime() - (DAYS - 1) * ONE_DAY).toISOString().slice(0, 10) + +let totalCount = 0 +let flakeCount = 0 async function checkWorkflowRuns (id, page = 1) { const response = await octokit.rest.actions.listWorkflowRuns({ @@ -31,6 +42,8 @@ async function checkWorkflowRuns (id, page = 1) { page, per_page: 100, // max is 100 status: 'success', + created: `${startDate}..${endDate}`, + branch: BRANCH, workflow_id: id }) @@ -43,13 +56,14 @@ async function checkWorkflowRuns (id, page = 1) { const promises = [] for (const run of runs) { + totalCount++ + // Filter out first attempts to get only reruns. The idea is that if a rerun // is successful it means any failed jobs in the previous run were flaky // since a rerun without any change made them pass. if (run.run_attempt === 1) continue - if (Date.parse(run.created_at) < Date.now() - DAYS * ONE_DAY) { - return Promise.all(promises) - } + + flakeCount++ promises.push(checkWorkflowJobs(run.id)) } @@ -95,14 +109,18 @@ async function checkWorkflowJobs (id, page = 1) { await Promise.all(workflows.map(w => checkWorkflowRuns(w))) // TODO: Report this somewhere useful instead. + +const dateRange = startDate === endDate ? `on ${endDate}` : `from ${startDate} to ${endDate}` +const logString = `jobs with at least ${OCCURRENCES} occurrences seen ${dateRange} (UTC)*` + if (Object.keys(flaky).length === 0) { - console.log( - `*No flaky jobs with at least ${OCCURRENCES} occurrences seen in the last ${DAYS > 1 ? `${DAYS} days` : 'day'}*` - ) + console.log(`*No flaky ${logString}`) } else { - console.log( - `*Flaky jobs with at least ${OCCURRENCES} occurrences seen in the last ${DAYS > 1 ? `${DAYS} days` : 'day'}*` - ) + const workflowSuccessRate = +((1 - flakeCount / totalCount) * 100).toFixed(1) + const pipelineSuccessRate = +((workflowSuccessRate / 100) ** workflows.length * 100).toFixed(1) + const pipelineBadge = pipelineSuccessRate >= 80 ? '🟢' : pipelineSuccessRate >= 70 ? '🟡' : '🔴' + + console.log(`*Flaky ${logString}`) for (const [workflow, jobs] of Object.entries(flaky).sort()) { if (!reported.has(workflow)) continue console.log(`* ${workflow}`) @@ -110,7 +128,13 @@ if (Object.keys(flaky).length === 0) { if (urls.length < OCCURRENCES) continue // Padding is needed because Slack doesn't show single digits as links. const links = urls.map((url, idx) => `[${String(idx + 1).padStart(2, '0')}](${url})`) - console.log(` * ${job} (${links.join(', ')})`) + const runsBadge = urls.length >= 3 ? ' 🔴' : urls.length === 2 ? ' 🟡' : '' + console.log(` * ${job} (${links.join(', ')})${runsBadge}`) } } + console.log('*Flakiness stats*') + console.log(`* Total runs: ${totalCount}`) + console.log(`* Flaky runs: ${flakeCount}`) + console.log(`* Workflow success rate: ${workflowSuccessRate}%`) + console.log(`* Pipeline success rate (approx): ${pipelineSuccessRate}% ${pipelineBadge}`) } From 6a2ca7879a6ad650e84e973bdb6fde6975d7d9f8 Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Mon, 14 Jul 2025 18:43:27 -0400 Subject: [PATCH 34/53] ci: add paths config to workflows for github & pull_request trigger (#5891) --- .github/workflows/apm-capabilities.yml | 15 ++++++++ .github/workflows/apm-integrations.yml | 15 ++++++++ .github/workflows/appsec.yml | 15 ++++++++ .github/workflows/debugger.yml | 15 ++++++++ .github/workflows/lambda.yml | 15 ++++++++ .github/workflows/llmobs.yml | 15 ++++++++ .github/workflows/platform.yml | 15 ++++++++ .github/workflows/profiling.yml | 15 ++++++++ .github/workflows/project.yml | 15 ++++++++ .github/workflows/system-tests.yml | 15 ++++++++ .github/workflows/test-optimization.yml | 15 ++++++++ .gitlab-ci.yml | 48 +++++++++++++++++++++++++ scripts/verify-ci-config.js | 8 ++--- 13 files changed, 217 insertions(+), 4 deletions(-) diff --git a/.github/workflows/apm-capabilities.yml b/.github/workflows/apm-capabilities.yml index 0d101dd9c69..185e82d971d 100644 --- a/.github/workflows/apm-capabilities.yml +++ b/.github/workflows/apm-capabilities.yml @@ -2,6 +2,21 @@ name: APM Capabilities on: pull_request: + paths: + # Paths for whole project (should be same in all workflows) + - '**' + - '!.github/workflows/**' + - '!.gitlab/**' + - '!.vscode/**' + - '!devdocs/**' + - '!examples/**' + - '!*.md' + - '!.gitlab-ci.yml' + - '!LICENSE*' + - 'LICENSE-3rdparty.csv' + - '!NOTICE' + # Paths for this workflow only + - '.github/workflows/apm-capabilities.yml' push: branches: [master] schedule: diff --git a/.github/workflows/apm-integrations.yml b/.github/workflows/apm-integrations.yml index 6b8e6e4549f..7b7944953c6 100644 --- a/.github/workflows/apm-integrations.yml +++ b/.github/workflows/apm-integrations.yml @@ -2,6 +2,21 @@ name: APM Integrations on: pull_request: + paths: + # Paths for whole project (should be same in all workflows) + - '**' + - '!.github/workflows/**' + - '!.gitlab/**' + - '!.vscode/**' + - '!devdocs/**' + - '!examples/**' + - '!*.md' + - '!.gitlab-ci.yml' + - '!LICENSE*' + - 'LICENSE-3rdparty.csv' + - '!NOTICE' + # Paths for this workflow only + - '.github/workflows/apm-integrations.yml' push: branches: [master] schedule: diff --git a/.github/workflows/appsec.yml b/.github/workflows/appsec.yml index ec65546f08c..92a7558db32 100644 --- a/.github/workflows/appsec.yml +++ b/.github/workflows/appsec.yml @@ -2,6 +2,21 @@ name: AppSec on: pull_request: + paths: + # Paths for whole project (should be same in all workflows) + - '**' + - '!.github/workflows/**' + - '!.gitlab/**' + - '!.vscode/**' + - '!devdocs/**' + - '!examples/**' + - '!*.md' + - '!.gitlab-ci.yml' + - '!LICENSE*' + - 'LICENSE-3rdparty.csv' + - '!NOTICE' + # Paths for this workflow only + - '.github/workflows/appsec.yml' push: branches: [master] schedule: diff --git a/.github/workflows/debugger.yml b/.github/workflows/debugger.yml index 88b288a7970..3905b3eb3c9 100644 --- a/.github/workflows/debugger.yml +++ b/.github/workflows/debugger.yml @@ -2,6 +2,21 @@ name: Debugger on: pull_request: + paths: + # Paths for whole project (should be same in all workflows) + - '**' + - '!.github/workflows/**' + - '!.gitlab/**' + - '!.vscode/**' + - '!devdocs/**' + - '!examples/**' + - '!*.md' + - '!.gitlab-ci.yml' + - '!LICENSE*' + - 'LICENSE-3rdparty.csv' + - '!NOTICE' + # Paths for this workflow only + - '.github/workflows/debugger.yml' push: branches: [master] schedule: diff --git a/.github/workflows/lambda.yml b/.github/workflows/lambda.yml index 54e6fb40c44..16ed6c37f1f 100644 --- a/.github/workflows/lambda.yml +++ b/.github/workflows/lambda.yml @@ -2,6 +2,21 @@ name: Lambda on: pull_request: + paths: + # Paths for whole project (should be same in all workflows) + - '**' + - '!.github/workflows/**' + - '!.gitlab/**' + - '!.vscode/**' + - '!devdocs/**' + - '!examples/**' + - '!*.md' + - '!.gitlab-ci.yml' + - '!LICENSE*' + - 'LICENSE-3rdparty.csv' + - '!NOTICE' + # Paths for this workflow only + - '.github/workflows/lambda.yml' push: branches: [master] schedule: diff --git a/.github/workflows/llmobs.yml b/.github/workflows/llmobs.yml index 073d80045f9..61ebc4dd966 100644 --- a/.github/workflows/llmobs.yml +++ b/.github/workflows/llmobs.yml @@ -2,6 +2,21 @@ name: LLMObs on: pull_request: + paths: + # Paths for whole project (should be same in all workflows) + - '**' + - '!.github/workflows/**' + - '!.gitlab/**' + - '!.vscode/**' + - '!devdocs/**' + - '!examples/**' + - '!*.md' + - '!.gitlab-ci.yml' + - '!LICENSE*' + - 'LICENSE-3rdparty.csv' + - '!NOTICE' + # Paths for this workflow only + - '.github/workflows/llmobs.yml' push: branches: [master] schedule: diff --git a/.github/workflows/platform.yml b/.github/workflows/platform.yml index dd4278200f3..2931ad86871 100644 --- a/.github/workflows/platform.yml +++ b/.github/workflows/platform.yml @@ -2,6 +2,21 @@ name: Platform on: pull_request: + paths: + # Paths for whole project (should be same in all workflows) + - '**' + - '!.github/workflows/**' + - '!.gitlab/**' + - '!.vscode/**' + - '!devdocs/**' + - '!examples/**' + - '!*.md' + - '!.gitlab-ci.yml' + - '!LICENSE*' + - 'LICENSE-3rdparty.csv' + - '!NOTICE' + # Paths for this workflow only + - '.github/workflows/platform.yml' push: branches: [master] schedule: diff --git a/.github/workflows/profiling.yml b/.github/workflows/profiling.yml index 60175d72b4b..cbc500a8513 100644 --- a/.github/workflows/profiling.yml +++ b/.github/workflows/profiling.yml @@ -2,6 +2,21 @@ name: Profiling on: pull_request: + paths: + # Paths for whole project (should be same in all workflows) + - '**' + - '!.github/workflows/**' + - '!.gitlab/**' + - '!.vscode/**' + - '!devdocs/**' + - '!examples/**' + - '!*.md' + - '!.gitlab-ci.yml' + - '!LICENSE*' + - 'LICENSE-3rdparty.csv' + - '!NOTICE' + # Paths for this workflow only + - '.github/workflows/profiling.yml' push: branches: [master] schedule: diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index 02493c4a395..76e6816733c 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -2,6 +2,21 @@ name: Project on: pull_request: + paths: + # Paths for whole project (should be same in all workflows) + - '**' + - '!.github/workflows/**' + - '!.gitlab/**' + - '!.vscode/**' + - '!devdocs/**' + - '!examples/**' + - '!*.md' + - '!.gitlab-ci.yml' + - '!LICENSE*' + - 'LICENSE-3rdparty.csv' + - '!NOTICE' + # Paths for this workflow only + - '.github/workflows/**' push: branches: [master] diff --git a/.github/workflows/system-tests.yml b/.github/workflows/system-tests.yml index 1d500c28379..ad5de4f3e89 100644 --- a/.github/workflows/system-tests.yml +++ b/.github/workflows/system-tests.yml @@ -2,6 +2,21 @@ name: System Tests on: pull_request: + paths: + # Paths for whole project (should be same in all workflows) + - '**' + - '!.github/workflows/**' + - '!.gitlab/**' + - '!.vscode/**' + - '!devdocs/**' + - '!examples/**' + - '!*.md' + - '!.gitlab-ci.yml' + - '!LICENSE*' + - 'LICENSE-3rdparty.csv' + - '!NOTICE' + # Paths for this workflow only + - '.github/workflows/system-tests.yml' push: branches: [master] workflow_dispatch: diff --git a/.github/workflows/test-optimization.yml b/.github/workflows/test-optimization.yml index b516300e6c7..95ba51dffe1 100644 --- a/.github/workflows/test-optimization.yml +++ b/.github/workflows/test-optimization.yml @@ -2,6 +2,21 @@ name: Test Optimization on: pull_request: + paths: + # Paths for whole project (should be same in all workflows) + - '**' + - '!.github/workflows/**' + - '!.gitlab/**' + - '!.vscode/**' + - '!devdocs/**' + - '!examples/**' + - '!*.md' + - '!.gitlab-ci.yml' + - '!LICENSE*' + - 'LICENSE-3rdparty.csv' + - '!NOTICE' + # Paths for this workflow only + - '.github/workflows/test-optimization.yml' push: branches: - master diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 7582ecf2f6e..4bcb9c39129 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -16,8 +16,56 @@ stages: include: - local: ".gitlab/one-pipeline.locked.yml" + rules: + - changes: + paths: + # Paths for whole project (should be same in all workflows) + - '**' + - '!.github/workflows/**' + - '!.gitlab/**' + - '!.vscode/**' + - '!devdocs/**' + - '!examples/**' + - '!*.md' + - '!LICENSE*' + - 'LICENSE-3rdparty.csv' + - '!NOTICE' + # Paths for this workflow only + - '.gitlab/one-pipeline.locked.yml' - local: ".gitlab/benchmarks.yml" + rules: + - changes: + paths: + # Paths for whole project (should be same in all workflows) + - '**' + - '!.github/workflows/**' + - '!.gitlab/**' + - '!.vscode/**' + - '!devdocs/**' + - '!examples/**' + - '!*.md' + - '!LICENSE*' + - 'LICENSE-3rdparty.csv' + - '!NOTICE' + # Paths for this workflow only + - '.gitlab/benchmarks.yml' - local: ".gitlab/macrobenchmarks.yml" + rules: + - changes: + paths: + # Paths for whole project (should be same in all workflows) + - '**' + - '!.github/workflows/**' + - '!.gitlab/**' + - '!.vscode/**' + - '!devdocs/**' + - '!examples/**' + - '!*.md' + - '!LICENSE*' + - 'LICENSE-3rdparty.csv' + - '!NOTICE' + # Paths for this workflow only + - '.gitlab/macrobenchmarks.yml' workflow: rules: diff --git a/scripts/verify-ci-config.js b/scripts/verify-ci-config.js index 7ea874e60c7..337f66d1698 100644 --- a/scripts/verify-ci-config.js +++ b/scripts/verify-ci-config.js @@ -174,7 +174,6 @@ const IGNORED_WORKFLOWS = { 'retry.yml' ], trigger_pull_request: [ - 'audit.yml', 'eslint-rules.yml', 'stale.yml' ], @@ -201,9 +200,10 @@ for (const workflow of workflows) { const yamlPath = path.join(__dirname, '..', '.github', 'workflows', workflow) const yamlContent = yaml.parse(fs.readFileSync(yamlPath, 'utf8')) const triggers = yamlContent.on - if (!IGNORED_WORKFLOWS.trigger_pull_request.includes(workflow) && - triggers?.pull_request !== null) { - triggersError(workflow, 'The `pull_request` trigger should be blank') + if (!IGNORED_WORKFLOWS.trigger_pull_request.includes(workflow)) { + if (triggers?.pull_request !== null && Object.keys(triggers.pull_request).toString() !== 'paths') { + triggersError(workflow, 'The `pull_request` trigger should be blank or only include paths') + } } if (!IGNORED_WORKFLOWS.trigger_push.includes(workflow) && triggers?.push?.branches?.[0] !== 'master') { From 05f1c9337b172bb0e02dbb2944d55d3ae52f4d1a Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Mon, 14 Jul 2025 18:57:52 -0400 Subject: [PATCH 35/53] test: update confluent kafka tests to use dynamic topic (#6079) --- .../test/index.spec.js | 61 ++++++++++++------- .../dd-trace/test/setup/services/kafka.js | 2 +- 2 files changed, 39 insertions(+), 24 deletions(-) diff --git a/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js b/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js index 64eca6972a9..f7bdd604ef9 100644 --- a/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js +++ b/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js @@ -1,5 +1,6 @@ 'use strict' +const { randomUUID } = require('crypto') const { expect } = require('chai') const agent = require('../../dd-trace/test/plugins/agent') const { expectSomeSpan, withDefaults } = require('../../dd-trace/test/plugins/helpers') @@ -10,14 +11,12 @@ const DataStreamsContext = require('../../dd-trace/src/datastreams/context') const { computePathwayHash } = require('../../dd-trace/src/datastreams/pathway') const { ENTRY_PARENT_HASH, DataStreamsProcessor } = require('../../dd-trace/src/datastreams/processor') -const testTopic = 'test-topic' - -const getDsmPathwayHash = (isProducer, parentHash) => { +const getDsmPathwayHash = (testTopic, isProducer, parentHash) => { let edgeTags if (isProducer) { edgeTags = ['direction:out', 'topic:' + testTopic, 'type:kafka'] } else { - edgeTags = ['direction:in', 'group:test-group', 'topic:' + testTopic, 'type:kafka'] + edgeTags = ['direction:in', 'group:test-group-confluent', 'topic:' + testTopic, 'type:kafka'] } edgeTags.sort() @@ -26,6 +25,7 @@ const getDsmPathwayHash = (isProducer, parentHash) => { describe('Plugin', () => { const module = '@confluentinc/kafka-javascript' + const groupId = 'test-group-confluent' describe('confluentinc-kafka-javascript', function () { this.timeout(30000) @@ -36,11 +36,13 @@ describe('Plugin', () => { withVersions('confluentinc-kafka-javascript', module, (version) => { let kafka + let admin let tracer let Kafka let ConfluentKafka let messages let nativeApi + let testTopic describe('without configuration', () => { beforeEach(async () => { @@ -60,11 +62,24 @@ describe('Plugin', () => { kafka = new Kafka({ kafkaJS: { clientId: `kafkajs-test-${version}`, - brokers: ['127.0.0.1:9092'] + brokers: ['127.0.0.1:9092'], + logLevel: ConfluentKafka.logLevel.WARN } }) + testTopic = `test-topic-${randomUUID()}` + admin = kafka.admin() + await admin.connect() + await admin.createTopics({ + topics: [{ + topic: testTopic, + numPartitions: 1, + replicationFactor: 1 + }] + }) }) + afterEach(() => admin.disconnect()) + describe('kafkaJS api', () => { describe('producer', () => { it('should be instrumented', async () => { @@ -74,7 +89,7 @@ describe('Plugin', () => { meta: { 'span.kind': 'producer', component: 'confluentinc-kafka-javascript', - 'messaging.destination.name': 'test-topic', + 'messaging.destination.name': testTopic, 'messaging.kafka.bootstrap.servers': '127.0.0.1:9092' }, metrics: { @@ -125,7 +140,7 @@ describe('Plugin', () => { beforeEach(async () => { messages = [{ key: 'key1', value: 'test2' }] consumer = kafka.consumer({ - kafkaJS: { groupId: 'test-group' } + kafkaJS: { groupId, fromBeginning: true, autoCommit: false } }) await consumer.connect() await consumer.subscribe({ topic: testTopic }) @@ -142,7 +157,7 @@ describe('Plugin', () => { meta: { 'span.kind': 'consumer', component: 'confluentinc-kafka-javascript', - 'messaging.destination.name': 'test-topic' + 'messaging.destination.name': testTopic }, resource: testTopic, error: 0, @@ -151,7 +166,7 @@ describe('Plugin', () => { const consumerReceiveMessagePromise = new Promise(resolve => { consumer.run({ - eachMessage: async () => { + eachMessage: () => { resolve() } }) @@ -221,7 +236,7 @@ describe('Plugin', () => { [ERROR_STACK]: fakeError.stack, 'span.kind': 'consumer', component: 'confluentinc-kafka-javascript', - 'messaging.destination.name': 'test-topic' + 'messaging.destination.name': testTopic }, resource: testTopic, error: 1, @@ -344,7 +359,10 @@ describe('Plugin', () => { beforeEach(async () => { nativeConsumer = new Consumer({ 'bootstrap.servers': '127.0.0.1:9092', - 'group.id': 'test-group' + 'group.id': groupId, + 'enable.auto.commit': false, + }, { + 'auto.offset.reset': 'earliest' }) await new Promise((resolve, reject) => { @@ -491,15 +509,15 @@ describe('Plugin', () => { tracer.use('confluentinc-kafka-javascript', { dsmEnabled: true }) messages = [{ key: 'key1', value: 'test2' }] consumer = kafka.consumer({ - kafkaJS: { groupId: 'test-group', fromBeginning: false } + kafkaJS: { groupId, fromBeginning: true } }) await consumer.connect() await consumer.subscribe({ topic: testTopic }) }) - before(() => { - expectedProducerHash = getDsmPathwayHash(true, ENTRY_PARENT_HASH) - expectedConsumerHash = getDsmPathwayHash(false, expectedProducerHash) + beforeEach(() => { + expectedProducerHash = getDsmPathwayHash(testTopic, true, ENTRY_PARENT_HASH) + expectedConsumerHash = getDsmPathwayHash(testTopic, false, expectedProducerHash) }) afterEach(async () => { @@ -617,24 +635,22 @@ describe('Plugin', () => { partition, offset: Number(message.offset) } - // Signal that we've processed a message messageProcessedResolve() } }) - consumerRunPromise.catch(() => {}) + await consumerRunPromise // wait for the message to be processed before continuing - await sendMessages(kafka, testTopic, messages).then( - async () => await messageProcessedPromise - ) + await sendMessages(kafka, testTopic, messages) + await messageProcessedPromise for (const call of setOffsetSpy.getCalls()) { expect(call.args[0]).to.not.have.property('type', 'kafka_commit') } const newConsumer = kafka.consumer({ - kafkaJS: { groupId: 'test-group', autoCommit: false } + kafkaJS: { groupId, fromBeginning: true, autoCommit: false } }) await newConsumer.connect() await sendMessages(kafka, testTopic, [{ key: 'key1', value: 'test2' }]) @@ -648,12 +664,11 @@ describe('Plugin', () => { // Check our work const runArg = setOffsetSpy.lastCall.args[0] - expect(setOffsetSpy).to.be.calledOnce expect(runArg).to.have.property('offset', commitMeta.offset) expect(runArg).to.have.property('partition', commitMeta.partition) expect(runArg).to.have.property('topic', commitMeta.topic) expect(runArg).to.have.property('type', 'kafka_commit') - expect(runArg).to.have.property('consumer_group', 'test-group') + expect(runArg).to.have.property('consumer_group', groupId) }) it('Should add backlog on producer response', async () => { diff --git a/packages/dd-trace/test/setup/services/kafka.js b/packages/dd-trace/test/setup/services/kafka.js index d114682ba7d..7e07979b48d 100644 --- a/packages/dd-trace/test/setup/services/kafka.js +++ b/packages/dd-trace/test/setup/services/kafka.js @@ -9,7 +9,7 @@ const kafka = new Kafka({ }) const admin = kafka.admin() const producer = kafka.producer() -const consumer = kafka.consumer({ groupId: 'test-group' }) +const consumer = kafka.consumer({ groupId: 'setup-group' }) const topic = 'test-topic' const messages = [{ key: 'setup', value: 'test' }] From 4fa522af2e84196e4d2e9fa41b219a4cbf0db202 Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Mon, 14 Jul 2025 19:04:11 -0400 Subject: [PATCH 36/53] test: rewrite aws sqs batch test to be deterministic (#6083) --- .../datadog-plugin-aws-sdk/test/sqs.spec.js | 82 +++++++++---------- 1 file changed, 41 insertions(+), 41 deletions(-) diff --git a/packages/datadog-plugin-aws-sdk/test/sqs.spec.js b/packages/datadog-plugin-aws-sdk/test/sqs.spec.js index 913df5bab62..a81e3d1fddf 100644 --- a/packages/datadog-plugin-aws-sdk/test/sqs.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/sqs.spec.js @@ -176,11 +176,22 @@ describe('Plugin', () => { }) }) - it('should propagate the tracing context from the producer to the consumer in batch operations', (done) => { + it('should propagate the tracing context from the producer to the consumer in batch operations', async () => { let parentId let traceId - agent.assertSomeTraces(traces => { + const sendPromise = new Promise((resolve, reject) => { + sqs.sendMessageBatch({ + Entries: [ + { Id: '1', MessageBody: 'test batch propagation 1' }, + { Id: '2', MessageBody: 'test batch propagation 2' }, + { Id: '3', MessageBody: 'test batch propagation 3' } + ], + QueueUrl + }, (err) => err ? reject(err) : resolve()) + }) + + const parentPromise = agent.assertSomeTraces(traces => { const span = traces[0][0] expect(span.resource.startsWith('sendMessageBatch')).to.equal(true) @@ -192,56 +203,45 @@ describe('Plugin', () => { traceId = span.trace_id.toString() }) - let batchChildSpans = 0 - agent.assertSomeTraces(traces => { - const span = traces[0][0] + await Promise.all([sendPromise, parentPromise]) - expect(parentId).to.be.a('string') - expect(span.parent_id.toString()).to.equal(parentId) - expect(span.trace_id.toString()).to.equal(traceId) - batchChildSpans += 1 - expect(batchChildSpans).to.equal(3) - }, { timeoutMs: 2000 }).then(done, done) + async function receiveAndAssertMessage () { + const childPromise = agent.assertSomeTraces(traces => { + const span = traces[0][0] - sqs.sendMessageBatch( - { - Entries: [ - { - Id: '1', - MessageBody: 'test batch propagation 1' - }, - { - Id: '2', - MessageBody: 'test batch propagation 2' - }, - { - Id: '3', - MessageBody: 'test batch propagation 3' - } - ], - QueueUrl - }, (err) => { - if (err) return done(err) + expect(parentId).to.be.a('string') + expect(span.parent_id.toString()).to.equal(parentId) + expect(span.trace_id.toString()).to.equal(traceId) + }) - function receiveMessage () { - sqs.receiveMessage({ - QueueUrl, - MaxNumberOfMessages: 1 - }, (err, data) => { - if (err) return done(err) + const receiveMessage = new Promise((resolve, reject) => { + sqs.receiveMessage({ + QueueUrl, + MaxNumberOfMessages: 1 + }, (err, data) => { + if (err) return reject(err) + try { for (const message in data.Messages) { const recordData = data.Messages[message].MessageAttributes expect(recordData).to.have.property('_datadog') const traceContext = JSON.parse(recordData._datadog.StringValue) expect(traceContext).to.have.property('x-datadog-trace-id') } - }) - } - receiveMessage() - receiveMessage() - receiveMessage() + + resolve() + } catch (e) { + reject(e) + } + }) }) + + await Promise.all([childPromise, receiveMessage]) + } + + await receiveAndAssertMessage() + await receiveAndAssertMessage() + await receiveAndAssertMessage() }) it('should run the consumer in the context of its span', (done) => { From c959cc9718f5bd16f4bd8eda59e14deb246053f7 Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Tue, 15 Jul 2025 09:25:32 +0200 Subject: [PATCH 37/53] Revert "ci: add paths config to workflows for github & pull_request trigger (#5891)" (#6093) --- .github/workflows/apm-capabilities.yml | 15 -------- .github/workflows/apm-integrations.yml | 15 -------- .github/workflows/appsec.yml | 15 -------- .github/workflows/debugger.yml | 15 -------- .github/workflows/lambda.yml | 15 -------- .github/workflows/llmobs.yml | 15 -------- .github/workflows/platform.yml | 15 -------- .github/workflows/profiling.yml | 15 -------- .github/workflows/project.yml | 15 -------- .github/workflows/system-tests.yml | 15 -------- .github/workflows/test-optimization.yml | 15 -------- .gitlab-ci.yml | 48 ------------------------- scripts/verify-ci-config.js | 8 ++--- 13 files changed, 4 insertions(+), 217 deletions(-) diff --git a/.github/workflows/apm-capabilities.yml b/.github/workflows/apm-capabilities.yml index 185e82d971d..0d101dd9c69 100644 --- a/.github/workflows/apm-capabilities.yml +++ b/.github/workflows/apm-capabilities.yml @@ -2,21 +2,6 @@ name: APM Capabilities on: pull_request: - paths: - # Paths for whole project (should be same in all workflows) - - '**' - - '!.github/workflows/**' - - '!.gitlab/**' - - '!.vscode/**' - - '!devdocs/**' - - '!examples/**' - - '!*.md' - - '!.gitlab-ci.yml' - - '!LICENSE*' - - 'LICENSE-3rdparty.csv' - - '!NOTICE' - # Paths for this workflow only - - '.github/workflows/apm-capabilities.yml' push: branches: [master] schedule: diff --git a/.github/workflows/apm-integrations.yml b/.github/workflows/apm-integrations.yml index 7b7944953c6..6b8e6e4549f 100644 --- a/.github/workflows/apm-integrations.yml +++ b/.github/workflows/apm-integrations.yml @@ -2,21 +2,6 @@ name: APM Integrations on: pull_request: - paths: - # Paths for whole project (should be same in all workflows) - - '**' - - '!.github/workflows/**' - - '!.gitlab/**' - - '!.vscode/**' - - '!devdocs/**' - - '!examples/**' - - '!*.md' - - '!.gitlab-ci.yml' - - '!LICENSE*' - - 'LICENSE-3rdparty.csv' - - '!NOTICE' - # Paths for this workflow only - - '.github/workflows/apm-integrations.yml' push: branches: [master] schedule: diff --git a/.github/workflows/appsec.yml b/.github/workflows/appsec.yml index 92a7558db32..ec65546f08c 100644 --- a/.github/workflows/appsec.yml +++ b/.github/workflows/appsec.yml @@ -2,21 +2,6 @@ name: AppSec on: pull_request: - paths: - # Paths for whole project (should be same in all workflows) - - '**' - - '!.github/workflows/**' - - '!.gitlab/**' - - '!.vscode/**' - - '!devdocs/**' - - '!examples/**' - - '!*.md' - - '!.gitlab-ci.yml' - - '!LICENSE*' - - 'LICENSE-3rdparty.csv' - - '!NOTICE' - # Paths for this workflow only - - '.github/workflows/appsec.yml' push: branches: [master] schedule: diff --git a/.github/workflows/debugger.yml b/.github/workflows/debugger.yml index 3905b3eb3c9..88b288a7970 100644 --- a/.github/workflows/debugger.yml +++ b/.github/workflows/debugger.yml @@ -2,21 +2,6 @@ name: Debugger on: pull_request: - paths: - # Paths for whole project (should be same in all workflows) - - '**' - - '!.github/workflows/**' - - '!.gitlab/**' - - '!.vscode/**' - - '!devdocs/**' - - '!examples/**' - - '!*.md' - - '!.gitlab-ci.yml' - - '!LICENSE*' - - 'LICENSE-3rdparty.csv' - - '!NOTICE' - # Paths for this workflow only - - '.github/workflows/debugger.yml' push: branches: [master] schedule: diff --git a/.github/workflows/lambda.yml b/.github/workflows/lambda.yml index 16ed6c37f1f..54e6fb40c44 100644 --- a/.github/workflows/lambda.yml +++ b/.github/workflows/lambda.yml @@ -2,21 +2,6 @@ name: Lambda on: pull_request: - paths: - # Paths for whole project (should be same in all workflows) - - '**' - - '!.github/workflows/**' - - '!.gitlab/**' - - '!.vscode/**' - - '!devdocs/**' - - '!examples/**' - - '!*.md' - - '!.gitlab-ci.yml' - - '!LICENSE*' - - 'LICENSE-3rdparty.csv' - - '!NOTICE' - # Paths for this workflow only - - '.github/workflows/lambda.yml' push: branches: [master] schedule: diff --git a/.github/workflows/llmobs.yml b/.github/workflows/llmobs.yml index 61ebc4dd966..073d80045f9 100644 --- a/.github/workflows/llmobs.yml +++ b/.github/workflows/llmobs.yml @@ -2,21 +2,6 @@ name: LLMObs on: pull_request: - paths: - # Paths for whole project (should be same in all workflows) - - '**' - - '!.github/workflows/**' - - '!.gitlab/**' - - '!.vscode/**' - - '!devdocs/**' - - '!examples/**' - - '!*.md' - - '!.gitlab-ci.yml' - - '!LICENSE*' - - 'LICENSE-3rdparty.csv' - - '!NOTICE' - # Paths for this workflow only - - '.github/workflows/llmobs.yml' push: branches: [master] schedule: diff --git a/.github/workflows/platform.yml b/.github/workflows/platform.yml index 2931ad86871..dd4278200f3 100644 --- a/.github/workflows/platform.yml +++ b/.github/workflows/platform.yml @@ -2,21 +2,6 @@ name: Platform on: pull_request: - paths: - # Paths for whole project (should be same in all workflows) - - '**' - - '!.github/workflows/**' - - '!.gitlab/**' - - '!.vscode/**' - - '!devdocs/**' - - '!examples/**' - - '!*.md' - - '!.gitlab-ci.yml' - - '!LICENSE*' - - 'LICENSE-3rdparty.csv' - - '!NOTICE' - # Paths for this workflow only - - '.github/workflows/platform.yml' push: branches: [master] schedule: diff --git a/.github/workflows/profiling.yml b/.github/workflows/profiling.yml index cbc500a8513..60175d72b4b 100644 --- a/.github/workflows/profiling.yml +++ b/.github/workflows/profiling.yml @@ -2,21 +2,6 @@ name: Profiling on: pull_request: - paths: - # Paths for whole project (should be same in all workflows) - - '**' - - '!.github/workflows/**' - - '!.gitlab/**' - - '!.vscode/**' - - '!devdocs/**' - - '!examples/**' - - '!*.md' - - '!.gitlab-ci.yml' - - '!LICENSE*' - - 'LICENSE-3rdparty.csv' - - '!NOTICE' - # Paths for this workflow only - - '.github/workflows/profiling.yml' push: branches: [master] schedule: diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index 76e6816733c..02493c4a395 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -2,21 +2,6 @@ name: Project on: pull_request: - paths: - # Paths for whole project (should be same in all workflows) - - '**' - - '!.github/workflows/**' - - '!.gitlab/**' - - '!.vscode/**' - - '!devdocs/**' - - '!examples/**' - - '!*.md' - - '!.gitlab-ci.yml' - - '!LICENSE*' - - 'LICENSE-3rdparty.csv' - - '!NOTICE' - # Paths for this workflow only - - '.github/workflows/**' push: branches: [master] diff --git a/.github/workflows/system-tests.yml b/.github/workflows/system-tests.yml index ad5de4f3e89..1d500c28379 100644 --- a/.github/workflows/system-tests.yml +++ b/.github/workflows/system-tests.yml @@ -2,21 +2,6 @@ name: System Tests on: pull_request: - paths: - # Paths for whole project (should be same in all workflows) - - '**' - - '!.github/workflows/**' - - '!.gitlab/**' - - '!.vscode/**' - - '!devdocs/**' - - '!examples/**' - - '!*.md' - - '!.gitlab-ci.yml' - - '!LICENSE*' - - 'LICENSE-3rdparty.csv' - - '!NOTICE' - # Paths for this workflow only - - '.github/workflows/system-tests.yml' push: branches: [master] workflow_dispatch: diff --git a/.github/workflows/test-optimization.yml b/.github/workflows/test-optimization.yml index 95ba51dffe1..b516300e6c7 100644 --- a/.github/workflows/test-optimization.yml +++ b/.github/workflows/test-optimization.yml @@ -2,21 +2,6 @@ name: Test Optimization on: pull_request: - paths: - # Paths for whole project (should be same in all workflows) - - '**' - - '!.github/workflows/**' - - '!.gitlab/**' - - '!.vscode/**' - - '!devdocs/**' - - '!examples/**' - - '!*.md' - - '!.gitlab-ci.yml' - - '!LICENSE*' - - 'LICENSE-3rdparty.csv' - - '!NOTICE' - # Paths for this workflow only - - '.github/workflows/test-optimization.yml' push: branches: - master diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 4bcb9c39129..7582ecf2f6e 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -16,56 +16,8 @@ stages: include: - local: ".gitlab/one-pipeline.locked.yml" - rules: - - changes: - paths: - # Paths for whole project (should be same in all workflows) - - '**' - - '!.github/workflows/**' - - '!.gitlab/**' - - '!.vscode/**' - - '!devdocs/**' - - '!examples/**' - - '!*.md' - - '!LICENSE*' - - 'LICENSE-3rdparty.csv' - - '!NOTICE' - # Paths for this workflow only - - '.gitlab/one-pipeline.locked.yml' - local: ".gitlab/benchmarks.yml" - rules: - - changes: - paths: - # Paths for whole project (should be same in all workflows) - - '**' - - '!.github/workflows/**' - - '!.gitlab/**' - - '!.vscode/**' - - '!devdocs/**' - - '!examples/**' - - '!*.md' - - '!LICENSE*' - - 'LICENSE-3rdparty.csv' - - '!NOTICE' - # Paths for this workflow only - - '.gitlab/benchmarks.yml' - local: ".gitlab/macrobenchmarks.yml" - rules: - - changes: - paths: - # Paths for whole project (should be same in all workflows) - - '**' - - '!.github/workflows/**' - - '!.gitlab/**' - - '!.vscode/**' - - '!devdocs/**' - - '!examples/**' - - '!*.md' - - '!LICENSE*' - - 'LICENSE-3rdparty.csv' - - '!NOTICE' - # Paths for this workflow only - - '.gitlab/macrobenchmarks.yml' workflow: rules: diff --git a/scripts/verify-ci-config.js b/scripts/verify-ci-config.js index 337f66d1698..7ea874e60c7 100644 --- a/scripts/verify-ci-config.js +++ b/scripts/verify-ci-config.js @@ -174,6 +174,7 @@ const IGNORED_WORKFLOWS = { 'retry.yml' ], trigger_pull_request: [ + 'audit.yml', 'eslint-rules.yml', 'stale.yml' ], @@ -200,10 +201,9 @@ for (const workflow of workflows) { const yamlPath = path.join(__dirname, '..', '.github', 'workflows', workflow) const yamlContent = yaml.parse(fs.readFileSync(yamlPath, 'utf8')) const triggers = yamlContent.on - if (!IGNORED_WORKFLOWS.trigger_pull_request.includes(workflow)) { - if (triggers?.pull_request !== null && Object.keys(triggers.pull_request).toString() !== 'paths') { - triggersError(workflow, 'The `pull_request` trigger should be blank or only include paths') - } + if (!IGNORED_WORKFLOWS.trigger_pull_request.includes(workflow) && + triggers?.pull_request !== null) { + triggersError(workflow, 'The `pull_request` trigger should be blank') } if (!IGNORED_WORKFLOWS.trigger_push.includes(workflow) && triggers?.push?.branches?.[0] !== 'master') { From 6c03eae4e4d1f2b16de153ea0be3ef428388787c Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Tue, 15 Jul 2025 09:47:04 +0200 Subject: [PATCH 38/53] ci: cap supported next version to be <15.4.1 (#6094) The latest version of next, which was just released, breaks our CI. Until a proper fix can be implemented, cap the supported range to <15.4.1 --- .github/workflows/apm-integrations.yml | 4 ++-- .github/workflows/appsec.yml | 4 ++-- packages/datadog-instrumentations/src/next.js | 10 +++++----- packages/datadog-plugin-next/test/index.spec.js | 2 +- .../test/integration-test/client.spec.js | 2 +- .../dd-trace/test/appsec/index.next.plugin.spec.js | 2 +- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/apm-integrations.yml b/.github/workflows/apm-integrations.yml index 6b8e6e4549f..0df8fba45cf 100644 --- a/.github/workflows/apm-integrations.yml +++ b/.github/workflows/apm-integrations.yml @@ -710,7 +710,7 @@ jobs: version: - 18 - latest - range: ['>=10.2.0 <11', '>=11.0.0 <13', '11.1.4', '>=13.0.0 <14', '13.2.0', '>=14.0.0 <=14.2.6', '>=14.2.7 <15', '>=15.0.0'] + range: ['>=10.2.0 <11', '>=11.0.0 <13', '11.1.4', '>=13.0.0 <14', '13.2.0', '>=14.0.0 <=14.2.6', '>=14.2.7 <15', '>=15.0.0 <15.4.1'] include: - range: '>=10.2.0 <11' range_clean: gte.10.2.0.and.lt.11 @@ -726,7 +726,7 @@ jobs: range_clean: gte.14.0.0.and.lte.14.2.6 - range: '>=14.2.7 <15' range_clean: gte.14.2.7.and.lt.15 - - range: '>=15.0.0' + - range: '>=15.0.0 <15.4.1' range_clean: gte.15.0.0 runs-on: ubuntu-latest env: diff --git a/.github/workflows/appsec.yml b/.github/workflows/appsec.yml index ec65546f08c..4c40ebff52b 100644 --- a/.github/workflows/appsec.yml +++ b/.github/workflows/appsec.yml @@ -216,7 +216,7 @@ jobs: version: - oldest - latest - range: ['>=10.2.0 <11', '>=11.0.0 <13', '11.1.4', '>=13.0.0 <14', '13.2.0', '>=14.0.0 <=14.2.6', '>=14.2.7 <15', '>=15.0.0'] + range: ['>=10.2.0 <11', '>=11.0.0 <13', '11.1.4', '>=13.0.0 <14', '13.2.0', '>=14.0.0 <=14.2.6', '>=14.2.7 <15', '>=15.0.0 <15.4.1'] include: - range: '>=10.2.0 <11' range_clean: gte.10.2.0.and.lt.11 @@ -232,7 +232,7 @@ jobs: range_clean: gte.14.0.0.and.lte.14.2.6 - range: '>=14.2.7 <15' range_clean: gte.14.2.7.and.lt.15 - - range: '>=15.0.0' + - range: '>=15.0.0 <15.4.1' range_clean: gte.15.0.0 runs-on: ubuntu-latest env: diff --git a/packages/datadog-instrumentations/src/next.js b/packages/datadog-instrumentations/src/next.js index 335304f4012..375e82d6524 100644 --- a/packages/datadog-instrumentations/src/next.js +++ b/packages/datadog-instrumentations/src/next.js @@ -197,7 +197,7 @@ function finish (ctx, result, err) { // however, it is not provided as a class function or exported property addHook({ name: 'next', - versions: ['>=13.3.0'], + versions: ['>=13.3.0 <15.4.1'], file: 'dist/server/web/spec-extension/adapters/next-request.js' }, NextRequestAdapter => { shimmer.wrap(NextRequestAdapter.NextRequestAdapter, 'fromNodeNextRequest', fromNodeNextRequest => { @@ -212,7 +212,7 @@ addHook({ addHook({ name: 'next', - versions: ['>=11.1'], + versions: ['>=11.1 <15.4.1'], file: 'dist/server/serve-static.js' }, serveStatic => shimmer.wrap(serveStatic, 'serveStatic', wrapServeStatic, { replaceGetter: true })) @@ -222,7 +222,7 @@ addHook({ file: 'dist/next-server/server/serve-static.js' }, serveStatic => shimmer.wrap(serveStatic, 'serveStatic', wrapServeStatic, { replaceGetter: true })) -addHook({ name: 'next', versions: ['>=11.1'], file: 'dist/server/next-server.js' }, nextServer => { +addHook({ name: 'next', versions: ['>=11.1 <15.4.1'], file: 'dist/server/next-server.js' }, nextServer => { const Server = nextServer.default shimmer.wrap(Server.prototype, 'handleRequest', wrapHandleRequest) @@ -239,7 +239,7 @@ addHook({ name: 'next', versions: ['>=11.1'], file: 'dist/server/next-server.js' }) // `handleApiRequest` changes parameters/implementation at 13.2.0 -addHook({ name: 'next', versions: ['>=13.2'], file: 'dist/server/next-server.js' }, nextServer => { +addHook({ name: 'next', versions: ['>=13.2 <15.4.1'], file: 'dist/server/next-server.js' }, nextServer => { const Server = nextServer.default shimmer.wrap(Server.prototype, 'handleApiRequest', wrapHandleApiRequestWithMatch) return nextServer @@ -277,7 +277,7 @@ addHook({ addHook({ name: 'next', - versions: ['>=13'], + versions: ['>=13 <15.4.1'], file: 'dist/server/web/spec-extension/request.js' }, request => { shimmer.wrap(request.NextRequest.prototype, 'nextUrl', function (originalGet) { diff --git a/packages/datadog-plugin-next/test/index.spec.js b/packages/datadog-plugin-next/test/index.spec.js index 627cbc6e2e0..32f2895f603 100644 --- a/packages/datadog-plugin-next/test/index.spec.js +++ b/packages/datadog-plugin-next/test/index.spec.js @@ -19,7 +19,7 @@ describe('Plugin', function () { const satisfiesStandalone = version => satisfies(version, '>=12.0.0') // TODO: Figure out why 10.x tests are failing. - withVersions('next', 'next', '>=11.1', version => { + withVersions('next', 'next', '>=11.1 <15.4.1', version => { const pkg = require(`../../../versions/next@${version}/node_modules/next/package.json`) const startServer = ({ withConfig, standalone }, schemaVersion = 'v0', defaultToGlobalService = false) => { diff --git a/packages/datadog-plugin-next/test/integration-test/client.spec.js b/packages/datadog-plugin-next/test/integration-test/client.spec.js index 76b114bc76c..1265e91b1f5 100644 --- a/packages/datadog-plugin-next/test/integration-test/client.spec.js +++ b/packages/datadog-plugin-next/test/integration-test/client.spec.js @@ -16,7 +16,7 @@ describe('esm', () => { let proc let sandbox // match versions tested with unit tests - withVersions('next', 'next', '>=11.1', version => { + withVersions('next', 'next', '>=11.1 <15.4.1', version => { before(async function () { // next builds slower in the CI, match timeout with unit tests this.timeout(300 * 1000) diff --git a/packages/dd-trace/test/appsec/index.next.plugin.spec.js b/packages/dd-trace/test/appsec/index.next.plugin.spec.js index 8f7fbf2a9d8..4aaa0445336 100644 --- a/packages/dd-trace/test/appsec/index.next.plugin.spec.js +++ b/packages/dd-trace/test/appsec/index.next.plugin.spec.js @@ -16,7 +16,7 @@ describe('test suite', () => { const satisfiesStandalone = version => satisfies(version, '>=12.0.0') - withVersions('next', 'next', '>=11.1', version => { + withVersions('next', 'next', '>=11.1 <15.4.1', version => { if (version === '>=11.0.0 <13' && NODE_MAJOR === 24 && NODE_MINOR === 0 && NODE_PATCH === 0) { return // node 24.0.0 fails, but 24.0.1 works From 70cdb458968119c92568a58f5bf14476b7d0a021 Mon Sep 17 00:00:00 2001 From: simon-id Date: Tue, 15 Jul 2025 11:53:10 +0200 Subject: [PATCH 39/53] replace get-port with OS default port in ip_extractor.spec.js (#6095) --- .../dd-trace/test/plugins/util/ip_extractor.spec.js | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/packages/dd-trace/test/plugins/util/ip_extractor.spec.js b/packages/dd-trace/test/plugins/util/ip_extractor.spec.js index 2902c558f61..8138a01b353 100644 --- a/packages/dd-trace/test/plugins/util/ip_extractor.spec.js +++ b/packages/dd-trace/test/plugins/util/ip_extractor.spec.js @@ -2,7 +2,6 @@ require('../../setup/tap') -const getPort = require('get-port') const { extractIp } = require('../../../src/plugins/util/ip_extractor') const http = require('http') const axios = require('axios') @@ -10,12 +9,6 @@ const axios = require('axios') describe('ip extractor', () => { let port, appListener, controller - before(() => { - return getPort().then(newPort => { - port = newPort - }) - }) - before(done => { const server = new http.Server(async (req, res) => { controller && await controller(req, res) @@ -23,7 +16,10 @@ describe('ip extractor', () => { res.end(JSON.stringify({ message: 'OK' })) }) appListener = server - .listen(port, 'localhost', () => done()) + .listen(0, 'localhost', () => { + port = server.address().port + done() + }) }) after(() => { From df209f2c2f096a4a704d14285dd6380db3bda442 Mon Sep 17 00:00:00 2001 From: Dmytro Yurchenko <88330911+ddyurchenko@users.noreply.github.com> Date: Tue, 15 Jul 2025 13:59:55 +0200 Subject: [PATCH 40/53] ci: disable legacy Gitlab exec in order to fix jobs failing with green status (#6097) --- .gitlab/benchmarks.yml | 10 ---------- .gitlab/macrobenchmarks.yml | 2 -- 2 files changed, 12 deletions(-) diff --git a/.gitlab/benchmarks.yml b/.gitlab/benchmarks.yml index 75a278e4150..6da4bf7dfb2 100644 --- a/.gitlab/benchmarks.yml +++ b/.gitlab/benchmarks.yml @@ -23,10 +23,6 @@ variables: paths: - platform/artifacts/ expire_in: 3 months - variables: - # Gitlab and BP specific env vars. Do not modify. - KUBERNETES_SERVICE_ACCOUNT_OVERWRITE: dd-trace-js - FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY: "true" benchmarks-pr-comment: stage: benchmarks-pr-comment @@ -38,9 +34,6 @@ benchmarks-pr-comment: - cd platform && (git init && git remote add origin https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.ddbuild.io/DataDog/benchmarking-platform && git pull origin dd-trace-js) - bp-runner bp-runner.pr-comment.yml --debug allow_failure: true - variables: - # Gitlab and BP specific env vars. Do not modify. - KUBERNETES_SERVICE_ACCOUNT_OVERWRITE: dd-trace-js check-big-regressions: stage: benchmarks-pr-comment @@ -51,9 +44,6 @@ check-big-regressions: script: - cd platform && (git init && git remote add origin https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.ddbuild.io/DataDog/benchmarking-platform && git pull origin dd-trace-js) - bp-runner bp-runner.fail-on-regression.yml --debug - variables: - # Gitlab and BP specific env vars. Do not modify. - KUBERNETES_SERVICE_ACCOUNT_OVERWRITE: dd-trace-js benchmark: extends: .benchmarks diff --git a/.gitlab/macrobenchmarks.yml b/.gitlab/macrobenchmarks.yml index 656a20be067..489b033f838 100644 --- a/.gitlab/macrobenchmarks.yml +++ b/.gitlab/macrobenchmarks.yml @@ -19,8 +19,6 @@ - platform/artifacts/ expire_in: 3 months variables: - FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY: "true" - K6_OPTIONS_WARMUP_RATE: 500 K6_OPTIONS_WARMUP_DURATION: 1m K6_OPTIONS_WARMUP_GRACEFUL_STOP: 10s From 511cfa6ff58b5c2cdd74728b305ac4769d693ac0 Mon Sep 17 00:00:00 2001 From: Sam Brenner <106700075+sabrenner@users.noreply.github.com> Date: Tue, 15 Jul 2025 09:11:01 -0400 Subject: [PATCH 41/53] feat(langchain, llmobs): expand langchain support for tools and vectorstores (#5760) * minimal apm spans * add llmobs plugins * tool io refactor * add tool tests * apm vectorstore tests * fix peer dependencies issue * switch to agent.assertSomeTraces * llmobs vectorstores tests * remove .only * remove promise.all * lint * do not need to format i/o for tools --------- Co-authored-by: Thomas Hunter II --- .../datadog-instrumentations/src/langchain.js | 21 ++ .../src/orchestrion-config/index.js | 30 +++ .../datadog-plugin-langchain/src/tracing.js | 40 +++- .../test/index.spec.js | 167 +++++++++++++- .../plugins/langchain/handlers/index.js | 5 + .../llmobs/plugins/langchain/handlers/tool.js | 15 ++ .../plugins/langchain/handlers/vectorstore.js | 36 +++ .../src/llmobs/plugins/langchain/index.js | 51 ++++- .../llmobs/plugins/langchain/index.spec.js | 214 +++++++++++++++++- packages/dd-trace/test/plugins/externals.json | 4 +- 10 files changed, 566 insertions(+), 17 deletions(-) create mode 100644 packages/dd-trace/src/llmobs/plugins/langchain/handlers/tool.js create mode 100644 packages/dd-trace/src/llmobs/plugins/langchain/handlers/vectorstore.js diff --git a/packages/datadog-instrumentations/src/langchain.js b/packages/datadog-instrumentations/src/langchain.js index fb43f65a932..3e3f6d50cdc 100644 --- a/packages/datadog-instrumentations/src/langchain.js +++ b/packages/datadog-instrumentations/src/langchain.js @@ -53,6 +53,27 @@ for (const extension of extensions) { return exports }) + addHook({ name: '@langchain/core', file: `dist/tools/index.${extension}`, versions: ['>=0.1'] }, exports => { + if (extension === 'cjs') { + wrap(exports.StructuredTool.prototype, 'invoke', 'orchestrion:@langchain/core:Tool_invoke') + } + return exports + }) + + addHook({ name: '@langchain/core', file: `dist/vectorstores.${extension}`, versions: ['>=0.1'] }, exports => { + if (extension === 'cjs') { + wrap( + exports.VectorStore.prototype, 'similaritySearch', 'orchestrion:@langchain/core:VectorStore_similaritySearch' + ) + wrap( + exports.VectorStore.prototype, 'similaritySearchWithScore', + 'orchestrion:@langchain/core:VectorStore_similaritySearchWithScore' + ) + } + + return exports + }) + addHook({ name: '@langchain/core', file: `dist/embeddings.${extension}`, versions: ['>=0.1'] }, exports => { if (extension === 'cjs') { shimmer.wrap(exports, 'Embeddings', Embeddings => { diff --git a/packages/datadog-instrumentations/src/orchestrion-config/index.js b/packages/datadog-instrumentations/src/orchestrion-config/index.js index 9849f5b36fe..4692714a0ee 100644 --- a/packages/datadog-instrumentations/src/orchestrion-config/index.js +++ b/packages/datadog-instrumentations/src/orchestrion-config/index.js @@ -53,4 +53,34 @@ instrumentations: class: Embeddings operator: traceSync channel_name: "Embeddings_constructor" + - module_name: "@langchain/core" + version_range: ">=0.1.0" + file_path: dist/tools/index.js + function_query: + name: invoke + type: method + kind: async + class: StructuredTool + operator: tracePromise + channel_name: "Tool_invoke" + - module_name: "@langchain/core" + version_range: ">=0.1.0" + file_path: dist/vectorstores.js + function_query: + name: similaritySearch + type: method + kind: async + class: VectorStore + operator: tracePromise + channel_name: "VectorStore_similaritySearch" + - module_name: "@langchain/core" + version_range: ">=0.1.0" + file_path: dist/vectorstores.js + function_query: + name: similaritySearchWithScore + type: method + kind: async + class: VectorStore + operator: tracePromise + channel_name: "VectorStore_similaritySearchWithScore" ` diff --git a/packages/datadog-plugin-langchain/src/tracing.js b/packages/datadog-plugin-langchain/src/tracing.js index 62e3841d5a1..fcb0fc0185e 100644 --- a/packages/datadog-plugin-langchain/src/tracing.js +++ b/packages/datadog-plugin-langchain/src/tracing.js @@ -40,9 +40,14 @@ class BaseLangChainTracingPlugin extends TracingPlugin { // Runnable interfaces have an `lc_namespace` property const ns = ctx.self.lc_namespace || ctx.namespace - const resource = ctx.resource = [...ns, ctx.self.constructor.name].join('.') - const handler = this.handlers[type] + const resourceParts = [...ns, ctx.self.constructor.name] + if (type === 'tool') { + resourceParts.push(ctx.instance.name) + } + const resource = ctx.resource = resourceParts.join('.') + + const handler = this.handlers[type] || this.handlers.default const instance = ctx.instance const apiKey = handler.extractApiKey(instance) @@ -78,7 +83,7 @@ class BaseLangChainTracingPlugin extends TracingPlugin { const { type } = ctx - const handler = this.handlers[type] + const handler = this.handlers[type] || this.handlers.default const tags = handler.getSpanEndTags(ctx, span) || {} span.addTags(tags) @@ -139,11 +144,38 @@ class EmbeddingsEmbedDocumentsPlugin extends BaseLangChainTracingPlugin { } } +class ToolInvokePlugin extends BaseLangChainTracingPlugin { + static get id () { return 'langchain_tool_invoke' } + static get lcType () { return 'tool' } + static get prefix () { + return 'tracing:orchestrion:@langchain/core:Tool_invoke' + } +} + +class VectorStoreSimilaritySearchPlugin extends BaseLangChainTracingPlugin { + static get id () { return 'langchain_vectorstore_similarity_search' } + static get lcType () { return 'similarity_search' } + static get prefix () { + return 'tracing:orchestrion:@langchain/core:VectorStore_similaritySearch' + } +} + +class VectorStoreSimilaritySearchWithScorePlugin extends BaseLangChainTracingPlugin { + static get id () { return 'langchain_vectorstore_similarity_search_with_score' } + static get lcType () { return 'similarity_search' } + static get prefix () { + return 'tracing:orchestrion:@langchain/core:VectorStore_similaritySearchWithScore' + } +} + module.exports = [ RunnableSequenceInvokePlugin, RunnableSequenceBatchPlugin, BaseChatModelGeneratePlugin, BaseLLMGeneratePlugin, EmbeddingsEmbedQueryPlugin, - EmbeddingsEmbedDocumentsPlugin + EmbeddingsEmbedDocumentsPlugin, + ToolInvokePlugin, + VectorStoreSimilaritySearchPlugin, + VectorStoreSimilaritySearchWithScorePlugin ] diff --git a/packages/datadog-plugin-langchain/test/index.spec.js b/packages/datadog-plugin-langchain/test/index.spec.js index 4d667e8fdcd..42a9ed7eb6c 100644 --- a/packages/datadog-plugin-langchain/test/index.spec.js +++ b/packages/datadog-plugin-langchain/test/index.spec.js @@ -19,6 +19,27 @@ const openAiBaseEmbeddingInfo = { base: 'https://api.openai.com', path: '/v1/emb const isDdTrace = iastFilter.isDdTrace +function stubSingleEmbedding (langchainOpenaiOpenAiVersion) { + if (semver.satisfies(langchainOpenaiOpenAiVersion, '>=4.91.0')) { + stubCall({ + ...openAiBaseEmbeddingInfo, + response: require('./fixtures/single-embedding.json') + }) + } else { + stubCall({ + ...openAiBaseEmbeddingInfo, + response: { + object: 'list', + data: [{ + object: 'embedding', + index: 0, + embedding: Array(1536).fill(0) + }] + } + }) + } +} + describe('Plugin', () => { let langchainOpenai let langchainAnthropic @@ -28,7 +49,8 @@ describe('Plugin', () => { let langchainOutputParsers let langchainPrompts let langchainRunnables - + let langchainTools + let MemoryVectorStore /** * In OpenAI 4.91.0, the default response format for embeddings was changed from `float` to `base64`. * We do not have control in @langchain/openai embeddings to change this for an individual call, @@ -65,7 +87,8 @@ describe('Plugin', () => { }) beforeEach(() => { - langchainOpenai = require(`../../../versions/@langchain/openai@${version}`).get() + langchainOpenai = require(`../../../versions/langchain@${version}`) + .get('@langchain/openai') langchainAnthropic = require(`../../../versions/@langchain/anthropic@${version}`).get() if (version !== '0.1.0') { // version mismatching otherwise @@ -80,10 +103,17 @@ describe('Plugin', () => { langchainPrompts = require(`../../../versions/@langchain/core@${version}`).get('@langchain/core/prompts') langchainRunnables = require(`../../../versions/@langchain/core@${version}`).get('@langchain/core/runnables') + langchainTools = require(`../../../versions/@langchain/core@${version}`) + .get('@langchain/core/tools') + + MemoryVectorStore = require(`../../../versions/@langchain/core@${version}`) + .get('langchain/vectorstores/memory') + .MemoryVectorStore + langchainOpenaiOpenAiVersion = - require(`../../../versions/@langchain/openai@${version}`) - .get('openai/version') - .VERSION + require(`../../../versions/langchain@${version}`) + .get('openai/version') + .VERSION }) afterEach(() => { @@ -1013,6 +1043,133 @@ describe('Plugin', () => { }) }) }) + + describe('tools', () => { + it('traces a tool call', async function () { + if (!langchainTools?.tool) this.skip() + + const myTool = langchainTools.tool( + () => 'Hello, world!', + { + name: 'myTool', + description: 'A tool that returns a greeting' + } + ) + + const checkTraces = agent.assertSomeTraces(traces => { + const span = traces[0][0] + + expect(span).to.have.property('name', 'langchain.request') + expect(span.resource).to.match(/^langchain\.tools\.[^.]+\.myTool$/) + }) + const result = await myTool.invoke() + expect(result).to.equal('Hello, world!') + + await checkTraces + }) + + it('traces a tool call with an error', async function () { + if (!langchainTools?.tool) this.skip() + + const myTool = langchainTools.tool( + () => { throw new Error('This is a test error') }, + { + name: 'myTool', + description: 'A tool that throws an error' + } + ) + + const checkTraces = agent.assertSomeTraces(traces => { + const span = traces[0][0] + + expect(span).to.have.property('name', 'langchain.request') + expect(span.resource).to.match(/^langchain\.tools\.[^.]+\.myTool$/) + + expect(span.meta).to.have.property('error.message') + expect(span.meta).to.have.property('error.type') + expect(span.meta).to.have.property('error.stack') + }) + + try { + await myTool.invoke() + expect.fail('Expected an error to be thrown') + } catch {} + + await checkTraces + }) + }) + + describe('vectorstores', () => { + let vectorstore + + beforeEach(async () => { + // need to mock out adding a document to the vectorstore + stubSingleEmbedding(langchainOpenaiOpenAiVersion) + + const embeddings = new langchainOpenai.OpenAIEmbeddings() + vectorstore = new MemoryVectorStore(embeddings) + + const document = { + pageContent: 'The powerhouse of the cell is the mitochondria', + metadata: { source: 'https://example.com' }, + id: '1' + } + + return vectorstore.addDocuments([document]) + }) + + it('traces a vectorstore similaritySearch call', async () => { + stubSingleEmbedding(langchainOpenaiOpenAiVersion) + + const checkTraces = agent.assertSomeTraces(traces => { + const spans = traces[0] + + expect(spans).to.have.length(2) + + const vectorstoreSpan = spans[0] + const embeddingSpan = spans[1] + + expect(vectorstoreSpan).to.have.property('name', 'langchain.request') + expect(vectorstoreSpan).to.have.property('resource', 'langchain.vectorstores.memory.MemoryVectorStore') + + expect(embeddingSpan).to.have.property('name', 'langchain.request') + expect(embeddingSpan).to.have.property('resource', 'langchain.embeddings.openai.OpenAIEmbeddings') + }, { spanResourceMatch: /langchain\.vectorstores\.memory\.MemoryVectorStore/ }) + // we need the spanResourceMatch, otherwise we'll match from the beforeEach + + const result = await vectorstore.similaritySearch('The powerhouse of the cell is the mitochondria', 2) + expect(result).to.exist + + await checkTraces + }) + + it('traces a vectorstore similaritySearchWithScore call', async () => { + stubSingleEmbedding(langchainOpenaiOpenAiVersion) + + const checkTraces = agent.assertSomeTraces(traces => { + const spans = traces[0] + + expect(spans).to.have.length(2) + + const vectorstoreSpan = spans[0] + const embeddingSpan = spans[1] + + expect(vectorstoreSpan).to.have.property('name', 'langchain.request') + expect(vectorstoreSpan).to.have.property('resource', 'langchain.vectorstores.memory.MemoryVectorStore') + + expect(embeddingSpan).to.have.property('name', 'langchain.request') + expect(embeddingSpan).to.have.property('resource', 'langchain.embeddings.openai.OpenAIEmbeddings') + }, { spanResourceMatch: /langchain\.vectorstores\.memory\.MemoryVectorStore/ }) + // we need the spanResourceMatch, otherwise we'll match from the beforeEach + + const result = await vectorstore.similaritySearchWithScore( + 'The powerhouse of the cell is the mitochondria', 2 + ) + expect(result).to.exist + + await checkTraces + }) + }) }) }) }) diff --git a/packages/dd-trace/src/llmobs/plugins/langchain/handlers/index.js b/packages/dd-trace/src/llmobs/plugins/langchain/handlers/index.js index c786ebb8829..a355bf96296 100644 --- a/packages/dd-trace/src/llmobs/plugins/langchain/handlers/index.js +++ b/packages/dd-trace/src/llmobs/plugins/langchain/handlers/index.js @@ -8,9 +8,14 @@ const ROLE_MAPPINGS = { class LangChainLLMObsHandler { constructor (tagger) { + /** @type {import('../../../tagger')} */ this._tagger = tagger } + getName ({ span }) { + return span?.context()._tags?.['resource.name'] + } + setMetaTags () {} formatIO (messages) { diff --git a/packages/dd-trace/src/llmobs/plugins/langchain/handlers/tool.js b/packages/dd-trace/src/llmobs/plugins/langchain/handlers/tool.js new file mode 100644 index 00000000000..4f05870e690 --- /dev/null +++ b/packages/dd-trace/src/llmobs/plugins/langchain/handlers/tool.js @@ -0,0 +1,15 @@ +'use strict' + +const LangChainLLMObsHandler = require('.') + +class LangChainLLMObsToolHandler extends LangChainLLMObsHandler { + getName ({ instance }) { + return instance.name + } + + setMetaTags ({ span, inputs, results }) { + this._tagger.tagTextIO(span, inputs, results) + } +} + +module.exports = LangChainLLMObsToolHandler diff --git a/packages/dd-trace/src/llmobs/plugins/langchain/handlers/vectorstore.js b/packages/dd-trace/src/llmobs/plugins/langchain/handlers/vectorstore.js new file mode 100644 index 00000000000..02500a724da --- /dev/null +++ b/packages/dd-trace/src/llmobs/plugins/langchain/handlers/vectorstore.js @@ -0,0 +1,36 @@ +'use strict' + +const LangChainLLMObsHandler = require('.') +const { spanHasError } = require('../../../util') + +class LangChainLLMObsVectorStoreHandler extends LangChainLLMObsHandler { + setMetaTags ({ span, inputs, results }) { + const input = this.formatIO(inputs) + if (spanHasError(span)) { + this._tagger.tagRetrievalIO(span, input) + return + } + + const documents = [] + for (const documentResult of results) { + let document, score + if (Array.isArray(documentResult)) { + document = documentResult[0] + score = documentResult[1] + } else { + document = documentResult + } + + documents.push({ + text: document.pageContent, + id: document.id, + name: document.metadata?.source, + score + }) + } + + this._tagger.tagRetrievalIO(span, input, documents) + } +} + +module.exports = LangChainLLMObsVectorStoreHandler diff --git a/packages/dd-trace/src/llmobs/plugins/langchain/index.js b/packages/dd-trace/src/llmobs/plugins/langchain/index.js index 74c5cdb2fac..3e6613f7a9c 100644 --- a/packages/dd-trace/src/llmobs/plugins/langchain/index.js +++ b/packages/dd-trace/src/llmobs/plugins/langchain/index.js @@ -14,11 +14,15 @@ const LLM_SPAN_TYPES = new Set(['llm', 'chat_model', 'embedding']) const LLM = 'llm' const WORKFLOW = 'workflow' const EMBEDDING = 'embedding' +const TOOL = 'tool' +const RETRIEVAL = 'retrieval' const ChainHandler = require('./handlers/chain') const ChatModelHandler = require('./handlers/chat_model') const LlmHandler = require('./handlers/llm') const EmbeddingHandler = require('./handlers/embedding') +const ToolHandler = require('./handlers/tool') +const VectorStoreHandler = require('./handlers/vectorstore') class BaseLangChainLLMObsPlugin extends LLMObsPlugin { static get integration () { return 'langchain' } @@ -34,7 +38,9 @@ class BaseLangChainLLMObsPlugin extends LLMObsPlugin { chain: new ChainHandler(this._tagger), chat_model: new ChatModelHandler(this._tagger), llm: new LlmHandler(this._tagger), - embedding: new EmbeddingHandler(this._tagger) + embedding: new EmbeddingHandler(this._tagger), + tool: new ToolHandler(this._tagger), + similarity_search: new VectorStoreHandler(this._tagger) } } @@ -45,7 +51,10 @@ class BaseLangChainLLMObsPlugin extends LLMObsPlugin { const modelProvider = tags['langchain.request.provider'] // could be undefined const modelName = tags['langchain.request.model'] // could be undefined const kind = this.getKind(ctx.type, modelProvider) - const name = tags['resource.name'] + + const instance = ctx.instance || ctx.self + const handler = this._handlers[ctx.type] + const name = handler?.getName({ span, instance }) return { modelProvider, @@ -113,7 +122,14 @@ class BaseLangChainLLMObsPlugin extends LLMObsPlugin { } } - return WORKFLOW + switch (type) { + case 'tool': + return TOOL + case 'similarity_search': + return RETRIEVAL + default: + return WORKFLOW + } } getIntegrationName (type, provider = 'custom') { @@ -181,11 +197,38 @@ class EmbeddingsEmbedDocumentsPlugin extends BaseLangChainLLMObsPlugin { } } +class ToolInvokePlugin extends BaseLangChainLLMObsPlugin { + static get id () { return 'llmobs_langchain_tool_invoke' } + static get lcType () { return 'tool' } + static get prefix () { + return 'tracing:orchestrion:@langchain/core:Tool_invoke' + } +} + +class VectorStoreSimilaritySearchPlugin extends BaseLangChainLLMObsPlugin { + static get id () { return 'llmobs_langchain_vectorstore_similarity_search' } + static get lcType () { return 'similarity_search' } + static get prefix () { + return 'tracing:orchestrion:@langchain/core:VectorStore_similaritySearch' + } +} + +class VectorStoreSimilaritySearchWithScorePlugin extends BaseLangChainLLMObsPlugin { + static get id () { return 'llmobs_langchain_vectorstore_similarity_search_with_score' } + static get lcType () { return 'similarity_search' } + static get prefix () { + return 'tracing:orchestrion:@langchain/core:VectorStore_similaritySearchWithScore' + } +} + module.exports = [ RunnableSequenceInvokePlugin, RunnableSequenceBatchPlugin, BaseChatModelGeneratePlugin, BaseLLMGeneratePlugin, EmbeddingsEmbedQueryPlugin, - EmbeddingsEmbedDocumentsPlugin + EmbeddingsEmbedDocumentsPlugin, + ToolInvokePlugin, + VectorStoreSimilaritySearchPlugin, + VectorStoreSimilaritySearchWithScorePlugin ] diff --git a/packages/dd-trace/test/llmobs/plugins/langchain/index.spec.js b/packages/dd-trace/test/llmobs/plugins/langchain/index.spec.js index b12934b8cfc..a647f39e7a8 100644 --- a/packages/dd-trace/test/llmobs/plugins/langchain/index.spec.js +++ b/packages/dd-trace/test/llmobs/plugins/langchain/index.spec.js @@ -33,6 +33,27 @@ const openAiBaseEmbeddingInfo = { base: 'https://api.openai.com', path: '/v1/emb const isDdTrace = iastFilter.isDdTrace +function stubSingleEmbedding (langchainOpenaiOpenAiVersion) { + if (semver.satisfies(langchainOpenaiOpenAiVersion, '>=4.91.0')) { + stubCall({ + ...openAiBaseEmbeddingInfo, + response: require('../../../../../datadog-plugin-langchain/test/fixtures/single-embedding.json') + }) + } else { + stubCall({ + ...openAiBaseEmbeddingInfo, + response: { + object: 'list', + data: [{ + object: 'embedding', + index: 0, + embedding: Array(1536).fill(0) + }] + } + }) + } +} + describe('integrations', () => { let langchainOpenai let langchainAnthropic @@ -42,6 +63,8 @@ describe('integrations', () => { let langchainOutputParsers let langchainPrompts let langchainRunnables + let tool + let MemoryVectorStore /** * In OpenAI 4.91.0, the default response format for embeddings was changed from `float` to `base64`. @@ -103,7 +126,8 @@ describe('integrations', () => { withVersions('langchain', ['@langchain/core'], '<0.3.60', version => { describe('langchain', () => { beforeEach(() => { - langchainOpenai = require(`../../../../../../versions/@langchain/openai@${version}`).get() + langchainOpenai = require(`../../../../../../versions/langchain@${version}`) + .get('@langchain/openai') langchainAnthropic = require(`../../../../../../versions/@langchain/anthropic@${version}`).get() langchainCohere = require(`../../../../../../versions/@langchain/cohere@${version}`).get() @@ -117,8 +141,16 @@ describe('integrations', () => { langchainRunnables = require(`../../../../../../versions/@langchain/core@${version}`) .get('@langchain/core/runnables') + tool = require(`../../../../../../versions/@langchain/core@${version}`) + .get('@langchain/core/tools') + .tool + + MemoryVectorStore = require(`../../../../../../versions/@langchain/core@${version}`) + .get('langchain/vectorstores/memory') + .MemoryVectorStore + langchainOpenaiOpenAiVersion = - require(`../../../../../../versions/@langchain/openai@${version}`) + require(`../../../../../../versions/langchain@${version}`) .get('openai/version') .VERSION }) @@ -1146,6 +1178,184 @@ describe('integrations', () => { await checkTraces }) }) + + describe('tools', () => { + it('submits a tool call span', async function () { + if (!tool) this.skip() + + const add = tool( + ({ a, b }) => a + b, + { + name: 'add', + description: 'A tool that adds two numbers', + schema: { + a: { type: 'number' }, + b: { type: 'number' } + } + } + ) + + const checkTraces = agent.assertSomeTraces(traces => { + const toolSpan = traces[0][0] + + const toolSpanEvent = LLMObsSpanWriter.prototype.append.getCall(0).args[0] + + const expectedTool = expectedLLMObsNonLLMSpanEvent({ + span: toolSpan, + spanKind: 'tool', + name: 'add', + inputValue: JSON.stringify({ a: 1, b: 2 }), + outputValue: JSON.stringify(3), + tags: { ml_app: 'test', language: 'javascript', integration: 'langchain' } + }) + + expect(toolSpanEvent).to.deepEqualWithMockValues(expectedTool) + }) + + const result = await add.invoke({ a: 1, b: 2 }) + expect(result).to.equal(3) + + await checkTraces + }) + + it('submits a tool call with an error', async function () { + if (!tool) this.skip() + + const add = tool( + ({ a, b }) => { + throw new Error('This is a test error') + }, + { + name: 'add', + description: 'A tool that adds two numbers', + schema: { + a: { type: 'number' }, + b: { type: 'number' } + } + } + ) + + const checkTraces = agent.assertSomeTraces(traces => { + const toolSpan = traces[0][0] + + const toolSpanEvent = LLMObsSpanWriter.prototype.append.getCall(0).args[0] + + const expectedTool = expectedLLMObsNonLLMSpanEvent({ + span: toolSpan, + spanKind: 'tool', + name: 'add', + inputValue: JSON.stringify({ a: 1, b: 2 }), + tags: { ml_app: 'test', language: 'javascript', integration: 'langchain' }, + error: 1, + errorType: 'Error', + errorMessage: 'This is a test error', + errorStack: MOCK_ANY + }) + + expect(toolSpanEvent).to.deepEqualWithMockValues(expectedTool) + }) + + try { + await add.invoke({ a: 1, b: 2 }) + expect.fail('Expected an error to be thrown') + } catch {} + + await checkTraces + }) + }) + + describe('vectorstores', () => { + let vectorstore + + beforeEach(() => { + stubSingleEmbedding(langchainOpenaiOpenAiVersion) + + const embeddings = new langchainOpenai.OpenAIEmbeddings() + vectorstore = new MemoryVectorStore(embeddings) + + const document = { + pageContent: 'The powerhouse of the cell is the mitochondria', + metadata: { source: 'https://example.com' } + } + + return vectorstore.addDocuments([document]) + }) + + it('submits a retrieval span with a child embedding span for similaritySearch', async () => { + stubSingleEmbedding(langchainOpenaiOpenAiVersion) + + const checkTraces = agent.assertSomeTraces(traces => { + const spans = traces[0] // first trace is the embedding call from the beforeEach + + expect(spans).to.have.length(2) + + const vectorstoreSpan = spans[0] + + // first call was for the embedding span in the beforeEach + const retrievalSpanEvent = LLMObsSpanWriter.prototype.append.getCall(1).args[0] + const embeddingSpanEvent = LLMObsSpanWriter.prototype.append.getCall(2).args[0] + + expect(embeddingSpanEvent.meta).to.have.property('span.kind', 'embedding') + expect(embeddingSpanEvent).to.have.property('parent_id', retrievalSpanEvent.span_id) + + const expectedRetrievalEvent = expectedLLMObsNonLLMSpanEvent({ + span: vectorstoreSpan, + spanKind: 'retrieval', + name: 'langchain.vectorstores.memory.MemoryVectorStore', + inputValue: 'Biology', + outputDocuments: [{ + text: 'The powerhouse of the cell is the mitochondria', + name: 'https://example.com' + }], + tags: { ml_app: 'test', language: 'javascript', integration: 'langchain' } + }) + + expect(retrievalSpanEvent).to.deepEqualWithMockValues(expectedRetrievalEvent) + }, { spanResourceMatch: /langchain\.vectorstores\.memory\.MemoryVectorStore/ }) + + await vectorstore.similaritySearch('Biology') + + await checkTraces + }) + + it('submits a retrieval span with a child embedding span for similaritySearchWithScore', async () => { + stubSingleEmbedding(langchainOpenaiOpenAiVersion) + + const checkTraces = agent.assertSomeTraces(traces => { + const spans = traces[0] // first trace is the embedding call from the beforeEach + + expect(spans).to.have.length(2) + + const vectorstoreSpan = spans[0] + + // first call was for the embedding span in the beforeEach + const retrievalSpanEvent = LLMObsSpanWriter.prototype.append.getCall(1).args[0] + const embeddingSpanEvent = LLMObsSpanWriter.prototype.append.getCall(2).args[0] + + expect(embeddingSpanEvent.meta).to.have.property('span.kind', 'embedding') + expect(embeddingSpanEvent).to.have.property('parent_id', retrievalSpanEvent.span_id) + + const expectedRetrievalEvent = expectedLLMObsNonLLMSpanEvent({ + span: vectorstoreSpan, + spanKind: 'retrieval', + name: 'langchain.vectorstores.memory.MemoryVectorStore', + inputValue: 'Biology', + outputDocuments: [{ + text: 'The powerhouse of the cell is the mitochondria', + name: 'https://example.com', + score: 1 + }], + tags: { ml_app: 'test', language: 'javascript', integration: 'langchain' } + }) + + expect(retrievalSpanEvent).to.deepEqualWithMockValues(expectedRetrievalEvent) + }, { spanResourceMatch: /langchain\.vectorstores\.memory\.MemoryVectorStore/ }) + + await vectorstore.similaritySearchWithScore('Biology') + + await checkTraces + }) + }) }) }) }) diff --git a/packages/dd-trace/test/plugins/externals.json b/packages/dd-trace/test/plugins/externals.json index ac98325f767..157fc0fd292 100644 --- a/packages/dd-trace/test/plugins/externals.json +++ b/packages/dd-trace/test/plugins/externals.json @@ -319,11 +319,11 @@ "versions": [">=0.1"] }, { - "name": "@langchain/openai", + "name": "@langchain/cohere", "versions": [">=0.1"] }, { - "name": "@langchain/cohere", + "name": "langchain", "versions": [">=0.1"] } ], From 2d9d5829e9055fe6468949eb3f18fa672a5ed37e Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Tue, 15 Jul 2025 10:26:37 -0400 Subject: [PATCH 42/53] revert next uncapping in source code (#6098) --- packages/datadog-instrumentations/src/next.js | 10 +++++----- scripts/verify-ci-config.js | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/datadog-instrumentations/src/next.js b/packages/datadog-instrumentations/src/next.js index 375e82d6524..335304f4012 100644 --- a/packages/datadog-instrumentations/src/next.js +++ b/packages/datadog-instrumentations/src/next.js @@ -197,7 +197,7 @@ function finish (ctx, result, err) { // however, it is not provided as a class function or exported property addHook({ name: 'next', - versions: ['>=13.3.0 <15.4.1'], + versions: ['>=13.3.0'], file: 'dist/server/web/spec-extension/adapters/next-request.js' }, NextRequestAdapter => { shimmer.wrap(NextRequestAdapter.NextRequestAdapter, 'fromNodeNextRequest', fromNodeNextRequest => { @@ -212,7 +212,7 @@ addHook({ addHook({ name: 'next', - versions: ['>=11.1 <15.4.1'], + versions: ['>=11.1'], file: 'dist/server/serve-static.js' }, serveStatic => shimmer.wrap(serveStatic, 'serveStatic', wrapServeStatic, { replaceGetter: true })) @@ -222,7 +222,7 @@ addHook({ file: 'dist/next-server/server/serve-static.js' }, serveStatic => shimmer.wrap(serveStatic, 'serveStatic', wrapServeStatic, { replaceGetter: true })) -addHook({ name: 'next', versions: ['>=11.1 <15.4.1'], file: 'dist/server/next-server.js' }, nextServer => { +addHook({ name: 'next', versions: ['>=11.1'], file: 'dist/server/next-server.js' }, nextServer => { const Server = nextServer.default shimmer.wrap(Server.prototype, 'handleRequest', wrapHandleRequest) @@ -239,7 +239,7 @@ addHook({ name: 'next', versions: ['>=11.1 <15.4.1'], file: 'dist/server/next-se }) // `handleApiRequest` changes parameters/implementation at 13.2.0 -addHook({ name: 'next', versions: ['>=13.2 <15.4.1'], file: 'dist/server/next-server.js' }, nextServer => { +addHook({ name: 'next', versions: ['>=13.2'], file: 'dist/server/next-server.js' }, nextServer => { const Server = nextServer.default shimmer.wrap(Server.prototype, 'handleApiRequest', wrapHandleApiRequestWithMatch) return nextServer @@ -277,7 +277,7 @@ addHook({ addHook({ name: 'next', - versions: ['>=13 <15.4.1'], + versions: ['>=13'], file: 'dist/server/web/spec-extension/request.js' }, request => { shimmer.wrap(request.NextRequest.prototype, 'nextUrl', function (originalGet) { diff --git a/scripts/verify-ci-config.js b/scripts/verify-ci-config.js index 7ea874e60c7..06b11c93e70 100644 --- a/scripts/verify-ci-config.js +++ b/scripts/verify-ci-config.js @@ -68,7 +68,7 @@ function checkPlugins (yamlPath) { const instRanges = Array.from(rangesPerPluginFromInst[pluginName]) const yamlVersions = getMatchingVersions(pluginName, yamlRanges) const instVersions = getMatchingVersions(pluginName, instRanges) - if (!util.isDeepStrictEqual(yamlVersions, instVersions)) { + if (pluginName !== 'next' && !util.isDeepStrictEqual(yamlVersions, instVersions)) { const opts = { colors: true } const colors = x => util.inspect(x, opts) pluginErrorMsg(pluginName, 'Mismatch', ` From ac9aad3759c9fc4fb60898946d28f8491ee59e90 Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Tue, 15 Jul 2025 10:29:26 -0400 Subject: [PATCH 43/53] ci: add job count for parametric system tests (#6099) --- .github/workflows/system-tests.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/system-tests.yml b/.github/workflows/system-tests.yml index 1d500c28379..92ba0e32c9a 100644 --- a/.github/workflows/system-tests.yml +++ b/.github/workflows/system-tests.yml @@ -42,4 +42,5 @@ jobs: desired_execution_time: 300 # 5 minutes scenarios_groups: tracer-release excluded_scenarios: APM_TRACING_E2E_OTEL,APM_TRACING_E2E_SINGLE_SPAN # require AWS and datadog credentials + parametric_job_count: 8 skip_empty_scenarios: true From 44ee7e45c7752e7a2ffcb9a3fb8e2c614739034b Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Tue, 15 Jul 2025 10:30:58 -0400 Subject: [PATCH 44/53] ci: add explicit dependabot label for use in automation (#6100) --- .github/dependabot.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index b113545e10b..c10d787578c 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -16,11 +16,21 @@ updates: gh-actions-packages: patterns: - "*" + labels: + - dependabot + - dependencies + - github_actions + - semver-patch - package-ecosystem: "npm" directories: - "/" schedule: interval: "weekly" + labels: + - dependabot + - dependencies + - javascript + - semver-patch ignore: - dependency-name: "@types/node" # Update the types manually with new Node.js version support From 86458859ee989d2d258082d5dedefc4c4099d1b5 Mon Sep 17 00:00:00 2001 From: Carles Capell <107924659+CarlesDD@users.noreply.github.com> Date: Wed, 16 Jul 2025 06:45:08 +0200 Subject: [PATCH 45/53] Add ASM_EXCLUSION_DATA capability + RC management + test and refactor (#6101) --- .../src/remote_config/capabilities.js | 1 + packages/dd-trace/src/remote_config/index.js | 2 + .../dd-trace/test/remote_config/index.spec.js | 240 ++++-------------- 3 files changed, 53 insertions(+), 190 deletions(-) diff --git a/packages/dd-trace/src/remote_config/capabilities.js b/packages/dd-trace/src/remote_config/capabilities.js index 61a6e6ae09d..5dfc12f52be 100644 --- a/packages/dd-trace/src/remote_config/capabilities.js +++ b/packages/dd-trace/src/remote_config/capabilities.js @@ -16,6 +16,7 @@ module.exports = { APM_TRACING_LOGS_INJECTION: 1n << 13n, APM_TRACING_HTTP_HEADER_TAGS: 1n << 14n, APM_TRACING_CUSTOM_TAGS: 1n << 15n, + ASM_EXCLUSION_DATA: 1n << 18n, APM_TRACING_ENABLED: 1n << 19n, ASM_RASP_SQLI: 1n << 21n, ASM_RASP_LFI: 1n << 22n, diff --git a/packages/dd-trace/src/remote_config/index.js b/packages/dd-trace/src/remote_config/index.js index 255803f73ea..829e6912812 100644 --- a/packages/dd-trace/src/remote_config/index.js +++ b/packages/dd-trace/src/remote_config/index.js @@ -89,6 +89,7 @@ function enableWafUpdate (appsecConfig) { rc.updateCapabilities(RemoteConfigCapabilities.ASM_CUSTOM_RULES, true) rc.updateCapabilities(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, true) rc.updateCapabilities(RemoteConfigCapabilities.ASM_TRUSTED_IPS, true) + rc.updateCapabilities(RemoteConfigCapabilities.ASM_EXCLUSION_DATA, true) rc.updateCapabilities(RemoteConfigCapabilities.ASM_ENDPOINT_FINGERPRINT, true) rc.updateCapabilities(RemoteConfigCapabilities.ASM_SESSION_FINGERPRINT, true) rc.updateCapabilities(RemoteConfigCapabilities.ASM_NETWORK_FINGERPRINT, true) @@ -125,6 +126,7 @@ function disableWafUpdate () { rc.updateCapabilities(RemoteConfigCapabilities.ASM_CUSTOM_RULES, false) rc.updateCapabilities(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, false) rc.updateCapabilities(RemoteConfigCapabilities.ASM_TRUSTED_IPS, false) + rc.updateCapabilities(RemoteConfigCapabilities.ASM_EXCLUSION_DATA, false) rc.updateCapabilities(RemoteConfigCapabilities.ASM_ENDPOINT_FINGERPRINT, false) rc.updateCapabilities(RemoteConfigCapabilities.ASM_SESSION_FINGERPRINT, false) rc.updateCapabilities(RemoteConfigCapabilities.ASM_NETWORK_FINGERPRINT, false) diff --git a/packages/dd-trace/test/remote_config/index.spec.js b/packages/dd-trace/test/remote_config/index.spec.js index f20f06db81a..0dd14f0a845 100644 --- a/packages/dd-trace/test/remote_config/index.spec.js +++ b/packages/dd-trace/test/remote_config/index.spec.js @@ -189,6 +189,48 @@ describe('Remote Config index', () => { }) describe('enableWafUpdate', () => { + const expectCapabilitiesCalledWith = (capabilityList, expectedValue) => { + capabilityList.forEach(capability => { + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(capability, expectedValue) + }) + } + + const expectCapabilitiesNotCalled = (capabilityList) => { + capabilityList.forEach(capability => { + expect(rc.updateCapabilities) + .to.not.have.been.calledWith(capability) + }) + } + + const CORE_ASM_CAPABILITIES = [ + RemoteConfigCapabilities.ASM_IP_BLOCKING, + RemoteConfigCapabilities.ASM_USER_BLOCKING, + RemoteConfigCapabilities.ASM_DD_RULES, + RemoteConfigCapabilities.ASM_EXCLUSIONS, + RemoteConfigCapabilities.ASM_REQUEST_BLOCKING, + RemoteConfigCapabilities.ASM_RESPONSE_BLOCKING, + RemoteConfigCapabilities.ASM_CUSTOM_RULES, + RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, + RemoteConfigCapabilities.ASM_TRUSTED_IPS, + RemoteConfigCapabilities.ASM_EXCLUSION_DATA, + RemoteConfigCapabilities.ASM_ENDPOINT_FINGERPRINT, + RemoteConfigCapabilities.ASM_SESSION_FINGERPRINT, + RemoteConfigCapabilities.ASM_NETWORK_FINGERPRINT, + RemoteConfigCapabilities.ASM_HEADER_FINGERPRINT, + RemoteConfigCapabilities.ASM_DD_MULTICONFIG + ] + + const RASP_CAPABILITIES = [ + RemoteConfigCapabilities.ASM_RASP_SSRF, + RemoteConfigCapabilities.ASM_RASP_SQLI, + RemoteConfigCapabilities.ASM_RASP_LFI, + RemoteConfigCapabilities.ASM_RASP_SHI, + RemoteConfigCapabilities.ASM_RASP_CMDI + ] + + const ALL_ASM_CAPABILITIES = [...CORE_ASM_CAPABILITIES, ...RASP_CAPABILITIES] + describe('enable', () => { it('should not fail if remote config is not enabled before', () => { config.appsec = {} @@ -212,44 +254,7 @@ describe('Remote Config index', () => { remoteConfig.enable(config) remoteConfig.enableWafUpdate(config.appsec) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_IP_BLOCKING, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_USER_BLOCKING, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_DD_RULES, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_EXCLUSIONS, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_REQUEST_BLOCKING, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RESPONSE_BLOCKING, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_RULES, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_ENDPOINT_FINGERPRINT, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_SESSION_FINGERPRINT, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_NETWORK_FINGERPRINT, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_HEADER_FINGERPRINT, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SSRF, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SQLI, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_LFI, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SHI, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_CMDI, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_DD_MULTICONFIG, true) + expectCapabilitiesCalledWith(ALL_ASM_CAPABILITIES, true) expect(rc.setProductHandler).to.have.been.calledWith('ASM_DATA') expect(rc.setProductHandler).to.have.been.calledWith('ASM_DD') @@ -262,44 +267,7 @@ describe('Remote Config index', () => { remoteConfig.enable(config) remoteConfig.enableWafUpdate(config.appsec) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_IP_BLOCKING, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_USER_BLOCKING, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_DD_RULES, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_EXCLUSIONS, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_REQUEST_BLOCKING, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RESPONSE_BLOCKING, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_RULES, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_ENDPOINT_FINGERPRINT, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_SESSION_FINGERPRINT, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_NETWORK_FINGERPRINT, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_HEADER_FINGERPRINT, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SSRF, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SQLI, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_LFI, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SHI, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_CMDI, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_DD_MULTICONFIG, true) + expectCapabilitiesCalledWith(ALL_ASM_CAPABILITIES, true) expect(rc.setProductHandler).to.have.been.calledWith('ASM_DATA') expect(rc.setProductHandler).to.have.been.calledWith('ASM_DD') @@ -314,44 +282,8 @@ describe('Remote Config index', () => { expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_ACTIVATION, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_IP_BLOCKING, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_USER_BLOCKING, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_DD_RULES, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_EXCLUSIONS, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_REQUEST_BLOCKING, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RESPONSE_BLOCKING, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_RULES, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_ENDPOINT_FINGERPRINT, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_SESSION_FINGERPRINT, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_NETWORK_FINGERPRINT, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_HEADER_FINGERPRINT, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SSRF, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SQLI, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_LFI, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SHI, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_CMDI, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_DD_MULTICONFIG, true) + + expectCapabilitiesCalledWith(ALL_ASM_CAPABILITIES, true) }) it('should not activate rasp capabilities if rasp is disabled', () => { @@ -361,44 +293,9 @@ describe('Remote Config index', () => { expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_ACTIVATION, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_IP_BLOCKING, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_USER_BLOCKING, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_DD_RULES, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_EXCLUSIONS, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_REQUEST_BLOCKING, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RESPONSE_BLOCKING, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_RULES, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_ENDPOINT_FINGERPRINT, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_SESSION_FINGERPRINT, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_NETWORK_FINGERPRINT, true) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_HEADER_FINGERPRINT, true) - expect(rc.updateCapabilities) - .to.not.have.been.calledWith(RemoteConfigCapabilities.ASM_RASP_SSRF) - expect(rc.updateCapabilities) - .to.not.have.been.calledWith(RemoteConfigCapabilities.ASM_RASP_SQLI) - expect(rc.updateCapabilities) - .to.not.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_LFI) - expect(rc.updateCapabilities) - .to.not.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SHI) - expect(rc.updateCapabilities) - .to.not.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_CMDI) - expect(rc.updateCapabilities) - .to.not.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_DD_MULTICONFIG) + + expectCapabilitiesCalledWith(CORE_ASM_CAPABILITIES, true) + expectCapabilitiesNotCalled(RASP_CAPABILITIES) }) }) @@ -408,44 +305,7 @@ describe('Remote Config index', () => { rc.updateCapabilities.resetHistory() remoteConfig.disableWafUpdate() - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_IP_BLOCKING, false) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_USER_BLOCKING, false) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_DD_RULES, false) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_EXCLUSIONS, false) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_REQUEST_BLOCKING, false) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RESPONSE_BLOCKING, false) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_RULES, false) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, false) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, false) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_ENDPOINT_FINGERPRINT, false) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_SESSION_FINGERPRINT, false) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_NETWORK_FINGERPRINT, false) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_HEADER_FINGERPRINT, false) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SSRF, false) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SQLI, false) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_LFI, false) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SHI, false) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_CMDI, false) - expect(rc.updateCapabilities) - .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_DD_MULTICONFIG, false) + expectCapabilitiesCalledWith(ALL_ASM_CAPABILITIES, false) expect(rc.removeProductHandler).to.have.been.calledWith('ASM_DATA') expect(rc.removeProductHandler).to.have.been.calledWith('ASM_DD') From 0ef07909e7d140df24503ac562fb2cfa477e7484 Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Wed, 16 Jul 2025 04:33:13 -0400 Subject: [PATCH 46/53] bump flakiness report green requirement (#6105) --- scripts/flakiness.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/flakiness.mjs b/scripts/flakiness.mjs index 911d5a7c9a7..873340b202d 100644 --- a/scripts/flakiness.mjs +++ b/scripts/flakiness.mjs @@ -118,7 +118,7 @@ if (Object.keys(flaky).length === 0) { } else { const workflowSuccessRate = +((1 - flakeCount / totalCount) * 100).toFixed(1) const pipelineSuccessRate = +((workflowSuccessRate / 100) ** workflows.length * 100).toFixed(1) - const pipelineBadge = pipelineSuccessRate >= 80 ? '🟢' : pipelineSuccessRate >= 70 ? '🟡' : '🔴' + const pipelineBadge = pipelineSuccessRate >= 85 ? '🟢' : pipelineSuccessRate >= 75 ? '🟡' : '🔴' console.log(`*Flaky ${logString}`) for (const [workflow, jobs] of Object.entries(flaky).sort()) { From 748dc592281fc165c72c1bb0c4cc409ccab0f1b6 Mon Sep 17 00:00:00 2001 From: Carles Capell <107924659+CarlesDD@users.noreply.github.com> Date: Wed, 16 Jul 2025 11:28:08 +0200 Subject: [PATCH 47/53] test: fix remote_config tests (#6102) * Include RC test in test:trace:core * Rename RC test folder + fix tests * Switch to tap * Remove timeout from RC client Id test * Move integration test to correct folder --- .../remote_config.spec.js | 6 +++--- .../remote_config}/index.js | 0 package.json | 2 +- packages/dd-trace/test/remote_config/index.spec.js | 14 +++++--------- .../dd-trace/test/remote_config/manager.spec.js | 10 ++++++---- .../dd-trace/test/remote_config/scheduler.spec.js | 2 ++ 6 files changed, 17 insertions(+), 17 deletions(-) rename packages/dd-trace/test/remote_config/rc-client_id.spec.js => integration-tests/remote_config.spec.js (90%) rename {packages/dd-trace/test/remote_config/resources => integration-tests/remote_config}/index.js (100%) diff --git a/packages/dd-trace/test/remote_config/rc-client_id.spec.js b/integration-tests/remote_config.spec.js similarity index 90% rename from packages/dd-trace/test/remote_config/rc-client_id.spec.js rename to integration-tests/remote_config.spec.js index aef7c1dc1fa..5d1a0e3f6e7 100644 --- a/packages/dd-trace/test/remote_config/rc-client_id.spec.js +++ b/integration-tests/remote_config.spec.js @@ -1,6 +1,6 @@ 'use strict' -const { createSandbox, FakeAgent, spawnProc } = require('../../../../integration-tests/helpers') +const { createSandbox, FakeAgent, spawnProc } = require('./helpers') const path = require('path') const Axios = require('axios') const { assert } = require('chai') @@ -14,11 +14,11 @@ describe('Remote config client id', () => { sandbox = await createSandbox( ['express'], false, - [path.join(__dirname, 'resources')] + [path.join(__dirname, 'remote_config')] ) cwd = sandbox.folder - appFile = path.join(cwd, 'resources', 'index.js') + appFile = path.join(cwd, 'remote_config', 'index.js') }) after(async function () { diff --git a/packages/dd-trace/test/remote_config/resources/index.js b/integration-tests/remote_config/index.js similarity index 100% rename from packages/dd-trace/test/remote_config/resources/index.js rename to integration-tests/remote_config/index.js diff --git a/package.json b/package.json index bff34c5000e..8b4a59025ed 100644 --- a/package.json +++ b/package.json @@ -25,7 +25,7 @@ "test:debugger": "mocha -r 'packages/dd-trace/test/setup/mocha.js' 'packages/dd-trace/test/debugger/**/*.spec.js'", "test:debugger:ci": "nyc --no-clean --include 'packages/dd-trace/src/debugger/**/*.js' -- npm run test:debugger", "test:eslint-rules": "node eslint-rules/*.test.mjs", - "test:trace:core": "tap packages/dd-trace/test/*.spec.js \"packages/dd-trace/test/{ci-visibility,datastreams,encode,exporters,opentelemetry,opentracing,plugins,service-naming,standalone,telemetry}/**/*.spec.js\"", + "test:trace:core": "tap packages/dd-trace/test/*.spec.js \"packages/dd-trace/test/{ci-visibility,datastreams,encode,exporters,opentelemetry,opentracing,plugins,remote_config,service-naming,standalone,telemetry}/**/*.spec.js\"", "test:trace:core:ci": "npm run test:trace:core -- --coverage --nyc-arg=--include=\"packages/dd-trace/src/**/*.js\"", "test:trace:guardrails": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/dd-trace/test/guardrails/**/*.spec.js\"", "test:trace:guardrails:ci": "nyc --no-clean --include \"packages/dd-trace/src/guardrails/**/*.js\" -- npm run test:trace:guardrails", diff --git a/packages/dd-trace/test/remote_config/index.spec.js b/packages/dd-trace/test/remote_config/index.spec.js index 0dd14f0a845..bd96904de37 100644 --- a/packages/dd-trace/test/remote_config/index.spec.js +++ b/packages/dd-trace/test/remote_config/index.spec.js @@ -1,12 +1,14 @@ 'use strict' +require('../setup/tap') + +const RuleManager = require('../../src/appsec/rule_manager') const RemoteConfigCapabilities = require('../../src/remote_config/capabilities') const { kPreUpdate } = require('../../src/remote_config/manager') let config let rc let RemoteConfigManager -let RuleManager let UserTracking let log let appsec @@ -33,10 +35,6 @@ describe('Remote Config index', () => { RemoteConfigManager = sinon.stub().returns(rc) - RuleManager = { - updateWafFromRC: sinon.stub() - } - UserTracking = { setCollectionMode: sinon.stub() } @@ -52,10 +50,8 @@ describe('Remote Config index', () => { remoteConfig = proxyquire('../src/remote_config', { './manager': RemoteConfigManager, - '../rule_manager': RuleManager, - '../user_tracking': UserTracking, - '../../log': log, - '..': appsec + '../appsec/user_tracking': UserTracking, + '../log': log, }) }) diff --git a/packages/dd-trace/test/remote_config/manager.spec.js b/packages/dd-trace/test/remote_config/manager.spec.js index 8b85f6a7ce6..6454020b22e 100644 --- a/packages/dd-trace/test/remote_config/manager.spec.js +++ b/packages/dd-trace/test/remote_config/manager.spec.js @@ -1,5 +1,7 @@ 'use strict' +require('../setup/tap') + const Capabilities = require('../../src/remote_config/capabilities') const { UNACKNOWLEDGED, ACKNOWLEDGED, ERROR } = require('../../src/remote_config/apply_states') @@ -42,11 +44,11 @@ describe('RemoteConfigManager', () => { RemoteConfigManager = proxyquire('../src/remote_config/manager', { 'crypto-randomuuid': uuid, './scheduler': Scheduler, - '../../../../../package.json': { version: '3.0.0' }, - '../../exporters/common/request': request, - '../../log': log, + '../../../../package.json': { version: '3.0.0' }, + '../exporters/common/request': request, + '../log': log, '../tagger': tagger, - '../../service-naming/extra-services': { + '../service-naming/extra-services': { getExtraServices: () => extraServices } }) diff --git a/packages/dd-trace/test/remote_config/scheduler.spec.js b/packages/dd-trace/test/remote_config/scheduler.spec.js index db3b4db3fe6..e116dd591d4 100644 --- a/packages/dd-trace/test/remote_config/scheduler.spec.js +++ b/packages/dd-trace/test/remote_config/scheduler.spec.js @@ -1,5 +1,7 @@ 'use strict' +require('../setup/tap') + const Scheduler = require('../../src/remote_config/scheduler') const INTERVAL = 5e3 From f69bc0484ef6d8fbd3b1d31f23ee066d1d28f285 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Wed, 16 Jul 2025 15:56:54 +0200 Subject: [PATCH 48/53] Tag profiles with a sequence number (#6106) --- integration-tests/profiler/profiler.spec.js | 13 ++++++++----- packages/dd-trace/src/profiling/profiler.js | 6 +++++- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/integration-tests/profiler/profiler.spec.js b/integration-tests/profiler/profiler.spec.js index c7a52eecdfd..b413eac5bc7 100644 --- a/integration-tests/profiler/profiler.spec.js +++ b/integration-tests/profiler/profiler.spec.js @@ -22,16 +22,16 @@ if (process.platform !== 'win32') { const TIMEOUT = 30000 function checkProfiles (agent, proc, timeout, - expectedProfileTypes = DEFAULT_PROFILE_TYPES, expectBadExit = false + expectedProfileTypes = DEFAULT_PROFILE_TYPES, expectBadExit = false, expectSeq = true ) { return Promise.all([ processExitPromise(proc, timeout, expectBadExit), - expectProfileMessagePromise(agent, timeout, expectedProfileTypes) + expectProfileMessagePromise(agent, timeout, expectedProfileTypes, expectSeq) ]) } function expectProfileMessagePromise (agent, timeout, - expectedProfileTypes = DEFAULT_PROFILE_TYPES + expectedProfileTypes = DEFAULT_PROFILE_TYPES, expectSeq = true ) { const fileNames = expectedProfileTypes.map(type => `${type}.pprof`) return agent.assertMessageReceived(({ headers, _, files }) => { @@ -50,6 +50,9 @@ function expectProfileMessagePromise (agent, timeout, for (const [index, fileName] of attachments.entries()) { assert.propertyVal(files[index + 1], 'originalname', fileName) } + if (expectSeq) { + assert(event.tags_profiler.indexOf(',profile_seq:') !== -1) + } } catch (e) { e.message += ` ${JSON.stringify({ headers, files, event })}` throw e @@ -560,7 +563,7 @@ describe('profiler', () => { execArgv: oomExecArgv, env: oomEnv }) - return checkProfiles(agent, proc, timeout, ['space'], true) + return checkProfiles(agent, proc, timeout, ['space'], true, false) }) it('sends a heap profile on OOM in worker thread and exits successfully', () => { @@ -584,7 +587,7 @@ describe('profiler', () => { DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT: 3 } }) - return checkProfiles(agent, proc, timeout, ['space'], false) + return checkProfiles(agent, proc, timeout, ['space'], false, false) }).retries(3) it('sends a heap profile on OOM with async callback', () => { diff --git a/packages/dd-trace/src/profiling/profiler.js b/packages/dd-trace/src/profiling/profiler.js index e9f1ced1313..0df71283e05 100644 --- a/packages/dd-trace/src/profiling/profiler.js +++ b/packages/dd-trace/src/profiling/profiler.js @@ -42,6 +42,8 @@ function findWebSpan (startedSpans, spanId) { } class Profiler extends EventEmitter { + #profileSeq = 0 + constructor () { super() this._enabled = false @@ -123,11 +125,12 @@ class Profiler extends EventEmitter { try { const start = new Date() + const nearOOMCallback = this._nearOOMExport.bind(this) for (const profiler of config.profilers) { // TODO: move this out of Profiler when restoring sourcemap support profiler.start({ mapper, - nearOOMCallback: this._nearOOMExport.bind(this) + nearOOMCallback }) this._logger.debug(`Started ${profiler.type} profiler in ${threadNamePrefix} thread`) } @@ -292,6 +295,7 @@ class Profiler extends EventEmitter { this.endpointCounts.clear() tags.snapshot = snapshotKind + tags.profile_seq = this.#profileSeq++ const exportSpec = { profiles, start, end, tags, endpointCounts } const tasks = this._config.exporters.map(exporter => exporter.export(exportSpec).catch(err => { From 17c4e9abc9d2a0a769b05795b05ad8066e4a2d15 Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Wed, 16 Jul 2025 18:12:44 +0200 Subject: [PATCH 49/53] Fix fs and rasp/iast problems (#6088) --- .../dd-trace/src/appsec/rasp/fs-plugin.js | 29 +- .../analyzers/path-traversal-analyzer.spec.js | 20 + .../resources/fs-async-way-method.js | 13 +- .../test/appsec/rasp/fs-plugin.spec.js | 42 +- .../appsec/rasp/lfi.express.plugin.spec.js | 568 ++++++++++-------- .../test/appsec/rasp/resources/template.ejs | 5 + packages/dd-trace/test/plugins/externals.json | 4 + 7 files changed, 388 insertions(+), 293 deletions(-) create mode 100644 packages/dd-trace/test/appsec/rasp/resources/template.ejs diff --git a/packages/dd-trace/src/appsec/rasp/fs-plugin.js b/packages/dd-trace/src/appsec/rasp/fs-plugin.js index dbd267b95e2..a632bb2d1e5 100644 --- a/packages/dd-trace/src/appsec/rasp/fs-plugin.js +++ b/packages/dd-trace/src/appsec/rasp/fs-plugin.js @@ -14,25 +14,31 @@ const enabledFor = { let fsPlugin -function enterWith (fsProps, store = storage('legacy').getStore()) { +function getStoreToStart (fsProps, store = storage('legacy').getStore()) { if (store && !store.fs?.opExcluded) { - storage('legacy').enterWith({ + return { ...store, fs: { ...store.fs, ...fsProps, parentStore: store } - }) + } } + + return store } class AppsecFsPlugin extends Plugin { enable () { - this.addSub('apm:fs:operation:start', this._onFsOperationStart) - this.addSub('apm:fs:operation:finish', this._onFsOperationFinishOrRenderEnd) - this.addSub('tracing:datadog:express:response:render:start', this._onResponseRenderStart) - this.addSub('tracing:datadog:express:response:render:end', this._onFsOperationFinishOrRenderEnd) + this.addBind('apm:fs:operation:start', this._onFsOperationStart) + this.addBind('apm:fs:operation:finish', this._onFsOperationFinishOrRenderEnd) + this.addBind('tracing:datadog:express:response:render:start', this._onResponseRenderStart) + this.addBind('tracing:datadog:express:response:render:end', this._onFsOperationFinishOrRenderEnd) + // TODO Remove this when dc-polyfill is fixed&updated + // hack to node 18 and early 20.x + // with dc-polyfill addBind is not enough to force a channel.hasSubscribers === true + this.addSub('tracing:datadog:express:response:render:start', () => {}) super.configure(true) } @@ -44,19 +50,20 @@ class AppsecFsPlugin extends Plugin { _onFsOperationStart () { const store = storage('legacy').getStore() if (store) { - enterWith({ root: store.fs?.root === undefined }, store) + return getStoreToStart({ root: store.fs?.root === undefined }, store) } } _onResponseRenderStart () { - enterWith({ opExcluded: true }) + return getStoreToStart({ opExcluded: true }) } _onFsOperationFinishOrRenderEnd () { const store = storage('legacy').getStore() - if (store?.fs?.parentStore) { - storage('legacy').enterWith(store.fs.parentStore) + if (store?.fs) { + return store.fs.parentStore } + return store } } diff --git a/packages/dd-trace/test/appsec/iast/analyzers/path-traversal-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/path-traversal-analyzer.spec.js index f914006b5e0..8e9c7abae4d 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/path-traversal-analyzer.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/path-traversal-analyzer.spec.js @@ -477,6 +477,26 @@ prepareTestServerForIast('integration test', (testThatRequestHasVulnerability, t describe('test stat', () => { runFsMethodTestThreeWay('stat', 0, null, __filename) + + describe('with two calls to async method without waiting to the callback', () => { + const fsAsyncWayMethodPath = path.join(os.tmpdir(), 'fs-async-way-method.js') + + before(() => { + fs.copyFileSync(path.join(__dirname, 'resources', 'fs-async-way-method.js'), fsAsyncWayMethodPath) + }) + + after(() => { + fs.unlinkSync(fsAsyncWayMethodPath) + }) + + testThatRequestHasVulnerability(function () { + const store = storage('legacy').getStore() + const iastCtx = iastContextFunctions.getIastContext(store) + const callArgs = [fsAsyncWayMethodPath] + callArgs[0] = newTaintedString(iastCtx, callArgs[0], 'param', 'Request') + return require(fsAsyncWayMethodPath).doubleCallIgnoringCb('stat', callArgs) + }, 'PATH_TRAVERSAL', { occurrences: 2 }) + }) }) describe('test symlink', () => { diff --git a/packages/dd-trace/test/appsec/iast/analyzers/resources/fs-async-way-method.js b/packages/dd-trace/test/appsec/iast/analyzers/resources/fs-async-way-method.js index 16a599b295c..a574eaf3c75 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/resources/fs-async-way-method.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/resources/fs-async-way-method.js @@ -2,10 +2,21 @@ const fs = require('fs') -module.exports = function (methodName, args, cb) { +function main (methodName, args, cb) { return new Promise((resolve, reject) => { fs[methodName](...args, (err, res) => { resolve(cb(res)) }) }) } + +main.doubleCallIgnoringCb = function (methodName, args) { + return new Promise((resolve) => { + fs[methodName](...args, () => {}) + fs[methodName](...args, () => { + resolve() + }) + }) +} + +module.exports = main diff --git a/packages/dd-trace/test/appsec/rasp/fs-plugin.spec.js b/packages/dd-trace/test/appsec/rasp/fs-plugin.spec.js index b87c88c20de..b79d05bfabc 100644 --- a/packages/dd-trace/test/appsec/rasp/fs-plugin.spec.js +++ b/packages/dd-trace/test/appsec/rasp/fs-plugin.spec.js @@ -27,6 +27,7 @@ describe('AppsecFsPlugin', () => { beforeEach(() => { configure = sinon.stub() class PluginClass { + addBind (channelName, handler) {} addSub (channelName, handler) {} configure (config) { @@ -93,20 +94,18 @@ describe('AppsecFsPlugin', () => { }) describe('_onFsOperationStart', () => { - it('should mark fs root', () => { + it('should return fs root', () => { const origStore = {} storage('legacy').enterWith(origStore) - appsecFsPlugin._onFsOperationStart() + let store = appsecFsPlugin._onFsOperationStart() - let store = storage('legacy').getStore() assert.property(store, 'fs') assert.propertyVal(store.fs, 'parentStore', origStore) assert.propertyVal(store.fs, 'root', true) - appsecFsPlugin._onFsOperationFinishOrRenderEnd() + store = appsecFsPlugin._onFsOperationFinishOrRenderEnd() - store = storage('legacy').getStore() assert.equal(store, origStore) assert.notProperty(store, 'fs') }) @@ -115,28 +114,30 @@ describe('AppsecFsPlugin', () => { const origStore = { orig: true } storage('legacy').enterWith(origStore) - appsecFsPlugin._onFsOperationStart() + const rootStore = appsecFsPlugin._onFsOperationStart() - const rootStore = storage('legacy').getStore() assert.property(rootStore, 'fs') assert.propertyVal(rootStore.fs, 'parentStore', origStore) assert.propertyVal(rootStore.fs, 'root', true) - appsecFsPlugin._onFsOperationStart() + storage('legacy').enterWith(rootStore) + + let store = appsecFsPlugin._onFsOperationStart() - let store = storage('legacy').getStore() assert.property(store, 'fs') assert.propertyVal(store.fs, 'parentStore', rootStore) assert.propertyVal(store.fs, 'root', false) assert.propertyVal(store, 'orig', true) - appsecFsPlugin._onFsOperationFinishOrRenderEnd() + storage('legacy').enterWith(store) + + store = appsecFsPlugin._onFsOperationFinishOrRenderEnd() - store = storage('legacy').getStore() assert.equal(store, rootStore) - appsecFsPlugin._onFsOperationFinishOrRenderEnd() - store = storage('legacy').getStore() + storage('legacy').enterWith(store) + + store = appsecFsPlugin._onFsOperationFinishOrRenderEnd() assert.equal(store, origStore) }) }) @@ -148,16 +149,16 @@ describe('AppsecFsPlugin', () => { const origStore = {} storage('legacy').enterWith(origStore) - appsecFsPlugin._onResponseRenderStart() + let store = appsecFsPlugin._onResponseRenderStart() - let store = storage('legacy').getStore() assert.property(store, 'fs') assert.propertyVal(store.fs, 'parentStore', origStore) assert.propertyVal(store.fs, 'opExcluded', true) - appsecFsPlugin._onFsOperationFinishOrRenderEnd() + storage('legacy').enterWith(store) + + store = appsecFsPlugin._onFsOperationFinishOrRenderEnd() - store = storage('legacy').getStore() assert.equal(store, origStore) assert.notProperty(store, 'fs') }) @@ -225,6 +226,12 @@ describe('AppsecFsPlugin', () => { it('should clean up store when finishing op', () => { let count = 4 + // TODO Remove this when node 18 is unsupported or dc-polyfill is fixed&updated + // hack to node 18 and early 20.x + // with dc-polyfill addBind is not enough to force a channel.hasSubscribers === true + const onStart = () => {} + opStartCh.subscribe(onStart) + const onFinish = () => { const store = storage('legacy').getStore() count-- @@ -244,6 +251,7 @@ describe('AppsecFsPlugin', () => { assert.strictEqual(count, 0) } finally { opFinishCh.unsubscribe(onFinish) + opStartCh.unsubscribe(onStart) } }) }) diff --git a/packages/dd-trace/test/appsec/rasp/lfi.express.plugin.spec.js b/packages/dd-trace/test/appsec/rasp/lfi.express.plugin.spec.js index 210c3849ece..d0a5ad8d3dd 100644 --- a/packages/dd-trace/test/appsec/rasp/lfi.express.plugin.spec.js +++ b/packages/dd-trace/test/appsec/rasp/lfi.express.plugin.spec.js @@ -30,376 +30,416 @@ describe('RASP - lfi', () => { } withVersions('express', 'express', expressVersion => { - let app, server + withVersions('express', 'ejs', ejsVersion => { + let app, server - before(() => { - return agent.load(['http', 'express'], { client: false }) - }) + before(() => { + return agent.load(['http', 'express'], { client: false }) + }) - before((done) => { - const express = require(`../../../../../versions/express@${expressVersion}`).get() - const expressApp = express() + before((done) => { + const express = require(`../../../../../versions/express@${expressVersion}`).get() + // const ejs = require(`../../../../../versions/ejs@${ejsVersion}`).get() + const expressApp = express() - expressApp.get('/', (req, res) => { - app(req, res) - }) + expressApp.set('view engine', 'ejs') + expressApp.set('views', path.join(__dirname, 'resources')) - appsec.enable(new Config({ - appsec: { - enabled: true, - rules: path.join(__dirname, 'resources', 'lfi_rasp_rules.json'), - rasp: { enabled: true } - } - })) + expressApp.get('/', (req, res) => { + app(req, res) + }) - server = expressApp.listen(0, () => { - const port = server.address().port - axios = Axios.create({ - baseURL: `http://localhost:${port}` + appsec.enable(new Config({ + appsec: { + enabled: true, + rules: path.join(__dirname, 'resources', 'lfi_rasp_rules.json'), + rasp: { enabled: true } + } + })) + + server = expressApp.listen(0, () => { + const port = server.address().port + axios = Axios.create({ + baseURL: `http://localhost:${port}` + }) + done() }) - done() }) - }) - after(() => { - appsec.disable() - server.close() - return agent.close({ ritmReset: false }) - }) + after(() => { + appsec.disable() + server.close() + return agent.close({ ritmReset: false }) + }) - describe('lfi', () => { - function getApp (fn, args, options) { - return async (req, res) => { - try { - const result = await fn(args) - options.onfinish?.(result) - } catch (e) { - if (e.message === 'DatadogRaspAbortError') { - res.writeHead(418) + describe('lfi', () => { + function getApp (fn, args, options) { + return async (req, res) => { + try { + const result = await fn(args) + options.onfinish?.(result) + } catch (e) { + if (e.message === 'DatadogRaspAbortError') { + res.writeHead(418) + } } + res.end('end') } - res.end('end') } - } - function getAppSync (fn, args, options) { - return (req, res) => { - try { - const result = fn(args) - options.onfinish?.(result) - } catch (e) { - if (e.message === 'DatadogRaspAbortError') { - res.writeHead(418) + function getAppSync (fn, args, options) { + return (req, res) => { + try { + const result = fn(args) + options.onfinish?.(result) + } catch (e) { + if (e.message === 'DatadogRaspAbortError') { + res.writeHead(418) + } } + res.end('end') } - res.end('end') } - } - function runFsMethodTest (description, options, fn, ...args) { - const { vulnerableIndex = 0, ruleEvalCount } = options + function runFsMethodTest (description, options, fn, ...args) { + const { vulnerableIndex = 0, ruleEvalCount } = options - describe(description, () => { - const getAppFn = options.getAppFn ?? getApp + describe(description, () => { + const getAppFn = options.getAppFn ?? getApp - it('should block param from the request', () => { - app = getAppFn(fn, args, options) + it('should block param from the request', () => { + app = getAppFn(fn, args, options) - const file = args[vulnerableIndex] - return testBlockingRequest(`/?file=${file}`, undefined, ruleEvalCount) - .then(span => { - assert(span.meta['_dd.appsec.json'].includes(file)) - }) - }) + const file = args[vulnerableIndex] + return testBlockingRequest(`/?file=${file}`, undefined, ruleEvalCount) + .then(span => { + assert(span.meta['_dd.appsec.json'].includes(file)) + }) + }) - it('should not block if param not found in the request', async () => { - app = getAppFn(fn, args, options) + it('should not block if param not found in the request', async () => { + app = getAppFn(fn, args, options) - await axios.get('/?file=/test.file') + await axios.get('/?file=/test.file') - return checkRaspExecutedAndNotThreat(agent, false) + return checkRaspExecutedAndNotThreat(agent, false) + }) }) - }) - } - - function runFsMethodTestThreeWay (methodName, options = {}, ...args) { - let desc = `test ${methodName} ${options.desc ?? ''}` - const { vulnerableIndex = 0 } = options - if (vulnerableIndex !== 0) { - desc += ` with vulnerable index ${vulnerableIndex}` } - describe(desc, () => { - runFsMethodTest(`test fs.${methodName}Sync method`, { ...options, getAppFn: getAppSync }, (args) => { - return require('fs')[`${methodName}Sync`](...args) - }, ...args) - runFsMethodTest(`test fs.${methodName} method`, options, (args) => { - return new Promise((resolve, reject) => { - require('fs')[methodName](...args, (err, res) => { - if (err) reject(err) - else resolve(res) + function runFsMethodTestThreeWay (methodName, options = {}, ...args) { + let desc = `test ${methodName} ${options.desc ?? ''}` + const { vulnerableIndex = 0 } = options + if (vulnerableIndex !== 0) { + desc += ` with vulnerable index ${vulnerableIndex}` + } + describe(desc, () => { + runFsMethodTest(`test fs.${methodName}Sync method`, { ...options, getAppFn: getAppSync }, (args) => { + return require('fs')[`${methodName}Sync`](...args) + }, ...args) + + runFsMethodTest(`test fs.${methodName} method`, options, (args) => { + return new Promise((resolve, reject) => { + require('fs')[methodName](...args, (err, res) => { + if (err) reject(err) + else resolve(res) + }) }) - }) - }, ...args) - - runFsMethodTest(`test fs.promises.${methodName} method`, options, async (args) => { - return require('fs').promises[methodName](...args) - }, ...args) - }) - } - - function unlink (...args) { - args.forEach(arg => { - try { - fs.unlinkSync(arg) - } catch (e) { + }, ...args) - } - }) - } + runFsMethodTest(`test fs.promises.${methodName} method`, options, async (args) => { + return require('fs').promises[methodName](...args) + }, ...args) + }) + } - describe('test access', () => { - runFsMethodTestThreeWay('access', undefined, __filename) - runFsMethodTestThreeWay('access', { desc: 'Buffer' }, Buffer.from(__filename)) + function unlink (...args) { + args.forEach(arg => { + try { + fs.unlinkSync(arg) + } catch (e) { - // not supported by waf yet - // runFsMethodTestThreeWay('access', { desc: 'URL' }, new URL(`file://${__filename}`)) - }) + } + }) + } - describe('test appendFile', () => { - const filename = path.join(os.tmpdir(), 'test-appendfile') + describe('test access', () => { + runFsMethodTestThreeWay('access', undefined, __filename) + runFsMethodTestThreeWay('access', { desc: 'Buffer' }, Buffer.from(__filename)) - beforeEach(() => { - fs.writeFileSync(filename, '') + // not supported by waf yet + // runFsMethodTestThreeWay('access', { desc: 'URL' }, new URL(`file://${__filename}`)) }) - afterEach(() => { - fs.unlinkSync(filename) - }) + describe('test appendFile', () => { + const filename = path.join(os.tmpdir(), 'test-appendfile') - runFsMethodTestThreeWay('appendFile', undefined, filename, 'test-content') - }) + beforeEach(() => { + fs.writeFileSync(filename, '') + }) - describe('test chmod', () => { - const filename = path.join(os.tmpdir(), 'test-chmod') + afterEach(() => { + fs.unlinkSync(filename) + }) - beforeEach(() => { - fs.writeFileSync(filename, '') + runFsMethodTestThreeWay('appendFile', undefined, filename, 'test-content') }) - afterEach(() => { - fs.unlinkSync(filename) - }) - runFsMethodTestThreeWay('chmod', undefined, filename, '666') - }) + describe('test chmod', () => { + const filename = path.join(os.tmpdir(), 'test-chmod') - describe('test copyFile', () => { - const src = path.join(os.tmpdir(), 'test-copyFile-src') - const dest = path.join(os.tmpdir(), 'test-copyFile-dst') + beforeEach(() => { + fs.writeFileSync(filename, '') + }) - beforeEach(() => { - fs.writeFileSync(src, '') + afterEach(() => { + fs.unlinkSync(filename) + }) + runFsMethodTestThreeWay('chmod', undefined, filename, '666') }) - afterEach(() => unlink(src, dest)) + describe('test copyFile', () => { + const src = path.join(os.tmpdir(), 'test-copyFile-src') + const dest = path.join(os.tmpdir(), 'test-copyFile-dst') - runFsMethodTestThreeWay('copyFile', { vulnerableIndex: 0, ruleEvalCount: 2 }, src, dest) - runFsMethodTestThreeWay('copyFile', { vulnerableIndex: 1, ruleEvalCount: 2 }, src, dest) - }) + beforeEach(() => { + fs.writeFileSync(src, '') + }) - describe('test link', () => { - const src = path.join(os.tmpdir(), 'test-link-src') - const dest = path.join(os.tmpdir(), 'test-link-dst') + afterEach(() => unlink(src, dest)) - beforeEach(() => { - fs.writeFileSync(src, '') + runFsMethodTestThreeWay('copyFile', { vulnerableIndex: 0, ruleEvalCount: 2 }, src, dest) + runFsMethodTestThreeWay('copyFile', { vulnerableIndex: 1, ruleEvalCount: 2 }, src, dest) }) - afterEach(() => unlink(src, dest)) + describe('test link', () => { + const src = path.join(os.tmpdir(), 'test-link-src') + const dest = path.join(os.tmpdir(), 'test-link-dst') - runFsMethodTestThreeWay('copyFile', { vulnerableIndex: 0, ruleEvalCount: 2 }, src, dest) - runFsMethodTestThreeWay('copyFile', { vulnerableIndex: 1, ruleEvalCount: 2 }, src, dest) - }) + beforeEach(() => { + fs.writeFileSync(src, '') + }) - describe('test lstat', () => { - runFsMethodTestThreeWay('lstat', undefined, __filename) - }) + afterEach(() => unlink(src, dest)) - describe('test mkdir', () => { - const dirname = path.join(os.tmpdir(), 'test-mkdir') + runFsMethodTestThreeWay('copyFile', { vulnerableIndex: 0, ruleEvalCount: 2 }, src, dest) + runFsMethodTestThreeWay('copyFile', { vulnerableIndex: 1, ruleEvalCount: 2 }, src, dest) + }) - afterEach(() => { - try { - fs.rmdirSync(dirname) - } catch (e) { - // some ops are blocked - } + describe('test lstat', () => { + runFsMethodTestThreeWay('lstat', undefined, __filename) }) - runFsMethodTestThreeWay('mkdir', undefined, dirname) - }) - describe('test mkdtemp', () => { - const dirname = path.join(os.tmpdir(), 'test-mkdtemp') + describe('test mkdir', () => { + const dirname = path.join(os.tmpdir(), 'test-mkdir') - runFsMethodTestThreeWay('mkdtemp', { - onfinish: (todelete) => { + afterEach(() => { try { - fs.rmdirSync(todelete) + fs.rmdirSync(dirname) } catch (e) { // some ops are blocked } - } - }, dirname) - }) + }) + runFsMethodTestThreeWay('mkdir', undefined, dirname) + }) - describe('test open', () => { - runFsMethodTestThreeWay('open', { - onfinish: (fd) => { - if (fd && fd.close) { - fd.close() - } else { - fs.close(fd, () => {}) - } - } - }, __filename, 'r') - }) + describe('test mkdtemp', () => { + const dirname = path.join(os.tmpdir(), 'test-mkdtemp') - describe('test opendir', () => { - const dirname = path.join(os.tmpdir(), 'test-opendir') + runFsMethodTestThreeWay('mkdtemp', { + onfinish: (todelete) => { + try { + fs.rmdirSync(todelete) + } catch (e) { + // some ops are blocked + } + } + }, dirname) + }) - beforeEach(() => { - fs.mkdirSync(dirname) + describe('test open', () => { + runFsMethodTestThreeWay('open', { + onfinish: (fd) => { + if (fd && fd.close) { + fd.close() + } else { + fs.close(fd, () => { + }) + } + } + }, __filename, 'r') }) - afterEach(() => { - fs.rmdirSync(dirname) + describe('test opendir', () => { + const dirname = path.join(os.tmpdir(), 'test-opendir') + + beforeEach(() => { + fs.mkdirSync(dirname) + }) + + afterEach(() => { + fs.rmdirSync(dirname) + }) + runFsMethodTestThreeWay('opendir', { + onfinish: (dir) => { + dir.close() + } + }, dirname) }) - runFsMethodTestThreeWay('opendir', { - onfinish: (dir) => { - dir.close() - } - }, dirname) - }) - describe('test readdir', () => { - const dirname = path.join(os.tmpdir(), 'test-opendir') + describe('test readdir', () => { + const dirname = path.join(os.tmpdir(), 'test-opendir') + + beforeEach(() => { + fs.mkdirSync(dirname) + }) - beforeEach(() => { - fs.mkdirSync(dirname) + afterEach(() => { + fs.rmdirSync(dirname) + }) + runFsMethodTestThreeWay('readdir', undefined, dirname) }) - afterEach(() => { - fs.rmdirSync(dirname) + describe('test readFile', () => { + runFsMethodTestThreeWay('readFile', undefined, __filename) + + runFsMethodTest('an async operation without callback is executed before', + { getAppFn: getAppSync, ruleEvalCount: 2 }, (args) => { + const fs = require('fs') + fs.readFile(path.join(__dirname, 'utils.js'), () => { + }) // safe and ignored operation + return fs.readFileSync(...args) + }, __filename) }) - runFsMethodTestThreeWay('readdir', undefined, dirname) - }) - describe('test readFile', () => { - runFsMethodTestThreeWay('readFile', undefined, __filename) - }) + describe('test readlink', () => { + const src = path.join(os.tmpdir(), 'test-readlink-src') + const dest = path.join(os.tmpdir(), 'test-readlink-dst') - describe('test readlink', () => { - const src = path.join(os.tmpdir(), 'test-readlink-src') - const dest = path.join(os.tmpdir(), 'test-readlink-dst') + beforeEach(() => { + fs.writeFileSync(src, '') + fs.linkSync(src, dest) + }) - beforeEach(() => { - fs.writeFileSync(src, '') - fs.linkSync(src, dest) + afterEach(() => unlink(src, dest)) + + runFsMethodTestThreeWay('readlink', undefined, dest) }) - afterEach(() => unlink(src, dest)) + describe('test realpath', () => { + runFsMethodTestThreeWay('realpath', undefined, __filename) - runFsMethodTestThreeWay('readlink', undefined, dest) - }) + runFsMethodTest('test fs.realpath.native method', {}, (args) => { + return new Promise((resolve, reject) => { + require('fs').realpath.native(...args, (err, result) => { + if (err) reject(err) + else resolve(result) + }) + }) + }, __filename) + }) - describe('test realpath', () => { - runFsMethodTestThreeWay('realpath', undefined, __filename) + describe('test rename', () => { + const src = path.join(os.tmpdir(), 'test-rename-src') + const dest = path.join(os.tmpdir(), 'test-rename-dst') - runFsMethodTest('test fs.realpath.native method', {}, (args) => { - return new Promise((resolve, reject) => { - require('fs').realpath.native(...args, (err, result) => { - if (err) reject(err) - else resolve(result) - }) + beforeEach(() => { + fs.writeFileSync(src, '') }) - }, __filename) - }) - describe('test rename', () => { - const src = path.join(os.tmpdir(), 'test-rename-src') - const dest = path.join(os.tmpdir(), 'test-rename-dst') + afterEach(() => unlink(dest)) - beforeEach(() => { - fs.writeFileSync(src, '') + runFsMethodTestThreeWay('rename', { vulnerableIndex: 0, ruleEvalCount: 2 }, src, dest) + runFsMethodTestThreeWay('rename', { vulnerableIndex: 1, ruleEvalCount: 2 }, src, dest) }) - afterEach(() => unlink(dest)) + describe('test rmdir', () => { + const dirname = path.join(os.tmpdir(), 'test-rmdir') - runFsMethodTestThreeWay('rename', { vulnerableIndex: 0, ruleEvalCount: 2 }, src, dest) - runFsMethodTestThreeWay('rename', { vulnerableIndex: 1, ruleEvalCount: 2 }, src, dest) - }) + beforeEach(() => { + fs.mkdirSync(dirname) + }) - describe('test rmdir', () => { - const dirname = path.join(os.tmpdir(), 'test-rmdir') + afterEach(() => { + try { + fs.rmdirSync(dirname) + } catch (e) { + } + }) - beforeEach(() => { - fs.mkdirSync(dirname) + runFsMethodTestThreeWay('rmdir', undefined, dirname) }) - afterEach(() => { - try { fs.rmdirSync(dirname) } catch (e) {} + describe('test stat', () => { + runFsMethodTestThreeWay('stat', undefined, __filename) }) - runFsMethodTestThreeWay('rmdir', undefined, dirname) - }) + describe('test symlink', () => { + const src = path.join(os.tmpdir(), 'test-symlink-src') + const dest = path.join(os.tmpdir(), 'test-symlink-dst') - describe('test stat', () => { - runFsMethodTestThreeWay('stat', undefined, __filename) - }) + beforeEach(() => { + fs.writeFileSync(src, '') + }) - describe('test symlink', () => { - const src = path.join(os.tmpdir(), 'test-symlink-src') - const dest = path.join(os.tmpdir(), 'test-symlink-dst') + afterEach(() => { + unlink(src, dest) + }) - beforeEach(() => { - fs.writeFileSync(src, '') + runFsMethodTestThreeWay('symlink', { vulnerableIndex: 0, ruleEvalCount: 2 }, src, dest) + runFsMethodTestThreeWay('symlink', { vulnerableIndex: 1, ruleEvalCount: 2 }, src, dest) }) - afterEach(() => { - unlink(src, dest) - }) + describe('test truncate', () => { + const src = path.join(os.tmpdir(), 'test-truncate-src') - runFsMethodTestThreeWay('symlink', { vulnerableIndex: 0, ruleEvalCount: 2 }, src, dest) - runFsMethodTestThreeWay('symlink', { vulnerableIndex: 1, ruleEvalCount: 2 }, src, dest) - }) + beforeEach(() => { + fs.writeFileSync(src, 'aaaaaa') + }) - describe('test truncate', () => { - const src = path.join(os.tmpdir(), 'test-truncate-src') + afterEach(() => unlink(src)) - beforeEach(() => { - fs.writeFileSync(src, 'aaaaaa') + runFsMethodTestThreeWay('truncate', undefined, src) }) - afterEach(() => unlink(src)) + describe('test unlink', () => { + const src = path.join(os.tmpdir(), 'test-unlink-src') - runFsMethodTestThreeWay('truncate', undefined, src) - }) + beforeEach(() => { + fs.writeFileSync(src, '') + }) + runFsMethodTestThreeWay('unlink', undefined, src) + }) - describe('test unlink', () => { - const src = path.join(os.tmpdir(), 'test-unlink-src') + describe('test writeFile', () => { + const src = path.join(os.tmpdir(), 'test-writeFile-src') - beforeEach(() => { - fs.writeFileSync(src, '') - }) - runFsMethodTestThreeWay('unlink', undefined, src) - }) + afterEach(() => unlink(src)) - describe('test writeFile', () => { - const src = path.join(os.tmpdir(), 'test-writeFile-src') + runFsMethodTestThreeWay('writeFile', undefined, src, 'content') + }) - afterEach(() => unlink(src)) + describe('test with express render', () => { + function getAppFn (fn, args, options) { + return (req, res) => { + try { + const result = fn(args) + options.onfinish?.(result) + } catch (e) { + if (e.message === 'DatadogRaspAbortError') { + res.status(418) + } + } + res.render('template') + } + } - runFsMethodTestThreeWay('writeFile', undefined, src, 'content') + runFsMethodTest('rule is eval only once and rendering file accesses are ignored', + { getAppFn, ruleEvalCount: 1 }, (args) => { + const fs = require('fs') + return fs.readFileSync(...args) + }, __filename) + }) }) }) }) diff --git a/packages/dd-trace/test/appsec/rasp/resources/template.ejs b/packages/dd-trace/test/appsec/rasp/resources/template.ejs new file mode 100644 index 00000000000..a806d0e8cd7 --- /dev/null +++ b/packages/dd-trace/test/appsec/rasp/resources/template.ejs @@ -0,0 +1,5 @@ + + +

Kaixo!

+ + diff --git a/packages/dd-trace/test/plugins/externals.json b/packages/dd-trace/test/plugins/externals.json index 157fc0fd292..554a5065709 100644 --- a/packages/dd-trace/test/plugins/externals.json +++ b/packages/dd-trace/test/plugins/externals.json @@ -99,6 +99,10 @@ { "name": "multer", "versions": ["^1.4.4-lts.1"] + }, + { + "name": "ejs", + "versions": ["3.1.10"] } ], "express-mongo-sanitize": [ From 03f579ed28d6f056d6508f9e615ecd4f97921ecc Mon Sep 17 00:00:00 2001 From: Roberto Montero <108007532+robertomonteromiguel@users.noreply.github.com> Date: Thu, 17 Jul 2025 11:30:34 +0200 Subject: [PATCH 50/53] ci: update one pipeline reference (#6112) --- .gitlab/one-pipeline.locked.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab/one-pipeline.locked.yml b/.gitlab/one-pipeline.locked.yml index a2192e2aea6..514056de50a 100644 --- a/.gitlab/one-pipeline.locked.yml +++ b/.gitlab/one-pipeline.locked.yml @@ -1,4 +1,4 @@ # DO NOT EDIT THIS FILE MANUALLY # This file is auto-generated by automation. include: - - remote: https://gitlab-templates.ddbuild.io/libdatadog/one-pipeline/ca/d44e89797a5a47c43cf712193abefe2178a004176606f7e01b77d1ec49a3ef5e/one-pipeline.yml + - remote: https://gitlab-templates.ddbuild.io/libdatadog/one-pipeline/ca/a0486057161f85a77e39ad2aa60ac66bb52414696d9b3dd87177df1057b11295/one-pipeline.yml From d3b0fc5497c34aa6f098dbb73f2b5ed22144731a Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Thu, 17 Jul 2025 11:35:10 +0200 Subject: [PATCH 51/53] chore: make as much of profiler internals private as possible (#6108) --- packages/dd-trace/src/profiling/profiler.js | 155 ++++++++++-------- .../dd-trace/test/profiling/profiler.spec.js | 6 +- 2 files changed, 88 insertions(+), 73 deletions(-) diff --git a/packages/dd-trace/src/profiling/profiler.js b/packages/dd-trace/src/profiling/profiler.js index 0df71283e05..38b720e2990 100644 --- a/packages/dd-trace/src/profiling/profiler.js +++ b/packages/dd-trace/src/profiling/profiler.js @@ -42,17 +42,20 @@ function findWebSpan (startedSpans, spanId) { } class Profiler extends EventEmitter { + #compressionFn + #compressionOptions + #enabled = false + #endpointCounts = new Map() + #lastStart + #logger #profileSeq = 0 + #spanFinishListener + #timer constructor () { super() - this._enabled = false - this._logger = undefined this._config = undefined - this._timer = undefined - this._lastStart = undefined this._timeoutInterval = undefined - this.endpointCounts = new Map() } start (options) { @@ -63,17 +66,21 @@ class Profiler extends EventEmitter { }) } - _logError (err) { - logError(this._logger, err) + get enabled () { + return this.#enabled + } + + #logError (err) { + logError(this.#logger, err) } async _start (options) { - if (this._enabled) return true + if (this.enabled) return true const config = this._config = new Config(options) - this._logger = config.logger - this._enabled = true + this.#logger = config.logger + this.#enabled = true this._setInterval() // Log errors if the source map finder fails, but don't prevent the rest @@ -85,7 +92,7 @@ class Profiler extends EventEmitter { mapper = await maybeSourceMap(config.sourceMap, SourceMapper, config.debugSourceMaps) if (config.sourceMap && config.debugSourceMaps) { - this._logger.debug(() => { + this.#logger.debug(() => { return mapper.infoMap.size === 0 ? 'Found no source maps' : `Found source maps for following files: [${[...mapper.infoMap.keys()].join(', ')}]` @@ -95,18 +102,18 @@ class Profiler extends EventEmitter { const clevel = config.uploadCompression.level switch (config.uploadCompression.method) { case 'gzip': - this._compressionFn = promisify(zlib.gzip) + this.#compressionFn = promisify(zlib.gzip) if (clevel !== undefined) { - this._compressionOptions = { + this.#compressionOptions = { level: clevel } } break case 'zstd': if (typeof zlib.zstdCompress === 'function') { - this._compressionFn = promisify(zlib.zstdCompress) + this.#compressionFn = promisify(zlib.zstdCompress) if (clevel !== undefined) { - this._compressionOptions = { + this.#compressionOptions = { params: { [zlib.constants.ZSTD_c_compressionLevel]: clevel } @@ -115,44 +122,44 @@ class Profiler extends EventEmitter { } else { const zstdCompress = require('@datadog/libdatadog').load('datadog-js-zstd').zstd_compress const level = clevel ?? 0 // 0 is zstd default compression level - this._compressionFn = (buffer) => Promise.resolve(Buffer.from(zstdCompress(buffer, level))) + this.#compressionFn = (buffer) => Promise.resolve(Buffer.from(zstdCompress(buffer, level))) } break } } catch (err) { - this._logError(err) + this.#logError(err) } try { const start = new Date() - const nearOOMCallback = this._nearOOMExport.bind(this) + const nearOOMCallback = this.#nearOOMExport.bind(this) for (const profiler of config.profilers) { // TODO: move this out of Profiler when restoring sourcemap support profiler.start({ mapper, nearOOMCallback }) - this._logger.debug(`Started ${profiler.type} profiler in ${threadNamePrefix} thread`) + this.#logger.debug(`Started ${profiler.type} profiler in ${threadNamePrefix} thread`) } if (config.endpointCollectionEnabled) { - this._spanFinishListener = this._onSpanFinish.bind(this) - spanFinishedChannel.subscribe(this._spanFinishListener) + this.#spanFinishListener = this.#onSpanFinish.bind(this) + spanFinishedChannel.subscribe(this.#spanFinishListener) } this._capture(this._timeoutInterval, start) return true } catch (e) { - this._logError(e) - this._stop() + this.#logError(e) + this.#stop() return false } } - _nearOOMExport (profileType, encodedProfile) { - const start = this._lastStart + #nearOOMExport (profileType, encodedProfile) { + const start = this.#lastStart const end = new Date() - this._submit({ + this.#submit({ [profileType]: encodedProfile }, start, end, snapshotKinds.ON_OUT_OF_MEMORY) } @@ -162,45 +169,45 @@ class Profiler extends EventEmitter { } stop () { - if (!this._enabled) return + if (!this.enabled) return // collect and export current profiles // once collect returns, profilers can be safely stopped this._collect(snapshotKinds.ON_SHUTDOWN, false) - this._stop() + this.#stop() } - _stop () { - if (!this._enabled) return + #stop () { + if (!this.enabled) return - this._enabled = false + this.#enabled = false - if (this._spanFinishListener !== undefined) { - spanFinishedChannel.unsubscribe(this._spanFinishListener) - this._spanFinishListener = undefined + if (this.#spanFinishListener !== undefined) { + spanFinishedChannel.unsubscribe(this.#spanFinishListener) + this.#spanFinishListener = undefined } for (const profiler of this._config.profilers) { profiler.stop() - this._logger.debug(`Stopped ${profiler.type} profiler in ${threadNamePrefix} thread`) + this.#logger.debug(`Stopped ${profiler.type} profiler in ${threadNamePrefix} thread`) } - clearTimeout(this._timer) - this._timer = undefined + clearTimeout(this.#timer) + this.#timer = undefined } _capture (timeout, start) { - if (!this._enabled) return - this._lastStart = start - if (!this._timer || timeout !== this._timeoutInterval) { - this._timer = setTimeout(() => this._collect(snapshotKinds.PERIODIC), timeout) - this._timer.unref() + if (!this.enabled) return + this.#lastStart = start + if (!this.#timer || timeout !== this._timeoutInterval) { + this.#timer = setTimeout(() => this._collect(snapshotKinds.PERIODIC), timeout) + this.#timer.unref() } else { - this._timer.refresh() + this.#timer.refresh() } } - _onSpanFinish (span) { + #onSpanFinish (span) { const context = span.context() const tags = context._tags if (!isWebServerSpan(tags)) return @@ -211,19 +218,19 @@ class Profiler extends EventEmitter { // Make sure this is the outermost web span, just in case so we don't overcount if (findWebSpan(getStartedSpans(context), context._parentId)) return - let counter = this.endpointCounts.get(endpointName) + let counter = this.#endpointCounts.get(endpointName) if (counter === undefined) { counter = { count: 1 } - this.endpointCounts.set(endpointName, counter) + this.#endpointCounts.set(endpointName, counter) } else { counter.count++ } } async _collect (snapshotKind, restart = true) { - if (!this._enabled) return + if (!this.enabled) return - const startDate = this._lastStart + const startDate = this.#lastStart const endDate = new Date() const profiles = [] const encodedProfiles = {} @@ -238,7 +245,7 @@ class Profiler extends EventEmitter { for (const profiler of this._config.profilers) { const profile = profiler.profile(restart, startDate, endDate) if (!restart) { - this._logger.debug(`Stopped ${profiler.type} profiler in ${threadNamePrefix} thread`) + this.#logger.debug(`Stopped ${profiler.type} profiler in ${threadNamePrefix} thread`) } if (!profile) continue profiles.push({ profiler, profile }) @@ -255,11 +262,11 @@ class Profiler extends EventEmitter { await Promise.all(profiles.map(async ({ profiler, profile }) => { try { const encoded = await profiler.encode(profile) - const compressed = encoded instanceof Buffer && this._compressionFn !== undefined - ? await this._compressionFn(encoded, this._compressionOptions) + const compressed = encoded instanceof Buffer && this.#compressionFn !== undefined + ? await this.#compressionFn(encoded, this.#compressionOptions) : encoded encodedProfiles[profiler.type] = compressed - this._logger.debug(() => { + this.#logger.debug(() => { const profileJson = JSON.stringify(profile, (key, value) => { return typeof value === 'bigint' ? value.toString() : value }) @@ -269,38 +276,38 @@ class Profiler extends EventEmitter { } catch (err) { // If encoding one of the profile types fails, we should still try to // encode and submit the other profile types. - this._logError(err) + this.#logError(err) } })) if (hasEncoded) { - await this._submit(encodedProfiles, startDate, endDate, snapshotKind) + await this.#submit(encodedProfiles, startDate, endDate, snapshotKind) profileSubmittedChannel.publish() - this._logger.debug('Submitted profiles') + this.#logger.debug('Submitted profiles') } } catch (err) { - this._logError(err) - this._stop() + this.#logError(err) + this.#stop() } } - _submit (profiles, start, end, snapshotKind) { + #submit (profiles, start, end, snapshotKind) { const { tags } = this._config // Flatten endpoint counts const endpointCounts = {} - for (const [endpoint, { count }] of this.endpointCounts) { + for (const [endpoint, { count }] of this.#endpointCounts) { endpointCounts[endpoint] = count } - this.endpointCounts.clear() + this.#endpointCounts.clear() tags.snapshot = snapshotKind tags.profile_seq = this.#profileSeq++ const exportSpec = { profiles, start, end, tags, endpointCounts } const tasks = this._config.exporters.map(exporter => exporter.export(exportSpec).catch(err => { - if (this._logger) { - this._logger.warn(err) + if (this.#logger) { + this.#logger.warn(err) } }) ) @@ -310,24 +317,32 @@ class Profiler extends EventEmitter { } class ServerlessProfiler extends Profiler { + #profiledIntervals = 0 + #interval = 1 // seconds + #flushAfterIntervals + constructor () { super() - this._profiledIntervals = 0 - this._interval = 1 - this._flushAfterIntervals = undefined + this.#profiledIntervals = 0 + this.#interval = 1 + this.#flushAfterIntervals = undefined + } + + get profiledIntervals () { + return this.#profiledIntervals } _setInterval () { - this._timeoutInterval = this._interval * 1000 - this._flushAfterIntervals = this._config.flushInterval / 1000 + this._timeoutInterval = this.#interval * 1000 + this.#flushAfterIntervals = this._config.flushInterval / 1000 } async _collect (snapshotKind, restart = true) { - if (this._profiledIntervals >= this._flushAfterIntervals || !restart) { - this._profiledIntervals = 0 + if (this.#profiledIntervals >= this.#flushAfterIntervals || !restart) { + this.#profiledIntervals = 0 await super._collect(snapshotKind, restart) } else { - this._profiledIntervals += 1 + this.#profiledIntervals += 1 this._capture(this._timeoutInterval, new Date()) // Don't submit profile until 65 (flushAfterIntervals) intervals have elapsed } diff --git a/packages/dd-trace/test/profiling/profiler.spec.js b/packages/dd-trace/test/profiling/profiler.spec.js index afdfb86a85d..01e14fc4a50 100644 --- a/packages/dd-trace/test/profiling/profiler.spec.js +++ b/packages/dd-trace/test/profiling/profiler.spec.js @@ -395,7 +395,7 @@ describe('profiler', function () { sourceMapCreate.rejects(error) await profiler._start({ profilers, exporters, logger, sourceMap: true }) expect(consoleLogger.error.args[0][0]).to.equal(error) - expect(profiler._enabled).to.equal(true) + expect(profiler.enabled).to.equal(true) }) }) @@ -430,11 +430,11 @@ describe('profiler', function () { it('should increment profiled intervals after one interval elapses', async () => { await profiler._start({ profilers, exporters }) - expect(profiler._profiledIntervals).to.equal(0) + expect(profiler.profiledIntervals).to.equal(0) clock.tick(interval) - expect(profiler._profiledIntervals).to.equal(1) + expect(profiler.profiledIntervals).to.equal(1) sinon.assert.notCalled(exporter.export) }) From 64f67b0fda0eabffcd10d927a81405ef4e536a49 Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Thu, 17 Jul 2025 12:16:58 +0200 Subject: [PATCH 52/53] ci: skip guardrail telemetry unit tests if DD_INJECT_FORCE is used (#6113) This is the case in CI for PRs targeting a release branch, as can be seen here: https://github.com/DataDog/dd-trace-js/blob/2caa6151a133fb782a51fa3c2c96f1a8d88da2ac/packages/dd-trace/test/setup/core.js#L24-L28 --- packages/dd-trace/test/guardrails/telemetry.spec.js | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/dd-trace/test/guardrails/telemetry.spec.js b/packages/dd-trace/test/guardrails/telemetry.spec.js index 29d8cb99b35..4ed8d5e3119 100644 --- a/packages/dd-trace/test/guardrails/telemetry.spec.js +++ b/packages/dd-trace/test/guardrails/telemetry.spec.js @@ -7,6 +7,13 @@ const { telemetryForwarder, assertTelemetryPoints } = require('../../../../integ describe('sendTelemetry', () => { let cleanup, sendTelemetry + before(function () { + if (['1', 'true', 'True'].includes(process.env.DD_INJECT_FORCE)) { + // When DD_INJECT_FORCE is set, only telemetry with the name `error` or `complete` is sent + this.skip() + } + }) + beforeEach(() => { cleanup = telemetryForwarder() sendTelemetry = proxyquire('../src/guardrails/telemetry', {}) From 2949b803e17707525c55fc51cebdef4e7997d2cb Mon Sep 17 00:00:00 2001 From: watson Date: Thu, 17 Jul 2025 10:17:47 +0000 Subject: [PATCH 53/53] v5.59.0 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 8b4a59025ed..f39f76641a6 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "dd-trace", - "version": "5.58.0", + "version": "5.59.0", "description": "Datadog APM tracing client for JavaScript", "main": "index.js", "typings": "index.d.ts",