Skip to content

Commit 07785fd

Browse files
authored
Add disabled by default integration concept (#5609)
This allows disable instrumentations easily while explicit enablement with an environment variable will enable them.
1 parent 34b8c65 commit 07785fd

File tree

14 files changed

+304
-41
lines changed

14 files changed

+304
-41
lines changed

packages/datadog-instrumentations/src/confluentinc-kafka-javascript.js

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -10,20 +10,20 @@ const log = require('../../dd-trace/src/log')
1010

1111
// Create channels for Confluent Kafka JavaScript
1212
const channels = {
13-
producerStart: channel('apm:@confluentinc/kafka-javascript:produce:start'),
14-
producerFinish: channel('apm:@confluentinc/kafka-javascript:produce:finish'),
15-
producerError: channel('apm:@confluentinc/kafka-javascript:produce:error'),
16-
producerCommit: channel('apm:@confluentinc/kafka-javascript:produce:commit'),
17-
consumerStart: channel('apm:@confluentinc/kafka-javascript:consume:start'),
18-
consumerFinish: channel('apm:@confluentinc/kafka-javascript:consume:finish'),
19-
consumerError: channel('apm:@confluentinc/kafka-javascript:consume:error'),
20-
consumerCommit: channel('apm:@confluentinc/kafka-javascript:consume:commit'),
13+
producerStart: channel('apm:confluentinc-kafka-javascript:produce:start'),
14+
producerFinish: channel('apm:confluentinc-kafka-javascript:produce:finish'),
15+
producerError: channel('apm:confluentinc-kafka-javascript:produce:error'),
16+
producerCommit: channel('apm:confluentinc-kafka-javascript:produce:commit'),
17+
consumerStart: channel('apm:confluentinc-kafka-javascript:consume:start'),
18+
consumerFinish: channel('apm:confluentinc-kafka-javascript:consume:finish'),
19+
consumerError: channel('apm:confluentinc-kafka-javascript:consume:error'),
20+
consumerCommit: channel('apm:confluentinc-kafka-javascript:consume:commit'),
2121

2222
// batch operations
23-
batchConsumerStart: channel('apm:@confluentinc/kafka-javascript:consume-batch:start'),
24-
batchConsumerFinish: channel('apm:@confluentinc/kafka-javascript:consume-batch:finish'),
25-
batchConsumerError: channel('apm:@confluentinc/kafka-javascript:consume-batch:error'),
26-
batchConsumerCommit: channel('apm:@confluentinc/kafka-javascript:consume-batch:commit')
23+
batchConsumerStart: channel('apm:confluentinc-kafka-javascript:consume-batch:start'),
24+
batchConsumerFinish: channel('apm:confluentinc-kafka-javascript:consume-batch:finish'),
25+
batchConsumerError: channel('apm:confluentinc-kafka-javascript:consume-batch:error'),
26+
batchConsumerCommit: channel('apm:confluentinc-kafka-javascript:consume-batch:commit')
2727
}
2828

2929
const disabledHeaderWeakSet = new WeakSet()

packages/datadog-instrumentations/src/helpers/register.js

Lines changed: 17 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ const log = require('../../../dd-trace/src/log')
99
const checkRequireCache = require('./check-require-cache')
1010
const telemetry = require('../../../dd-trace/src/guardrails/telemetry')
1111
const { isInServerlessEnvironment } = require('../../../dd-trace/src/serverless')
12+
const { isFalse, isTrue, normalizePluginEnvName } = require('../../../dd-trace/src/util')
1213
const { getEnvironmentVariables } = require('../../../dd-trace/src/config-helper')
1314

1415
const envs = getEnvironmentVariables()
@@ -22,16 +23,22 @@ const hooks = require('./hooks')
2223
const instrumentations = require('./instrumentations')
2324
const names = Object.keys(hooks)
2425
const pathSepExpr = new RegExp(`\\${path.sep}`, 'g')
26+
2527
const disabledInstrumentations = new Set(
26-
DD_TRACE_DISABLED_INSTRUMENTATIONS ? DD_TRACE_DISABLED_INSTRUMENTATIONS.split(',') : []
28+
DD_TRACE_DISABLED_INSTRUMENTATIONS?.split(',').map(name => normalizePluginEnvName(name, true)) ?? []
2729
)
30+
const reenabledInstrumentations = new Set()
2831

2932
// Check for DD_TRACE_<INTEGRATION>_ENABLED environment variables
3033
for (const [key, value] of Object.entries(envs)) {
3134
const match = key.match(/^DD_TRACE_(.+)_ENABLED$/)
32-
if (match && (value?.toLowerCase() === 'false' || value === '0')) {
33-
const integration = match[1].toLowerCase()
34-
disabledInstrumentations.add(integration)
35+
if (match && value) {
36+
const integration = normalizePluginEnvName(match[1], true)
37+
if (isFalse(value)) {
38+
disabledInstrumentations.add(integration)
39+
} else if (isTrue(value)) {
40+
reenabledInstrumentations.add(integration)
41+
}
3542
}
3643
}
3744

@@ -58,7 +65,8 @@ const allInstrumentations = {}
5865

5966
// TODO: make this more efficient
6067
for (const packageName of names) {
61-
if (disabledInstrumentations.has(packageName)) continue
68+
const normalizedPackageName = normalizePluginEnvName(packageName, true)
69+
if (disabledInstrumentations.has(normalizedPackageName)) continue
6270

6371
const hookOptions = {}
6472

@@ -67,6 +75,10 @@ for (const packageName of names) {
6775
if (typeof hook === 'object') {
6876
if (hook.serverless === false && isInServerlessEnvironment()) continue
6977

78+
// some integrations are disabled by default, but can be enabled by setting
79+
// the DD_TRACE_<INTEGRATION>_ENABLED environment variable to true
80+
if (hook.disabled && !reenabledInstrumentations.has(normalizedPackageName)) continue
81+
7082
hookOptions.internals = hook.esmFirst
7183
hook = hook.fn
7284
}
Lines changed: 133 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,133 @@
1+
'use strict'
2+
3+
const { expect } = require('chai')
4+
const sinon = require('sinon')
5+
const Module = require('module')
6+
7+
describe('register', () => {
8+
let hooksMock
9+
let HookMock
10+
let originalModuleProtoRequire
11+
12+
const clearRegisterCache = () => {
13+
const registerPath = require.resolve('../../src/helpers/register')
14+
delete require.cache[registerPath]
15+
}
16+
17+
beforeEach(() => {
18+
delete process.env.DD_TRACE_CONFLUENTINC_KAFKA_JAVASCRIPT_ENABLED
19+
delete process.env.DD_TRACE_DISABLED_INSTRUMENTATIONS
20+
21+
hooksMock = {
22+
'@confluentinc/kafka-javascript': {
23+
disabled: true,
24+
fn: sinon.stub().returns('hooked')
25+
},
26+
'mongodb-core': {
27+
fn: sinon.stub().returns('hooked')
28+
}
29+
}
30+
31+
HookMock = sinon.stub()
32+
33+
const registerPath = require.resolve('../../src/helpers/register')
34+
originalModuleProtoRequire = Module.prototype.require
35+
36+
Module.prototype.require = function (request) {
37+
if (this.filename === registerPath) {
38+
const stubs = {
39+
'./hooks': hooksMock,
40+
'./hook': HookMock
41+
}
42+
return stubs[request] || originalModuleProtoRequire.call(this, request)
43+
}
44+
return originalModuleProtoRequire.call(this, request)
45+
}
46+
47+
clearRegisterCache()
48+
})
49+
50+
afterEach(() => {
51+
sinon.restore()
52+
Module.prototype.require = originalModuleProtoRequire
53+
clearRegisterCache()
54+
})
55+
56+
const loadRegisterWithEnv = (env = undefined) => {
57+
env = env || {}
58+
clearRegisterCache()
59+
Object.entries(env).forEach(([key, value]) => {
60+
process.env[key] = value
61+
})
62+
require('../../src/helpers/register')
63+
}
64+
65+
const runHookCallbacks = (hookMock) => {
66+
for (let i = 0; i < hookMock.callCount; i++) {
67+
const callback = hookMock.args[i][2]
68+
const moduleName = hookMock.args[i][0][0]
69+
const moduleExports = 'original'
70+
const result = callback(moduleExports, moduleName, '/path/to/module', '1.0.0')
71+
expect(result).to.equal('original')
72+
}
73+
}
74+
75+
it('should skip hooks that are disabled by default and process enabled hooks', () => {
76+
loadRegisterWithEnv()
77+
78+
expect(HookMock.callCount).to.equal(1)
79+
expect(HookMock.args[0]).to.deep.include(['mongodb-core'], { internals: undefined })
80+
81+
runHookCallbacks(HookMock)
82+
83+
expect(hooksMock['@confluentinc/kafka-javascript'].fn).to.not.have.been.called
84+
expect(hooksMock['mongodb-core'].fn).to.have.been.called
85+
})
86+
87+
it('should enable disabled hooks when DD_TRACE_[pkg]_ENABLED is true', () => {
88+
loadRegisterWithEnv({ DD_TRACE_CONFLUENTINC_KAFKA_JAVASCRIPT_ENABLED: 'true' })
89+
90+
expect(HookMock.callCount).to.equal(2)
91+
expect(HookMock.args[0]).to.deep.include(['@confluentinc/kafka-javascript'], { internals: undefined })
92+
expect(HookMock.args[1]).to.deep.include(['mongodb-core'], { internals: undefined })
93+
94+
runHookCallbacks(HookMock)
95+
96+
expect(hooksMock['@confluentinc/kafka-javascript'].fn).to.have.been.called
97+
expect(hooksMock['mongodb-core'].fn).to.have.been.called
98+
})
99+
100+
it('should not enable disabled hooks when DD_TRACE_[pkg]_ENABLED is false', () => {
101+
loadRegisterWithEnv({ DD_TRACE_CONFLUENTINC_KAFKA_JAVASCRIPT_ENABLED: 'false' })
102+
103+
expect(HookMock.callCount).to.equal(1)
104+
expect(HookMock.args[0]).to.deep.include(['mongodb-core'], { internals: undefined })
105+
106+
runHookCallbacks(HookMock)
107+
108+
expect(hooksMock['@confluentinc/kafka-javascript'].fn).to.not.have.been.called
109+
expect(hooksMock['mongodb-core'].fn).to.have.been.called
110+
})
111+
112+
it('should disable hooks that are disabled by DD_TRACE_[pkg]_ENABLED=false', () => {
113+
loadRegisterWithEnv({ DD_TRACE_MONGODB_CORE_ENABLED: 'false' })
114+
115+
expect(HookMock.callCount).to.equal(0)
116+
117+
runHookCallbacks(HookMock)
118+
119+
expect(hooksMock['@confluentinc/kafka-javascript'].fn).to.not.have.been.called
120+
expect(hooksMock['mongodb-core'].fn).to.not.have.been.called
121+
})
122+
123+
it('should disable hooks that are disabled by DD_TRACE_DISABLED_INSTRUMENTATIONS', () => {
124+
loadRegisterWithEnv({ DD_TRACE_DISABLED_INSTRUMENTATIONS: 'mongodb-core,@confluentinc/kafka-javascript' })
125+
126+
expect(HookMock.callCount).to.equal(0)
127+
128+
runHookCallbacks(HookMock)
129+
130+
expect(hooksMock['@confluentinc/kafka-javascript'].fn).to.not.have.been.called
131+
expect(hooksMock['mongodb-core'].fn).to.not.have.been.called
132+
})
133+
})

packages/datadog-plugin-confluentinc-kafka-javascript/src/batch-consumer.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ const KafkajsBatchConsumerPlugin = require('../../datadog-plugin-kafkajs/src/bat
44

55
class ConfluentKafkaJsBatchConsumerPlugin extends KafkajsBatchConsumerPlugin {
66
static get id () {
7-
return '@confluentinc/kafka-javascript'
7+
return 'confluentinc-kafka-javascript'
88
}
99
}
1010

packages/datadog-plugin-confluentinc-kafka-javascript/src/consumer.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ const KafkajsConsumerPlugin = require('../../datadog-plugin-kafkajs/src/consumer
44

55
class ConfluentKafkaJsConsumerPlugin extends KafkajsConsumerPlugin {
66
static get id () {
7-
return '@confluentinc/kafka-javascript'
7+
return 'confluentinc-kafka-javascript'
88
}
99
}
1010

packages/datadog-plugin-confluentinc-kafka-javascript/src/index.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ const BatchConsumerPlugin = require('./batch-consumer')
66
const KafkajsPlugin = require('../../datadog-plugin-kafkajs/src/index')
77

88
class ConfluentKafkaJsPlugin extends KafkajsPlugin {
9-
static get id () { return '@confluentinc/kafka-javascript' }
9+
static get id () { return 'confluentinc-kafka-javascript' }
1010
static get plugins () {
1111
return {
1212
producer: ProducerPlugin,

packages/datadog-plugin-confluentinc-kafka-javascript/src/producer.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ const KafkajsProducerPlugin = require('../../datadog-plugin-kafkajs/src/producer
44

55
class ConfluentKafkaJsProducerPlugin extends KafkajsProducerPlugin {
66
static get id () {
7-
return '@confluentinc/kafka-javascript'
7+
return 'confluentinc-kafka-javascript'
88
}
99
}
1010

packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ const getDsmPathwayHash = (isProducer, parentHash) => {
2727
describe('Plugin', () => {
2828
const module = '@confluentinc/kafka-javascript'
2929

30-
describe('@confluentinc/kafka-javascript', function () {
30+
describe('confluentinc-kafka-javascript', function () {
3131
this.timeout(30000)
3232

3333
afterEach(() => {
@@ -48,7 +48,7 @@ describe('Plugin', () => {
4848

4949
process.env.DD_DATA_STREAMS_ENABLED = 'true'
5050
tracer = require('../../dd-trace')
51-
await agent.load('@confluentinc/kafka-javascript')
51+
await agent.load('confluentinc-kafka-javascript')
5252
const lib = require(`../../../versions/${module}@${version}`).get()
5353

5454
// Store the module for later use
@@ -73,7 +73,7 @@ describe('Plugin', () => {
7373
service: expectedSchema.send.serviceName,
7474
meta: {
7575
'span.kind': 'producer',
76-
component: '@confluentinc/kafka-javascript',
76+
component: 'confluentinc-kafka-javascript',
7777
'messaging.destination.name': 'test-topic',
7878
'messaging.kafka.bootstrap.servers': '127.0.0.1:9092'
7979
},
@@ -106,7 +106,7 @@ describe('Plugin', () => {
106106
[ERROR_TYPE]: error.name,
107107
[ERROR_MESSAGE]: error.message,
108108
[ERROR_STACK]: error.stack,
109-
component: '@confluentinc/kafka-javascript'
109+
component: 'confluentinc-kafka-javascript'
110110
})
111111
}, { timeoutMs: 10000 })
112112

@@ -141,7 +141,7 @@ describe('Plugin', () => {
141141
service: expectedSchema.receive.serviceName,
142142
meta: {
143143
'span.kind': 'consumer',
144-
component: '@confluentinc/kafka-javascript',
144+
component: 'confluentinc-kafka-javascript',
145145
'messaging.destination.name': 'test-topic'
146146
},
147147
resource: testTopic,
@@ -220,7 +220,7 @@ describe('Plugin', () => {
220220
[ERROR_MESSAGE]: fakeError.message,
221221
[ERROR_STACK]: fakeError.stack,
222222
'span.kind': 'consumer',
223-
component: '@confluentinc/kafka-javascript',
223+
component: 'confluentinc-kafka-javascript',
224224
'messaging.destination.name': 'test-topic'
225225
},
226226
resource: testTopic,
@@ -256,7 +256,7 @@ describe('Plugin', () => {
256256
const lib = require(`../../../versions/${module}@${version}`).get()
257257
nativeApi = lib
258258

259-
await agent.load('@confluentinc/kafka-javascript')
259+
await agent.load('confluentinc-kafka-javascript')
260260

261261
// Get the producer/consumer classes directly from the module
262262
Producer = nativeApi.Producer
@@ -295,7 +295,7 @@ describe('Plugin', () => {
295295
service: expectedSchema.send.serviceName,
296296
meta: {
297297
'span.kind': 'producer',
298-
component: '@confluentinc/kafka-javascript',
298+
component: 'confluentinc-kafka-javascript',
299299
'messaging.destination.name': testTopic,
300300
'messaging.kafka.bootstrap.servers': '127.0.0.1:9092'
301301
},
@@ -322,7 +322,7 @@ describe('Plugin', () => {
322322
})
323323

324324
expect(span.meta).to.include({
325-
component: '@confluentinc/kafka-javascript'
325+
component: 'confluentinc-kafka-javascript'
326326
})
327327

328328
expect(span.meta[ERROR_TYPE]).to.exist
@@ -410,7 +410,7 @@ describe('Plugin', () => {
410410
service: expectedSchema.receive.serviceName,
411411
meta: {
412412
'span.kind': 'consumer',
413-
component: '@confluentinc/kafka-javascript',
413+
component: 'confluentinc-kafka-javascript',
414414
'messaging.destination.name': testTopic
415415
},
416416
resource: testTopic,
@@ -464,7 +464,7 @@ describe('Plugin', () => {
464464
// expect(errorSpan).to.exist
465465
// expect(errorSpan.name).to.equal(expectedSchema.receive.opName)
466466
// expect(errorSpan.meta).to.include({
467-
// component: '@confluentinc/kafka-javascript'
467+
// component: 'confluentinc-kafka-javascript'
468468
// })
469469

470470
// expect(errorSpan.meta[ERROR_TYPE]).to.equal(fakeError.name)
@@ -488,7 +488,7 @@ describe('Plugin', () => {
488488

489489
beforeEach(async () => {
490490
tracer.init()
491-
tracer.use('@confluentinc/kafka-javascript', { dsmEnabled: true })
491+
tracer.use('confluentinc-kafka-javascript', { dsmEnabled: true })
492492
messages = [{ key: 'key1', value: 'test2' }]
493493
consumer = kafka.consumer({
494494
kafkaJS: { groupId: 'test-group', fromBeginning: false }

packages/dd-trace/src/plugin_manager.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
'use strict'
22

33
const { channel } = require('dc-polyfill')
4-
const { isFalse } = require('./util')
4+
const { isFalse, normalizePluginEnvName } = require('./util')
55
const plugins = require('./plugins')
66
const log = require('./log')
77
const { getEnvironmentVariable } = require('../../dd-trace/src/config-helper')
@@ -33,7 +33,7 @@ function maybeEnable (Plugin) {
3333
if (!Plugin || typeof Plugin !== 'function') return
3434
if (!pluginClasses[Plugin.id]) {
3535
const envName = `DD_TRACE_${Plugin.id.toUpperCase()}_ENABLED`
36-
const enabled = getEnvironmentVariable(envName.replaceAll(/[^a-z0-9_]/ig, '_'))
36+
const enabled = getEnvironmentVariable(normalizePluginEnvName(envName))
3737

3838
// TODO: remove the need to load the plugin class in order to disable the plugin
3939
if (isFalse(enabled) || disabledPlugins.has(Plugin.id)) {

0 commit comments

Comments
 (0)