Skip to content

Commit faee414

Browse files
authored
test: use dynamic test topic for kafkajs tests (#6053)
* use custom node action * code cleanup
1 parent bf402c8 commit faee414

File tree

2 files changed

+42
-15
lines changed

2 files changed

+42
-15
lines changed

.github/workflows/apm-integrations.yml

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -482,6 +482,9 @@ jobs:
482482
- uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
483483

484484
kafkajs:
485+
strategy:
486+
matrix:
487+
node-version: ['oldest', 'latest']
485488
runs-on: ubuntu-latest
486489
services:
487490
kafka:
@@ -506,7 +509,17 @@ jobs:
506509
SERVICES: kafka
507510
steps:
508511
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
509-
- uses: ./.github/actions/plugins/test
512+
- uses: ./.github/actions/testagent/start
513+
- uses: ./.github/actions/node
514+
with:
515+
version: ${{ matrix.node-version }}
516+
- uses: ./.github/actions/install
517+
- run: yarn test:plugins:ci
518+
- if: always()
519+
uses: ./.github/actions/testagent/logs
520+
with:
521+
suffix: plugins-${{ github.job }}-${{ matrix.node-version }}
522+
- uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
510523

511524
koa:
512525
runs-on: ubuntu-latest

packages/datadog-plugin-kafkajs/test/index.spec.js

Lines changed: 28 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
'use strict'
22

3+
const { randomUUID } = require('crypto')
34
const { expect } = require('chai')
45
const semver = require('semver')
56
const dc = require('dc-polyfill')
@@ -13,10 +14,9 @@ const DataStreamsContext = require('../../dd-trace/src/datastreams/context')
1314
const { computePathwayHash } = require('../../dd-trace/src/datastreams/pathway')
1415
const { ENTRY_PARENT_HASH, DataStreamsProcessor } = require('../../dd-trace/src/datastreams/processor')
1516

16-
const testTopic = 'test-topic'
1717
const testKafkaClusterId = '5L6g3nShT-eMCtK--X86sw'
1818

19-
const getDsmPathwayHash = (clusterIdAvailable, isProducer, parentHash) => {
19+
const getDsmPathwayHash = (testTopic, clusterIdAvailable, isProducer, parentHash) => {
2020
let edgeTags
2121
if (isProducer) {
2222
edgeTags = ['direction:out', 'topic:' + testTopic, 'type:kafka']
@@ -41,18 +41,14 @@ describe('Plugin', () => {
4141
})
4242
withVersions('kafkajs', 'kafkajs', (version) => {
4343
let kafka
44+
let admin
4445
let tracer
4546
let Kafka
4647
let Broker
4748
let clusterIdAvailable
4849
let expectedProducerHash
4950
let expectedConsumerHash
50-
51-
before(() => {
52-
clusterIdAvailable = semver.intersects(version, '>=1.13')
53-
expectedProducerHash = getDsmPathwayHash(clusterIdAvailable, true, ENTRY_PARENT_HASH)
54-
expectedConsumerHash = getDsmPathwayHash(clusterIdAvailable, false, expectedProducerHash)
55-
})
51+
let testTopic
5652

5753
describe('without configuration', () => {
5854
const messages = [{ key: 'key1', value: 'test2' }]
@@ -70,6 +66,18 @@ describe('Plugin', () => {
7066
brokers: ['127.0.0.1:9092'],
7167
logLevel: lib.logLevel.WARN
7268
})
69+
testTopic = `test-topic-${randomUUID()}`
70+
admin = kafka.admin()
71+
await admin.createTopics({
72+
topics: [{
73+
topic: testTopic,
74+
numPartitions: 1,
75+
replicationFactor: 1
76+
}]
77+
})
78+
clusterIdAvailable = semver.intersects(version, '>=1.13')
79+
expectedProducerHash = getDsmPathwayHash(testTopic, clusterIdAvailable, true, ENTRY_PARENT_HASH)
80+
expectedConsumerHash = getDsmPathwayHash(testTopic, clusterIdAvailable, false, expectedProducerHash)
7381
})
7482

7583
describe('producer', () => {
@@ -78,7 +86,7 @@ describe('Plugin', () => {
7886
'span.kind': 'producer',
7987
component: 'kafkajs',
8088
'pathway.hash': expectedProducerHash.readBigUInt64BE(0).toString(),
81-
'messaging.destination.name': 'test-topic',
89+
'messaging.destination.name': testTopic,
8290
'messaging.kafka.bootstrap.servers': '127.0.0.1:9092'
8391
}
8492
if (clusterIdAvailable) meta['kafka.cluster_id'] = testKafkaClusterId
@@ -246,7 +254,7 @@ describe('Plugin', () => {
246254
'span.kind': 'consumer',
247255
component: 'kafkajs',
248256
'pathway.hash': expectedConsumerHash.readBigUInt64BE(0).toString(),
249-
'messaging.destination.name': 'test-topic'
257+
'messaging.destination.name': testTopic
250258
},
251259
resource: testTopic,
252260
error: 0,
@@ -436,8 +444,8 @@ describe('Plugin', () => {
436444

437445
before(() => {
438446
clusterIdAvailable = semver.intersects(version, '>=1.13')
439-
expectedProducerHash = getDsmPathwayHash(clusterIdAvailable, true, ENTRY_PARENT_HASH)
440-
expectedConsumerHash = getDsmPathwayHash(clusterIdAvailable, false, expectedProducerHash)
447+
expectedProducerHash = getDsmPathwayHash(testTopic, clusterIdAvailable, true, ENTRY_PARENT_HASH)
448+
expectedConsumerHash = getDsmPathwayHash(testTopic, clusterIdAvailable, false, expectedProducerHash)
441449
})
442450

443451
afterEach(async () => {
@@ -531,7 +539,11 @@ describe('Plugin', () => {
531539
it('Should add backlog on consumer explicit commit', async () => {
532540
// Send a message, consume it, and record the last consumed offset
533541
let commitMeta
534-
await sendMessages(kafka, testTopic, messages)
542+
const deferred = {}
543+
deferred.promise = new Promise((resolve, reject) => {
544+
deferred.resolve = resolve
545+
deferred.reject = reject
546+
})
535547
await consumer.run({
536548
eachMessage: async payload => {
537549
const { topic, partition, message } = payload
@@ -540,10 +552,12 @@ describe('Plugin', () => {
540552
partition,
541553
offset: Number(message.offset)
542554
}
555+
deferred.resolve()
543556
},
544557
autoCommit: false
545558
})
546-
await new Promise(resolve => setTimeout(resolve, 50)) // Let eachMessage be called
559+
await sendMessages(kafka, testTopic, messages)
560+
await deferred.promise
547561
await consumer.disconnect() // Flush ongoing `eachMessage` calls
548562
for (const call of setOffsetSpy.getCalls()) {
549563
expect(call.args[0]).to.not.have.property('type', 'kafka_commit')

0 commit comments

Comments
 (0)