From 9a578e3e934458ea162e6c3d1717cb6ade33cc64 Mon Sep 17 00:00:00 2001 From: William Conti <58711692+wconti27@users.noreply.github.com> Date: Thu, 25 Jan 2024 14:15:49 -0500 Subject: [PATCH 01/44] feat: add datadog trace context extraction to aws kinesis (#3874) Adds Datadog Trace Context Extraction and DSM context extraction the AWS Kinesis `getRecords` requests. --- .../src/services/kinesis.js | 108 +++++++++++++++++- .../test/kinesis.spec.js | 52 ++++++++- packages/dd-trace/test/plugins/agent.js | 1 + 3 files changed, 159 insertions(+), 2 deletions(-) diff --git a/packages/datadog-plugin-aws-sdk/src/services/kinesis.js b/packages/datadog-plugin-aws-sdk/src/services/kinesis.js index e4ce71cc3e1..052e43f187b 100644 --- a/packages/datadog-plugin-aws-sdk/src/services/kinesis.js +++ b/packages/datadog-plugin-aws-sdk/src/services/kinesis.js @@ -1,15 +1,69 @@ 'use strict' const { - CONTEXT_PROPAGATION_KEY + CONTEXT_PROPAGATION_KEY, + getSizeOrZero } = require('../../../dd-trace/src/datastreams/processor') const { encodePathwayContext } = require('../../../dd-trace/src/datastreams/pathway') const log = require('../../../dd-trace/src/log') const BaseAwsSdkPlugin = require('../base') +const { storage } = require('../../../datadog-core') class Kinesis extends BaseAwsSdkPlugin { static get id () { return 'kinesis' } static get peerServicePrecursors () { return ['streamname'] } + constructor (...args) { + super(...args) + + // TODO(bengl) Find a way to create the response span tags without this WeakMap being populated + // in the base class + this.requestTags = new WeakMap() + + this.addSub('apm:aws:response:start:kinesis', obj => { + const { request, response } = obj + const store = storage.getStore() + const plugin = this + + // if we have either of these operations, we want to store the streamName param + // since it is not typically available during get/put records requests + if (request.operation === 'getShardIterator' || request.operation === 'listShards') { + this.storeStreamName(request.params, request.operation, store) + return + } + + if (request.operation === 'getRecords') { + let span + const responseExtraction = this.responseExtract(request.params, request.operation, response) + if (responseExtraction && responseExtraction.maybeChildOf) { + obj.needsFinish = true + const options = { + childOf: responseExtraction.maybeChildOf, + tags: Object.assign( + {}, + this.requestTags.get(request) || {}, + { 'span.kind': 'server' } + ) + } + span = plugin.tracer.startSpan('aws.response', options) + this.enter(span, store) + } + + // get the stream name that should have been stored previously + const { streamName } = storage.getStore() + + // extract DSM context after as we might not have a parent-child but may have a DSM context + this.responseExtractDSMContext( + request.operation, response, span ?? null, streamName + ) + } + }) + + this.addSub('apm:aws:response:finish:kinesis', err => { + const { span } = storage.getStore() + this.finish(span, null, err) + }) + } + generateTags (params, operation, response) { if (!params || !params.StreamName) return {} @@ -20,6 +74,58 @@ class Kinesis extends BaseAwsSdkPlugin { } } + storeStreamName (params, operation, store) { + if (!operation || (operation !== 'getShardIterator' && operation !== 'listShards')) return + if (!params || !params.StreamName) return + + const streamName = params.StreamName + storage.enterWith({ ...store, streamName }) + } + + responseExtract (params, operation, response) { + if (operation !== 'getRecords') return + if (params.Limit && params.Limit !== 1) return + if (!response || !response.Records || !response.Records[0]) return + + const record = response.Records[0] + + try { + const decodedData = JSON.parse(Buffer.from(record.Data).toString()) + + return { + maybeChildOf: this.tracer.extract('text_map', decodedData._datadog), + parsedAttributes: decodedData._datadog + } + } catch (e) { + log.error(e) + } + } + + responseExtractDSMContext (operation, response, span, streamName) { + if (!this.config.dsmEnabled) return + if (operation !== 'getRecords') return + if (!response || !response.Records || !response.Records[0]) return + + // we only want to set the payloadSize on the span if we have one message, not repeatedly + span = response.Records.length > 1 ? null : span + + response.Records.forEach(record => { + const parsedAttributes = JSON.parse(Buffer.from(record.Data).toString()) + + if ( + parsedAttributes && + parsedAttributes._datadog && + parsedAttributes._datadog[CONTEXT_PROPAGATION_KEY] && + streamName + ) { + const payloadSize = getSizeOrZero(record.Data) + this.tracer.decodeDataStreamsContext(Buffer.from(parsedAttributes._datadog[CONTEXT_PROPAGATION_KEY])) + this.tracer + .setCheckpoint(['direction:in', `topic:${streamName}`, 'type:kinesis'], span, payloadSize) + } + }) + } + // AWS-SDK will b64 kinesis payloads // or will accept an already b64 encoded payload // This method handles both diff --git a/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js b/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js index 8f322d69a17..62f9b3a2f2d 100644 --- a/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js @@ -165,6 +165,7 @@ describe('Kinesis', function () { describe('DSM Context Propagation', () => { const expectedProducerHash = '15481393933680799703' + const expectedConsumerHash = '10538746554122257118' before(() => { return agent.load('aws-sdk', { kinesis: { dsmEnabled: true } }, { dsmEnabled: true }) @@ -187,8 +188,33 @@ describe('Kinesis', function () { }) }) + afterEach(() => agent.reload('aws-sdk', { kinesis: { dsmEnabled: true } }, { dsmEnabled: true })) + + it('injects DSM pathway hash during Kinesis getRecord to the span', done => { + let getRecordSpanMeta = {} + agent.use(traces => { + const span = traces[0][0] + + if (span.name === 'aws.response') { + getRecordSpanMeta = span.meta + } + + expect(getRecordSpanMeta).to.include({ + 'pathway.hash': expectedConsumerHash + }) + }, { timeoutMs: 10000 }).then(done, done) + + helpers.putTestRecord(kinesis, streamNameDSM, helpers.dataBuffer, (err, data) => { + if (err) return done(err) + + helpers.getTestData(kinesis, streamNameDSM, data, (err) => { + if (err) return done(err) + }) + }) + }) + it('injects DSM pathway hash during Kinesis putRecord to the span', done => { - let putRecordSpanMeta + let putRecordSpanMeta = {} agent.use(traces => { const span = traces[0][0] @@ -225,6 +251,30 @@ describe('Kinesis', function () { if (err) return done(err) }) }) + + it('emits DSM stats to the agent during Kinesis getRecord', done => { + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = 0 + // we should have only have 1 stats point since we only had 1 put operation + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived += statsBuckets.Stats.length + }) + } + }, { timeoutMs: 10000 }) + expect(statsPointsReceived).to.be.at.least(2) + expect(dsmStatsExist(agent, expectedConsumerHash)).to.equal(true) + }, { timeoutMs: 10000 }).then(done, done) + + helpers.putTestRecord(kinesis, streamNameDSM, helpers.dataBuffer, (err, data) => { + if (err) return done(err) + + helpers.getTestData(kinesis, streamNameDSM, data, (err) => { + if (err) return done(err) + }) + }) + }) }) }) }) diff --git a/packages/dd-trace/test/plugins/agent.js b/packages/dd-trace/test/plugins/agent.js index 1f80d719439..2896c7f2886 100644 --- a/packages/dd-trace/test/plugins/agent.js +++ b/packages/dd-trace/test/plugins/agent.js @@ -298,6 +298,7 @@ module.exports = { pluginName = [].concat(pluginName) plugins = pluginName config = [].concat(config) + dsmStats = [] for (let i = 0, l = pluginName.length; i < l; i++) { tracer.use(pluginName[i], config[i]) From 3d328df6d09f8e836f398cc2c48b3ec4a56844d8 Mon Sep 17 00:00:00 2001 From: Julio Gonzalez <107922352+hoolioh@users.noreply.github.com> Date: Fri, 26 Jan 2024 17:27:03 +0100 Subject: [PATCH 02/44] Fix workflow for plugins (#4004) * Update runner in aerospike plugin. * Set up codecov version to 2. * Rollback to ubuntu 18.04 and set codecov version to 2. --- .github/workflows/plugins.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/plugins.yml b/.github/workflows/plugins.yml index 754df150e82..3b6f903e9f1 100644 --- a/.github/workflows/plugins.yml +++ b/.github/workflows/plugins.yml @@ -72,7 +72,7 @@ jobs: yarn install --ignore-engines yarn test:plugins:ci - if: always() - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v2 aerospike-node-16: runs-on: ubuntu-latest @@ -996,8 +996,9 @@ jobs: - uses: actions/checkout@v3 - uses: ./.github/actions/node/setup - run: yarn install --ignore-engines - - run: yarn services - - run: yarn test:plugins + - run: yarn config set ignore-engines true + - run: yarn services --ignore-engines + - run: yarn test:plugins --ignore-engines - uses: codecov/codecov-action@v3 paperplane: From c180742e5f33d74bf79cb0efe5ee4141c9865217 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Mon, 29 Jan 2024 10:29:16 +0100 Subject: [PATCH 03/44] [ci-visibility] Add support for `evp_proxy/v4` (gzip compatible) (#3998) --- .../exporters/agent-proxy/index.js | 31 ++++++++-- .../exporters/agentless/index.js | 2 + .../exporters/ci-visibility-exporter.js | 21 ++++--- .../exporters/git/git_metadata.js | 38 +++++++++--- .../get-itr-configuration.js | 3 +- .../get-skippable-suites.js | 3 +- .../exporters/agent-proxy/agent-proxy.spec.js | 35 +++++++++++ .../exporters/git/git_metadata.spec.js | 58 +++++++++++-------- 8 files changed, 143 insertions(+), 48 deletions(-) diff --git a/packages/dd-trace/src/ci-visibility/exporters/agent-proxy/index.js b/packages/dd-trace/src/ci-visibility/exporters/agent-proxy/index.js index a9e5a444674..2491f562d2e 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/agent-proxy/index.js +++ b/packages/dd-trace/src/ci-visibility/exporters/agent-proxy/index.js @@ -5,10 +5,23 @@ const AgentlessWriter = require('../agentless/writer') const CoverageWriter = require('../agentless/coverage-writer') const CiVisibilityExporter = require('../ci-visibility-exporter') -const AGENT_EVP_PROXY_PATH = '/evp_proxy/v2' +const AGENT_EVP_PROXY_PATH_PREFIX = '/evp_proxy/v' +const AGENT_EVP_PROXY_PATH_REGEX = /\/evp_proxy\/v(\d+)\/?/ -function getIsEvpCompatible (err, agentInfo) { - return !err && agentInfo.endpoints.some(url => url.includes(AGENT_EVP_PROXY_PATH)) +function getLatestEvpProxyVersion (err, agentInfo) { + if (err) { + return 0 + } + return agentInfo.endpoints.reduce((acc, endpoint) => { + if (endpoint.includes(AGENT_EVP_PROXY_PATH_PREFIX)) { + const version = Number(endpoint.replace(AGENT_EVP_PROXY_PATH_REGEX, '$1')) + if (isNaN(version)) { + return acc + } + return version > acc ? version : acc + } + return acc + }, 0) } class AgentProxyCiVisibilityExporter extends CiVisibilityExporter { @@ -25,17 +38,22 @@ class AgentProxyCiVisibilityExporter extends CiVisibilityExporter { this.getAgentInfo((err, agentInfo) => { this._isInitialized = true - const isEvpCompatible = getIsEvpCompatible(err, agentInfo) + const latestEvpProxyVersion = getLatestEvpProxyVersion(err, agentInfo) + const isEvpCompatible = latestEvpProxyVersion >= 2 + const isGzipCompatible = latestEvpProxyVersion >= 4 + + const evpProxyPrefix = `${AGENT_EVP_PROXY_PATH_PREFIX}${latestEvpProxyVersion}` if (isEvpCompatible) { this._isUsingEvpProxy = true + this.evpProxyPrefix = evpProxyPrefix this._writer = new AgentlessWriter({ url: this._url, tags, - evpProxyPrefix: AGENT_EVP_PROXY_PATH + evpProxyPrefix }) this._coverageWriter = new CoverageWriter({ url: this._url, - evpProxyPrefix: AGENT_EVP_PROXY_PATH + evpProxyPrefix }) } else { this._writer = new AgentWriter({ @@ -51,6 +69,7 @@ class AgentProxyCiVisibilityExporter extends CiVisibilityExporter { this._resolveCanUseCiVisProtocol(isEvpCompatible) this.exportUncodedTraces() this.exportUncodedCoverages() + this._isGzipCompatible = isGzipCompatible }) } diff --git a/packages/dd-trace/src/ci-visibility/exporters/agentless/index.js b/packages/dd-trace/src/ci-visibility/exporters/agentless/index.js index 70276a3521c..dcbded6a54e 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/agentless/index.js +++ b/packages/dd-trace/src/ci-visibility/exporters/agentless/index.js @@ -21,6 +21,8 @@ class AgentlessCiVisibilityExporter extends CiVisibilityExporter { this._coverageWriter = new CoverageWriter({ url: this._coverageUrl }) this._apiUrl = url || new URL(`https://api.${site}`) + // Agentless is always gzip compatible + this._isGzipCompatible = true } setUrl (url, coverageUrl = url, apiUrl = url) { diff --git a/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js b/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js index 446479e1af7..d4cb05cb4a0 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js +++ b/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js @@ -108,6 +108,7 @@ class CiVisibilityExporter extends AgentInfoExporter { env: this._config.env, service: this._config.service, isEvpProxy: !!this._isUsingEvpProxy, + evpProxyPrefix: this.evpProxyPrefix, custom: getTestConfigurationTags(this._config.tags), ...testConfiguration } @@ -134,6 +135,7 @@ class CiVisibilityExporter extends AgentInfoExporter { env: this._config.env, service: this._config.service, isEvpProxy: !!this._isUsingEvpProxy, + evpProxyPrefix: this.evpProxyPrefix, custom: getTestConfigurationTags(this._config.tags), ...testConfiguration } @@ -172,14 +174,19 @@ class CiVisibilityExporter extends AgentInfoExporter { if (!canUseCiVisProtocol) { return } - sendGitMetadataRequest(this._getApiUrl(), !!this._isUsingEvpProxy, repositoryUrl, (err) => { - if (err) { - log.error(`Error uploading git metadata: ${err.message}`) - } else { - log.debug('Successfully uploaded git metadata') + sendGitMetadataRequest( + this._getApiUrl(), + { isEvpProxy: !!this._isUsingEvpProxy, evpProxyPrefix: this.evpProxyPrefix }, + repositoryUrl, + (err) => { + if (err) { + log.error(`Error uploading git metadata: ${err.message}`) + } else { + log.debug('Successfully uploaded git metadata') + } + this._resolveGit(err) } - this._resolveGit(err) - }) + ) }) } diff --git a/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js b/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js index fb0329ab637..a00fedf8dfa 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js +++ b/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js @@ -60,7 +60,7 @@ function getCommonRequestOptions (url) { * The response are the commits for which the backend already has information * This response is used to know which commits can be ignored from there on */ -function getCommitsToUpload ({ url, repositoryUrl, latestCommits, isEvpProxy }, callback) { +function getCommitsToUpload ({ url, repositoryUrl, latestCommits, isEvpProxy, evpProxyPrefix }, callback) { const commonOptions = getCommonRequestOptions(url) const options = { @@ -73,7 +73,7 @@ function getCommitsToUpload ({ url, repositoryUrl, latestCommits, isEvpProxy }, } if (isEvpProxy) { - options.path = '/evp_proxy/v2/api/v2/git/repository/search_commits' + options.path = `${evpProxyPrefix}/api/v2/git/repository/search_commits` options.headers['X-Datadog-EVP-Subdomain'] = 'api' delete options.headers['dd-api-key'] } @@ -122,7 +122,7 @@ function getCommitsToUpload ({ url, repositoryUrl, latestCommits, isEvpProxy }, /** * This function uploads a git packfile */ -function uploadPackFile ({ url, isEvpProxy, packFileToUpload, repositoryUrl, headCommit }, callback) { +function uploadPackFile ({ url, isEvpProxy, evpProxyPrefix, packFileToUpload, repositoryUrl, headCommit }, callback) { const form = new FormData() const pushedSha = JSON.stringify({ @@ -162,7 +162,7 @@ function uploadPackFile ({ url, isEvpProxy, packFileToUpload, repositoryUrl, hea } if (isEvpProxy) { - options.path = '/evp_proxy/v2/api/v2/git/repository/packfile' + options.path = `${evpProxyPrefix}/api/v2/git/repository/packfile` options.headers['X-Datadog-EVP-Subdomain'] = 'api' delete options.headers['dd-api-key'] } @@ -187,6 +187,7 @@ function uploadPackFile ({ url, isEvpProxy, packFileToUpload, repositoryUrl, hea function generateAndUploadPackFiles ({ url, isEvpProxy, + evpProxyPrefix, commitsToUpload, repositoryUrl, headCommit @@ -216,6 +217,7 @@ function generateAndUploadPackFiles ({ packFileToUpload: packFilesToUpload[packFileIndex++], url, isEvpProxy, + evpProxyPrefix, repositoryUrl, headCommit }, @@ -228,6 +230,7 @@ function generateAndUploadPackFiles ({ packFileToUpload: packFilesToUpload[packFileIndex++], url, isEvpProxy, + evpProxyPrefix, repositoryUrl, headCommit }, @@ -238,7 +241,7 @@ function generateAndUploadPackFiles ({ /** * This function uploads git metadata to CI Visibility's backend. */ -function sendGitMetadata (url, isEvpProxy, configRepositoryUrl, callback) { +function sendGitMetadata (url, { isEvpProxy, evpProxyPrefix }, configRepositoryUrl, callback) { let repositoryUrl = configRepositoryUrl if (!repositoryUrl) { repositoryUrl = getRepositoryUrl() @@ -266,15 +269,34 @@ function sendGitMetadata (url, isEvpProxy, configRepositoryUrl, callback) { // If it has already unshallowed or the clone is not shallow, we move on if (hasCheckedShallow || !isShallowRepository()) { - return generateAndUploadPackFiles({ url, isEvpProxy, commitsToUpload, repositoryUrl, headCommit }, callback) + return generateAndUploadPackFiles({ + url, + isEvpProxy, + evpProxyPrefix, + commitsToUpload, + repositoryUrl, + headCommit + }, callback) } // Otherwise we unshallow and get commits to upload again log.debug('It is shallow clone, unshallowing...') unshallowRepository() - getCommitsToUpload({ url, repositoryUrl, latestCommits, isEvpProxy }, getOnFinishGetCommitsToUpload(true)) + getCommitsToUpload({ + url, + repositoryUrl, + latestCommits, + isEvpProxy, + evpProxyPrefix + }, getOnFinishGetCommitsToUpload(true)) } - getCommitsToUpload({ url, repositoryUrl, latestCommits, isEvpProxy }, getOnFinishGetCommitsToUpload(false)) + getCommitsToUpload({ + url, + repositoryUrl, + latestCommits, + isEvpProxy, + evpProxyPrefix + }, getOnFinishGetCommitsToUpload(false)) } module.exports = { diff --git a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js index 6df4d99ea98..a6932438ecb 100644 --- a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js +++ b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js @@ -14,6 +14,7 @@ const { function getItrConfiguration ({ url, isEvpProxy, + evpProxyPrefix, env, service, repositoryUrl, @@ -38,7 +39,7 @@ function getItrConfiguration ({ } if (isEvpProxy) { - options.path = '/evp_proxy/v2/api/v2/libraries/tests/services/setting' + options.path = `${evpProxyPrefix}/api/v2/libraries/tests/services/setting` options.headers['X-Datadog-EVP-Subdomain'] = 'api' } else { const apiKey = process.env.DATADOG_API_KEY || process.env.DD_API_KEY diff --git a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js index 5e206473319..1926e18b925 100644 --- a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js +++ b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js @@ -15,6 +15,7 @@ const { function getSkippableSuites ({ url, isEvpProxy, + evpProxyPrefix, env, service, repositoryUrl, @@ -38,7 +39,7 @@ function getSkippableSuites ({ } if (isEvpProxy) { - options.path = '/evp_proxy/v2/api/v2/ci/tests/skippable' + options.path = `${evpProxyPrefix}/api/v2/ci/tests/skippable` options.headers['X-Datadog-EVP-Subdomain'] = 'api' } else { const apiKey = process.env.DATADOG_API_KEY || process.env.DD_API_KEY diff --git a/packages/dd-trace/test/ci-visibility/exporters/agent-proxy/agent-proxy.spec.js b/packages/dd-trace/test/ci-visibility/exporters/agent-proxy/agent-proxy.spec.js index 404852ec633..1dfa23dead5 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/agent-proxy/agent-proxy.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/agent-proxy/agent-proxy.spec.js @@ -250,4 +250,39 @@ describe('AgentProxyCiVisibilityExporter', () => { expect(mockCoverageWriter.setUrl).to.have.been.calledWith(coverageUrl) }) }) + + describe('_isGzipCompatible', () => { + it('should set _isGzipCompatible to true if the newest version is v4 or newer', async () => { + const scope = nock('http://localhost:8126') + .get('/info') + .reply(200, JSON.stringify({ + endpoints: ['/evp_proxy/v2', '/evp_proxy/v3', '/evp_proxy/v4/', '/evp_proxy/v5'] + })) + + const agentProxyCiVisibilityExporter = new AgentProxyCiVisibilityExporter({ port, tags }) + + expect(agentProxyCiVisibilityExporter).not.to.be.null + + await agentProxyCiVisibilityExporter._canUseCiVisProtocolPromise + + expect(agentProxyCiVisibilityExporter._isGzipCompatible).to.be.true + expect(scope.isDone()).to.be.true + }) + it('should set _isGzipCompatible to false if the newest version is v3 or older', async () => { + const scope = nock('http://localhost:8126') + .get('/info') + .reply(200, JSON.stringify({ + endpoints: ['/evp_proxy/v2', '/evp_proxy/v3'] + })) + + const agentProxyCiVisibilityExporter = new AgentProxyCiVisibilityExporter({ port, tags }) + + expect(agentProxyCiVisibilityExporter).not.to.be.null + + await agentProxyCiVisibilityExporter._canUseCiVisProtocolPromise + + expect(agentProxyCiVisibilityExporter._isGzipCompatible).to.be.false + expect(scope.isDone()).to.be.true + }) + }) }) diff --git a/packages/dd-trace/test/ci-visibility/exporters/git/git_metadata.spec.js b/packages/dd-trace/test/ci-visibility/exporters/git/git_metadata.spec.js index db5cb2a70de..1cc1ded5bdb 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/git/git_metadata.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/git/git_metadata.spec.js @@ -71,7 +71,7 @@ describe('git_metadata', () => { .reply(200, JSON.stringify({ data: latestCommits.map((sha) => ({ id: sha, type: 'commit' })) })) isShallowRepositoryStub.returns(true) - gitMetadata.sendGitMetadata(new URL('https://api.test.com'), false, '', (err) => { + gitMetadata.sendGitMetadata(new URL('https://api.test.com'), { isEvpProxy: false }, '', (err) => { expect(unshallowRepositoryStub).not.to.have.been.called expect(err).to.be.null expect(scope.isDone()).to.be.true @@ -89,7 +89,7 @@ describe('git_metadata', () => { .reply(204) isShallowRepositoryStub.returns(true) - gitMetadata.sendGitMetadata(new URL('https://api.test.com'), false, '', (err) => { + gitMetadata.sendGitMetadata(new URL('https://api.test.com'), { isEvpProxy: false }, '', (err) => { expect(unshallowRepositoryStub).to.have.been.called expect(err).to.be.null expect(scope.isDone()).to.be.true @@ -104,7 +104,7 @@ describe('git_metadata', () => { .post('/api/v2/git/repository/packfile') .reply(204) - gitMetadata.sendGitMetadata(new URL('https://api.test.com'), false, '', (err) => { + gitMetadata.sendGitMetadata(new URL('https://api.test.com'), { isEvpProxy: false }, '', (err) => { expect(err).to.be.null expect(scope.isDone()).to.be.true done() @@ -120,7 +120,7 @@ describe('git_metadata', () => { getCommitsRevListStub.returns([]) - gitMetadata.sendGitMetadata(new URL('https://api.test.com'), false, '', (err) => { + gitMetadata.sendGitMetadata(new URL('https://api.test.com'), { isEvpProxy: false }, '', (err) => { expect(err).to.be.null // to check that it is not called expect(scope.isDone()).to.be.false @@ -136,7 +136,7 @@ describe('git_metadata', () => { .post('/api/v2/git/repository/packfile') .reply(204) - gitMetadata.sendGitMetadata(new URL('https://api.test.com'), false, '', (err) => { + gitMetadata.sendGitMetadata(new URL('https://api.test.com'), { isEvpProxy: false }, '', (err) => { // eslint-disable-next-line expect(err.message).to.contain('Error fetching commits to exclude: Error from https://api.test.com/api/v2/git/repository/search_commits: 404 Not Found. Response from the endpoint: "Not found SHA"') // to check that it is not called @@ -153,7 +153,7 @@ describe('git_metadata', () => { .post('/api/v2/git/repository/packfile') .reply(204) - gitMetadata.sendGitMetadata(new URL('https://api.test.com'), false, '', (err) => { + gitMetadata.sendGitMetadata(new URL('https://api.test.com'), { isEvpProxy: false }, '', (err) => { expect(err.message).to.contain("Can't parse commits to exclude response: Invalid commit type response") // to check that it is not called expect(scope.isDone()).to.be.false @@ -169,7 +169,7 @@ describe('git_metadata', () => { .post('/api/v2/git/repository/packfile') .reply(204) - gitMetadata.sendGitMetadata(new URL('https://api.test.com'), false, '', (err) => { + gitMetadata.sendGitMetadata(new URL('https://api.test.com'), { isEvpProxy: false }, '', (err) => { expect(err.message).to.contain("Can't parse commits to exclude response: Invalid commit format") // to check that it is not called expect(scope.isDone()).to.be.false @@ -185,7 +185,7 @@ describe('git_metadata', () => { .post('/api/v2/git/repository/packfile') .reply(502) - gitMetadata.sendGitMetadata(new URL('https://api.test.com'), false, '', (err) => { + gitMetadata.sendGitMetadata(new URL('https://api.test.com'), { isEvpProxy: false }, '', (err) => { expect(err.message).to.contain('Could not upload packfiles: status code 502') expect(scope.isDone()).to.be.true done() @@ -212,7 +212,7 @@ describe('git_metadata', () => { secondTemporaryPackFile ]) - gitMetadata.sendGitMetadata(new URL('https://api.test.com'), false, '', (err) => { + gitMetadata.sendGitMetadata(new URL('https://api.test.com'), { isEvpProxy: false }, '', (err) => { expect(err).to.be.null expect(scope.isDone()).to.be.true done() @@ -282,7 +282,7 @@ describe('git_metadata', () => { 'not there either' ]) - gitMetadata.sendGitMetadata(new URL('https://api.test.com'), false, '', (err) => { + gitMetadata.sendGitMetadata(new URL('https://api.test.com'), { isEvpProxy: false }, '', (err) => { expect(err.message).to.contain('Could not read "not-there"') expect(scope.isDone()).to.be.false done() @@ -298,7 +298,7 @@ describe('git_metadata', () => { generatePackFilesForCommitsStub.returns([]) - gitMetadata.sendGitMetadata(new URL('https://api.test.com'), false, '', (err) => { + gitMetadata.sendGitMetadata(new URL('https://api.test.com'), { isEvpProxy: false }, '', (err) => { expect(err.message).to.contain('Failed to generate packfiles') expect(scope.isDone()).to.be.false done() @@ -314,7 +314,7 @@ describe('git_metadata', () => { getRepositoryUrlStub.returns('') - gitMetadata.sendGitMetadata(new URL('https://api.test.com'), false, '', (err) => { + gitMetadata.sendGitMetadata(new URL('https://api.test.com'), { isEvpProxy: false }, '', (err) => { expect(err.message).to.contain('Repository URL is empty') expect(scope.isDone()).to.be.false done() @@ -332,7 +332,7 @@ describe('git_metadata', () => { .post('/api/v2/git/repository/packfile') .reply(204) - gitMetadata.sendGitMetadata(new URL('https://api.test.com'), false, '', (err) => { + gitMetadata.sendGitMetadata(new URL('https://api.test.com'), { isEvpProxy: false }, '', (err) => { expect(err).to.be.null expect(scope.isDone()).to.be.true done() @@ -349,10 +349,14 @@ describe('git_metadata', () => { done() }) - gitMetadata.sendGitMetadata(new URL('https://api.test.com'), true, '', (err) => { - expect(err).to.be.null - expect(scope.isDone()).to.be.true - }) + gitMetadata.sendGitMetadata( + new URL('https://api.test.com'), + { isEvpProxy: true, evpProxyPrefix: '/evp_proxy/v2' }, + '', + (err) => { + expect(err).to.be.null + expect(scope.isDone()).to.be.true + }) }) it('should use the input repository url and not call getRepositoryUrl', (done) => { @@ -370,14 +374,18 @@ describe('git_metadata', () => { .post('/evp_proxy/v2/api/v2/git/repository/packfile') .reply(204) - gitMetadata.sendGitMetadata(new URL('https://api.test.com'), true, 'https://custom-git@datadog.com', (err) => { - expect(err).to.be.null - expect(scope.isDone()).to.be.true - requestPromise.then((repositoryUrl) => { - expect(getRepositoryUrlStub).not.to.have.been.called - expect(repositoryUrl).to.equal('https://custom-git@datadog.com') - done() + gitMetadata.sendGitMetadata( + new URL('https://api.test.com'), + { isEvpProxy: true, evpProxyPrefix: '/evp_proxy/v2' }, + 'https://custom-git@datadog.com', + (err) => { + expect(err).to.be.null + expect(scope.isDone()).to.be.true + requestPromise.then((repositoryUrl) => { + expect(getRepositoryUrlStub).not.to.have.been.called + expect(repositoryUrl).to.equal('https://custom-git@datadog.com') + done() + }) }) - }) }) }) From a071d6eca0ef79b4ea843664525d84552a1db91f Mon Sep 17 00:00:00 2001 From: William Conti <58711692+wconti27@users.noreply.github.com> Date: Mon, 29 Jan 2024 10:47:39 -0500 Subject: [PATCH 04/44] feat(dsm): add dsm to rabbitmq integrations (amqplib/rhea) (#3987) * Add DSM to RabbitMQ Integrations for dd-trace-js --- .github/workflows/plugins.yml | 1 + .../datadog-instrumentations/src/amqplib.js | 2 +- .../datadog-plugin-amqplib/src/consumer.js | 11 +- .../datadog-plugin-amqplib/src/producer.js | 15 ++- .../datadog-plugin-amqplib/test/index.spec.js | 111 ++++++++++++++++++ .../test/kinesis.spec.js | 6 +- .../datadog-plugin-aws-sdk/test/sns.spec.js | 6 +- .../test/spec_helpers.js | 18 --- .../datadog-plugin-aws-sdk/test/sqs.spec.js | 6 +- packages/datadog-plugin-rhea/src/consumer.js | 12 +- packages/datadog-plugin-rhea/src/producer.js | 11 ++ .../datadog-plugin-rhea/test/index.spec.js | 84 ++++++++++++- .../dd-trace/src/datastreams/processor.js | 6 + packages/dd-trace/test/plugins/agent.js | 23 +++- 14 files changed, 277 insertions(+), 35 deletions(-) diff --git a/.github/workflows/plugins.yml b/.github/workflows/plugins.yml index 3b6f903e9f1..df537f0b487 100644 --- a/.github/workflows/plugins.yml +++ b/.github/workflows/plugins.yml @@ -153,6 +153,7 @@ jobs: env: PLUGINS: amqp10|rhea SERVICES: qpid + DD_DATA_STREAMS_ENABLED: true steps: - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start diff --git a/packages/datadog-instrumentations/src/amqplib.js b/packages/datadog-instrumentations/src/amqplib.js index 06e5de2b306..2e6835aad8d 100644 --- a/packages/datadog-instrumentations/src/amqplib.js +++ b/packages/datadog-instrumentations/src/amqplib.js @@ -28,7 +28,7 @@ addHook({ name: 'amqplib', file: 'lib/channel.js', versions: ['>=0.5'] }, channe }) shimmer.wrap(channel.Channel.prototype, 'sendMessage', sendMessage => function (fields) { - return instrument(sendMessage, this, arguments, 'basic.publish', fields) + return instrument(sendMessage, this, arguments, 'basic.publish', fields, arguments[2]) }) shimmer.wrap(channel.BaseChannel.prototype, 'dispatchMessage', dispatchMessage => function (fields, message) { diff --git a/packages/datadog-plugin-amqplib/src/consumer.js b/packages/datadog-plugin-amqplib/src/consumer.js index 0aed1696507..4778990faf5 100644 --- a/packages/datadog-plugin-amqplib/src/consumer.js +++ b/packages/datadog-plugin-amqplib/src/consumer.js @@ -2,6 +2,7 @@ const { TEXT_MAP } = require('../../../ext/formats') const ConsumerPlugin = require('../../dd-trace/src/plugins/consumer') +const { getAmqpMessageSize, CONTEXT_PROPAGATION_KEY } = require('../../dd-trace/src/datastreams/processor') const { getResourceName } = require('./util') class AmqplibConsumerPlugin extends ConsumerPlugin { @@ -13,7 +14,7 @@ class AmqplibConsumerPlugin extends ConsumerPlugin { const childOf = extract(this.tracer, message) - this.startSpan({ + const span = this.startSpan({ childOf, resource: getResourceName(method, fields), type: 'worker', @@ -26,6 +27,14 @@ class AmqplibConsumerPlugin extends ConsumerPlugin { 'amqp.destination': fields.destination } }) + + if (this.config.dsmEnabled && message) { + const payloadSize = getAmqpMessageSize({ headers: message.properties.headers, content: message.content }) + const queue = fields.queue ?? fields.routingKey + this.tracer.decodeDataStreamsContext(message.properties.headers[CONTEXT_PROPAGATION_KEY]) + this.tracer + .setCheckpoint(['direction:in', `topic:${queue}`, 'type:rabbitmq'], span, payloadSize) + } } } diff --git a/packages/datadog-plugin-amqplib/src/producer.js b/packages/datadog-plugin-amqplib/src/producer.js index 9c3d1da8d53..a07582e50c4 100644 --- a/packages/datadog-plugin-amqplib/src/producer.js +++ b/packages/datadog-plugin-amqplib/src/producer.js @@ -3,13 +3,15 @@ const { TEXT_MAP } = require('../../../ext/formats') const { CLIENT_PORT_KEY } = require('../../dd-trace/src/constants') const ProducerPlugin = require('../../dd-trace/src/plugins/producer') +const { encodePathwayContext } = require('../../dd-trace/src/datastreams/pathway') +const { getAmqpMessageSize, CONTEXT_PROPAGATION_KEY } = require('../../dd-trace/src/datastreams/processor') const { getResourceName } = require('./util') class AmqplibProducerPlugin extends ProducerPlugin { static get id () { return 'amqplib' } static get operation () { return 'command' } - start ({ channel = {}, method, fields }) { + start ({ channel = {}, method, fields, message }) { if (method !== 'basic.publish') return const stream = (channel.connection && channel.connection.stream) || {} @@ -30,6 +32,17 @@ class AmqplibProducerPlugin extends ProducerPlugin { fields.headers = fields.headers || {} this.tracer.inject(span, TEXT_MAP, fields.headers) + + if (this.config.dsmEnabled) { + const hasRoutingKey = fields.routingKey != null + const payloadSize = getAmqpMessageSize({ content: message, headers: fields.headers }) + const dataStreamsContext = this.tracer + .setCheckpoint( + ['direction:out', `exchange:${fields.exchange}`, `has_routing_key:${hasRoutingKey}`, 'type:rabbitmq'] + , span, payloadSize) + const pathwayCtx = encodePathwayContext(dataStreamsContext) + fields.headers[CONTEXT_PROPAGATION_KEY] = pathwayCtx + } } } diff --git a/packages/datadog-plugin-amqplib/test/index.spec.js b/packages/datadog-plugin-amqplib/test/index.spec.js index b6f73212a7f..784f8d0f23b 100644 --- a/packages/datadog-plugin-amqplib/test/index.spec.js +++ b/packages/datadog-plugin-amqplib/test/index.spec.js @@ -13,6 +13,7 @@ describe('Plugin', () => { describe('amqplib', () => { withVersions('amqplib', 'amqplib', version => { beforeEach(() => { + process.env.DD_DATA_STREAMS_ENABLED = 'true' tracer = require('../../dd-trace') }) @@ -300,6 +301,116 @@ describe('Plugin', () => { .catch(done) }) }) + + describe('when data streams monitoring is enabled', function () { + this.timeout(10000) + + const expectedProducerHash = '17191234428405871432' + const expectedConsumerHash = '18277095184718602853' + + before(() => { + tracer = require('../../dd-trace') + tracer.use('amqplib') + }) + + before(async () => { + return agent.load('amqplib') + }) + + after(() => { + return agent.close({ ritmReset: false }) + }) + + it('Should emit DSM stats to the agent when sending a message', done => { + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = 0 + // we should have 1 dsm stats points + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived += statsBuckets.Stats.length + }) + } + }) + expect(statsPointsReceived).to.be.at.least(1) + expect(agent.dsmStatsExist(agent, expectedProducerHash)).to.equal(true) + }, { timeoutMs: 10000 }).then(done, done) + + channel.assertQueue('testDSM', {}, (err, ok) => { + if (err) return done(err) + + channel.sendToQueue(ok.queue, Buffer.from('DSM pathway test')) + }) + }) + + it('Should emit DSM stats to the agent when receiving a message', done => { + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = 0 + // we should have 2 dsm stats points + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived += statsBuckets.Stats.length + }) + } + }) + expect(statsPointsReceived).to.be.at.least(1) + expect(agent.dsmStatsExist(agent, expectedConsumerHash)).to.equal(true) + }, { timeoutMs: 10000 }).then(done, done) + + channel.assertQueue('testDSM', {}, (err, ok) => { + if (err) return done(err) + + channel.consume(ok.queue, () => {}, {}, (err, ok) => { + if (err) done(err) + }) + }) + }) + + it('Should set pathway hash tag on a span when producing', (done) => { + channel.assertQueue('testDSM', {}, (err, ok) => { + if (err) return done(err) + + channel.sendToQueue(ok.queue, Buffer.from('dsm test')) + + let produceSpanMeta = {} + agent.use(traces => { + const span = traces[0][0] + + if (span.resource.startsWith('basic.publish')) { + produceSpanMeta = span.meta + } + + expect(produceSpanMeta).to.include({ + 'pathway.hash': expectedProducerHash + }) + }, { timeoutMs: 10000 }).then(done, done) + }) + }) + + it('Should set pathway hash tag on a span when consuming', (done) => { + channel.assertQueue('testDSM', {}, (err, ok) => { + if (err) return done(err) + + channel.consume(ok.queue, () => {}, {}, (err, ok) => { + if (err) return done(err) + + let consumeSpanMeta = {} + agent.use(traces => { + const span = traces[0][0] + + if (span.resource.startsWith('basic.deliver')) { + consumeSpanMeta = span.meta + } + + expect(consumeSpanMeta).to.include({ + 'pathway.hash': expectedConsumerHash + }) + }, { timeoutMs: 10000 }).then(done, done) + }) + }) + }) + }) }) describe('with configuration', () => { diff --git a/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js b/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js index 62f9b3a2f2d..92999d0b5b6 100644 --- a/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js @@ -2,7 +2,7 @@ 'use strict' const agent = require('../../dd-trace/test/plugins/agent') -const { setup, dsmStatsExist } = require('./spec_helpers') +const { setup } = require('./spec_helpers') const helpers = require('./kinesis_helpers') const { rawExpectedSchema } = require('./kinesis-naming') @@ -244,7 +244,7 @@ describe('Kinesis', function () { } }) expect(statsPointsReceived).to.be.at.least(1) - expect(dsmStatsExist(agent, expectedProducerHash)).to.equal(true) + expect(agent.dsmStatsExist(agent, expectedProducerHash)).to.equal(true) }).then(done, done) helpers.putTestRecord(kinesis, streamNameDSM, helpers.dataBuffer, (err, data) => { @@ -264,7 +264,7 @@ describe('Kinesis', function () { } }, { timeoutMs: 10000 }) expect(statsPointsReceived).to.be.at.least(2) - expect(dsmStatsExist(agent, expectedConsumerHash)).to.equal(true) + expect(agent.dsmStatsExist(agent, expectedConsumerHash)).to.equal(true) }, { timeoutMs: 10000 }).then(done, done) helpers.putTestRecord(kinesis, streamNameDSM, helpers.dataBuffer, (err, data) => { diff --git a/packages/datadog-plugin-aws-sdk/test/sns.spec.js b/packages/datadog-plugin-aws-sdk/test/sns.spec.js index 0761d64bbe3..8935af5e923 100644 --- a/packages/datadog-plugin-aws-sdk/test/sns.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/sns.spec.js @@ -3,7 +3,7 @@ const semver = require('semver') const agent = require('../../dd-trace/test/plugins/agent') -const { setup, dsmStatsExist } = require('./spec_helpers') +const { setup } = require('./spec_helpers') const { rawExpectedSchema } = require('./sns-naming') describe('Sns', () => { @@ -321,7 +321,7 @@ describe('Sns', () => { } }) expect(statsPointsReceived).to.be.at.least(1) - expect(dsmStatsExist(agent, expectedProducerHash)).to.equal(true) + expect(agent.dsmStatsExist(agent, expectedProducerHash)).to.equal(true) }).then(done, done) sns.subscribe(subParams, () => { @@ -341,7 +341,7 @@ describe('Sns', () => { } }) expect(statsPointsReceived).to.be.at.least(2) - expect(dsmStatsExist(agent, expectedConsumerHash)).to.equal(true) + expect(agent.dsmStatsExist(agent, expectedConsumerHash)).to.equal(true) }).then(done, done) sns.subscribe(subParams, () => { diff --git a/packages/datadog-plugin-aws-sdk/test/spec_helpers.js b/packages/datadog-plugin-aws-sdk/test/spec_helpers.js index c9f872e4b60..821f2486c23 100644 --- a/packages/datadog-plugin-aws-sdk/test/spec_helpers.js +++ b/packages/datadog-plugin-aws-sdk/test/spec_helpers.js @@ -15,24 +15,6 @@ const helpers = { delete process.env['AWS_SECRET_ACCESS_KEY'] delete process.env['AWS_ACCESS_KEY_ID'] }) - }, - - dsmStatsExist (agent, expectedHash) { - const dsmStats = agent.getDsmStats() - let hashFound = false - if (dsmStats.length !== 0) { - dsmStats.forEach((statsTimeBucket) => { - statsTimeBucket.Stats.forEach((statsBucket) => { - statsBucket.Stats.forEach((stats) => { - if (stats.Hash.toString() === expectedHash) { - hashFound = true - return hashFound - } - }) - }) - }) - } - return hashFound } } diff --git a/packages/datadog-plugin-aws-sdk/test/sqs.spec.js b/packages/datadog-plugin-aws-sdk/test/sqs.spec.js index 64671fb4932..b5b83287c5d 100644 --- a/packages/datadog-plugin-aws-sdk/test/sqs.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/sqs.spec.js @@ -1,7 +1,7 @@ 'use strict' const agent = require('../../dd-trace/test/plugins/agent') -const { setup, dsmStatsExist } = require('./spec_helpers') +const { setup } = require('./spec_helpers') const { rawExpectedSchema } = require('./sqs-naming') const queueName = 'SQS_QUEUE_NAME' @@ -409,7 +409,7 @@ describe('Plugin', () => { } }) expect(statsPointsReceived).to.be.at.least(1) - expect(dsmStatsExist(agent, expectedProducerHash)).to.equal(true) + expect(agent.dsmStatsExist(agent, expectedProducerHash)).to.equal(true) }).then(done, done) sqs.sendMessage({ MessageBody: 'test DSM', QueueUrl: QueueUrlDsm }, () => {}) @@ -427,7 +427,7 @@ describe('Plugin', () => { } }) expect(statsPointsReceived).to.be.at.least(2) - expect(dsmStatsExist(agent, expectedConsumerHash)).to.equal(true) + expect(agent.dsmStatsExist(agent, expectedConsumerHash)).to.equal(true) }).then(done, done) sqs.sendMessage({ MessageBody: 'test DSM', QueueUrl: QueueUrlDsm }, () => { diff --git a/packages/datadog-plugin-rhea/src/consumer.js b/packages/datadog-plugin-rhea/src/consumer.js index 1adece16fbd..267340b2e51 100644 --- a/packages/datadog-plugin-rhea/src/consumer.js +++ b/packages/datadog-plugin-rhea/src/consumer.js @@ -2,6 +2,7 @@ const ConsumerPlugin = require('../../dd-trace/src/plugins/consumer') const { storage } = require('../../datadog-core') +const { getAmqpMessageSize, CONTEXT_PROPAGATION_KEY } = require('../../dd-trace/src/datastreams/processor') class RheaConsumerPlugin extends ConsumerPlugin { static get id () { return 'rhea' } @@ -19,7 +20,7 @@ class RheaConsumerPlugin extends ConsumerPlugin { const name = getResourceNameFromMessage(msgObj) const childOf = extractTextMap(msgObj, this.tracer) - this.startSpan({ + const span = this.startSpan({ childOf, resource: name, type: 'worker', @@ -29,6 +30,15 @@ class RheaConsumerPlugin extends ConsumerPlugin { 'amqp.link.role': 'receiver' } }) + + if (this.config.dsmEnabled && msgObj.message) { + const payloadSize = getAmqpMessageSize( + { headers: msgObj.message.delivery_annotations, content: msgObj.message.body } + ) + this.tracer.decodeDataStreamsContext(msgObj.message.delivery_annotations[CONTEXT_PROPAGATION_KEY]) + this.tracer + .setCheckpoint(['direction:in', `topic:${name}`, 'type:rabbitmq'], span, payloadSize) + } } } diff --git a/packages/datadog-plugin-rhea/src/producer.js b/packages/datadog-plugin-rhea/src/producer.js index 332aff1276d..f79dd368e60 100644 --- a/packages/datadog-plugin-rhea/src/producer.js +++ b/packages/datadog-plugin-rhea/src/producer.js @@ -2,6 +2,8 @@ const { CLIENT_PORT_KEY } = require('../../dd-trace/src/constants') const ProducerPlugin = require('../../dd-trace/src/plugins/producer') +const { encodePathwayContext } = require('../../dd-trace/src/datastreams/pathway') +const { getAmqpMessageSize, CONTEXT_PROPAGATION_KEY } = require('../../dd-trace/src/datastreams/processor') class RheaProducerPlugin extends ProducerPlugin { static get id () { return 'rhea' } @@ -36,6 +38,15 @@ function addDeliveryAnnotations (msg, tracer, span) { msg.delivery_annotations = msg.delivery_annotations || {} tracer.inject(span, 'text_map', msg.delivery_annotations) + + if (tracer._config.dsmEnabled) { + const targetName = span.context()._tags['amqp.link.target.address'] + const payloadSize = getAmqpMessageSize({ content: msg.body, headers: msg.delivery_annotations }) + const dataStreamsContext = tracer + .setCheckpoint(['direction:out', `exchange:${targetName}`, 'type:rabbitmq'], span, payloadSize) + const pathwayCtx = encodePathwayContext(dataStreamsContext) + msg.delivery_annotations[CONTEXT_PROPAGATION_KEY] = pathwayCtx + } } } diff --git a/packages/datadog-plugin-rhea/test/index.spec.js b/packages/datadog-plugin-rhea/test/index.spec.js index 93148c01703..c7814cbcba2 100644 --- a/packages/datadog-plugin-rhea/test/index.spec.js +++ b/packages/datadog-plugin-rhea/test/index.spec.js @@ -8,8 +8,10 @@ const { expectedSchema, rawExpectedSchema } = require('./naming') describe('Plugin', () => { let tracer - describe('rhea', () => { - before(() => agent.load('rhea')) + describe('rhea', function () { + before(() => { + agent.load('rhea') + }) after(() => agent.close({ ritmReset: false })) withVersions('rhea', 'rhea', version => { @@ -46,6 +48,84 @@ describe('Plugin', () => { connection.open_receiver('amq.topic') }) + const expectedProducerHash = '15837999642856815456' + const expectedConsumerHash = '18403970455318595370' + + it('Should set pathway hash tag on a span when producing', (done) => { + let produceSpanMeta = {} + agent.use(traces => { + const span = traces[0][0] + + if (span.meta['span.kind'] === 'producer') { + produceSpanMeta = span.meta + } + + expect(produceSpanMeta).to.include({ + 'pathway.hash': expectedProducerHash + }) + }, { timeoutMs: 2000 }).then(done, done) + + context.sender.send({ body: 'hello from DSM' }) + }) + + it('Should set pathway hash tag on a span when consuming', (done) => { + context.sender.send({ body: 'hello from DSM' }) + + container.once('message', msg => { + let consumeSpanMeta = {} + agent.use(traces => { + const span = traces[0][0] + + if (span.meta['span.kind'] === 'consumer') { + consumeSpanMeta = span.meta + } + + expect(consumeSpanMeta).to.include({ + 'pathway.hash': expectedConsumerHash + }) + }, { timeoutMs: 2000 }).then(done, done) + }) + }) + + it('Should emit DSM stats to the agent when sending a message', done => { + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = 0 + // we should have 1 dsm stats points + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived += statsBuckets.Stats.length + }) + } + }, { timeoutMs: 2000 }) + expect(statsPointsReceived).to.be.at.least(1) + expect(agent.dsmStatsExist(agent, expectedProducerHash)).to.equal(true) + }).then(done, done) + + context.sender.send({ body: 'hello from DSM' }) + }) + + it('Should emit DSM stats to the agent when receiving a message', done => { + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = 0 + // we should have 2 dsm stats points + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived += statsBuckets.Stats.length + }) + } + }) + expect(statsPointsReceived).to.be.at.least(2) + expect(agent.dsmStatsExist(agent, expectedConsumerHash)).to.equal(true) + }, { timeoutMs: 2000 }).then(done, done) + + context.sender.send({ body: 'hello from DSM' }) + container.once('message', msg => { + msg.delivery.accept() + }) + }) + describe('sending a message', () => { withPeerService( () => tracer, diff --git a/packages/dd-trace/src/datastreams/processor.js b/packages/dd-trace/src/datastreams/processor.js index 4f04b7e0e3c..f3e18ed9865 100644 --- a/packages/dd-trace/src/datastreams/processor.js +++ b/packages/dd-trace/src/datastreams/processor.js @@ -153,6 +153,11 @@ function getMessageSize (message) { return getSizeOrZero(key) + getSizeOrZero(value) + getHeadersSize(headers) } +function getAmqpMessageSize (message) { + const { headers, content } = message + return getSizeOrZero(content) + getHeadersSize(headers) +} + class TimeBuckets extends Map { forTime (time) { if (!this.has(time)) { @@ -358,6 +363,7 @@ module.exports = { getMessageSize, getHeadersSize, getSizeOrZero, + getAmqpMessageSize, ENTRY_PARENT_HASH, CONTEXT_PROPAGATION_KEY } diff --git a/packages/dd-trace/test/plugins/agent.js b/packages/dd-trace/test/plugins/agent.js index 2896c7f2886..b47a6e828b1 100644 --- a/packages/dd-trace/test/plugins/agent.js +++ b/packages/dd-trace/test/plugins/agent.js @@ -38,6 +38,24 @@ function ciVisRequestHandler (request, response) { }) } +function dsmStatsExist (agent, expectedHash) { + const dsmStats = agent.getDsmStats() + let hashFound = false + if (dsmStats.length !== 0) { + dsmStats.forEach((statsTimeBucket) => { + statsTimeBucket.Stats.forEach((statsBucket) => { + statsBucket.Stats.forEach((stats) => { + if (stats.Hash.toString() === expectedHash) { + hashFound = true + return hashFound + } + }) + }) + }) + } + return hashFound +} + function addEnvironmentVariablesToHeaders (headers) { // get all environment variables that start with "DD_" const ddEnvVars = new Map( @@ -245,7 +263,6 @@ module.exports = { // DSM Checkpoint endpoint dsmStats = [] agent.post('/v0.1/pipeline_stats', (req, res) => { - // if (useTestAgent) res.redirect('http://127.0.0.1:9126/v0.1/pipeline_stats') dsmStats.push(req.body) statsHandlers.forEach(({ handler, spanResourceMatch }) => { handler(dsmStats) @@ -275,6 +292,7 @@ module.exports = { server.on('close', () => { tracer = null + dsmStats = [] }) tracer.init(Object.assign({}, { @@ -391,5 +409,6 @@ module.exports = { tracer, testedPlugins, - getDsmStats + getDsmStats, + dsmStatsExist } From 04ad59bb9c8f64ab6f8775d035aaf2b5b08a6230 Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Mon, 29 Jan 2024 17:20:41 +0100 Subject: [PATCH 05/44] Fix location in mysql vulnerability (#4006) * Fix location in mysql vulnerability * export logic to report evidence to reuse instead of copying * Fix lint * Improve refactor --- .../iast/analyzers/sql-injection-analyzer.js | 35 ++++--------------- .../iast/analyzers/vulnerability-analyzer.js | 16 +++++---- .../analyzers/sql-injection-analyzer.spec.js | 13 ++++--- 3 files changed, 25 insertions(+), 39 deletions(-) diff --git a/packages/dd-trace/src/appsec/iast/analyzers/sql-injection-analyzer.js b/packages/dd-trace/src/appsec/iast/analyzers/sql-injection-analyzer.js index a857839e175..4d302ece1b6 100644 --- a/packages/dd-trace/src/appsec/iast/analyzers/sql-injection-analyzer.js +++ b/packages/dd-trace/src/appsec/iast/analyzers/sql-injection-analyzer.js @@ -4,8 +4,6 @@ const InjectionAnalyzer = require('./injection-analyzer') const { SQL_INJECTION } = require('../vulnerabilities') const { getRanges } = require('../taint-tracking/operations') const { storage } = require('../../../../../datadog-core') -const { getIastContext } = require('../iast-context') -const { addVulnerability } = require('../vulnerability-reporter') const { getNodeModulesPaths } = require('../path-line') const EXCLUDED_PATHS = getNodeModulesPaths('mysql', 'mysql2', 'sequelize', 'pg-pool', 'knex') @@ -16,9 +14,9 @@ class SqlInjectionAnalyzer extends InjectionAnalyzer { } onConfigure () { - this.addSub('apm:mysql:query:start', ({ sql }) => this.analyze(sql, 'MYSQL')) - this.addSub('apm:mysql2:query:start', ({ sql }) => this.analyze(sql, 'MYSQL')) - this.addSub('apm:pg:query:start', ({ query }) => this.analyze(query.text, 'POSTGRES')) + this.addSub('apm:mysql:query:start', ({ sql }) => this.analyze(sql, undefined, 'MYSQL')) + this.addSub('apm:mysql2:query:start', ({ sql }) => this.analyze(sql, undefined, 'MYSQL')) + this.addSub('apm:pg:query:start', ({ query }) => this.analyze(query.text, undefined, 'POSTGRES')) this.addSub( 'datadog:sequelize:query:start', @@ -42,7 +40,7 @@ class SqlInjectionAnalyzer extends InjectionAnalyzer { getStoreAndAnalyze (query, dialect) { const parentStore = storage.getStore() if (parentStore) { - this.analyze(query, dialect, parentStore) + this.analyze(query, parentStore, dialect) storage.enterWith({ ...parentStore, sqlAnalyzed: true, sqlParentStore: parentStore }) } @@ -60,29 +58,10 @@ class SqlInjectionAnalyzer extends InjectionAnalyzer { return { value, ranges, dialect } } - analyze (value, dialect, store = storage.getStore()) { + analyze (value, store, dialect) { + store = store || storage.getStore() if (!(store && store.sqlAnalyzed)) { - const iastContext = getIastContext(store) - if (this._isInvalidContext(store, iastContext)) return - this._reportIfVulnerable(value, iastContext, dialect) - } - } - - _reportIfVulnerable (value, context, dialect) { - if (this._isVulnerable(value, context) && this._checkOCE(context)) { - this._report(value, context, dialect) - return true - } - return false - } - - _report (value, context, dialect) { - const evidence = this._getEvidence(value, context, dialect) - const location = this._getLocation() - if (!this._isExcluded(location)) { - const spanId = context && context.rootSpan && context.rootSpan.context().toSpanId() - const vulnerability = this._createVulnerability(this._type, evidence, spanId, location) - addVulnerability(context, vulnerability) + super.analyze(value, store, dialect) } } diff --git a/packages/dd-trace/src/appsec/iast/analyzers/vulnerability-analyzer.js b/packages/dd-trace/src/appsec/iast/analyzers/vulnerability-analyzer.js index 1f52790300d..f79e7a44f71 100644 --- a/packages/dd-trace/src/appsec/iast/analyzers/vulnerability-analyzer.js +++ b/packages/dd-trace/src/appsec/iast/analyzers/vulnerability-analyzer.js @@ -22,8 +22,12 @@ class Analyzer extends SinkIastPlugin { return false } - _report (value, context) { - const evidence = this._getEvidence(value, context) + _report (value, context, meta) { + const evidence = this._getEvidence(value, context, meta) + this._reportEvidence(value, context, evidence) + } + + _reportEvidence (value, context, evidence) { const location = this._getLocation(value) if (!this._isExcluded(location)) { const locationSourceMap = this._replaceLocationFromSourceMap(location) @@ -33,9 +37,9 @@ class Analyzer extends SinkIastPlugin { } } - _reportIfVulnerable (value, context) { + _reportIfVulnerable (value, context, meta) { if (this._isVulnerable(value, context) && this._checkOCE(context, value)) { - this._report(value, context) + this._report(value, context, meta) return true } return false @@ -71,11 +75,11 @@ class Analyzer extends SinkIastPlugin { return store && !iastContext } - analyze (value, store = storage.getStore()) { + analyze (value, store = storage.getStore(), meta) { const iastContext = getIastContext(store) if (this._isInvalidContext(store, iastContext)) return - this._reportIfVulnerable(value, iastContext) + this._reportIfVulnerable(value, iastContext, meta) } analyzeAll (...values) { diff --git a/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.spec.js index 23b40545401..8b93f492e62 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.spec.js @@ -78,6 +78,9 @@ describe('sql-injection-analyzer', () => { }, '../overhead-controller': { hasQuota: () => true } }) + sinon.stub(ProxyAnalyzer.prototype, '_reportEvidence') + const reportEvidence = ProxyAnalyzer.prototype._reportEvidence + const InjectionAnalyzer = proxyquire('../../../../src/appsec/iast/analyzers/injection-analyzer', { '../taint-tracking/operations': TaintTrackingMock, './vulnerability-analyzer': ProxyAnalyzer @@ -91,11 +94,11 @@ describe('sql-injection-analyzer', () => { }, '../vulnerability-reporter': { addVulnerability } }) - proxiedSqlInjectionAnalyzer.analyze(TAINTED_QUERY, dialect) - expect(addVulnerability).to.have.been.calledOnce - expect(addVulnerability).to.have.been.calledWithMatch({}, { - type: 'SQL_INJECTION', - evidence: { dialect: dialect } + proxiedSqlInjectionAnalyzer.analyze(TAINTED_QUERY, undefined, dialect) + expect(reportEvidence).to.have.been.calledOnce + expect(reportEvidence).to.have.been.calledWithMatch(TAINTED_QUERY, {}, { + value: TAINTED_QUERY, + dialect }) }) From 11b1d231b1e4613a1f420a13a34830a9fd6428bc Mon Sep 17 00:00:00 2001 From: Julio Gonzalez <107922352+hoolioh@users.noreply.github.com> Date: Tue, 30 Jan 2024 11:00:04 +0100 Subject: [PATCH 06/44] Shell execution integration (#3608) --------- Co-authored-by: Ugaitz Urien Co-authored-by: Igor Unanua --- .github/workflows/plugins.yml | 16 + LICENSE-3rdparty.csv | 1 + ci/init.js | 1 + package.json | 1 + .../src/child-process.js | 29 - .../src/child_process.js | 150 +++++ .../src/helpers/hooks.js | 4 +- .../test/child_process.spec.js | 379 ++++++++++++ .../datadog-plugin-child_process/src/index.js | 91 +++ .../src/scrub-cmd-params.js | 125 ++++ .../test/index.spec.js | 575 ++++++++++++++++++ .../test/scrub-cmd-params.spec.js | 79 +++ .../analyzers/command-injection-analyzer.js | 2 +- .../dd-trace/src/appsec/iast/iast-plugin.js | 5 +- packages/dd-trace/src/plugins/index.js | 1 + packages/dd-trace/src/plugins/util/exec.js | 34 -- packages/dd-trace/src/plugins/util/git.js | 6 + .../test/appsec/iast/iast-plugin.spec.js | 18 +- yarn.lock | 2 +- 19 files changed, 1450 insertions(+), 69 deletions(-) delete mode 100644 packages/datadog-instrumentations/src/child-process.js create mode 100644 packages/datadog-instrumentations/src/child_process.js create mode 100644 packages/datadog-instrumentations/test/child_process.spec.js create mode 100644 packages/datadog-plugin-child_process/src/index.js create mode 100644 packages/datadog-plugin-child_process/src/scrub-cmd-params.js create mode 100644 packages/datadog-plugin-child_process/test/index.spec.js create mode 100644 packages/datadog-plugin-child_process/test/scrub-cmd-params.spec.js delete mode 100644 packages/dd-trace/src/plugins/util/exec.js diff --git a/.github/workflows/plugins.yml b/.github/workflows/plugins.yml index df537f0b487..b98e6db18a6 100644 --- a/.github/workflows/plugins.yml +++ b/.github/workflows/plugins.yml @@ -321,6 +321,22 @@ jobs: uses: ./.github/actions/testagent/logs - uses: codecov/codecov-action@v3 + child_process: + runs-on: ubuntu-latest + env: + PLUGINS: child_process + steps: + - uses: actions/checkout@v2 + - uses: ./.github/actions/node/setup + - run: yarn install + - uses: ./.github/actions/node/oldest + - run: yarn test:plugins:ci + - uses: ./.github/actions/node/20 + - run: yarn test:plugins:ci + - uses: ./.github/actions/node/latest + - run: yarn test:plugins:ci + - uses: codecov/codecov-action@v2 + couchbase: runs-on: ubuntu-latest services: diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index 6fcac2fa10f..467bc7a4feb 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -30,6 +30,7 @@ require,protobufjs,BSD-3-Clause,Copyright 2016 Daniel Wirtz require,tlhunter-sorted-set,MIT,Copyright (c) 2023 Datadog Inc. require,retry,MIT,Copyright 2011 Tim Koschützki Felix Geisendörfer require,semver,ISC,Copyright Isaac Z. Schlueter and Contributors +require,shell-quote,mit,Copyright (c) 2013 James Halliday dev,@types/node,MIT,Copyright Authors dev,autocannon,MIT,Copyright 2016 Matteo Collina dev,aws-sdk,Apache 2.0,Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. diff --git a/ci/init.js b/ci/init.js index b6f0ba9961b..81849b0e1e1 100644 --- a/ci/init.js +++ b/ci/init.js @@ -44,6 +44,7 @@ if (isJestWorker) { if (shouldInit) { tracer.init(options) tracer.use('fs', false) + tracer.use('child_process', false) } module.exports = tracer diff --git a/package.json b/package.json index 5445795e78e..c99684316bd 100644 --- a/package.json +++ b/package.json @@ -99,6 +99,7 @@ "protobufjs": "^7.2.5", "retry": "^0.13.1", "semver": "^7.5.4", + "shell-quote": "^1.8.1", "tlhunter-sorted-set": "^0.1.0" }, "devDependencies": { diff --git a/packages/datadog-instrumentations/src/child-process.js b/packages/datadog-instrumentations/src/child-process.js deleted file mode 100644 index 3dca938ed42..00000000000 --- a/packages/datadog-instrumentations/src/child-process.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -const { - channel, - addHook -} = require('./helpers/instrument') -const shimmer = require('../../datadog-shimmer') - -const childProcessChannel = channel('datadog:child_process:execution:start') -const execMethods = ['exec', 'execFile', 'fork', 'spawn', 'execFileSync', 'execSync', 'spawnSync'] -const names = ['child_process', 'node:child_process'] - -addHook({ name: names }, childProcess => { - shimmer.massWrap(childProcess, execMethods, wrapChildProcessMethod()) - return childProcess -}) - -function wrapChildProcessMethod () { - function wrapMethod (childProcessMethod) { - return function () { - if (childProcessChannel.hasSubscribers && arguments.length > 0) { - const command = arguments[0] - childProcessChannel.publish({ command }) - } - return childProcessMethod.apply(this, arguments) - } - } - return wrapMethod -} diff --git a/packages/datadog-instrumentations/src/child_process.js b/packages/datadog-instrumentations/src/child_process.js new file mode 100644 index 00000000000..61eddc47049 --- /dev/null +++ b/packages/datadog-instrumentations/src/child_process.js @@ -0,0 +1,150 @@ +'use strict' + +const util = require('util') + +const { + addHook, + AsyncResource +} = require('./helpers/instrument') +const shimmer = require('../../datadog-shimmer') +const dc = require('dc-polyfill') + +const childProcessChannel = dc.tracingChannel('datadog:child_process:execution') + +// ignored exec method because it calls to execFile directly +const execAsyncMethods = ['execFile', 'spawn'] +const execSyncMethods = ['execFileSync', 'spawnSync'] + +const names = ['child_process', 'node:child_process'] + +// child_process and node:child_process returns the same object instance, we only want to add hooks once +let patched = false +names.forEach(name => { + addHook({ name }, childProcess => { + if (!patched) { + patched = true + shimmer.massWrap(childProcess, execAsyncMethods, wrapChildProcessAsyncMethod()) + shimmer.massWrap(childProcess, execSyncMethods, wrapChildProcessSyncMethod()) + shimmer.wrap(childProcess, 'execSync', wrapChildProcessSyncMethod(true)) + } + + return childProcess + }) +}) + +function normalizeArgs (args, shell) { + const childProcessInfo = { + command: args[0] + } + + if (Array.isArray(args[1])) { + childProcessInfo.command = childProcessInfo.command + ' ' + args[1].join(' ') + if (args[2] != null && typeof args[2] === 'object') { + childProcessInfo.options = args[2] + } + } else if (args[1] != null && typeof args[1] === 'object') { + childProcessInfo.options = args[1] + } + childProcessInfo.shell = shell || + childProcessInfo.options?.shell === true || + typeof childProcessInfo.options?.shell === 'string' + + return childProcessInfo +} + +function wrapChildProcessSyncMethod (shell = false) { + return function wrapMethod (childProcessMethod) { + return function () { + if (!childProcessChannel.start.hasSubscribers || arguments.length === 0) { + return childProcessMethod.apply(this, arguments) + } + + const childProcessInfo = normalizeArgs(arguments, shell) + + return childProcessChannel.traceSync( + childProcessMethod, + { + command: childProcessInfo.command, + shell: childProcessInfo.shell + }, + this, + ...arguments) + } + } +} + +function wrapChildProcessCustomPromisifyMethod (customPromisifyMethod, shell) { + return function () { + if (!childProcessChannel.start.hasSubscribers || arguments.length === 0) { + return customPromisifyMethod.apply(this, arguments) + } + + const childProcessInfo = normalizeArgs(arguments, shell) + + return childProcessChannel.tracePromise( + customPromisifyMethod, + { + command: childProcessInfo.command, + shell: childProcessInfo.shell + }, + this, + ...arguments) + } +} + +function wrapChildProcessAsyncMethod (shell = false) { + return function wrapMethod (childProcessMethod) { + function wrappedChildProcessMethod () { + if (!childProcessChannel.start.hasSubscribers || arguments.length === 0) { + return childProcessMethod.apply(this, arguments) + } + + const childProcessInfo = normalizeArgs(arguments, shell) + + const innerResource = new AsyncResource('bound-anonymous-fn') + return innerResource.runInAsyncScope(() => { + childProcessChannel.start.publish({ command: childProcessInfo.command, shell: childProcessInfo.shell }) + + const childProcess = childProcessMethod.apply(this, arguments) + if (childProcess) { + let errorExecuted = false + + childProcess.on('error', (e) => { + errorExecuted = true + childProcessChannel.error.publish(e) + }) + + childProcess.on('close', (code) => { + code = code || 0 + if (!errorExecuted && code !== 0) { + childProcessChannel.error.publish() + } + childProcessChannel.asyncEnd.publish({ + command: childProcessInfo.command, + shell: childProcessInfo.shell, + result: code + }) + }) + } + + return childProcess + }) + } + + if (childProcessMethod[util.promisify.custom]) { + const wrapedChildProcessCustomPromisifyMethod = + shimmer.wrap(childProcessMethod[util.promisify.custom], + wrapChildProcessCustomPromisifyMethod(childProcessMethod[util.promisify.custom]), shell) + + // should do it in this way because the original property is readonly + const descriptor = Object.getOwnPropertyDescriptor(childProcessMethod, util.promisify.custom) + Object.defineProperty(wrappedChildProcessMethod, + util.promisify.custom, + { + ...descriptor, + value: wrapedChildProcessCustomPromisifyMethod + }) + } + return wrappedChildProcessMethod + } +} diff --git a/packages/datadog-instrumentations/src/helpers/hooks.js b/packages/datadog-instrumentations/src/helpers/hooks.js index 2f27be5af25..2d50e3365c3 100644 --- a/packages/datadog-instrumentations/src/helpers/hooks.js +++ b/packages/datadog-instrumentations/src/helpers/hooks.js @@ -30,7 +30,7 @@ module.exports = { 'body-parser': () => require('../body-parser'), 'bunyan': () => require('../bunyan'), 'cassandra-driver': () => require('../cassandra-driver'), - 'child_process': () => require('../child-process'), + 'child_process': () => require('../child_process'), 'connect': () => require('../connect'), 'cookie': () => require('../cookie'), 'cookie-parser': () => require('../cookie-parser'), @@ -78,7 +78,7 @@ module.exports = { 'mysql2': () => require('../mysql2'), 'net': () => require('../net'), 'next': () => require('../next'), - 'node:child_process': () => require('../child-process'), + 'node:child_process': () => require('../child_process'), 'node:crypto': () => require('../crypto'), 'node:dns': () => require('../dns'), 'node:http': () => require('../http'), diff --git a/packages/datadog-instrumentations/test/child_process.spec.js b/packages/datadog-instrumentations/test/child_process.spec.js new file mode 100644 index 00000000000..c4ab71dbde5 --- /dev/null +++ b/packages/datadog-instrumentations/test/child_process.spec.js @@ -0,0 +1,379 @@ +'use strict' + +const { promisify } = require('util') +const agent = require('../../dd-trace/test/plugins/agent') +const dc = require('dc-polyfill') + +describe('child process', () => { + const modules = ['child_process', 'node:child_process'] + const execAsyncMethods = ['execFile', 'spawn'] + const execAsyncShellMethods = ['exec'] + const execSyncMethods = ['execFileSync'] + const execSyncShellMethods = ['execSync'] + + const childProcessChannel = dc.tracingChannel('datadog:child_process:execution') + + modules.forEach((childProcessModuleName) => { + describe(childProcessModuleName, () => { + let start, finish, error, childProcess, asyncFinish + + before(() => { + return agent.load(childProcessModuleName) + }) + + after(() => { + return agent.close({ ritmReset: false }) + }) + + beforeEach(() => { + start = sinon.stub() + finish = sinon.stub() + error = sinon.stub() + asyncFinish = sinon.stub() + + childProcessChannel.subscribe({ + start: start, + end: finish, + asyncEnd: asyncFinish, + error: error + }) + + childProcess = require(childProcessModuleName) + }) + + afterEach(() => { + childProcessChannel.unsubscribe({ + start: start, + end: finish, + asyncEnd: asyncFinish, + error: error + }) + }) + + describe('async methods', (done) => { + describe('command not interpreted by a shell by default', () => { + execAsyncMethods.forEach(methodName => { + describe(`method ${methodName}`, () => { + it('should execute success callbacks', (done) => { + const childEmitter = childProcess[methodName]('ls') + + childEmitter.once('close', () => { + expect(start).to.have.been.calledOnceWith({ command: 'ls', shell: false }) + expect(asyncFinish).to.have.been.calledOnceWith({ command: 'ls', shell: false, result: 0 }) + expect(error).not.to.have.been.called + done() + }) + }) + + it('should execute error callback', (done) => { + const childEmitter = childProcess[methodName]('invalid_command_test') + + childEmitter.once('close', () => { + expect(start).to.have.been.calledOnceWith({ command: 'invalid_command_test', shell: false }) + expect(asyncFinish).to.have.been.calledOnceWith({ + command: 'invalid_command_test', + shell: false, + result: -2 + }) + expect(error).to.have.been.calledOnce + done() + }) + }) + + it('should execute error callback with `exit 1` command', (done) => { + const childEmitter = childProcess[methodName]('node -e "process.exit(1)"', { shell: true }) + + childEmitter.once('close', () => { + expect(start).to.have.been.calledOnceWith({ command: 'node -e "process.exit(1)"', shell: true }) + expect(asyncFinish).to.have.been.calledOnceWith({ + command: 'node -e "process.exit(1)"', + shell: true, + result: 1 + }) + expect(error).to.have.been.calledOnce + done() + }) + }) + }) + + if (methodName !== 'spawn') { + describe(`method ${methodName} with promisify`, () => { + it('should execute success callbacks', async () => { + await promisify(childProcess[methodName])('echo') + expect(start.firstCall.firstArg).to.include({ + command: 'echo', + shell: false + }) + + expect(asyncFinish).to.have.been.calledOnceWith({ + command: 'echo', + shell: false, + result: { + stdout: '\n', + stderr: '' + } + }) + expect(error).not.to.have.been.called + }) + + it('should execute error callback', async () => { + try { + await promisify(childProcess[methodName])('invalid_command_test') + } catch (e) { + expect(start).to.have.been.calledOnce + expect(start.firstCall.firstArg).to.include({ command: 'invalid_command_test', shell: false }) + + const errStub = new Error('spawn invalid_command_test ENOENT') + errStub.code = 'ENOENT' + errStub.errno = -2 + + expect(asyncFinish).to.have.been.calledOnce + expect(asyncFinish.firstCall.firstArg).to.include({ command: 'invalid_command_test', shell: false }) + expect(asyncFinish.firstCall.firstArg).to.deep.include({ + command: 'invalid_command_test', + shell: false, + error: errStub + }) + + expect(error).to.have.been.calledOnce + } + }) + + it('should execute error callback with `exit 1` command', async () => { + const errStub = new Error('Command failed: node -e "process.exit(1)"\n') + errStub.code = 1 + errStub.cmd = 'node -e "process.exit(1)"' + + try { + await promisify(childProcess[methodName])('node -e "process.exit(1)"', { shell: true }) + } catch (e) { + expect(start).to.have.been.calledOnce + expect(start.firstCall.firstArg).to.include({ command: 'node -e "process.exit(1)"', shell: true }) + + expect(asyncFinish).to.have.been.calledOnce + expect(asyncFinish.firstCall.firstArg).to.include({ + command: 'node -e "process.exit(1)"', + shell: true + }) + expect(asyncFinish.firstCall.firstArg).to.deep.include({ + command: 'node -e "process.exit(1)"', + shell: true, + error: errStub + }) + + expect(error).to.have.been.calledOnce + } + }) + }) + } + }) + }) + + describe('command interpreted by a shell by default', () => { + execAsyncShellMethods.forEach(methodName => { + describe(`method ${methodName}`, () => { + it('should execute success callbacks', (done) => { + const res = childProcess[methodName]('ls') + + res.once('close', () => { + expect(start).to.have.been.calledOnceWith({ command: 'ls', shell: true }) + expect(asyncFinish).to.have.been.calledOnceWith({ command: 'ls', shell: true, result: 0 }) + expect(error).not.to.have.been.called + done() + }) + }) + + it('should execute error callback with `exit 1` command', (done) => { + const res = childProcess[methodName]('node -e "process.exit(1)"') + + res.once('close', () => { + expect(start).to.have.been.calledOnceWith({ command: 'node -e "process.exit(1)"', shell: true }) + expect(asyncFinish).to.have.been.calledOnceWith({ + command: 'node -e "process.exit(1)"', + shell: true, + result: 1 + }) + expect(error).to.have.been.called + done() + }) + }) + + it('should execute error callback', (done) => { + const res = childProcess[methodName]('invalid_command_test') + + res.once('close', () => { + expect(start).to.have.been.calledOnceWith({ command: 'invalid_command_test', shell: true }) + expect(error).to.have.been.calledOnce + expect(asyncFinish).to.have.been.calledOnceWith({ + command: 'invalid_command_test', + shell: true, + result: 127 + }) + done() + }) + }) + }) + + describe(`method ${methodName} with promisify`, () => { + it('should execute success callbacks', async () => { + await promisify(childProcess[methodName])('echo') + expect(start).to.have.been.calledOnceWith({ + command: 'echo', + shell: true + }) + expect(asyncFinish).to.have.been.calledOnceWith({ + command: 'echo', + shell: true, + result: 0 + }) + expect(error).not.to.have.been.called + }) + + it('should execute error callback', async () => { + try { + await promisify(childProcess[methodName])('invalid_command_test') + return Promise.reject(new Error('Command expected to fail')) + } catch (e) { + expect(start).to.have.been.calledOnceWith({ command: 'invalid_command_test', shell: true }) + expect(asyncFinish).to.have.been.calledOnce + expect(error).to.have.been.calledOnce + } + }) + + it('should execute error callback with `exit 1` command', async () => { + try { + await promisify(childProcess[methodName])('node -e "process.exit(1)"') + return Promise.reject(new Error('Command expected to fail')) + } catch (e) { + expect(start).to.have.been.calledOnceWith({ command: 'node -e "process.exit(1)"', shell: true }) + expect(asyncFinish).to.have.been.calledOnceWith({ + command: 'node -e "process.exit(1)"', + shell: true, + result: 1 + }) + expect(error).to.have.been.calledOnce + } + }) + }) + }) + }) + }) + + describe('sync methods', () => { + describe('command not interpreted by a shell', () => { + execSyncMethods.forEach(methodName => { + describe(`method ${methodName}`, () => { + it('should execute success callbacks', () => { + const result = childProcess[methodName]('ls') + + expect(start).to.have.been.calledOnceWith({ + command: 'ls', + shell: false, + result: result + }, + 'tracing:datadog:child_process:execution:start') + + expect(finish).to.have.been.calledOnceWith({ + command: 'ls', + shell: false, + result: result + }, + 'tracing:datadog:child_process:execution:end') + + expect(error).not.to.have.been.called + }) + + it('should execute error callback', () => { + let childError + try { + childProcess[methodName]('invalid_command_test') + } catch (error) { + childError = error + } finally { + expect(start).to.have.been.calledOnceWith({ + command: 'invalid_command_test', + shell: false, + error: childError + }) + expect(finish).to.have.been.calledOnce + expect(error).to.have.been.calledOnce + } + }) + + it('should execute error callback with `exit 1` command', () => { + let childError + try { + childProcess[methodName]('node -e "process.exit(1)"', { shell: true }) + } catch (error) { + childError = error + } finally { + expect(start).to.have.been.calledOnceWith({ + command: 'node -e "process.exit(1)"', + shell: true, + error: childError + }) + expect(finish).to.have.been.calledOnce + } + }) + }) + }) + }) + + describe('command interpreted by a shell by default', () => { + execSyncShellMethods.forEach(methodName => { + describe(`method ${methodName}`, () => { + it('should execute success callbacks', () => { + const result = childProcess[methodName]('ls') + + expect(start).to.have.been.calledOnceWith({ + command: 'ls', + shell: true, + result + }) + expect(finish).to.have.been.calledOnceWith({ + command: 'ls', + shell: true, + result + }) + expect(error).not.to.have.been.called + }) + + it('should execute error callback', () => { + let childError + try { + childProcess[methodName]('invalid_command_test') + } catch (error) { + childError = error + } finally { + expect(start).to.have.been.calledOnceWith({ + command: 'invalid_command_test', + shell: true, + error: childError + }) + expect(finish).to.have.been.calledOnce + expect(error).to.have.been.calledOnce + } + }) + + it('should execute error callback with `exit 1` command', () => { + let childError + try { + childProcess[methodName]('node -e "process.exit(1)"') + } catch (error) { + childError = error + } finally { + expect(start).to.have.been.calledOnceWith({ + command: 'node -e "process.exit(1)"', + shell: true, + error: childError + }) + expect(finish).to.have.been.calledOnce + } + }) + }) + }) + }) + }) + }) + }) +}) diff --git a/packages/datadog-plugin-child_process/src/index.js b/packages/datadog-plugin-child_process/src/index.js new file mode 100644 index 00000000000..b28e242f056 --- /dev/null +++ b/packages/datadog-plugin-child_process/src/index.js @@ -0,0 +1,91 @@ +'use strict' + +const TracingPlugin = require('../../dd-trace/src/plugins/tracing') +const scrubChildProcessCmd = require('./scrub-cmd-params') + +const MAX_ARG_SIZE = 4096 // 4kB + +function truncateCommand (cmdFields) { + let size = cmdFields[0].length + let truncated = false + for (let i = 1; i < cmdFields.length; i++) { + if (size >= MAX_ARG_SIZE) { + truncated = true + cmdFields[i] = '' + continue + } + + const argLen = cmdFields[i].length + if (size < MAX_ARG_SIZE && size + argLen > MAX_ARG_SIZE) { + cmdFields[i] = cmdFields[i].substring(0, 2) + truncated = true + } + + size += argLen + } + + return truncated +} + +class ChildProcessPlugin extends TracingPlugin { + static get id () { return 'child_process' } + static get prefix () { return 'tracing:datadog:child_process:execution' } + + get tracer () { + return this._tracer + } + + start ({ command, shell }) { + if (typeof command !== 'string') { + return + } + + const cmdFields = scrubChildProcessCmd(command) + const truncated = truncateCommand(cmdFields) + const property = (shell === true) ? 'cmd.shell' : 'cmd.exec' + + const meta = { + 'component': 'subprocess', + [property]: (shell === true) ? cmdFields.join(' ') : JSON.stringify(cmdFields) + } + + if (truncated) { + meta['cmd.truncated'] = `${truncated}` + } + + this.startSpan('command_execution', { + service: this.config.service, + resource: (shell === true) ? 'sh' : cmdFields[0], + type: 'system', + meta + }) + } + + end ({ result, error }) { + let exitCode + + if (result !== undefined) { + exitCode = result?.status || 0 + } else if (error !== undefined) { + exitCode = error?.status || error?.code || 0 + } else { + // TracingChannels call start, end synchronously. Later when the promise is resolved then asyncStart asyncEnd. + // Therefore in the case of calling end with neither result nor error means that they will come in the asyncEnd. + return + } + + this.activeSpan?.setTag('cmd.exit_code', `${exitCode}`) + this.activeSpan?.finish() + } + + error (error) { + this.addError(error) + } + + asyncEnd ({ result }) { + this.activeSpan?.setTag('cmd.exit_code', `${result}`) + this.activeSpan?.finish() + } +} + +module.exports = ChildProcessPlugin diff --git a/packages/datadog-plugin-child_process/src/scrub-cmd-params.js b/packages/datadog-plugin-child_process/src/scrub-cmd-params.js new file mode 100644 index 00000000000..3f5d85574e3 --- /dev/null +++ b/packages/datadog-plugin-child_process/src/scrub-cmd-params.js @@ -0,0 +1,125 @@ +'use strict' + +const shellParser = require('shell-quote/parse') + +const ALLOWED_ENV_VARIABLES = ['LD_PRELOAD', 'LD_LIBRARY_PATH', 'PATH'] +const PROCESS_DENYLIST = ['md5'] + +const VARNAMES_REGEX = /\$([\w\d_]*)(?:[^\w\d_]|$)/gmi +// eslint-disable-next-line max-len +const PARAM_PATTERN = '^-{0,2}(?:p(?:ass(?:w(?:or)?d)?)?|api_?key|secret|a(?:ccess|uth)_token|mysql_pwd|credentials|(?:stripe)?token)$' +const regexParam = new RegExp(PARAM_PATTERN, 'i') +const ENV_PATTERN = '^(\\w+=\\w+;)*\\w+=\\w+;?$' +const envvarRegex = new RegExp(ENV_PATTERN) +const REDACTED = '?' + +function extractVarNames (expression) { + const varNames = new Set() + let match + + while ((match = VARNAMES_REGEX.exec(expression))) { + varNames.add(match[1]) + } + + const varNamesObject = {} + for (const varName of varNames.keys()) { + varNamesObject[varName] = `$${varName}` + } + return varNamesObject +} + +function getTokensByExpression (expressionTokens) { + const expressionListTokens = [] + let wipExpressionTokens = [] + let isNewExpression = true + + expressionTokens.forEach(token => { + if (isNewExpression) { + expressionListTokens.push(wipExpressionTokens) + isNewExpression = false + } + + wipExpressionTokens.push(token) + + if (token.op) { + wipExpressionTokens = [] + isNewExpression = true + } + }) + return expressionListTokens +} + +function scrubChildProcessCmd (expression) { + const varNames = extractVarNames(expression) + const expressionTokens = shellParser(expression, varNames) + + const expressionListTokens = getTokensByExpression(expressionTokens) + + const result = [] + expressionListTokens.forEach((expressionTokens) => { + let foundBinary = false + for (let index = 0; index < expressionTokens.length; index++) { + const token = expressionTokens[index] + + if (typeof token === 'object') { + if (token.pattern) { + result.push(token.pattern) + } else if (token.op) { + result.push(token.op) + } else if (token.comment) { + result.push(`#${token.comment}`) + } + } else if (!foundBinary) { + if (envvarRegex.test(token)) { + const envSplit = token.split('=') + + if (!ALLOWED_ENV_VARIABLES.includes(envSplit[0])) { + envSplit[1] = REDACTED + + const newToken = envSplit.join('=') + expressionTokens[index] = newToken + + result.push(newToken) + } else { + result.push(token) + } + } else { + foundBinary = true + result.push(token) + + if (PROCESS_DENYLIST.includes(token)) { + for (index++; index < expressionTokens.length; index++) { + const token = expressionTokens[index] + + if (token.op) { + result.push(token.op) + } else { + expressionTokens[index] = REDACTED + result.push(REDACTED) + } + } + break + } + } + } else { + const paramKeyValue = token.split('=') + const paramKey = paramKeyValue[0] + + if (regexParam.test(paramKey)) { + if (paramKeyValue.length === 1) { + expressionTokens[index + 1] = REDACTED + result.push(token) + } else { + result.push(`${paramKey}=${REDACTED}`) + } + } else { + result.push(token) + } + } + } + }) + + return result +} + +module.exports = scrubChildProcessCmd diff --git a/packages/datadog-plugin-child_process/test/index.spec.js b/packages/datadog-plugin-child_process/test/index.spec.js new file mode 100644 index 00000000000..1f56fe26538 --- /dev/null +++ b/packages/datadog-plugin-child_process/test/index.spec.js @@ -0,0 +1,575 @@ +'use strict' + +const ChildProcessPlugin = require('../src') +const { storage } = require('../../datadog-core') +const agent = require('../../dd-trace/test/plugins/agent') +const { expectSomeSpan } = require('../../dd-trace/test/plugins/helpers') + +function noop () {} + +function normalizeArgs (methodName, command, options) { + const args = [] + if (methodName === 'exec' || methodName === 'execSync') { + args.push(command.join(' ')) + } else { + args.push(command[0], command.slice(1)) + } + + args.push(options) + + return args +} + +describe('Child process plugin', () => { + describe('unit tests', () => { + let tracerStub, configStub, spanStub + + beforeEach(() => { + spanStub = { + setTag: sinon.stub(), + finish: sinon.stub() + } + + tracerStub = { + startSpan: sinon.stub() + } + }) + + afterEach(() => { + sinon.restore() + }) + + describe('start', () => { + it('should call startSpan with proper parameters', () => { + const shellPlugin = new ChildProcessPlugin(tracerStub, configStub) + + shellPlugin.start({ command: 'ls -l' }) + + expect(tracerStub.startSpan).to.have.been.calledOnceWithExactly( + 'command_execution', + { + childOf: undefined, + tags: { + component: 'subprocess', + 'service.name': undefined, + 'resource.name': 'ls', + 'span.kind': undefined, + 'span.type': 'system', + 'cmd.exec': JSON.stringify([ 'ls', '-l' ]) + }, + integrationName: 'system' + } + ) + }) + + it('should call startSpan with cmd.shell property', () => { + const shellPlugin = new ChildProcessPlugin(tracerStub, configStub) + + shellPlugin.start({ command: 'ls -l', shell: true }) + + expect(tracerStub.startSpan).to.have.been.calledOnceWithExactly( + 'command_execution', + { + childOf: undefined, + tags: { + component: 'subprocess', + 'service.name': undefined, + 'resource.name': 'sh', + 'span.kind': undefined, + 'span.type': 'system', + 'cmd.shell': 'ls -l' + }, + integrationName: 'system' + } + ) + }) + + it('should truncate last argument', () => { + const shellPlugin = new ChildProcessPlugin(tracerStub, configStub) + const arg = 'a'.padEnd(4092, 'a') + const command = 'echo' + ' ' + arg + ' arg2' + + shellPlugin.start({ command }) + + expect(tracerStub.startSpan).to.have.been.calledOnceWithExactly( + 'command_execution', + { + childOf: undefined, + tags: { + component: 'subprocess', + 'service.name': undefined, + 'resource.name': 'echo', + 'span.kind': undefined, + 'span.type': 'system', + 'cmd.exec': JSON.stringify([ 'echo', arg, '' ]), + 'cmd.truncated': 'true' + }, + integrationName: 'system' + } + ) + }) + + it('should truncate path and blank last argument', () => { + const shellPlugin = new ChildProcessPlugin(tracerStub, configStub) + const path = '/home/'.padEnd(4096, '/') + const command = 'ls -l' + ' ' + path + ' -t' + + shellPlugin.start({ command, shell: true }) + + expect(tracerStub.startSpan).to.have.been.calledOnceWithExactly( + 'command_execution', + { + childOf: undefined, + tags: { + component: 'subprocess', + 'service.name': undefined, + 'resource.name': 'sh', + 'span.kind': undefined, + 'span.type': 'system', + 'cmd.shell': 'ls -l /h ', + 'cmd.truncated': 'true' + }, + integrationName: 'system' + } + ) + }) + + it('should truncate first argument and blank the rest', () => { + const shellPlugin = new ChildProcessPlugin(tracerStub, configStub) + const option = '-l'.padEnd(4096, 't') + const path = '/home' + const command = `ls ${option} ${path} -t` + + shellPlugin.start({ command }) + + expect(tracerStub.startSpan).to.have.been.calledOnceWithExactly( + 'command_execution', + { + childOf: undefined, + tags: { + component: 'subprocess', + 'service.name': undefined, + 'resource.name': 'ls', + 'span.kind': undefined, + 'span.type': 'system', + 'cmd.exec': JSON.stringify([ 'ls', '-l', '', '' ]), + 'cmd.truncated': 'true' + }, + integrationName: 'system' + } + ) + }) + + it('should truncate last argument', () => { + const shellPlugin = new ChildProcessPlugin(tracerStub, configStub) + const option = '-t'.padEnd(4000 * 8, 'u') + const path = '/home' + const command = 'ls' + ' -l' + ' ' + path + ' ' + option + + shellPlugin.start({ command, shell: true }) + + expect(tracerStub.startSpan).to.have.been.calledOnceWithExactly( + 'command_execution', + { + childOf: undefined, + tags: { + component: 'subprocess', + 'service.name': undefined, + 'resource.name': 'sh', + 'span.kind': undefined, + 'span.type': 'system', + 'cmd.shell': 'ls -l /home -t', + 'cmd.truncated': 'true' + }, + integrationName: 'system' + } + ) + }) + + it('should not crash if command is not a string', () => { + const shellPlugin = new ChildProcessPlugin(tracerStub, configStub) + + shellPlugin.start({ command: undefined }) + + expect(tracerStub.startSpan).not.to.have.been.called + }) + + it('should not crash if command does not exist', () => { + const shellPlugin = new ChildProcessPlugin(tracerStub, configStub) + + shellPlugin.start({}) + + expect(tracerStub.startSpan).not.to.have.been.called + }) + }) + + describe('end', () => { + it('should not call setTag if neither error nor result is passed', () => { + sinon.stub(storage, 'getStore').returns({ span: spanStub }) + const shellPlugin = new ChildProcessPlugin(tracerStub, configStub) + + shellPlugin.end({}) + + expect(spanStub.setTag).not.to.have.been.called + expect(spanStub.finish).not.to.have.been.called + }) + + it('should call setTag with proper code when result is a buffer', () => { + sinon.stub(storage, 'getStore').returns({ span: spanStub }) + const shellPlugin = new ChildProcessPlugin(tracerStub, configStub) + + shellPlugin.end({ result: Buffer.from('test') }) + + expect(spanStub.setTag).to.have.been.calledOnceWithExactly('cmd.exit_code', '0') + expect(spanStub.finish).to.have.been.calledOnceWithExactly() + }) + + it('should call setTag with proper code when result is a string', () => { + sinon.stub(storage, 'getStore').returns({ span: spanStub }) + const shellPlugin = new ChildProcessPlugin(tracerStub, configStub) + + shellPlugin.end({ result: 'test' }) + + expect(spanStub.setTag).to.have.been.calledOnceWithExactly('cmd.exit_code', '0') + expect(spanStub.finish).to.have.been.calledOnceWithExactly() + }) + + it('should call setTag with proper code when an error is thrown', () => { + sinon.stub(storage, 'getStore').returns({ span: spanStub }) + const shellPlugin = new ChildProcessPlugin(tracerStub, configStub) + + shellPlugin.end({ error: { status: -1 } }) + + expect(spanStub.setTag).to.have.been.calledOnceWithExactly('cmd.exit_code', '-1') + expect(spanStub.finish).to.have.been.calledOnceWithExactly() + }) + }) + + describe('asyncEnd', () => { + it('should call setTag with undefined code if neither error nor result is passed', () => { + sinon.stub(storage, 'getStore').returns({ span: spanStub }) + const shellPlugin = new ChildProcessPlugin(tracerStub, configStub) + + shellPlugin.asyncEnd({}) + + expect(spanStub.setTag).to.have.been.calledOnceWithExactly('cmd.exit_code', 'undefined') + expect(spanStub.finish).to.have.been.calledOnce + }) + + it('should call setTag with proper code when a proper code is returned', () => { + sinon.stub(storage, 'getStore').returns({ span: spanStub }) + const shellPlugin = new ChildProcessPlugin(tracerStub, configStub) + + shellPlugin.asyncEnd({ result: 0 }) + + expect(spanStub.setTag).to.have.been.calledOnceWithExactly('cmd.exit_code', '0') + expect(spanStub.finish).to.have.been.calledOnceWithExactly() + }) + }) + + describe('channel', () => { + it('should return proper prefix', () => { + expect(ChildProcessPlugin.prefix).to.be.equal('tracing:datadog:child_process:execution') + }) + + it('should return proper id', () => { + expect(ChildProcessPlugin.id).to.be.equal('child_process') + }) + }) + }) + + describe('Integration', () => { + describe('Methods which spawn a shell by default', () => { + const execAsyncMethods = ['exec'] + const execSyncMethods = ['execSync'] + let childProcess, tracer + + beforeEach(() => { + return agent.load('child_process', undefined, { flushInterval: 1 }).then(() => { + tracer = require('../../dd-trace') + childProcess = require('child_process') + tracer.use('child_process', { enabled: true }) + }) + }) + + afterEach(() => agent.close({ ritmReset: false })) + const parentSpanList = [true, false] + parentSpanList.forEach(parentSpan => { + describe(`${parentSpan ? 'with' : 'without'} parent span`, () => { + const methods = [ + ...execAsyncMethods.map(methodName => ({ methodName, async: true })), + ...execSyncMethods.map(methodName => ({ methodName, async: false })) + ] + if (parentSpan) { + beforeEach((done) => { + const parentSpan = tracer.startSpan('parent') + parentSpan.finish() + tracer.scope().activate(parentSpan, done) + }) + } + + methods.forEach(({ methodName, async }) => { + describe(methodName, () => { + it('should be instrumented', (done) => { + const expected = { + type: 'system', + name: 'command_execution', + error: 0, + meta: { + component: 'subprocess', + 'cmd.shell': 'ls', + 'cmd.exit_code': '0' + } + } + + expectSomeSpan(agent, expected).then(done, done) + + const res = childProcess[methodName]('ls') + if (async) { + res.on('close', noop) + } + }) + + it('command should be scrubbed', (done) => { + const expected = { + type: 'system', + name: 'command_execution', + error: 0, + meta: { + component: 'subprocess', + 'cmd.shell': 'echo password ?', + 'cmd.exit_code': '0' + } + } + expectSomeSpan(agent, expected).then(done, done) + + const args = [] + if (methodName === 'exec' || methodName === 'execSync') { + args.push('echo password 123') + } else { + args.push('echo') + args.push(['password', '123']) + } + + const res = childProcess[methodName](...args) + if (async) { + res.on('close', noop) + } + }) + + it('should be instrumented with error code', (done) => { + const command = [ 'node', '-badOption' ] + const options = { + stdio: 'pipe' + } + const expected = { + type: 'system', + name: 'command_execution', + error: 1, + meta: { + component: 'subprocess', + 'cmd.shell': 'node -badOption', + 'cmd.exit_code': '9' + } + } + + expectSomeSpan(agent, expected).then(done, done) + + const args = normalizeArgs(methodName, command, options) + + if (async) { + const res = childProcess[methodName].apply(null, args) + res.on('close', noop) + } else { + try { + childProcess[methodName].apply(null, args) + } catch { + // process exit with code 1, exceptions are expected + } + } + }) + }) + }) + }) + }) + }) + + describe('Methods which do not spawn a shell by default', () => { + const execAsyncMethods = ['execFile', 'spawn'] + const execSyncMethods = ['execFileSync', 'spawnSync'] + let childProcess, tracer + + beforeEach(() => { + return agent.load('child_process', undefined, { flushInterval: 1 }).then(() => { + tracer = require('../../dd-trace') + childProcess = require('child_process') + tracer.use('child_process', { enabled: true }) + }) + }) + + afterEach(() => agent.close({ ritmReset: false })) + const parentSpanList = [true, false] + parentSpanList.forEach(parentSpan => { + describe(`${parentSpan ? 'with' : 'without'} parent span`, () => { + const methods = [ + ...execAsyncMethods.map(methodName => ({ methodName, async: true })), + ...execSyncMethods.map(methodName => ({ methodName, async: false })) + ] + if (parentSpan) { + beforeEach((done) => { + const parentSpan = tracer.startSpan('parent') + parentSpan.finish() + tracer.scope().activate(parentSpan, done) + }) + } + + methods.forEach(({ methodName, async }) => { + describe(methodName, () => { + it('should be instrumented', (done) => { + const expected = { + type: 'system', + name: 'command_execution', + error: 0, + meta: { + component: 'subprocess', + 'cmd.exec': '["ls"]', + 'cmd.exit_code': '0' + } + } + expectSomeSpan(agent, expected).then(done, done) + + const res = childProcess[methodName]('ls') + if (async) { + res.on('close', noop) + } + }) + + it('command should be scrubbed', (done) => { + const expected = { + type: 'system', + name: 'command_execution', + error: 0, + meta: { + component: 'subprocess', + 'cmd.exec': '["echo","password","?"]', + 'cmd.exit_code': '0' + } + } + expectSomeSpan(agent, expected).then(done, done) + + const args = [] + if (methodName === 'exec' || methodName === 'execSync') { + args.push('echo password 123') + } else { + args.push('echo') + args.push(['password', '123']) + } + + const res = childProcess[methodName](...args) + if (async) { + res.on('close', noop) + } + }) + + it('should be instrumented with error code', (done) => { + const command = [ 'node', '-badOption' ] + const options = { + stdio: 'pipe' + } + + const errorExpected = { + type: 'system', + name: 'command_execution', + error: 1, + meta: { + component: 'subprocess', + 'cmd.exec': '["node","-badOption"]', + 'cmd.exit_code': '9' + } + } + + const noErrorExpected = { + type: 'system', + name: 'command_execution', + error: 0, + meta: { + component: 'subprocess', + 'cmd.exec': '["node","-badOption"]', + 'cmd.exit_code': '9' + } + } + + const args = normalizeArgs(methodName, command, options) + + if (async) { + expectSomeSpan(agent, errorExpected).then(done, done) + const res = childProcess[methodName].apply(null, args) + res.on('close', noop) + } else { + try { + if (methodName === 'spawnSync') { + expectSomeSpan(agent, noErrorExpected).then(done, done) + } else { + expectSomeSpan(agent, errorExpected).then(done, done) + } + childProcess[methodName].apply(null, args) + } catch { + // process exit with code 1, exceptions are expected + } + } + }) + + it('should be instrumented with error code (override shell default behavior)', (done) => { + const command = [ 'node', '-badOption' ] + const options = { + stdio: 'pipe', + shell: true + } + const errorExpected = { + type: 'system', + name: 'command_execution', + error: 1, + meta: { + component: 'subprocess', + 'cmd.shell': 'node -badOption', + 'cmd.exit_code': '9' + } + } + + const noErrorExpected = { + type: 'system', + name: 'command_execution', + error: 0, + meta: { + component: 'subprocess', + 'cmd.shell': 'node -badOption', + 'cmd.exit_code': '9' + } + } + + const args = normalizeArgs(methodName, command, options) + + if (async) { + expectSomeSpan(agent, errorExpected).then(done, done) + const res = childProcess[methodName].apply(null, args) + res.on('close', noop) + } else { + try { + if (methodName === 'spawnSync') { + expectSomeSpan(agent, noErrorExpected).then(done, done) + } else { + expectSomeSpan(agent, errorExpected).then(done, done) + } + childProcess[methodName].apply(null, args) + } catch { + // process exit with code 1, exceptions are expected + } + } + }) + }) + }) + }) + }) + }) + }) +}) diff --git a/packages/datadog-plugin-child_process/test/scrub-cmd-params.spec.js b/packages/datadog-plugin-child_process/test/scrub-cmd-params.spec.js new file mode 100644 index 00000000000..a76788d0742 --- /dev/null +++ b/packages/datadog-plugin-child_process/test/scrub-cmd-params.spec.js @@ -0,0 +1,79 @@ +'use strict' + +const scrubCmdParams = require('../src/scrub-cmd-params') + +describe('scrub cmds', () => { + it('Should not scrub single command', () => { + expect(scrubCmdParams('ls -la')).to.be.deep.equal(['ls', '-la']) + }) + + it('Should split correctly comments', () => { + expect(scrubCmdParams('ls #comment')).to.be.deep.equal(['ls', '#comment']) + expect(scrubCmdParams('ls #comment with spaces')).to.be.deep.equal(['ls', '#comment with spaces']) + }) + + it('Should split globs', () => { + expect(scrubCmdParams('ls node_modules/*')).to.be.deep.equal(['ls', 'node_modules/*']) + expect(scrubCmdParams('ls *')).to.be.deep.equal(['ls', '*']) + }) + + it('Should split correctly texts', () => { + expect(scrubCmdParams('echo "Hello\\ text"')).to.be.deep.equal(['echo', 'Hello\\ text']) + expect(scrubCmdParams('node -e "process.exit(1)"')).to.be.deep.equal(['node', '-e', 'process.exit(1)']) + }) + + it('Should not scrub chained command', () => { + expect(scrubCmdParams('ls -la|grep something')).to.be.deep.equal(['ls', '-la', '|', 'grep', 'something']) + }) + + it('Should scrub environment variables', () => { + expect(scrubCmdParams('ENV=XXX LD_PRELOAD=YYY ls')).to.be.deep.equal(['ENV=?', 'LD_PRELOAD=YYY', 'ls']) + expect(scrubCmdParams('DD_TEST=info SHELL=zsh ls -l')).to.be.deep.equal(['DD_TEST=?', 'SHELL=?', 'ls', '-l']) + }) + + it('Should scrub secret values', () => { + expect(scrubCmdParams('cmd --pass abc --token=def')).to.be.deep.equal(['cmd', '--pass', '?', '--token=?']) + + expect(scrubCmdParams('mysqladmin -u root password very_secret')) + .to.be.deep.equal(['mysqladmin', '-u', 'root', 'password', '?']) + + expect(scrubCmdParams('test -password very_secret -api_key 1234')) + .to.be.deep.equal(['test', '-password', '?', '-api_key', '?']) + }) + + it('Should scrub md5 commands', () => { + expect(scrubCmdParams('md5 -s pony')).to.be.deep.equal(['md5', '?', '?']) + + expect(scrubCmdParams('cat passwords.txt | while read line; do; md5 -s $line; done')).to.be.deep + .equal([ + 'cat', + 'passwords.txt', + '|', + 'while', + 'read', + 'line', + ';', + 'do', + ';', + 'md5', + '?', + '?', + ';', + 'done' + ]) + }) + + it('should scrub shell expressions', () => { + expect(scrubCmdParams('md5 -s secret ; mysqladmin -u root password 1234 | test api_key 4321')).to.be.deep.equal([ + 'md5', '?', '?', ';', 'mysqladmin', '-u', 'root', 'password', '?', '|', 'test', 'api_key', '?' + ]) + }) + + it('Should not scrub md5sum commands', () => { + expect(scrubCmdParams('md5sum file')).to.be.deep.equal(['md5sum', 'file']) + }) + + it('Should maintain var names', () => { + expect(scrubCmdParams('echo $something')).to.be.deep.equal(['echo', '$something']) + }) +}) diff --git a/packages/dd-trace/src/appsec/iast/analyzers/command-injection-analyzer.js b/packages/dd-trace/src/appsec/iast/analyzers/command-injection-analyzer.js index eccf8a3814b..fd2a230a2a8 100644 --- a/packages/dd-trace/src/appsec/iast/analyzers/command-injection-analyzer.js +++ b/packages/dd-trace/src/appsec/iast/analyzers/command-injection-analyzer.js @@ -8,7 +8,7 @@ class CommandInjectionAnalyzer extends InjectionAnalyzer { } onConfigure () { - this.addSub('datadog:child_process:execution:start', ({ command }) => this.analyze(command)) + this.addSub('tracing:datadog:child_process:execution:start', ({ command }) => this.analyze(command)) } } diff --git a/packages/dd-trace/src/appsec/iast/iast-plugin.js b/packages/dd-trace/src/appsec/iast/iast-plugin.js index 2fe9f85bed6..02eb07ebd10 100644 --- a/packages/dd-trace/src/appsec/iast/iast-plugin.js +++ b/packages/dd-trace/src/appsec/iast/iast-plugin.js @@ -127,10 +127,13 @@ class IastPlugin extends Plugin { if (!channelName && !moduleName) return if (!moduleName) { - const firstSep = channelName.indexOf(':') + let firstSep = channelName.indexOf(':') if (firstSep === -1) { moduleName = channelName } else { + if (channelName.startsWith('tracing:')) { + firstSep = channelName.indexOf(':', 'tracing:'.length + 1) + } const lastSep = channelName.indexOf(':', firstSep + 1) moduleName = channelName.substring(firstSep + 1, lastSep !== -1 ? lastSep : channelName.length) } diff --git a/packages/dd-trace/src/plugins/index.js b/packages/dd-trace/src/plugins/index.js index c7c96df0f50..b22f0475ab3 100644 --- a/packages/dd-trace/src/plugins/index.js +++ b/packages/dd-trace/src/plugins/index.js @@ -23,6 +23,7 @@ module.exports = { get 'aws-sdk' () { return require('../../../datadog-plugin-aws-sdk/src') }, get 'bunyan' () { return require('../../../datadog-plugin-bunyan/src') }, get 'cassandra-driver' () { return require('../../../datadog-plugin-cassandra-driver/src') }, + get 'child_process' () { return require('../../../datadog-plugin-child_process/src') }, get 'connect' () { return require('../../../datadog-plugin-connect/src') }, get 'couchbase' () { return require('../../../datadog-plugin-couchbase/src') }, get 'cypress' () { return require('../../../datadog-plugin-cypress/src') }, diff --git a/packages/dd-trace/src/plugins/util/exec.js b/packages/dd-trace/src/plugins/util/exec.js deleted file mode 100644 index 3e3ca3f3660..00000000000 --- a/packages/dd-trace/src/plugins/util/exec.js +++ /dev/null @@ -1,34 +0,0 @@ -const cp = require('child_process') -const log = require('../../log') -const { distributionMetric, incrementCountMetric } = require('../../ci-visibility/telemetry') - -const sanitizedExec = ( - cmd, - flags, - operationMetric, - durationMetric, - errorMetric -) => { - let startTime - if (operationMetric) { - incrementCountMetric(operationMetric.name, operationMetric.tags) - } - if (durationMetric) { - startTime = Date.now() - } - try { - const result = cp.execFileSync(cmd, flags, { stdio: 'pipe' }).toString().replace(/(\r\n|\n|\r)/gm, '') - if (durationMetric) { - distributionMetric(durationMetric.name, durationMetric.tags, Date.now() - startTime) - } - return result - } catch (e) { - if (errorMetric) { - incrementCountMetric(errorMetric.name, { ...errorMetric.tags, exitCode: e.status }) - } - log.error(e) - return '' - } -} - -module.exports = { sanitizedExec } diff --git a/packages/dd-trace/src/plugins/util/git.js b/packages/dd-trace/src/plugins/util/git.js index 885cbe5fb3c..f4aebc184a5 100644 --- a/packages/dd-trace/src/plugins/util/git.js +++ b/packages/dd-trace/src/plugins/util/git.js @@ -26,6 +26,7 @@ const { TELEMETRY_GIT_COMMAND_ERRORS } = require('../../ci-visibility/telemetry') const { filterSensitiveInfoFromRepository } = require('./url') +const { storage } = require('../../../../datadog-core') const GIT_REV_LIST_MAX_BUFFER = 8 * 1024 * 1024 // 8MB @@ -36,6 +37,9 @@ function sanitizedExec ( durationMetric, errorMetric ) { + const store = storage.getStore() + storage.enterWith({ noop: true }) + let startTime if (operationMetric) { incrementCountMetric(operationMetric.name, operationMetric.tags) @@ -55,6 +59,8 @@ function sanitizedExec ( } log.error(e) return '' + } finally { + storage.enterWith(store) } } diff --git a/packages/dd-trace/test/appsec/iast/iast-plugin.spec.js b/packages/dd-trace/test/appsec/iast/iast-plugin.spec.js index ca8a3381676..539c749abbe 100644 --- a/packages/dd-trace/test/appsec/iast/iast-plugin.spec.js +++ b/packages/dd-trace/test/appsec/iast/iast-plugin.spec.js @@ -247,7 +247,7 @@ describe('IAST Plugin', () => { expect(getTelemetryHandler).to.be.calledOnceWith(iastPlugin.pluginSubs[1]) }) - it('should register an pluginSubscription and increment a sink metric when a sink module is loaded', () => { + it('should register a pluginSubscription and increment a sink metric when a sink module is loaded', () => { iastPlugin.addSub({ moduleName: 'sink', channelName: 'datadog:sink:start', @@ -264,6 +264,22 @@ describe('IAST Plugin', () => { expect(metricAdd).to.be.calledOnceWith(1, 'injection') }) + it('should register and increment a sink metric when a sink module is loaded using a tracingChannel', () => { + iastPlugin.addSub({ + channelName: 'tracing:datadog:sink:start', + tag: 'injection', + tagKey: VULNERABILITY_TYPE + }, handler) + iastPlugin.configure(true) + + const metric = getInstrumentedMetric(VULNERABILITY_TYPE) + const metricAdd = sinon.stub(metric, 'add') + + loadChannel.publish({ name: 'sink' }) + + expect(metricAdd).to.be.calledOnceWith(1, 'injection') + }) + it('should register an pluginSubscription and increment a source metric when a source module is loaded', () => { iastPlugin.addSub({ moduleName: 'source', diff --git a/yarn.lock b/yarn.lock index da98ae08d7c..9fc7b354f71 100644 --- a/yarn.lock +++ b/yarn.lock @@ -4565,7 +4565,7 @@ shebang-regex@^3.0.0: resolved "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz" integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== -shell-quote@^1.6.1: +shell-quote@^1.6.1, shell-quote@^1.8.1: version "1.8.1" resolved "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.1.tgz" integrity "sha1-bb9Nt1UVrVusY7TxiUw6FUx2ZoA= sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA==" From 5d6f011543d85109bcbfd243ee1e1eabf709c3fb Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Tue, 30 Jan 2024 15:53:08 +0100 Subject: [PATCH 07/44] Support processors and scanners updates for API Sec via RC (#3981) --- packages/dd-trace/src/appsec/rule_manager.js | 4 +- .../dd-trace/test/appsec/rule_manager.spec.js | 54 +++++++++++++++++++ 2 files changed, 56 insertions(+), 2 deletions(-) diff --git a/packages/dd-trace/src/appsec/rule_manager.js b/packages/dd-trace/src/appsec/rule_manager.js index 7f13d14bb34..3cbef3597e3 100644 --- a/packages/dd-trace/src/appsec/rule_manager.js +++ b/packages/dd-trace/src/appsec/rule_manager.js @@ -69,9 +69,9 @@ function updateWafFromRC ({ toUnapply, toApply, toModify }) { item.apply_error = 'Multiple ruleset received in ASM_DD' } else { if (file && file.rules && file.rules.length) { - const { version, metadata, rules } = file + const { version, metadata, rules, processors, scanners } = file - newRuleset = { version, metadata, rules } + newRuleset = { version, metadata, rules, processors, scanners } newRulesetId = id } diff --git a/packages/dd-trace/test/appsec/rule_manager.spec.js b/packages/dd-trace/test/appsec/rule_manager.spec.js index b2162b42a82..7c9dc010ddc 100644 --- a/packages/dd-trace/test/appsec/rule_manager.spec.js +++ b/packages/dd-trace/test/appsec/rule_manager.spec.js @@ -300,6 +300,33 @@ describe('AppSec Rule Manager', () => { 'confidence': '1' }, 'conditions': [] + }], + processors: [{ + id: 'test-processor-id', + generator: 'test-generator', + evaluate: false, + output: true + }], + scanners: [{ + id: 'test-scanner-id', + name: 'Test name', + key: { + operator: 'match_regex', + parameters: { + regex: 'test-regex' + } + }, + value: { + operator: 'match_regex', + parameters: { + regex: 'test-regex-2' + } + }, + tags: { + type: 'card', + card_type: 'test', + category: 'payment' + } }] } @@ -333,6 +360,33 @@ describe('AppSec Rule Manager', () => { 'confidence': '1' }, 'conditions': [] + }], + processors: [{ + id: 'test-processor-id', + generator: 'test-generator', + evaluate: false, + output: true + }], + scanners: [{ + id: 'test-scanner-id', + name: 'Test name', + key: { + operator: 'match_regex', + parameters: { + regex: 'test-regex' + } + }, + value: { + operator: 'match_regex', + parameters: { + regex: 'test-regex-2' + } + }, + tags: { + type: 'card', + card_type: 'test', + category: 'payment' + } }] } From ded59b2670b22a7c3400cd6f8bcadb374639c162 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Tue, 30 Jan 2024 18:44:26 +0100 Subject: [PATCH 08/44] =?UTF-8?q?[ci-visibility]=C2=A0Use=20system's=20nod?= =?UTF-8?q?e=20instead=20of=20the=20bundled=20one=20in=20cypress=20tests?= =?UTF-8?q?=20(#4011)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/project.yml | 3 +++ integration-tests/cypress-config.json | 3 ++- integration-tests/cypress/cypress.spec.js | 4 ++++ 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index 1115c818aae..927c144894c 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -53,6 +53,9 @@ jobs: integration-cypress: strategy: matrix: + # Important: This is outside the minimum supported version of dd-trace-js + # Node > 16 does not work with Cypress@6.7.0 (not even without our plugin) + # TODO: figure out what to do with this: we might have to deprecate support for cypress@6.7.0 version: [16, latest] # 6.7.0 is the minimum version we support cypress-version: [6.7.0, latest] diff --git a/integration-tests/cypress-config.json b/integration-tests/cypress-config.json index 3bd4dc31817..3ad19f9f90a 100644 --- a/integration-tests/cypress-config.json +++ b/integration-tests/cypress-config.json @@ -4,5 +4,6 @@ "pluginsFile": "cypress/plugins-old/index.js", "supportFile": "cypress/support/e2e.js", "integrationFolder": "cypress/e2e", - "defaultCommandTimeout": 100 + "defaultCommandTimeout": 100, + "nodeVersion": "system" } diff --git a/integration-tests/cypress/cypress.spec.js b/integration-tests/cypress/cypress.spec.js index ad297913ef8..85381f41e2e 100644 --- a/integration-tests/cypress/cypress.spec.js +++ b/integration-tests/cypress/cypress.spec.js @@ -30,6 +30,7 @@ const { } = require('../../packages/dd-trace/src/plugins/util/test') const { ERROR_MESSAGE } = require('../../packages/dd-trace/src/constants') const semver = require('semver') +const { NODE_MAJOR } = require('../../version') const version = process.env.CYPRESS_VERSION const hookFile = 'dd-trace/loader-hook.mjs' @@ -56,6 +57,9 @@ moduleType.forEach(({ if (type === 'esm' && semver.satisfies(version, '<10.0.0')) { return } + if (version === '6.7.0' && NODE_MAJOR > 16) { + return + } describe(`cypress@${version} ${type}`, function () { this.retries(2) this.timeout(60000) From 2c02651a9a38c66eca7281021175708d3569b614 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Tue, 30 Jan 2024 20:19:34 +0100 Subject: [PATCH 09/44] [request] Allow accepting gzip (#3992) --- .../dd-trace/src/exporters/common/request.js | 24 +++++++- .../test/exporters/common/request.spec.js | 57 +++++++++++++++++++ 2 files changed, 78 insertions(+), 3 deletions(-) diff --git a/packages/dd-trace/src/exporters/common/request.js b/packages/dd-trace/src/exporters/common/request.js index c59976edb50..41a211c79ea 100644 --- a/packages/dd-trace/src/exporters/common/request.js +++ b/packages/dd-trace/src/exporters/common/request.js @@ -7,6 +7,8 @@ const { Readable } = require('stream') const http = require('http') const https = require('https') const { parse: urlParse } = require('url') +const zlib = require('zlib') + const docker = require('./docker') const { httpAgent, httpsAgent } = require('./agents') const { storage } = require('../../../../datadog-core') @@ -93,16 +95,31 @@ function request (data, options, callback) { options.agent = isSecure ? httpsAgent : httpAgent const onResponse = res => { - let responseData = '' + const chunks = [] res.setTimeout(timeout) - res.on('data', chunk => { responseData += chunk }) + res.on('data', chunk => { + chunks.push(chunk) + }) res.on('end', () => { activeRequests-- + const buffer = Buffer.concat(chunks) if (res.statusCode >= 200 && res.statusCode <= 299) { - callback(null, responseData, res.statusCode) + const isGzip = res.headers['content-encoding'] === 'gzip' + if (isGzip) { + zlib.gunzip(buffer, (err, result) => { + if (err) { + log.error(`Could not gunzip response: ${err.message}`) + callback(null, '', res.statusCode) + } else { + callback(null, result.toString(), res.statusCode) + } + }) + } else { + callback(null, buffer.toString(), res.statusCode) + } } else { let errorMessage = '' try { @@ -114,6 +131,7 @@ function request (data, options, callback) { } catch (e) { // ignore error } + const responseData = buffer.toString() if (responseData) { errorMessage += ` Response from the endpoint: "${responseData}"` } diff --git a/packages/dd-trace/test/exporters/common/request.spec.js b/packages/dd-trace/test/exporters/common/request.spec.js index ca2935f63e9..eb606fd468e 100644 --- a/packages/dd-trace/test/exporters/common/request.spec.js +++ b/packages/dd-trace/test/exporters/common/request.spec.js @@ -5,6 +5,7 @@ require('../../setup/tap') const nock = require('nock') const getPort = require('get-port') const http = require('http') +const zlib = require('zlib') const FormData = require('../../../src/exporters/common/form-data') @@ -343,4 +344,60 @@ describe('request', function () { }) }) }) + + describe('with compressed responses', () => { + it('can decompress gzip responses', (done) => { + const compressedData = zlib.gzipSync(Buffer.from(JSON.stringify({ foo: 'bar' }))) + nock('http://test:123', { + reqheaders: { + 'content-type': 'application/json', + 'accept-encoding': 'gzip' + } + }) + .post('/path') + .reply(200, compressedData, { 'content-encoding': 'gzip' }) + + request(Buffer.from(''), { + protocol: 'http:', + hostname: 'test', + port: 123, + path: '/path', + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'accept-encoding': 'gzip' + } + }, (err, res) => { + expect(res).to.equal(JSON.stringify({ foo: 'bar' })) + done(err) + }) + }) + it('should ignore badly compressed data and log an error', (done) => { + const badlyCompressedData = 'this is not actually compressed data' + nock('http://test:123', { + reqheaders: { + 'content-type': 'application/json', + 'accept-encoding': 'gzip' + } + }) + .post('/path') + .reply(200, badlyCompressedData, { 'content-encoding': 'gzip' }) + + request(Buffer.from(''), { + protocol: 'http:', + hostname: 'test', + port: 123, + path: '/path', + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'accept-encoding': 'gzip' + } + }, (err, res) => { + expect(log.error).to.have.been.calledWith('Could not gunzip response: unexpected end of file') + expect(res).to.equal('') + done(err) + }) + }) + }) }) From ff6fa6a689efb59b96dcfdd39580bc88271e6132 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Thu, 1 Feb 2024 14:36:30 +0100 Subject: [PATCH 10/44] [ci-visibility] Refactor ITR config to library config (#4010) --- integration-tests/ci-visibility.spec.js | 18 ++++---- integration-tests/cucumber/cucumber.spec.js | 10 ++--- .../datadog-instrumentations/src/cucumber.js | 4 +- packages/datadog-instrumentations/src/jest.js | 12 ++--- .../datadog-instrumentations/src/mocha.js | 6 +-- packages/datadog-plugin-cucumber/src/index.js | 8 ++-- packages/datadog-plugin-cypress/src/plugin.js | 16 +++---- packages/datadog-plugin-jest/src/index.js | 2 +- packages/datadog-plugin-mocha/src/index.js | 8 ++-- .../exporters/ci-visibility-exporter.js | 32 +++++++------- .../get-library-configuration.js} | 6 +-- packages/dd-trace/src/plugins/ci_plugin.js | 10 ++--- .../exporters/agent-proxy/agent-proxy.spec.js | 4 +- .../exporters/agentless/exporter.spec.js | 13 +++--- .../exporters/ci-visibility-exporter.spec.js | 44 ++++++++++--------- 15 files changed, 96 insertions(+), 97 deletions(-) rename packages/dd-trace/src/ci-visibility/{intelligent-test-runner/get-itr-configuration.js => requests/get-library-configuration.js} (96%) diff --git a/integration-tests/ci-visibility.spec.js b/integration-tests/ci-visibility.spec.js index 8efc274a2cc..dd7d563cda5 100644 --- a/integration-tests/ci-visibility.spec.js +++ b/integration-tests/ci-visibility.spec.js @@ -685,18 +685,18 @@ testFrameworks.forEach(({ }) it('can report code coverage', (done) => { let testOutput - const itrConfigRequestPromise = receiver.payloadReceived( + const libraryConfigRequestPromise = receiver.payloadReceived( ({ url }) => url === '/api/v2/libraries/tests/services/setting' ) const codeCovRequestPromise = receiver.payloadReceived(({ url }) => url === '/api/v2/citestcov') const eventsRequestPromise = receiver.payloadReceived(({ url }) => url === '/api/v2/citestcycle') Promise.all([ - itrConfigRequestPromise, + libraryConfigRequestPromise, codeCovRequestPromise, eventsRequestPromise - ]).then(([itrConfigRequest, codeCovRequest, eventsRequest]) => { - assert.propertyVal(itrConfigRequest.headers, 'dd-api-key', '1') + ]).then(([libraryConfigRequest, codeCovRequest, eventsRequest]) => { + assert.propertyVal(libraryConfigRequest.headers, 'dd-api-key', '1') const [coveragePayload] = codeCovRequest.payload assert.propertyVal(codeCovRequest.headers, 'dd-api-key', '1') @@ -1294,19 +1294,19 @@ testFrameworks.forEach(({ }) it('can report code coverage', (done) => { let testOutput - const itrConfigRequestPromise = receiver.payloadReceived( + const libraryConfigRequestPromise = receiver.payloadReceived( ({ url }) => url === '/evp_proxy/v2/api/v2/libraries/tests/services/setting' ) const codeCovRequestPromise = receiver.payloadReceived(({ url }) => url === '/evp_proxy/v2/api/v2/citestcov') const eventsRequestPromise = receiver.payloadReceived(({ url }) => url === '/evp_proxy/v2/api/v2/citestcycle') Promise.all([ - itrConfigRequestPromise, + libraryConfigRequestPromise, codeCovRequestPromise, eventsRequestPromise - ]).then(([itrConfigRequest, codeCovRequest, eventsRequest]) => { - assert.notProperty(itrConfigRequest.headers, 'dd-api-key') - assert.propertyVal(itrConfigRequest.headers, 'x-datadog-evp-subdomain', 'api') + ]).then(([libraryConfigRequest, codeCovRequest, eventsRequest]) => { + assert.notProperty(libraryConfigRequest.headers, 'dd-api-key') + assert.propertyVal(libraryConfigRequest.headers, 'x-datadog-evp-subdomain', 'api') const [coveragePayload] = codeCovRequest.payload assert.notProperty(codeCovRequest.headers, 'dd-api-key') diff --git a/integration-tests/cucumber/cucumber.spec.js b/integration-tests/cucumber/cucumber.spec.js index d27b6c377a2..7ed484e4993 100644 --- a/integration-tests/cucumber/cucumber.spec.js +++ b/integration-tests/cucumber/cucumber.spec.js @@ -285,23 +285,23 @@ versions.forEach(version => { }) it('can report code coverage', (done) => { let testOutput - const itrConfigRequestPromise = receiver.payloadReceived( + const libraryConfigRequestPromise = receiver.payloadReceived( ({ url }) => url.endsWith('/api/v2/libraries/tests/services/setting') ) const codeCovRequestPromise = receiver.payloadReceived(({ url }) => url.endsWith('/api/v2/citestcov')) const eventsRequestPromise = receiver.payloadReceived(({ url }) => url.endsWith('/api/v2/citestcycle')) Promise.all([ - itrConfigRequestPromise, + libraryConfigRequestPromise, codeCovRequestPromise, eventsRequestPromise - ]).then(([itrConfigRequest, codeCovRequest, eventsRequest]) => { + ]).then(([libraryConfigRequest, codeCovRequest, eventsRequest]) => { const [coveragePayload] = codeCovRequest.payload if (isAgentless) { - assert.propertyVal(itrConfigRequest.headers, 'dd-api-key', '1') + assert.propertyVal(libraryConfigRequest.headers, 'dd-api-key', '1') assert.propertyVal(codeCovRequest.headers, 'dd-api-key', '1') } else { - assert.notProperty(itrConfigRequest.headers, 'dd-api-key') + assert.notProperty(libraryConfigRequest.headers, 'dd-api-key') assert.notProperty(codeCovRequest.headers, 'dd-api-key', '1') } diff --git a/packages/datadog-instrumentations/src/cucumber.js b/packages/datadog-instrumentations/src/cucumber.js index d9791d67485..738b52d82a9 100644 --- a/packages/datadog-instrumentations/src/cucumber.js +++ b/packages/datadog-instrumentations/src/cucumber.js @@ -16,7 +16,7 @@ const testSuiteStartCh = channel('ci:cucumber:test-suite:start') const testSuiteFinishCh = channel('ci:cucumber:test-suite:finish') const testSuiteCodeCoverageCh = channel('ci:cucumber:test-suite:code-coverage') -const itrConfigurationCh = channel('ci:cucumber:itr-configuration') +const libraryConfigurationCh = channel('ci:cucumber:library-configuration') const skippableSuitesCh = channel('ci:cucumber:test-suite:skippable') const sessionStartCh = channel('ci:cucumber:session:start') const sessionFinishCh = channel('ci:cucumber:session:finish') @@ -272,7 +272,7 @@ addHook({ }) asyncResource.runInAsyncScope(() => { - itrConfigurationCh.publish({ onDone }) + libraryConfigurationCh.publish({ onDone }) }) await configPromise diff --git a/packages/datadog-instrumentations/src/jest.js b/packages/datadog-instrumentations/src/jest.js index 400b2aaf03e..1c8ef07e0b8 100644 --- a/packages/datadog-instrumentations/src/jest.js +++ b/packages/datadog-instrumentations/src/jest.js @@ -37,7 +37,7 @@ const testRunFinishCh = channel('ci:jest:test:finish') const testErrCh = channel('ci:jest:test:err') const skippableSuitesCh = channel('ci:jest:test-suite:skippable') -const jestItrConfigurationCh = channel('ci:jest:itr-configuration') +const libraryConfigurationCh = channel('ci:jest:library-configuration') const itrSkippedSuitesCh = channel('ci:jest:itr:skipped-suites') @@ -234,19 +234,19 @@ function cliWrapper (cli, jestVersion) { const configurationPromise = new Promise((resolve) => { onDone = resolve }) - if (!jestItrConfigurationCh.hasSubscribers) { + if (!libraryConfigurationCh.hasSubscribers) { return runCLI.apply(this, arguments) } sessionAsyncResource.runInAsyncScope(() => { - jestItrConfigurationCh.publish({ onDone }) + libraryConfigurationCh.publish({ onDone }) }) try { - const { err, itrConfig } = await configurationPromise + const { err, libraryConfig } = await configurationPromise if (!err) { - isCodeCoverageEnabled = itrConfig.isCodeCoverageEnabled - isSuitesSkippingEnabled = itrConfig.isSuitesSkippingEnabled + isCodeCoverageEnabled = libraryConfig.isCodeCoverageEnabled + isSuitesSkippingEnabled = libraryConfig.isSuitesSkippingEnabled } } catch (err) { log.error(err) diff --git a/packages/datadog-instrumentations/src/mocha.js b/packages/datadog-instrumentations/src/mocha.js index 9438902f797..f3d2404a840 100644 --- a/packages/datadog-instrumentations/src/mocha.js +++ b/packages/datadog-instrumentations/src/mocha.js @@ -20,7 +20,7 @@ const skipCh = channel('ci:mocha:test:skip') const testFinishCh = channel('ci:mocha:test:finish') const parameterizedTestCh = channel('ci:mocha:test:parameterize') -const itrConfigurationCh = channel('ci:mocha:itr-configuration') +const libraryConfigurationCh = channel('ci:mocha:library-configuration') const skippableSuitesCh = channel('ci:mocha:test-suite:skippable') const testSessionStartCh = channel('ci:mocha:session:start') @@ -384,7 +384,7 @@ addHook({ return run.apply(this, arguments) } - if (!itrConfigurationCh.hasSubscribers || this.isWorker) { + if (!libraryConfigurationCh.hasSubscribers || this.isWorker) { if (this.isWorker) { isWorker = true } @@ -439,7 +439,7 @@ addHook({ } mochaRunAsyncResource.runInAsyncScope(() => { - itrConfigurationCh.publish({ + libraryConfigurationCh.publish({ onDone: mochaRunAsyncResource.bind(onReceivedConfiguration) }) }) diff --git a/packages/datadog-plugin-cucumber/src/index.js b/packages/datadog-plugin-cucumber/src/index.js index f754981fe29..e44d0c5bd70 100644 --- a/packages/datadog-plugin-cucumber/src/index.js +++ b/packages/datadog-plugin-cucumber/src/index.js @@ -47,7 +47,7 @@ class CucumberPlugin extends CiPlugin { hasUnskippableSuites, hasForcedToRunSuites }) => { - const { isSuitesSkippingEnabled, isCodeCoverageEnabled } = this.itrConfig || {} + const { isSuitesSkippingEnabled, isCodeCoverageEnabled } = this.libraryConfig || {} addIntelligentTestRunnerSpanTags( this.testSessionSpan, this.testModuleSpan, @@ -71,7 +71,7 @@ class CucumberPlugin extends CiPlugin { this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'session') finishAllTraceSpans(this.testSessionSpan) - this.itrConfig = null + this.libraryConfig = null this.tracer._exporter.flush() }) @@ -102,7 +102,7 @@ class CucumberPlugin extends CiPlugin { } }) this.telemetry.ciVisEvent(TELEMETRY_EVENT_CREATED, 'suite') - if (this.itrConfig?.isCodeCoverageEnabled) { + if (this.libraryConfig?.isCodeCoverageEnabled) { this.telemetry.ciVisEvent(TELEMETRY_CODE_COVERAGE_STARTED, 'suite', { library: 'istanbul' }) } }) @@ -114,7 +114,7 @@ class CucumberPlugin extends CiPlugin { }) this.addSub('ci:cucumber:test-suite:code-coverage', ({ coverageFiles, suiteFile }) => { - if (!this.itrConfig?.isCodeCoverageEnabled) { + if (!this.libraryConfig?.isCodeCoverageEnabled) { return } if (!coverageFiles.length) { diff --git a/packages/datadog-plugin-cypress/src/plugin.js b/packages/datadog-plugin-cypress/src/plugin.js index e088df48eed..a846806f70d 100644 --- a/packages/datadog-plugin-cypress/src/plugin.js +++ b/packages/datadog-plugin-cypress/src/plugin.js @@ -119,14 +119,14 @@ function getSuiteStatus (suiteStats) { return 'pass' } -function getItrConfig (tracer, testConfiguration) { +function getLibraryConfiguration (tracer, testConfiguration) { return new Promise(resolve => { - if (!tracer._tracer._exporter || !tracer._tracer._exporter.getItrConfiguration) { + if (!tracer._tracer._exporter?.getLibraryConfiguration) { return resolve({ err: new Error('CI Visibility was not initialized correctly') }) } - tracer._tracer._exporter.getItrConfiguration(testConfiguration, (err, itrConfig) => { - resolve({ err, itrConfig }) + tracer._tracer._exporter.getLibraryConfiguration(testConfiguration, (err, libraryConfig) => { + resolve({ err, libraryConfig }) }) }) } @@ -136,7 +136,7 @@ function getSkippableTests (isSuitesSkippingEnabled, tracer, testConfiguration) return Promise.resolve({ skippableTests: [] }) } return new Promise(resolve => { - if (!tracer._tracer._exporter || !tracer._tracer._exporter.getItrConfiguration) { + if (!tracer._tracer._exporter?.getLibraryConfiguration) { return resolve({ err: new Error('CI Visibility was not initialized correctly') }) } tracer._tracer._exporter.getSkippableSuites(testConfiguration, (err, skippableTests, correlationId) => { @@ -284,12 +284,12 @@ module.exports = (on, config) => { } on('before:run', (details) => { - return getItrConfig(tracer, testConfiguration).then(({ err, itrConfig }) => { + return getLibraryConfiguration(tracer, testConfiguration).then(({ err, libraryConfig }) => { if (err) { log.error(err) } else { - isSuitesSkippingEnabled = itrConfig.isSuitesSkippingEnabled - isCodeCoverageEnabled = itrConfig.isCodeCoverageEnabled + isSuitesSkippingEnabled = libraryConfig.isSuitesSkippingEnabled + isCodeCoverageEnabled = libraryConfig.isCodeCoverageEnabled } return getSkippableTests(isSuitesSkippingEnabled, tracer, testConfiguration) diff --git a/packages/datadog-plugin-jest/src/index.js b/packages/datadog-plugin-jest/src/index.js index 3ee4773ac27..49a8d4b2a05 100644 --- a/packages/datadog-plugin-jest/src/index.js +++ b/packages/datadog-plugin-jest/src/index.js @@ -223,7 +223,7 @@ class JestPlugin extends CiPlugin { }) /** - * This can't use `this.itrConfig` like `ci:mocha:test-suite:code-coverage` + * This can't use `this.libraryConfig` like `ci:mocha:test-suite:code-coverage` * because this subscription happens in a different process from the one * fetching the ITR config. */ diff --git a/packages/datadog-plugin-mocha/src/index.js b/packages/datadog-plugin-mocha/src/index.js index b691b858c0a..644371c3ec6 100644 --- a/packages/datadog-plugin-mocha/src/index.js +++ b/packages/datadog-plugin-mocha/src/index.js @@ -42,7 +42,7 @@ class MochaPlugin extends CiPlugin { this.sourceRoot = process.cwd() this.addSub('ci:mocha:test-suite:code-coverage', ({ coverageFiles, suiteFile }) => { - if (!this.itrConfig || !this.itrConfig.isCodeCoverageEnabled) { + if (!this.libraryConfig?.isCodeCoverageEnabled) { return } const testSuiteSpan = this._testSuites.get(suiteFile) @@ -98,7 +98,7 @@ class MochaPlugin extends CiPlugin { } }) this.telemetry.ciVisEvent(TELEMETRY_EVENT_CREATED, 'suite') - if (this.itrConfig?.isCodeCoverageEnabled) { + if (this.libraryConfig?.isCodeCoverageEnabled) { this.telemetry.ciVisEvent(TELEMETRY_CODE_COVERAGE_STARTED, 'suite', { library: 'istanbul' }) } if (itrCorrelationId) { @@ -192,7 +192,7 @@ class MochaPlugin extends CiPlugin { error }) => { if (this.testSessionSpan) { - const { isSuitesSkippingEnabled, isCodeCoverageEnabled } = this.itrConfig || {} + const { isSuitesSkippingEnabled, isCodeCoverageEnabled } = this.libraryConfig || {} this.testSessionSpan.setTag(TEST_STATUS, status) this.testModuleSpan.setTag(TEST_STATUS, status) @@ -222,7 +222,7 @@ class MochaPlugin extends CiPlugin { this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'session') finishAllTraceSpans(this.testSessionSpan) } - this.itrConfig = null + this.libraryConfig = null this.tracer._exporter.flush() }) } diff --git a/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js b/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js index d4cb05cb4a0..d2833cec328 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js +++ b/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js @@ -3,7 +3,7 @@ const URL = require('url').URL const { sendGitMetadata: sendGitMetadataRequest } = require('./git/git_metadata') -const { getItrConfiguration: getItrConfigurationRequest } = require('../intelligent-test-runner/get-itr-configuration') +const { getLibraryConfiguration: getLibraryConfigurationRequest } = require('../requests/get-library-configuration') const { getSkippableSuites: getSkippableSuitesRequest } = require('../intelligent-test-runner/get-skippable-suites') const log = require('../../log') const AgentInfoExporter = require('../../exporters/common/agent-info-exporter') @@ -76,11 +76,10 @@ class CiVisibilityExporter extends AgentInfoExporter { shouldRequestSkippableSuites () { return !!(this._config.isIntelligentTestRunnerEnabled && this._canUseCiVisProtocol && - this._itrConfig && - this._itrConfig.isSuitesSkippingEnabled) + this._libraryConfig?.isSuitesSkippingEnabled) } - shouldRequestItrConfiguration () { + shouldRequestLibraryConfiguration () { return this._config.isIntelligentTestRunnerEnabled } @@ -117,13 +116,13 @@ class CiVisibilityExporter extends AgentInfoExporter { } /** - * We can't request ITR configuration until we know whether we can use the + * We can't request library configuration until we know whether we can use the * CI Visibility Protocol, hence the this._canUseCiVisProtocol promise. */ - getItrConfiguration (testConfiguration, callback) { + getLibraryConfiguration (testConfiguration, callback) { const { repositoryUrl } = testConfiguration this.sendGitMetadata(repositoryUrl) - if (!this.shouldRequestItrConfiguration()) { + if (!this.shouldRequestLibraryConfiguration()) { return callback(null, {}) } this._canUseCiVisProtocolPromise.then((canUseCiVisProtocol) => { @@ -139,28 +138,29 @@ class CiVisibilityExporter extends AgentInfoExporter { custom: getTestConfigurationTags(this._config.tags), ...testConfiguration } - getItrConfigurationRequest(configuration, (err, itrConfig) => { + getLibraryConfigurationRequest(configuration, (err, libraryConfig) => { /** - * **Important**: this._itrConfig remains empty in testing frameworks - * where the tests run in a subprocess, because `getItrConfiguration` is called only once. + * **Important**: this._libraryConfig remains empty in testing frameworks + * where the tests run in a subprocess, like Jest, + * because `getLibraryConfiguration` is called only once in the main process. */ - this._itrConfig = itrConfig + this._libraryConfig = libraryConfig if (err) { callback(err, {}) - } else if (itrConfig?.requireGit) { + } else if (libraryConfig?.requireGit) { // If the backend requires git, we'll wait for the upload to finish and request settings again this._gitUploadPromise.then(gitUploadError => { if (gitUploadError) { return callback(gitUploadError, {}) } - getItrConfigurationRequest(configuration, (err, finalItrConfig) => { - this._itrConfig = finalItrConfig - callback(err, finalItrConfig) + getLibraryConfigurationRequest(configuration, (err, finalLibraryConfig) => { + this._libraryConfig = finalLibraryConfig + callback(err, finalLibraryConfig) }) }) } else { - callback(null, itrConfig) + callback(null, libraryConfig) } }) }) diff --git a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js b/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js similarity index 96% rename from packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js rename to packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js index a6932438ecb..ea9340f4224 100644 --- a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js +++ b/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js @@ -9,9 +9,9 @@ const { TELEMETRY_GIT_REQUESTS_SETTINGS_ERRORS, TELEMETRY_GIT_REQUESTS_SETTINGS_RESPONSE, getErrorTypeFromStatusCode -} = require('../../ci-visibility/telemetry') +} = require('../telemetry') -function getItrConfiguration ({ +function getLibraryConfiguration ({ url, isEvpProxy, evpProxyPrefix, @@ -117,4 +117,4 @@ function getItrConfiguration ({ }) } -module.exports = { getItrConfiguration } +module.exports = { getLibraryConfiguration } diff --git a/packages/dd-trace/src/plugins/ci_plugin.js b/packages/dd-trace/src/plugins/ci_plugin.js index 349358ca9fc..90d354a31ae 100644 --- a/packages/dd-trace/src/plugins/ci_plugin.js +++ b/packages/dd-trace/src/plugins/ci_plugin.js @@ -36,17 +36,17 @@ module.exports = class CiPlugin extends Plugin { this.rootDir = process.cwd() // fallback in case :session:start events are not emitted - this.addSub(`ci:${this.constructor.id}:itr-configuration`, ({ onDone }) => { - if (!this.tracer._exporter || !this.tracer._exporter.getItrConfiguration) { + this.addSub(`ci:${this.constructor.id}:library-configuration`, ({ onDone }) => { + if (!this.tracer._exporter || !this.tracer._exporter.getLibraryConfiguration) { return onDone({ err: new Error('CI Visibility was not initialized correctly') }) } - this.tracer._exporter.getItrConfiguration(this.testConfiguration, (err, itrConfig) => { + this.tracer._exporter.getLibraryConfiguration(this.testConfiguration, (err, libraryConfig) => { if (err) { log.error(`Intelligent Test Runner configuration could not be fetched. ${err.message}`) } else { - this.itrConfig = itrConfig + this.libraryConfig = libraryConfig } - onDone({ err, itrConfig }) + onDone({ err, libraryConfig }) }) }) diff --git a/packages/dd-trace/test/ci-visibility/exporters/agent-proxy/agent-proxy.spec.js b/packages/dd-trace/test/ci-visibility/exporters/agent-proxy/agent-proxy.spec.js index 1dfa23dead5..189defedf4e 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/agent-proxy/agent-proxy.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/agent-proxy/agent-proxy.spec.js @@ -106,7 +106,7 @@ describe('AgentProxyCiVisibilityExporter', () => { spanId: '1', files: [] } - agentProxyCiVisibilityExporter._itrConfig = { isCodeCoverageEnabled: true } + agentProxyCiVisibilityExporter._libraryConfig = { isCodeCoverageEnabled: true } agentProxyCiVisibilityExporter.exportCoverage(coverage) expect(mockWriter.append).to.have.been.calledWith({ spanId: '1', traceId: '1', files: [] }) }) @@ -213,7 +213,7 @@ describe('AgentProxyCiVisibilityExporter', () => { spanId: '1', files: [] } - agentProxyCiVisibilityExporter._itrConfig = { isCodeCoverageEnabled: true } + agentProxyCiVisibilityExporter._libraryConfig = { isCodeCoverageEnabled: true } agentProxyCiVisibilityExporter.exportCoverage(coverage) expect(mockWriter.append).to.have.been.calledWith({ traceId: '1', spanId: '1', files: [] }) await new Promise(resolve => setTimeout(resolve, flushInterval)) diff --git a/packages/dd-trace/test/ci-visibility/exporters/agentless/exporter.spec.js b/packages/dd-trace/test/ci-visibility/exporters/agentless/exporter.spec.js index 9c12087dbe0..2c2ec7638fe 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/agentless/exporter.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/agentless/exporter.spec.js @@ -53,8 +53,7 @@ describe('CI Visibility Agentless Exporter', () => { isIntelligentTestRunnerEnabled: true, tags: {} }) - expect(agentlessExporter.shouldRequestItrConfiguration()).to.be.true - agentlessExporter.getItrConfiguration({}, () => { + agentlessExporter.getLibraryConfiguration({}, () => { expect(scope.isDone()).to.be.true expect(agentlessExporter.canReportCodeCoverage()).to.be.true expect(agentlessExporter.shouldRequestSkippableSuites()).to.be.true @@ -85,7 +84,7 @@ describe('CI Visibility Agentless Exporter', () => { tags: {} }) agentlessExporter._resolveGit() - agentlessExporter.getItrConfiguration({}, () => { + agentlessExporter.getLibraryConfiguration({}, () => { agentlessExporter.getSkippableSuites({}, () => { expect(scope.isDone()).to.be.true done() @@ -107,8 +106,7 @@ describe('CI Visibility Agentless Exporter', () => { const agentlessExporter = new AgentlessCiVisibilityExporter({ url, isGitUploadEnabled: true, isIntelligentTestRunnerEnabled: true, tags: {} }) - expect(agentlessExporter.shouldRequestItrConfiguration()).to.be.true - agentlessExporter.getItrConfiguration({}, () => { + agentlessExporter.getLibraryConfiguration({}, () => { expect(scope.isDone()).to.be.true expect(agentlessExporter.canReportCodeCoverage()).to.be.true expect(agentlessExporter.shouldRequestSkippableSuites()).to.be.true @@ -130,7 +128,7 @@ describe('CI Visibility Agentless Exporter', () => { const agentlessExporter = new AgentlessCiVisibilityExporter({ url, isGitUploadEnabled: true, isIntelligentTestRunnerEnabled: true, tags: {} }) - agentlessExporter.getItrConfiguration({}, () => { + agentlessExporter.getLibraryConfiguration({}, () => { expect(scope.isDone()).to.be.true expect(agentlessExporter.canReportCodeCoverage()).to.be.true done() @@ -162,8 +160,7 @@ describe('CI Visibility Agentless Exporter', () => { }) } - expect(agentlessExporter.shouldRequestItrConfiguration()).to.be.true - agentlessExporter.getItrConfiguration({}, (err) => { + agentlessExporter.getLibraryConfiguration({}, (err) => { expect(scope.isDone()).not.to.be.true expect(err.message).to.contain( 'Request to settings endpoint was not done because Datadog API key is not defined' diff --git a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js index 7a326d61293..c6066a67c8e 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js @@ -79,8 +79,8 @@ describe('CI Visibility Exporter', () => { }) }) - describe('getItrConfiguration', () => { - it('should upload git metadata when getItrConfiguration is called, regardless of ITR config', (done) => { + describe('getLibraryConfiguration', () => { + it('should upload git metadata when getLibraryConfiguration is called, regardless of ITR config', (done) => { const scope = nock(`http://localhost:${port}`) .post('/api/v2/git/repository/search_commits') .reply(200, JSON.stringify({ @@ -90,20 +90,22 @@ describe('CI Visibility Exporter', () => { .reply(202, '') const ciVisibilityExporter = new CiVisibilityExporter({ port, isGitUploadEnabled: true }) - ciVisibilityExporter.getItrConfiguration({}, () => { - expect(scope.isDone()).not.to.be.true + ciVisibilityExporter._resolveCanUseCiVisProtocol(true) + ciVisibilityExporter.getLibraryConfiguration({}, () => {}) + ciVisibilityExporter._gitUploadPromise.then(() => { + expect(scope.isDone()).to.be.true done() }) }) - context('if ITR is not enabled', () => { - it('should resolve immediately if ITR is not enabled', (done) => { + context('if ITR is disabled', () => { + it('should resolve immediately and not request settings', (done) => { const scope = nock(`http://localhost:${port}`) .post('/api/v2/libraries/tests/services/setting') .reply(200) const ciVisibilityExporter = new CiVisibilityExporter({ port }) - ciVisibilityExporter.getItrConfiguration({}, (err, itrConfig) => { - expect(itrConfig).to.eql({}) + ciVisibilityExporter.getLibraryConfiguration({}, (err, libraryConfig) => { + expect(libraryConfig).to.eql({}) expect(err).to.be.null expect(scope.isDone()).not.to.be.true done() @@ -137,7 +139,7 @@ describe('CI Visibility Exporter', () => { } }) - ciVisibilityExporter.getItrConfiguration({}, () => { + ciVisibilityExporter.getLibraryConfiguration({}, () => { expect(scope.isDone()).to.be.true expect(customConfig).to.eql({ 'my_custom_config': 'my_custom_config_value' @@ -162,8 +164,8 @@ describe('CI Visibility Exporter', () => { const ciVisibilityExporter = new CiVisibilityExporter({ port, isIntelligentTestRunnerEnabled: true }) - ciVisibilityExporter.getItrConfiguration({}, (err, itrConfig) => { - expect(itrConfig).to.eql({ + ciVisibilityExporter.getLibraryConfiguration({}, (err, libraryConfig) => { + expect(libraryConfig).to.eql({ requireGit: false, isCodeCoverageEnabled: true, isItrEnabled: true, @@ -192,7 +194,7 @@ describe('CI Visibility Exporter', () => { const ciVisibilityExporter = new CiVisibilityExporter({ port, isIntelligentTestRunnerEnabled: true }) expect(ciVisibilityExporter.shouldRequestSkippableSuites()).to.be.false - ciVisibilityExporter.getItrConfiguration({}, () => { + ciVisibilityExporter.getLibraryConfiguration({}, () => { expect(ciVisibilityExporter.shouldRequestSkippableSuites()).to.be.true done() }) @@ -227,12 +229,12 @@ describe('CI Visibility Exporter', () => { port, isIntelligentTestRunnerEnabled: true }) ciVisibilityExporter._resolveCanUseCiVisProtocol(true) - expect(ciVisibilityExporter.shouldRequestItrConfiguration()).to.be.true - ciVisibilityExporter.getItrConfiguration({}, (err, itrConfig) => { + expect(ciVisibilityExporter.shouldRequestLibraryConfiguration()).to.be.true + ciVisibilityExporter.getLibraryConfiguration({}, (err, libraryConfig) => { expect(scope.isDone()).to.be.true expect(err).to.be.null // the second request returns require_git: false - expect(itrConfig.requireGit).to.be.false + expect(libraryConfig.requireGit).to.be.false expect(hasUploadedGit).to.be.true done() }) @@ -269,12 +271,12 @@ describe('CI Visibility Exporter', () => { port, isIntelligentTestRunnerEnabled: true }) ciVisibilityExporter._resolveCanUseCiVisProtocol(true) - expect(ciVisibilityExporter.shouldRequestItrConfiguration()).to.be.true - ciVisibilityExporter.getItrConfiguration({}, (err, itrConfig) => { + expect(ciVisibilityExporter.shouldRequestLibraryConfiguration()).to.be.true + ciVisibilityExporter.getLibraryConfiguration({}, (err, libraryConfig) => { expect(scope.isDone()).to.be.true expect(err).to.be.null // the second request returns require_git: false - expect(itrConfig.requireGit).to.be.false + expect(libraryConfig.requireGit).to.be.false done() }) ciVisibilityExporter._resolveGit() @@ -352,7 +354,7 @@ describe('CI Visibility Exporter', () => { } }) - ciVisibilityExporter._itrConfig = { isSuitesSkippingEnabled: true } + ciVisibilityExporter._libraryConfig = { isSuitesSkippingEnabled: true } ciVisibilityExporter._resolveCanUseCiVisProtocol(true) ciVisibilityExporter.getSkippableSuites({}, () => { @@ -393,7 +395,7 @@ describe('CI Visibility Exporter', () => { isGitUploadEnabled: true }) - ciVisibilityExporter._itrConfig = { isSuitesSkippingEnabled: true } + ciVisibilityExporter._libraryConfig = { isSuitesSkippingEnabled: true } ciVisibilityExporter._resolveCanUseCiVisProtocol(true) ciVisibilityExporter.getSkippableSuites({}, (err, skippableSuites) => { @@ -413,7 +415,7 @@ describe('CI Visibility Exporter', () => { const ciVisibilityExporter = new CiVisibilityExporter({ port, isIntelligentTestRunnerEnabled: true }) - ciVisibilityExporter._itrConfig = { isSuitesSkippingEnabled: true } + ciVisibilityExporter._libraryConfig = { isSuitesSkippingEnabled: true } ciVisibilityExporter._resolveCanUseCiVisProtocol(true) ciVisibilityExporter.getSkippableSuites({}, (err, skippableSuites) => { From 00164ca5108fdaf85ecd0d89e8c65dfccd849338 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Fri, 2 Feb 2024 10:19:35 +0100 Subject: [PATCH 11/44] [ci-visibility] Use gzip for skippable endpoint if it's available (#4016) --- .../exporters/ci-visibility-exporter.js | 1 + .../get-skippable-suites.js | 5 + .../exporters/ci-visibility-exporter.spec.js | 99 +++++++++++++++++++ 3 files changed, 105 insertions(+) diff --git a/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js b/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js index d2833cec328..58e4495c6b2 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js +++ b/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js @@ -107,6 +107,7 @@ class CiVisibilityExporter extends AgentInfoExporter { env: this._config.env, service: this._config.service, isEvpProxy: !!this._isUsingEvpProxy, + isGzipCompatible: this._isGzipCompatible, evpProxyPrefix: this.evpProxyPrefix, custom: getTestConfigurationTags(this._config.tags), ...testConfiguration diff --git a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js index 1926e18b925..9d410800375 100644 --- a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js +++ b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js @@ -16,6 +16,7 @@ function getSkippableSuites ({ url, isEvpProxy, evpProxyPrefix, + isGzipCompatible, env, service, repositoryUrl, @@ -38,6 +39,10 @@ function getSkippableSuites ({ url } + if (isGzipCompatible) { + options.headers['accept-encoding'] = 'gzip' + } + if (isEvpProxy) { options.path = `${evpProxyPrefix}/api/v2/ci/tests/skippable` options.headers['X-Datadog-EVP-Subdomain'] = 'api' diff --git a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js index c6066a67c8e..db9da2db76d 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js @@ -4,6 +4,7 @@ require('../../../../dd-trace/test/setup/tap') const cp = require('child_process') const fs = require('fs') +const zlib = require('zlib') const CiVisibilityExporter = require('../../../src/ci-visibility/exporters/ci-visibility-exporter') const nock = require('nock') @@ -427,6 +428,104 @@ describe('CI Visibility Exporter', () => { ciVisibilityExporter._resolveGit(new Error('could not upload git metadata')) }) }) + context('if ITR is enabled and the exporter can use gzip', () => { + it('should request the API with gzip', (done) => { + nock(`http://localhost:${port}`) + .post('/api/v2/git/repository/search_commits') + .reply(200, JSON.stringify({ + data: [] + })) + .post('/api/v2/git/repository/packfile') + .reply(202, '') + + let requestHeaders = {} + const scope = nock(`http://localhost:${port}`) + .post('/api/v2/ci/tests/skippable') + .reply(200, function () { + requestHeaders = this.req.headers + + return zlib.gzipSync( + JSON.stringify({ + meta: { + correlation_id: '1234' + }, + data: [{ + type: 'suite', + attributes: { + suite: 'ci-visibility/test/ci-visibility-test.js' + } + }] + }) + ) + }, { + 'content-encoding': 'gzip' + }) + const ciVisibilityExporter = new CiVisibilityExporter({ + port, + isIntelligentTestRunnerEnabled: true, + isGitUploadEnabled: true + }) + ciVisibilityExporter._libraryConfig = { isSuitesSkippingEnabled: true } + ciVisibilityExporter._resolveCanUseCiVisProtocol(true) + ciVisibilityExporter._isGzipCompatible = true + + ciVisibilityExporter.getSkippableSuites({}, (err, skippableSuites) => { + expect(err).to.be.null + expect(skippableSuites).to.eql(['ci-visibility/test/ci-visibility-test.js']) + expect(scope.isDone()).to.be.true + expect(requestHeaders['accept-encoding']).to.equal('gzip') + done() + }) + ciVisibilityExporter.sendGitMetadata() + }) + }) + context('if ITR is enabled and the exporter can not use gzip', () => { + it('should request the API without gzip', (done) => { + nock(`http://localhost:${port}`) + .post('/api/v2/git/repository/search_commits') + .reply(200, JSON.stringify({ + data: [] + })) + .post('/api/v2/git/repository/packfile') + .reply(202, '') + + let requestHeaders = {} + const scope = nock(`http://localhost:${port}`) + .post('/api/v2/ci/tests/skippable') + .reply(200, function () { + requestHeaders = this.req.headers + + return JSON.stringify({ + meta: { + correlation_id: '1234' + }, + data: [{ + type: 'suite', + attributes: { + suite: 'ci-visibility/test/ci-visibility-test.js' + } + }] + }) + }) + const ciVisibilityExporter = new CiVisibilityExporter({ + port, + isIntelligentTestRunnerEnabled: true, + isGitUploadEnabled: true + }) + ciVisibilityExporter._libraryConfig = { isSuitesSkippingEnabled: true } + ciVisibilityExporter._resolveCanUseCiVisProtocol(true) + ciVisibilityExporter._isGzipCompatible = false + + ciVisibilityExporter.getSkippableSuites({}, (err, skippableSuites) => { + expect(err).to.be.null + expect(skippableSuites).to.eql(['ci-visibility/test/ci-visibility-test.js']) + expect(scope.isDone()).to.be.true + expect(requestHeaders['accept-encoding']).not.to.equal('gzip') + done() + }) + ciVisibilityExporter.sendGitMetadata() + }) + }) }) describe('export', () => { From 7a8fd53c7b4cd95eaea56d2a288330400b33084c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Fri, 2 Feb 2024 15:59:03 +0100 Subject: [PATCH 12/44] [ci-visibility] Attempt to use repository root to find CODEOWNERS file (#4021) --- packages/datadog-plugin-cypress/src/plugin.js | 8 +++-- packages/dd-trace/src/plugins/ci_plugin.js | 8 +++-- packages/dd-trace/src/plugins/util/test.js | 32 +++++++++++++++---- .../dd-trace/test/plugins/util/test.spec.js | 23 ++++++++++++- 4 files changed, 58 insertions(+), 13 deletions(-) diff --git a/packages/datadog-plugin-cypress/src/plugin.js b/packages/datadog-plugin-cypress/src/plugin.js index a846806f70d..9909c9d3a3c 100644 --- a/packages/datadog-plugin-cypress/src/plugin.js +++ b/packages/datadog-plugin-cypress/src/plugin.js @@ -45,7 +45,8 @@ const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA, GIT_BRANCH, - CI_PROVIDER_NAME + CI_PROVIDER_NAME, + CI_WORKSPACE_PATH } = require('../../dd-trace/src/plugins/util/tags') const { OS_VERSION, @@ -186,7 +187,8 @@ module.exports = (on, config) => { [RUNTIME_NAME]: runtimeName, [RUNTIME_VERSION]: runtimeVersion, [GIT_BRANCH]: branch, - [CI_PROVIDER_NAME]: ciProviderName + [CI_PROVIDER_NAME]: ciProviderName, + [CI_WORKSPACE_PATH]: repositoryRoot } = testEnvironmentMetadata const isUnsupportedCIProvider = !ciProviderName @@ -205,7 +207,7 @@ module.exports = (on, config) => { testLevel: 'test' } - const codeOwnersEntries = getCodeOwnersFileEntries() + const codeOwnersEntries = getCodeOwnersFileEntries(repositoryRoot) let activeSpan = null let testSessionSpan = null diff --git a/packages/dd-trace/src/plugins/ci_plugin.js b/packages/dd-trace/src/plugins/ci_plugin.js index 90d354a31ae..10ba66caff2 100644 --- a/packages/dd-trace/src/plugins/ci_plugin.js +++ b/packages/dd-trace/src/plugins/ci_plugin.js @@ -27,7 +27,7 @@ const { TELEMETRY_EVENT_CREATED, TELEMETRY_ITR_SKIPPED } = require('../ci-visibility/telemetry') -const { CI_PROVIDER_NAME, GIT_REPOSITORY_URL, GIT_COMMIT_SHA, GIT_BRANCH } = require('./util/tags') +const { CI_PROVIDER_NAME, GIT_REPOSITORY_URL, GIT_COMMIT_SHA, GIT_BRANCH, CI_WORKSPACE_PATH } = require('./util/tags') const { OS_VERSION, OS_PLATFORM, OS_ARCHITECTURE, RUNTIME_NAME, RUNTIME_VERSION } = require('./util/env') module.exports = class CiPlugin extends Plugin { @@ -140,7 +140,6 @@ module.exports = class CiPlugin extends Plugin { configure (config) { super.configure(config) this.testEnvironmentMetadata = getTestEnvironmentMetadata(this.constructor.id, this.config) - this.codeOwnersEntries = getCodeOwnersFileEntries() const { [GIT_REPOSITORY_URL]: repositoryUrl, @@ -151,9 +150,12 @@ module.exports = class CiPlugin extends Plugin { [RUNTIME_NAME]: runtimeName, [RUNTIME_VERSION]: runtimeVersion, [GIT_BRANCH]: branch, - [CI_PROVIDER_NAME]: ciProviderName + [CI_PROVIDER_NAME]: ciProviderName, + [CI_WORKSPACE_PATH]: repositoryRoot } = this.testEnvironmentMetadata + this.codeOwnersEntries = getCodeOwnersFileEntries(repositoryRoot) + this.isUnsupportedCIProvider = !ciProviderName this.testConfiguration = { diff --git a/packages/dd-trace/src/plugins/util/test.js b/packages/dd-trace/src/plugins/util/test.js index 7fa6d1b1c0d..2f64e9863b6 100644 --- a/packages/dd-trace/src/plugins/util/test.js +++ b/packages/dd-trace/src/plugins/util/test.js @@ -281,16 +281,36 @@ const POSSIBLE_CODEOWNERS_LOCATIONS = [ '.gitlab/CODEOWNERS' ] -function getCodeOwnersFileEntries (rootDir = process.cwd()) { - let codeOwnersContent - - POSSIBLE_CODEOWNERS_LOCATIONS.forEach(location => { +function readCodeOwners (rootDir) { + for (const location of POSSIBLE_CODEOWNERS_LOCATIONS) { try { - codeOwnersContent = fs.readFileSync(`${rootDir}/${location}`).toString() + return fs.readFileSync(path.join(rootDir, location)).toString() } catch (e) { // retry with next path } - }) + } + return '' +} + +function getCodeOwnersFileEntries (rootDir) { + let codeOwnersContent + let usedRootDir = rootDir + let isTriedCwd = false + + const processCwd = process.cwd() + + if (!usedRootDir || usedRootDir === processCwd) { + usedRootDir = processCwd + isTriedCwd = true + } + + codeOwnersContent = readCodeOwners(usedRootDir) + + // If we haven't found CODEOWNERS in the provided root dir, we try with process.cwd() + if (!codeOwnersContent && !isTriedCwd) { + codeOwnersContent = readCodeOwners(processCwd) + } + if (!codeOwnersContent) { return null } diff --git a/packages/dd-trace/test/plugins/util/test.spec.js b/packages/dd-trace/test/plugins/util/test.spec.js index a418af72305..9ba79403013 100644 --- a/packages/dd-trace/test/plugins/util/test.spec.js +++ b/packages/dd-trace/test/plugins/util/test.spec.js @@ -79,9 +79,30 @@ describe('getCodeOwnersFileEntries', () => { }) it('returns null if CODEOWNERS can not be found', () => { const rootDir = path.join(__dirname, '__not_found__') + // We have to change the working directory, + // otherwise it will find the CODEOWNERS file in the root of dd-trace-js + const oldCwd = process.cwd() + process.chdir(path.join(__dirname)) const codeOwnersFileEntries = getCodeOwnersFileEntries(rootDir) - expect(codeOwnersFileEntries).to.equal(null) + process.chdir(oldCwd) + }) + it('tries both input rootDir and process.cwd()', () => { + const rootDir = path.join(__dirname, '__not_found__') + const oldCwd = process.cwd() + + process.chdir(path.join(__dirname, '__test__')) + const codeOwnersFileEntries = getCodeOwnersFileEntries(rootDir) + + expect(codeOwnersFileEntries[0]).to.eql({ + pattern: 'packages/dd-trace/test/plugins/util/test.spec.js', + owners: ['@datadog-ci-app'] + }) + expect(codeOwnersFileEntries[1]).to.eql({ + pattern: 'packages/dd-trace/test/plugins/util/*', + owners: ['@datadog-dd-trace-js'] + }) + process.chdir(oldCwd) }) }) From d38f245d4e265e80adf5ed0ade048d747980278f Mon Sep 17 00:00:00 2001 From: simon-id Date: Fri, 2 Feb 2024 17:34:21 +0100 Subject: [PATCH 13/44] Supports form data for nextjs body instrumentation (#4008) --------- Co-authored-by: Ugaitz Urien --- packages/datadog-instrumentations/src/next.js | 20 ++- .../test/appsec/index.next.plugin.spec.js | 114 ++++++------------ .../app-dir/app/api/test-formdata/route.js | 18 +++ .../pages/api/test-formdata/index.js | 10 ++ 4 files changed, 84 insertions(+), 78 deletions(-) create mode 100644 packages/dd-trace/test/appsec/next/app-dir/app/api/test-formdata/route.js create mode 100644 packages/dd-trace/test/appsec/next/pages-dir/pages/api/test-formdata/index.js diff --git a/packages/datadog-instrumentations/src/next.js b/packages/datadog-instrumentations/src/next.js index 8c679724382..863a7058f25 100644 --- a/packages/datadog-instrumentations/src/next.js +++ b/packages/datadog-instrumentations/src/next.js @@ -290,9 +290,23 @@ addHook({ shimmer.massWrap(request.NextRequest.prototype, ['text', 'json'], function (originalMethod) { return async function wrappedJson () { const body = await originalMethod.apply(this, arguments) - bodyParsedChannel.publish({ - body - }) + + bodyParsedChannel.publish({ body }) + + return body + } + }) + + shimmer.wrap(request.NextRequest.prototype, 'formData', function (originalFormData) { + return async function wrappedFormData () { + const body = await originalFormData.apply(this, arguments) + + let normalizedBody = body + if (typeof body.entries === 'function') { + normalizedBody = Object.fromEntries(body.entries()) + } + bodyParsedChannel.publish({ body: normalizedBody }) + return body } }) diff --git a/packages/dd-trace/test/appsec/index.next.plugin.spec.js b/packages/dd-trace/test/appsec/index.next.plugin.spec.js index b67ca79eecb..9cc23a6c18e 100644 --- a/packages/dd-trace/test/appsec/index.next.plugin.spec.js +++ b/packages/dd-trace/test/appsec/index.next.plugin.spec.js @@ -155,6 +155,25 @@ describe('test suite', () => { }) } + function getFindBodyThreatMethod (done) { + return function findBodyThreat (traces) { + let attackFound = false + + traces.forEach(trace => { + trace.forEach(span => { + if (span.meta['_dd.appsec.json']) { + attackFound = true + } + }) + }) + + if (attackFound) { + agent.unsubscribe(findBodyThreat) + done() + } + } + } + tests.forEach(({ appName, serverPath }) => { describe(`should detect threats in ${appName}`, () => { initApp(appName) @@ -164,22 +183,7 @@ describe('test suite', () => { it('in request body', function (done) { this.timeout(5000) - function findBodyThreat (traces) { - let attackFound = false - - traces.forEach(trace => { - trace.forEach(span => { - if (span.meta['_dd.appsec.json']) { - attackFound = true - } - }) - }) - - if (attackFound) { - agent.unsubscribe(findBodyThreat) - done() - } - } + const findBodyThreat = getFindBodyThreatMethod(done) agent.subscribe(findBodyThreat) axios @@ -188,27 +192,26 @@ describe('test suite', () => { }).catch(e => { done(e) }) }) - if (appName === 'app-dir') { - it('in request body with .text() function', function (done) { - this.timeout(5000) + it('in form data body', function (done) { + this.timeout(5000) - function findBodyThreat (traces) { - let attackFound = false + const findBodyThreat = getFindBodyThreatMethod(done) - traces.forEach(trace => { - trace.forEach(span => { - if (span.meta['_dd.appsec.json']) { - attackFound = true - } - }) - }) + agent.subscribe(findBodyThreat) - if (attackFound) { - agent.unsubscribe(findBodyThreat) - done() - } - } + axios + .post(`http://127.0.0.1:${port}/api/test-formdata`, new URLSearchParams({ + key: 'testattack' + })).catch(e => { + done(e) + }) + }) + if (appName === 'app-dir') { + it('in request body with .text() function', function (done) { + this.timeout(5000) + + const findBodyThreat = getFindBodyThreatMethod(done) agent.subscribe(findBodyThreat) axios .post(`http://127.0.0.1:${port}/api/test-text`, { @@ -222,20 +225,7 @@ describe('test suite', () => { it('in request query', function (done) { this.timeout(5000) - function findBodyThreat (traces) { - let attackFound = false - traces.forEach(trace => { - trace.forEach(span => { - if (span.meta['_dd.appsec.json']) { - attackFound = true - } - }) - }) - if (attackFound) { - agent.unsubscribe(findBodyThreat) - done() - } - } + const findBodyThreat = getFindBodyThreatMethod(done) axios .get(`http://127.0.0.1:${port}/api/test?param=testattack`) @@ -247,20 +237,7 @@ describe('test suite', () => { it('in request query with array params, attack in the second item', function (done) { this.timeout(5000) - function findBodyThreat (traces) { - let attackFound = false - traces.forEach(trace => { - trace.forEach(span => { - if (span.meta['_dd.appsec.json']) { - attackFound = true - } - }) - }) - if (attackFound) { - agent.unsubscribe(findBodyThreat) - done() - } - } + const findBodyThreat = getFindBodyThreatMethod(done) axios .get(`http://127.0.0.1:${port}/api/test?param[]=safe¶m[]=testattack`) @@ -272,20 +249,7 @@ describe('test suite', () => { it('in request query with array params, threat in the first item', function (done) { this.timeout(5000) - function findBodyThreat (traces) { - let attackFound = false - traces.forEach(trace => { - trace.forEach(span => { - if (span.meta['_dd.appsec.json']) { - attackFound = true - } - }) - }) - if (attackFound) { - agent.unsubscribe(findBodyThreat) - done() - } - } + const findBodyThreat = getFindBodyThreatMethod(done) axios .get(`http://127.0.0.1:${port}/api/test?param[]=testattack¶m[]=safe`) diff --git a/packages/dd-trace/test/appsec/next/app-dir/app/api/test-formdata/route.js b/packages/dd-trace/test/appsec/next/app-dir/app/api/test-formdata/route.js new file mode 100644 index 00000000000..69109a530e6 --- /dev/null +++ b/packages/dd-trace/test/appsec/next/app-dir/app/api/test-formdata/route.js @@ -0,0 +1,18 @@ +import { NextResponse } from 'next/server' +export async function POST (request) { + const body = await request.formData() + + if (!body.entries) { + return NextResponse.json({ + message: 'Instrumentation modified form data' + }, { + status: 500 + }) + } + + return NextResponse.json({ + now: Date.now(), + cache: 'no-store', + data: body + }) +} diff --git a/packages/dd-trace/test/appsec/next/pages-dir/pages/api/test-formdata/index.js b/packages/dd-trace/test/appsec/next/pages-dir/pages/api/test-formdata/index.js new file mode 100644 index 00000000000..538520f5eaf --- /dev/null +++ b/packages/dd-trace/test/appsec/next/pages-dir/pages/api/test-formdata/index.js @@ -0,0 +1,10 @@ +// Next.js API route support: https://nextjs.org/docs/api-routes/introduction + +export default async function POST (req, res) { + const body = req.body + res.status(200).json({ + cache: 'no-store', + data: body, + query: req.query + }) +} From 89a25e2923022b5b22155327feacec008b52bfb6 Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Mon, 5 Feb 2024 11:39:21 +0100 Subject: [PATCH 14/44] Update rewriter to fix `Error.prepareStackTrace` overrides with iast enabled (#4028) --- package.json | 2 +- yarn.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index c99684316bd..8b4c45be140 100644 --- a/package.json +++ b/package.json @@ -70,7 +70,7 @@ }, "dependencies": { "@datadog/native-appsec": "7.0.0", - "@datadog/native-iast-rewriter": "2.2.2", + "@datadog/native-iast-rewriter": "2.2.3", "@datadog/native-iast-taint-tracking": "1.6.4", "@datadog/native-metrics": "^2.0.0", "@datadog/pprof": "5.0.0", diff --git a/yarn.lock b/yarn.lock index 9fc7b354f71..2689de62d51 100644 --- a/yarn.lock +++ b/yarn.lock @@ -419,10 +419,10 @@ dependencies: node-gyp-build "^3.9.0" -"@datadog/native-iast-rewriter@2.2.2": - version "2.2.2" - resolved "https://registry.yarnpkg.com/@datadog/native-iast-rewriter/-/native-iast-rewriter-2.2.2.tgz#3f7feaf6be1af4c83ad063065b8ed509bbaf11cb" - integrity sha512-13ZBhJpjZ/tiV6rYfyAf/ITye9cyd3x12M/2NKhD4Ivev4N4uKBREAjpArOtzKtPXZ5b6oXwVV4ofT1SHoYyzA== +"@datadog/native-iast-rewriter@2.2.3": + version "2.2.3" + resolved "https://registry.yarnpkg.com/@datadog/native-iast-rewriter/-/native-iast-rewriter-2.2.3.tgz#7d512abdb03dcc238825e8d6c90cebf782686db3" + integrity sha512-RCbflf8BJ++h99I7iA4NxTA1lx7YqB+sPQkJNSZKxXyEXtWl9J4XsDV9C/sB9iGbf1PVY77tFvoGm5/WpUV4IA== dependencies: lru-cache "^7.14.0" node-gyp-build "^4.5.0" From 165b3402645c4ffd9fa2e7f45de86bfcfc568dbd Mon Sep 17 00:00:00 2001 From: simon-id Date: Mon, 5 Feb 2024 17:21:50 +0100 Subject: [PATCH 15/44] Fix error due to undefined options (#4029) When options are undefined, doing `options.timeout` throws error and crash the whole node process. https://github.com/DataDog/dd-trace-js/issues/3939 Co-authored-by: Atif Saddique <134278544+atif-saddique-deel@users.noreply.github.com> --- packages/datadog-plugin-http/src/client.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/datadog-plugin-http/src/client.js b/packages/datadog-plugin-http/src/client.js index 42833bb896f..5d48f3331e9 100644 --- a/packages/datadog-plugin-http/src/client.js +++ b/packages/datadog-plugin-http/src/client.js @@ -122,7 +122,7 @@ class HttpClientPlugin extends ClientPlugin { // conditions for no error: // 1. not using a custom agent instance with custom timeout specified // 2. no invocation of `req.setTimeout` - if (!args.options.agent?.options.timeout && !customRequestTimeout) return + if (!args.options.agent?.options?.timeout && !customRequestTimeout) return span.setTag('error', 1) } From d28f33ed45f2ed5497d628dcc39a0f8e96ecc656 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Mon, 5 Feb 2024 17:33:17 +0100 Subject: [PATCH 16/44] [ci-visibility] Add get known tests request (#4015) --- .../early-flake-detection/get-known-tests.js | 83 +++++++++++ .../exporters/ci-visibility-exporter.js | 59 +++++++- .../requests/get-library-configuration.js | 9 +- packages/dd-trace/src/config.js | 6 + packages/dd-trace/src/plugins/ci_plugin.js | 12 ++ .../exporters/ci-visibility-exporter.spec.js | 135 +++++++++++++++++- packages/dd-trace/test/config.spec.js | 10 ++ 7 files changed, 309 insertions(+), 5 deletions(-) create mode 100644 packages/dd-trace/src/ci-visibility/early-flake-detection/get-known-tests.js diff --git a/packages/dd-trace/src/ci-visibility/early-flake-detection/get-known-tests.js b/packages/dd-trace/src/ci-visibility/early-flake-detection/get-known-tests.js new file mode 100644 index 00000000000..e9df9daa04c --- /dev/null +++ b/packages/dd-trace/src/ci-visibility/early-flake-detection/get-known-tests.js @@ -0,0 +1,83 @@ +const request = require('../../exporters/common/request') +const id = require('../../id') +const log = require('../../log') + +function getKnownTests ({ + url, + isEvpProxy, + evpProxyPrefix, + isGzipCompatible, + env, + service, + repositoryUrl, + sha, + osVersion, + osPlatform, + osArchitecture, + runtimeName, + runtimeVersion, + custom +}, done) { + const options = { + path: '/api/v2/ci/libraries/tests', + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + timeout: 20000, + url + } + + if (isGzipCompatible) { + options.headers['accept-encoding'] = 'gzip' + } + + if (isEvpProxy) { + options.path = `${evpProxyPrefix}/api/v2/ci/libraries/tests` + options.headers['X-Datadog-EVP-Subdomain'] = 'api' + } else { + const apiKey = process.env.DATADOG_API_KEY || process.env.DD_API_KEY + if (!apiKey) { + return done(new Error('Skippable suites were not fetched because Datadog API key is not defined.')) + } + + options.headers['dd-api-key'] = apiKey + } + + const data = JSON.stringify({ + data: { + id: id().toString(10), + type: 'ci_app_libraries_tests_request', + attributes: { + configurations: { + 'os.platform': osPlatform, + 'os.version': osVersion, + 'os.architecture': osArchitecture, + 'runtime.name': runtimeName, + 'runtime.version': runtimeVersion, + custom + }, + service, + env, + repository_url: repositoryUrl, + sha + } + } + }) + + request(data, options, (err, res) => { + if (err) { + done(err) + } else { + try { + const { data: { attributes: { test_full_names: knownTests } } } = JSON.parse(res) + log.debug(() => `Number of received known tests: ${Object.keys(knownTests).length}`) + done(null, knownTests) + } catch (err) { + done(err) + } + } + }) +} + +module.exports = { getKnownTests } diff --git a/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js b/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js index 58e4495c6b2..28c7e79744c 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js +++ b/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js @@ -5,6 +5,7 @@ const URL = require('url').URL const { sendGitMetadata: sendGitMetadataRequest } = require('./git/git_metadata') const { getLibraryConfiguration: getLibraryConfigurationRequest } = require('../requests/get-library-configuration') const { getSkippableSuites: getSkippableSuitesRequest } = require('../intelligent-test-runner/get-skippable-suites') +const { getKnownTests: getKnownTestsRequest } = require('../early-flake-detection/get-known-tests') const log = require('../../log') const AgentInfoExporter = require('../../exporters/common/agent-info-exporter') @@ -79,6 +80,13 @@ class CiVisibilityExporter extends AgentInfoExporter { this._libraryConfig?.isSuitesSkippingEnabled) } + shouldRequestKnownTests () { + return !!( + this._config.isEarlyFlakeDetectionEnabled && + this._libraryConfig?.isEarlyFlakeDetectionEnabled + ) + } + shouldRequestLibraryConfiguration () { return this._config.isIntelligentTestRunnerEnabled } @@ -116,6 +124,30 @@ class CiVisibilityExporter extends AgentInfoExporter { }) } + getKnownTests (testConfiguration, callback) { + if (!this.shouldRequestKnownTests()) { + return callback(null) + } + this._canUseCiVisProtocolPromise.then((canUseCiVisProtocol) => { + if (!canUseCiVisProtocol) { + return callback( + new Error('Known tests can not be requested because CI Visibility protocol can not be used') + ) + } + const configuration = { + url: this._getApiUrl(), + env: this._config.env, + service: this._config.service, + isEvpProxy: !!this._isUsingEvpProxy, + evpProxyPrefix: this.evpProxyPrefix, + custom: getTestConfigurationTags(this._config.tags), + isGzipCompatible: this._isGzipCompatible, + ...testConfiguration + } + getKnownTestsRequest(configuration, callback) + }) + } + /** * We can't request library configuration until we know whether we can use the * CI Visibility Protocol, hence the this._canUseCiVisProtocol promise. @@ -145,7 +177,7 @@ class CiVisibilityExporter extends AgentInfoExporter { * where the tests run in a subprocess, like Jest, * because `getLibraryConfiguration` is called only once in the main process. */ - this._libraryConfig = libraryConfig + this._libraryConfig = this.getConfiguration(libraryConfig) if (err) { callback(err, {}) @@ -156,8 +188,8 @@ class CiVisibilityExporter extends AgentInfoExporter { return callback(gitUploadError, {}) } getLibraryConfigurationRequest(configuration, (err, finalLibraryConfig) => { - this._libraryConfig = finalLibraryConfig - callback(err, finalLibraryConfig) + this._libraryConfig = this.getConfiguration(finalLibraryConfig) + callback(err, this._libraryConfig) }) }) } else { @@ -167,6 +199,27 @@ class CiVisibilityExporter extends AgentInfoExporter { }) } + // Takes into account potential kill switches + getConfiguration (remoteConfiguration) { + if (!remoteConfiguration) { + return {} + } + const { + isCodeCoverageEnabled, + isSuitesSkippingEnabled, + isItrEnabled, + requireGit, + isEarlyFlakeDetectionEnabled + } = remoteConfiguration + return { + isCodeCoverageEnabled, + isSuitesSkippingEnabled, + isItrEnabled, + requireGit, + isEarlyFlakeDetectionEnabled: isEarlyFlakeDetectionEnabled && this._config.isEarlyFlakeDetectionEnabled + } + } + sendGitMetadata (repositoryUrl) { if (!this._config.isGitUploadEnabled) { return diff --git a/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js b/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js index ea9340f4224..61ed0e70ca1 100644 --- a/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js +++ b/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js @@ -94,7 +94,14 @@ function getLibraryConfiguration ({ } } = JSON.parse(res) - const settings = { isCodeCoverageEnabled, isSuitesSkippingEnabled, isItrEnabled, requireGit } + const settings = { + isCodeCoverageEnabled, + isSuitesSkippingEnabled, + isItrEnabled, + requireGit, + // TODO: change to backend response + isEarlyFlakeDetectionEnabled: false + } log.debug(() => `Remote settings: ${JSON.stringify(settings)}`) diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index 7b992e31194..7f0627bb655 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -172,6 +172,11 @@ class Config { false ) + const DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED = coalesce( + process.env.DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED, + true + ) + const DD_TRACE_MEMCACHED_COMMAND_ENABLED = coalesce( process.env.DD_TRACE_MEMCACHED_COMMAND_ENABLED, false @@ -666,6 +671,7 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) this.gitMetadataEnabled = isTrue(DD_TRACE_GIT_METADATA_ENABLED) this.isManualApiEnabled = this.isCiVisibility && isTrue(DD_CIVISIBILITY_MANUAL_API_ENABLED) + this.isEarlyFlakeDetectionEnabled = this.isCiVisibility && isTrue(DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED) this.openaiSpanCharLimit = DD_OPENAI_SPAN_CHAR_LIMIT diff --git a/packages/dd-trace/src/plugins/ci_plugin.js b/packages/dd-trace/src/plugins/ci_plugin.js index 10ba66caff2..e8565bfe174 100644 --- a/packages/dd-trace/src/plugins/ci_plugin.js +++ b/packages/dd-trace/src/plugins/ci_plugin.js @@ -115,6 +115,18 @@ module.exports = class CiPlugin extends Plugin { }) this.telemetry.count(TELEMETRY_ITR_SKIPPED, { testLevel: 'suite' }, skippedSuites.length) }) + + this.addSub(`ci:${this.constructor.id}:known-tests`, ({ onDone }) => { + if (!this.tracer._exporter?.getKnownTests) { + return onDone({ err: new Error('CI Visibility was not initialized correctly') }) + } + this.tracer._exporter.getKnownTests(this.testConfiguration, (err, knownTests) => { + if (err) { + log.error(`Known tests could not be fetched. ${err.message}`) + } + onDone({ err, knownTests }) + }) + }) } get telemetry () { diff --git a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js index db9da2db76d..0fa82baba52 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js @@ -170,7 +170,8 @@ describe('CI Visibility Exporter', () => { requireGit: false, isCodeCoverageEnabled: true, isItrEnabled: true, - isSuitesSkippingEnabled: true + isSuitesSkippingEnabled: true, + isEarlyFlakeDetectionEnabled: false }) expect(err).not.to.exist expect(scope.isDone()).to.be.true @@ -644,4 +645,136 @@ describe('CI Visibility Exporter', () => { }) }) }) + + describe('getKnownTests', () => { + context('if early flake detection is disabled', () => { + it('should resolve immediately to undefined', (done) => { + const scope = nock(`http://localhost:${port}`) + .post('/api/v2/ci/libraries/tests') + .reply(200) + + const ciVisibilityExporter = new CiVisibilityExporter({ port, isEarlyFlakeDetectionEnabled: false }) + + ciVisibilityExporter._resolveCanUseCiVisProtocol(true) + + ciVisibilityExporter.getKnownTests({}, (err, knownTests) => { + expect(err).to.be.null + expect(knownTests).to.eql(undefined) + expect(scope.isDone()).not.to.be.true + done() + }) + }) + }) + context('if early flake detection is enabled but can not use CI Visibility protocol', () => { + it('should raise an error', (done) => { + const scope = nock(`http://localhost:${port}`) + .post('/api/v2/ci/libraries/tests') + .reply(200) + + const ciVisibilityExporter = new CiVisibilityExporter({ port, isEarlyFlakeDetectionEnabled: true }) + + ciVisibilityExporter._resolveCanUseCiVisProtocol(false) + ciVisibilityExporter._libraryConfig = { isEarlyFlakeDetectionEnabled: true } + ciVisibilityExporter.getKnownTests({}, (err) => { + expect(err.message).to.include( + 'Known tests can not be requested because CI Visibility protocol can not be used' + ) + expect(scope.isDone()).not.to.be.true + done() + }) + }) + }) + context('if early flake detection is enabled and can use CI Vis Protocol', () => { + it('should request known tests', (done) => { + const scope = nock(`http://localhost:${port}`) + .post('/api/v2/ci/libraries/tests') + .reply(200, JSON.stringify({ + data: { + attributes: { + test_full_names: ['suite1.test1', 'suite2.test2'] + } + } + })) + + const ciVisibilityExporter = new CiVisibilityExporter({ port, isEarlyFlakeDetectionEnabled: true }) + + ciVisibilityExporter._resolveCanUseCiVisProtocol(true) + ciVisibilityExporter._libraryConfig = { isEarlyFlakeDetectionEnabled: true } + ciVisibilityExporter.getKnownTests({}, (err, knownTests) => { + expect(err).to.be.null + expect(knownTests).to.eql(['suite1.test1', 'suite2.test2']) + expect(scope.isDone()).to.be.true + done() + }) + }) + it('should return an error if the request fails', (done) => { + const scope = nock(`http://localhost:${port}`) + .post('/api/v2/ci/libraries/tests') + .reply(500) + const ciVisibilityExporter = new CiVisibilityExporter({ port, isEarlyFlakeDetectionEnabled: true }) + + ciVisibilityExporter._resolveCanUseCiVisProtocol(true) + ciVisibilityExporter._libraryConfig = { isEarlyFlakeDetectionEnabled: true } + ciVisibilityExporter.getKnownTests({}, (err) => { + expect(err).not.to.be.null + expect(scope.isDone()).to.be.true + done() + }) + }) + it('should accept gzip if the exporter is gzip compatible', (done) => { + let requestHeaders = {} + const scope = nock(`http://localhost:${port}`) + .post('/api/v2/ci/libraries/tests') + .reply(200, function () { + requestHeaders = this.req.headers + + return zlib.gzipSync(JSON.stringify({ + data: { attributes: { test_full_names: ['suite1.test1', 'suite2.test2'] } } + })) + }, { + 'content-encoding': 'gzip' + }) + + const ciVisibilityExporter = new CiVisibilityExporter({ port, isEarlyFlakeDetectionEnabled: true }) + + ciVisibilityExporter._resolveCanUseCiVisProtocol(true) + ciVisibilityExporter._libraryConfig = { isEarlyFlakeDetectionEnabled: true } + ciVisibilityExporter._isGzipCompatible = true + ciVisibilityExporter.getKnownTests({}, (err, knownTests) => { + expect(err).to.be.null + expect(knownTests).to.eql(['suite1.test1', 'suite2.test2']) + expect(scope.isDone()).to.be.true + expect(requestHeaders['accept-encoding']).to.equal('gzip') + done() + }) + }) + it('should not accept gzip if the exporter is gzip incompatible', (done) => { + let requestHeaders = {} + const scope = nock(`http://localhost:${port}`) + .post('/api/v2/ci/libraries/tests') + .reply(200, function () { + requestHeaders = this.req.headers + + return JSON.stringify({ + data: { attributes: { test_full_names: ['suite1.test1', 'suite2.test2'] } } + }) + }) + + const ciVisibilityExporter = new CiVisibilityExporter({ port, isEarlyFlakeDetectionEnabled: true }) + + ciVisibilityExporter._resolveCanUseCiVisProtocol(true) + ciVisibilityExporter._libraryConfig = { isEarlyFlakeDetectionEnabled: true } + + ciVisibilityExporter._isGzipCompatible = false + + ciVisibilityExporter.getKnownTests({}, (err, knownTests) => { + expect(err).to.be.null + expect(knownTests).to.eql(['suite1.test1', 'suite2.test2']) + expect(scope.isDone()).to.be.true + expect(requestHeaders['accept-encoding']).not.to.equal('gzip') + done() + }) + }) + }) + }) }) diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index 55335b78d37..367749f08d6 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -1266,6 +1266,7 @@ describe('Config', () => { delete process.env.DD_CIVISIBILITY_ITR_ENABLED delete process.env.DD_CIVISIBILITY_GIT_UPLOAD_ENABLED delete process.env.DD_CIVISIBILITY_MANUAL_API_ENABLED + delete process.env.DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED options = {} }) context('ci visibility mode is enabled', () => { @@ -1312,6 +1313,15 @@ describe('Config', () => { const config = new Config(options) expect(config).to.nested.property('telemetry.enabled', true) }) + it('should enable early flake detection by default', () => { + const config = new Config(options) + expect(config).to.have.property('isEarlyFlakeDetectionEnabled', true) + }) + it('should disable early flake detection if DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED is false', () => { + process.env.DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED = 'false' + const config = new Config(options) + expect(config).to.have.property('isEarlyFlakeDetectionEnabled', false) + }) }) context('ci visibility mode is not enabled', () => { it('should not activate intelligent test runner or git metadata upload', () => { From b1e50f0027770f2e64ff075c1517cfe12e0b1a95 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Mon, 5 Feb 2024 18:44:35 +0100 Subject: [PATCH 17/44] [ci-visibility] Fix appClosing telemetry event (#4023) --- .../test/app-10/CODEOWNERS | 1 - .../test/app/CODEOWNERS | 1 - .../datadog-plugin-cypress/test/index.spec.js | 2 +- packages/dd-trace/src/telemetry/index.js | 3 ++ .../dd-trace/test/telemetry/index.spec.js | 32 +++++++++++++++---- 5 files changed, 29 insertions(+), 10 deletions(-) delete mode 100644 packages/datadog-plugin-cypress/test/app-10/CODEOWNERS delete mode 100644 packages/datadog-plugin-cypress/test/app/CODEOWNERS diff --git a/packages/datadog-plugin-cypress/test/app-10/CODEOWNERS b/packages/datadog-plugin-cypress/test/app-10/CODEOWNERS deleted file mode 100644 index fb1bef44dc2..00000000000 --- a/packages/datadog-plugin-cypress/test/app-10/CODEOWNERS +++ /dev/null @@ -1 +0,0 @@ -cypress/integration/* @datadog diff --git a/packages/datadog-plugin-cypress/test/app/CODEOWNERS b/packages/datadog-plugin-cypress/test/app/CODEOWNERS deleted file mode 100644 index fb1bef44dc2..00000000000 --- a/packages/datadog-plugin-cypress/test/app/CODEOWNERS +++ /dev/null @@ -1 +0,0 @@ -cypress/integration/* @datadog diff --git a/packages/datadog-plugin-cypress/test/index.spec.js b/packages/datadog-plugin-cypress/test/index.spec.js index 293855ca522..60bdc1fd22f 100644 --- a/packages/datadog-plugin-cypress/test/index.spec.js +++ b/packages/datadog-plugin-cypress/test/index.spec.js @@ -81,10 +81,10 @@ describe('Plugin', function () { [TEST_TYPE]: 'browser', [ORIGIN_KEY]: CI_APP_ORIGIN, [TEST_IS_RUM_ACTIVE]: 'true', - [TEST_CODE_OWNERS]: JSON.stringify(['@datadog']), [LIBRARY_VERSION]: ddTraceVersion, [COMPONENT]: 'cypress' }) + expect(passedTestSpan.meta[TEST_CODE_OWNERS]).to.contain('@DataDog') expect(passedTestSpan.meta[TEST_FRAMEWORK_VERSION]).not.to.be.undefined expect(passedTestSpan.metrics[TEST_SOURCE_START]).to.exist diff --git a/packages/dd-trace/src/telemetry/index.js b/packages/dd-trace/src/telemetry/index.js index 20502cd28da..6f8159119dd 100644 --- a/packages/dd-trace/src/telemetry/index.js +++ b/packages/dd-trace/src/telemetry/index.js @@ -141,6 +141,9 @@ function appStarted (config) { } function appClosing () { + if (!config?.telemetry?.enabled) { + return + } const { reqType, payload } = createPayload('app-closing') sendData(config, application, host, reqType, payload) // we flush before shutting down. Only in CI Visibility diff --git a/packages/dd-trace/test/telemetry/index.spec.js b/packages/dd-trace/test/telemetry/index.spec.js index 5bb42a8c7c3..e5480e429b1 100644 --- a/packages/dd-trace/test/telemetry/index.spec.js +++ b/packages/dd-trace/test/telemetry/index.spec.js @@ -160,13 +160,13 @@ describe('telemetry', () => { }) }) - // TODO: make this work regardless of the test runner - // it.skip('should send app-closing', () => { - // process.emit('beforeExit') - // return testSeq(5, 'app-closing', payload => { - // expect(payload).to.deep.equal({}) - // }) - // }) + // TODO: test it's called on beforeExit instead of calling directly + it('should send app-closing', () => { + telemetry.appClosing() + return testSeq(5, 'app-closing', payload => { + expect(payload).to.deep.equal({}) + }) + }) it('should do nothing when not enabled', (done) => { telemetry.stop() @@ -187,6 +187,24 @@ describe('telemetry', () => { clearTimeout() }) }) + + it('should not send app-closing if telemetry is not enabled', () => { + const sendDataStub = sinon.stub() + const notEnabledTelemetry = proxyquire('../../src/telemetry', { + './send-data': { + sendData: sendDataStub + } + }) + notEnabledTelemetry.start({ + telemetry: { enabled: false, heartbeatInterval: DEFAULT_HEARTBEAT_INTERVAL }, + appsec: { enabled: false }, + profiling: { enabled: false } + }, { + _pluginsByName: pluginsByName + }) + notEnabledTelemetry.appClosing() + expect(sendDataStub.called).to.be.false + }) }) describe('telemetry app-heartbeat', () => { From d5ea506dfad4b0e04724461dd054ce1ab2065c51 Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Mon, 5 Feb 2024 11:46:18 -0800 Subject: [PATCH 18/44] upgrade axios for a test VULN-5533 + dc-polyfill (#4026) --- integration-tests/esbuild/package.json | 2 +- package.json | 2 +- yarn.lock | 8 ++++---- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/integration-tests/esbuild/package.json b/integration-tests/esbuild/package.json index d96723cc631..cc027c59bcf 100644 --- a/integration-tests/esbuild/package.json +++ b/integration-tests/esbuild/package.json @@ -19,7 +19,7 @@ "license": "ISC", "dependencies": { "aws-sdk": "^2.1446.0", - "axios": "^0.21.2", + "axios": "^1.6.7", "esbuild": "0.16.12", "express": "^4.16.2", "knex": "^2.4.2" diff --git a/package.json b/package.json index 8b4c45be140..bc79ad575f7 100644 --- a/package.json +++ b/package.json @@ -78,7 +78,7 @@ "@opentelemetry/api": "^1.0.0", "@opentelemetry/core": "^1.14.0", "crypto-randomuuid": "^1.0.0", - "dc-polyfill": "^0.1.2", + "dc-polyfill": "^0.1.4", "ignore": "^5.2.4", "import-in-the-middle": "^1.7.3", "int64-buffer": "^0.1.9", diff --git a/yarn.lock b/yarn.lock index 2689de62d51..f736833d4e9 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1670,10 +1670,10 @@ csstype@^3.0.2: resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.3.tgz#d80ff294d114fb0e6ac500fbf85b60137d7eff81" integrity "sha1-2A/ylNEU+w5qxQD7+FtgE31+/4E= sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==" -dc-polyfill@^0.1.2: - version "0.1.2" - resolved "https://registry.npmjs.org/dc-polyfill/-/dc-polyfill-0.1.2.tgz" - integrity "sha1-maLxIHWTF7mXaZmqcVGDocRLEyc= sha512-AJ4TWwkeOKF7+Wj301wdyK8L0D9SE8Fr7+eMein8UP8+Iyb1xuL3rXWXavsTEM1+vOqDLciYho4cpsvNY0RDGQ==" +dc-polyfill@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/dc-polyfill/-/dc-polyfill-0.1.4.tgz#4118cec81a8fab9a5729c41c285c715ffa42495a" + integrity sha512-8iwEduR2jR9wWYggeaYtYZWRiUe3XZPyAQtMTL1otv8X3kfR8xUIVb4l5awHEeyDrH6Je7N324lKzMKlMMN6Yw== debug@2.6.9, debug@^2.6.9: version "2.6.9" From c4553fb0dab130729e788a18b1f6eba216a70917 Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Mon, 5 Feb 2024 12:02:15 -0800 Subject: [PATCH 19/44] upgrade docs/ packages: VULN-5529 VULN-5527 VULN-5525 VULN-5522 (#4025) --- docs/yarn.lock | 114 ++++++++++++++++++++++++++----------------------- 1 file changed, 60 insertions(+), 54 deletions(-) diff --git a/docs/yarn.lock b/docs/yarn.lock index 1f1ffeef806..08fe81056b4 100644 --- a/docs/yarn.lock +++ b/docs/yarn.lock @@ -18,7 +18,7 @@ brace-expansion@^1.1.7: concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" - integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== fs-extra@^8.1.0: version "8.1.0" @@ -32,48 +32,48 @@ fs-extra@^8.1.0: fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" - integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== -function-bind@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" - integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== glob@^7.0.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023" - integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q== + version "7.2.3" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" + integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" inherits "2" - minimatch "^3.0.4" + minimatch "^3.1.1" once "^1.3.0" path-is-absolute "^1.0.0" graceful-fs@^4.1.6, graceful-fs@^4.2.0: - version "4.2.8" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.8.tgz#e412b8d33f5e006593cbd3cee6df9f2cebbe802a" - integrity sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg== + version "4.2.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" + integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== handlebars@^4.7.6: - version "4.7.7" - resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.7.tgz#9ce33416aad02dbd6c8fafa8240d5d98004945a1" - integrity sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA== + version "4.7.8" + resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.8.tgz#41c42c18b1be2365439188c77c6afae71c0cd9e9" + integrity sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ== dependencies: minimist "^1.2.5" - neo-async "^2.6.0" + neo-async "^2.6.2" source-map "^0.6.1" wordwrap "^1.0.0" optionalDependencies: uglify-js "^3.1.4" -has@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" - integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== +hasown@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.0.tgz#f4c513d454a57b7c7e1650778de226b11700546c" + integrity sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA== dependencies: - function-bind "^1.1.1" + function-bind "^1.1.2" highlight.js@^10.0.0: version "10.7.3" @@ -83,7 +83,7 @@ highlight.js@^10.0.0: inflight@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== dependencies: once "^1.3.0" wrappy "1" @@ -98,17 +98,17 @@ interpret@^1.0.0: resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.4.0.tgz#665ab8bc4da27a774a40584e812e3e0fa45b1a1e" integrity sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA== -is-core-module@^2.2.0: - version "2.8.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.8.0.tgz#0321336c3d0925e497fd97f5d95cb114a5ccd548" - integrity sha512-vd15qHsaqrRL7dtH6QNuy0ndJmRDrS9HAM1CAiSifNUFv4x1a0CCVsj18hJ1mShxIG6T2i1sO78MkP56r0nYRw== +is-core-module@^2.13.0: + version "2.13.1" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.1.tgz#ad0d7532c6fea9da1ebdc82742d74525c6273384" + integrity sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw== dependencies: - has "^1.0.3" + hasown "^2.0.0" jsonfile@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" - integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= + integrity sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg== optionalDependencies: graceful-fs "^4.1.6" @@ -127,19 +127,19 @@ marked@1.0.0: resolved "https://registry.yarnpkg.com/marked/-/marked-1.0.0.tgz#d35784245a04871e5988a491e28867362e941693" integrity sha512-Wo+L1pWTVibfrSr+TTtMuiMfNzmZWiOPeO7rZsQUY5bgsxpHesBEcIWJloWVTFnrMXnf/TL30eTFSGJddmQAng== -minimatch@^3.0.0, minimatch@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" - integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== +minimatch@^3.0.0, minimatch@^3.1.1: + version "3.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== dependencies: brace-expansion "^1.1.7" minimist@^1.2.5: - version "1.2.5" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" - integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== + version "1.2.8" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" + integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== -neo-async@^2.6.0: +neo-async@^2.6.2: version "2.6.2" resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== @@ -147,16 +147,16 @@ neo-async@^2.6.0: once@^1.3.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== dependencies: wrappy "1" path-is-absolute@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" - integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== -path-parse@^1.0.6: +path-parse@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== @@ -169,22 +169,23 @@ progress@^2.0.3: rechoir@^0.6.2: version "0.6.2" resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384" - integrity sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q= + integrity sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw== dependencies: resolve "^1.1.6" resolve@^1.1.6: - version "1.20.0" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" - integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== + version "1.22.8" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d" + integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== dependencies: - is-core-module "^2.2.0" - path-parse "^1.0.6" + is-core-module "^2.13.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" shelljs@^0.8.4: - version "0.8.4" - resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.8.4.tgz#de7684feeb767f8716b326078a8a00875890e3c2" - integrity sha512-7gk3UZ9kOfPLIAbslLzyWeGiEqx9e3rxwZM0KE6EL8GlGwjym9Mrlx5/p33bWTu9YG6vcS4MBxYZDHYr5lr8BQ== + version "0.8.5" + resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.8.5.tgz#de055408d8361bed66c669d2f000538ced8ee20c" + integrity sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow== dependencies: glob "^7.0.0" interpret "^1.0.0" @@ -195,6 +196,11 @@ source-map@^0.6.1: resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + typedoc-default-themes@^0.10.2: version "0.10.2" resolved "https://registry.yarnpkg.com/typedoc-default-themes/-/typedoc-default-themes-0.10.2.tgz#743380a80afe62c5ef92ca1bd4abe2ac596be4d2" @@ -224,9 +230,9 @@ typescript@^3.8.3: integrity sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q== uglify-js@^3.1.4: - version "3.14.5" - resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.14.5.tgz#cdabb7d4954231d80cb4a927654c4655e51f4859" - integrity sha512-qZukoSxOG0urUTvjc2ERMTcAy+BiFh3weWAkeurLwjrCba73poHmG3E36XEjd/JGukMzwTL7uCxZiAexj8ppvQ== + version "3.17.4" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.17.4.tgz#61678cf5fa3f5b7eb789bb345df29afb8257c22c" + integrity sha512-T9q82TJI9e/C1TAxYvfb16xO120tMVFZrGA3f9/P4424DNu6ypK103y0GPFVa17yotwSyZW5iYXgjYHkGrJW/g== universalify@^0.1.0: version "0.1.2" @@ -236,9 +242,9 @@ universalify@^0.1.0: wordwrap@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" - integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= + integrity sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q== wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== From 9437f1c5c1db5f6d22a69456e322f3f0a421ceaf Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Tue, 6 Feb 2024 11:01:12 +0100 Subject: [PATCH 20/44] Api security enabled by default and configurable by DD_API_SECURITY_ENABLED environment variable (#4005) --- packages/dd-trace/src/config.js | 9 ++++---- packages/dd-trace/test/config.spec.js | 31 ++++++++++++++++++++++++--- 2 files changed, 33 insertions(+), 7 deletions(-) diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index 7f0627bb655..8969617f024 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -421,10 +421,11 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) process.env.DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING, 'safe' ).toLowerCase() - const DD_EXPERIMENTAL_API_SECURITY_ENABLED = coalesce( + const DD_API_SECURITY_ENABLED = coalesce( appsec?.apiSecurity?.enabled, - isTrue(process.env.DD_EXPERIMENTAL_API_SECURITY_ENABLED), - false + process.env.DD_API_SECURITY_ENABLED && isTrue(process.env.DD_API_SECURITY_ENABLED), + process.env.DD_EXPERIMENTAL_API_SECURITY_ENABLED && isTrue(process.env.DD_EXPERIMENTAL_API_SECURITY_ENABLED), + true ) const DD_API_SECURITY_REQUEST_SAMPLE_RATE = coalesce( appsec?.apiSecurity?.requestSampling, @@ -641,7 +642,7 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) mode: DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING }, apiSecurity: { - enabled: DD_EXPERIMENTAL_API_SECURITY_ENABLED, + enabled: DD_API_SECURITY_ENABLED, // Coerce value between 0 and 1 requestSampling: Math.min(1, Math.max(0, DD_API_SECURITY_REQUEST_SAMPLE_RATE)) } diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index 367749f08d6..360e85dfd48 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -112,7 +112,7 @@ describe('Config', () => { expect(config).to.have.nested.property('appsec.blockedTemplateGraphql', undefined) expect(config).to.have.nested.property('appsec.eventTracking.enabled', true) expect(config).to.have.nested.property('appsec.eventTracking.mode', 'safe') - expect(config).to.have.nested.property('appsec.apiSecurity.enabled', false) + expect(config).to.have.nested.property('appsec.apiSecurity.enabled', true) expect(config).to.have.nested.property('appsec.apiSecurity.requestSampling', 0.1) expect(config).to.have.nested.property('remoteConfig.enabled', true) expect(config).to.have.nested.property('remoteConfig.pollInterval', 5) @@ -230,7 +230,7 @@ describe('Config', () => { process.env.DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED = 'true' process.env.DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED = 'true' process.env.DD_EXPERIMENTAL_PROFILING_ENABLED = 'true' - process.env.DD_EXPERIMENTAL_API_SECURITY_ENABLED = 'true' + process.env.DD_API_SECURITY_ENABLED = 'true' process.env.DD_API_SECURITY_REQUEST_SAMPLE_RATE = 1 process.env.DD_INSTRUMENTATION_INSTALL_ID = '68e75c48-57ca-4a12-adfc-575c4b05fcbe' process.env.DD_INSTRUMENTATION_INSTALL_TYPE = 'k8s_single_step' @@ -688,7 +688,7 @@ describe('Config', () => { process.env.DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON = BLOCKED_TEMPLATE_HTML_PATH // json and html here process.env.DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON = BLOCKED_TEMPLATE_JSON_PATH // json and html here process.env.DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING = 'disabled' - process.env.DD_EXPERIMENTAL_API_SECURITY_ENABLED = 'false' + process.env.DD_API_SECURITY_ENABLED = 'false' process.env.DD_API_SECURITY_REQUEST_SAMPLE_RATE = 0.5 process.env.DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS = 11 process.env.DD_IAST_ENABLED = 'false' @@ -1172,6 +1172,31 @@ describe('Config', () => { expect(config.appsec.blockedTemplateGraphql).to.be.undefined }) + it('should enable api security with DD_EXPERIMENTAL_API_SECURITY_ENABLED', () => { + process.env.DD_EXPERIMENTAL_API_SECURITY_ENABLED = 'true' + + const config = new Config() + + expect(config.appsec.apiSecurity.enabled).to.be.true + }) + + it('should disable api security with DD_EXPERIMENTAL_API_SECURITY_ENABLED', () => { + process.env.DD_EXPERIMENTAL_API_SECURITY_ENABLED = 'false' + + const config = new Config() + + expect(config.appsec.apiSecurity.enabled).to.be.false + }) + + it('should ignore DD_EXPERIMENTAL_API_SECURITY_ENABLED with DD_API_SECURITY_ENABLED=true', () => { + process.env.DD_EXPERIMENTAL_API_SECURITY_ENABLED = 'false' + process.env.DD_API_SECURITY_ENABLED = 'true' + + const config = new Config() + + expect(config.appsec.apiSecurity.enabled).to.be.true + }) + context('auto configuration w/ unix domain sockets', () => { context('on windows', () => { it('should not be used', () => { From b2a1cb8da71a93d000e954833a60ea33d278f4e4 Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Tue, 6 Feb 2024 10:08:07 -0800 Subject: [PATCH 21/44] upgrading axios to squelch security scanners (#4031) --- package.json | 2 +- yarn.lock | 36 ++++++++++++++++++++++++++---------- 2 files changed, 27 insertions(+), 11 deletions(-) diff --git a/package.json b/package.json index bc79ad575f7..fb22352d305 100644 --- a/package.json +++ b/package.json @@ -106,7 +106,7 @@ "@types/node": ">=14", "autocannon": "^4.5.2", "aws-sdk": "^2.1446.0", - "axios": "^0.21.2", + "axios": "^1.6.7", "benchmark": "^2.1.4", "body-parser": "^1.20.2", "chai": "^4.3.7", diff --git a/yarn.lock b/yarn.lock index f736833d4e9..59d734e4d7c 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1139,12 +1139,14 @@ aws-sdk@^2.1446.0: uuid "8.0.0" xml2js "0.5.0" -axios@^0.21.2: - version "0.21.4" - resolved "https://registry.npmjs.org/axios/-/axios-0.21.4.tgz" - integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg== +axios@^1.6.7: + version "1.6.7" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.7.tgz#7b48c2e27c96f9c68a2f8f31e2ab19f59b06b0a7" + integrity sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA== dependencies: - follow-redirects "^1.14.0" + follow-redirects "^1.15.4" + form-data "^4.0.0" + proxy-from-env "^1.1.0" balanced-match@^1.0.0: version "1.0.2" @@ -1565,7 +1567,7 @@ colors@^1.1.2: resolved "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz" integrity sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA== -combined-stream@^1.0.6: +combined-stream@^1.0.6, combined-stream@^1.0.8: version "1.0.8" resolved "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz" integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== @@ -2444,10 +2446,10 @@ flatted@^3.1.0: resolved "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz" integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== -follow-redirects@^1.14.0: - version "1.15.2" - resolved "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz" - integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== +follow-redirects@^1.15.4: + version "1.15.5" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.5.tgz#54d4d6d062c0fa7d9d17feb008461550e3ba8020" + integrity sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw== for-each@^0.3.3: version "0.3.3" @@ -2473,6 +2475,15 @@ form-data@^2.5.1: combined-stream "^1.0.6" mime-types "^2.1.12" +form-data@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" + integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + forwarded@0.2.0: version "0.2.0" resolved "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz" @@ -4129,6 +4140,11 @@ proxy-addr@~2.0.7: forwarded "0.2.0" ipaddr.js "1.9.1" +proxy-from-env@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" + integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== + proxyquire@^1.8.0: version "1.8.0" resolved "https://registry.npmjs.org/proxyquire/-/proxyquire-1.8.0.tgz" From 9ee2e7a49ab09d7dde04f2c2968c5f078909c639 Mon Sep 17 00:00:00 2001 From: Jordi Bertran de Balanda Date: Wed, 7 Feb 2024 12:57:07 +0100 Subject: [PATCH 22/44] grab resolved remote peer in GRPC >=1.1.4 (#4013) --- .../src/grpc/client.js | 92 +++++++++++-------- packages/datadog-plugin-grpc/src/client.js | 18 +++- .../datadog-plugin-grpc/test/client.spec.js | 25 ++++- 3 files changed, 95 insertions(+), 40 deletions(-) diff --git a/packages/datadog-instrumentations/src/grpc/client.js b/packages/datadog-instrumentations/src/grpc/client.js index c1a97d96b45..84e73e63f4e 100644 --- a/packages/datadog-instrumentations/src/grpc/client.js +++ b/packages/datadog-instrumentations/src/grpc/client.js @@ -15,54 +15,52 @@ const errorChannel = channel('apm:grpc:client:request:error') const finishChannel = channel('apm:grpc:client:request:finish') const emitChannel = channel('apm:grpc:client:request:emit') -function createWrapMakeRequest (type) { +function createWrapMakeRequest (type, hasPeer = false) { return function wrapMakeRequest (makeRequest) { return function (path) { const args = ensureMetadata(this, arguments, 4) - return callMethod(this, makeRequest, args, path, args[4], type) + return callMethod(this, makeRequest, args, path, args[4], type, hasPeer) } } } -function createWrapLoadPackageDefinition () { +function createWrapLoadPackageDefinition (hasPeer = false) { return function wrapLoadPackageDefinition (loadPackageDefinition) { return function (packageDef) { const result = loadPackageDefinition.apply(this, arguments) if (!result) return result - wrapPackageDefinition(result) + wrapPackageDefinition(result, hasPeer) return result } } } -function createWrapMakeClientConstructor () { +function createWrapMakeClientConstructor (hasPeer = false) { return function wrapMakeClientConstructor (makeClientConstructor) { return function (methods) { const ServiceClient = makeClientConstructor.apply(this, arguments) - - wrapClientConstructor(ServiceClient, methods) - + wrapClientConstructor(ServiceClient, methods, hasPeer) return ServiceClient } } } -function wrapPackageDefinition (def) { +function wrapPackageDefinition (def, hasPeer = false) { for (const name in def) { if (def[name].format) continue if (def[name].service && def[name].prototype) { - wrapClientConstructor(def[name], def[name].service) + wrapClientConstructor(def[name], def[name].service, hasPeer) } else { - wrapPackageDefinition(def[name]) + wrapPackageDefinition(def[name], hasPeer) } } } -function wrapClientConstructor (ServiceClient, methods) { +function wrapClientConstructor (ServiceClient, methods, hasPeer = false) { const proto = ServiceClient.prototype if (typeof methods !== 'object' || 'format' in methods) return @@ -76,24 +74,23 @@ function wrapClientConstructor (ServiceClient, methods) { const type = getType(methods[name]) if (methods[name]) { - proto[name] = wrapMethod(proto[name], path, type) + proto[name] = wrapMethod(proto[name], path, type, hasPeer) } if (originalName) { - proto[originalName] = wrapMethod(proto[originalName], path, type) + proto[originalName] = wrapMethod(proto[originalName], path, type, hasPeer) } }) } -function wrapMethod (method, path, type) { +function wrapMethod (method, path, type, hasPeer) { if (typeof method !== 'function' || patched.has(method)) { return method } const wrapped = function () { const args = ensureMetadata(this, arguments, 1) - - return callMethod(this, method, args, path, args[1], type) + return callMethod(this, method, args, path, args[1], type, hasPeer) } Object.assign(wrapped, method) @@ -117,7 +114,20 @@ function wrapCallback (ctx, callback = () => { }) { } } -function createWrapEmit (ctx) { +function createWrapEmit (ctx, hasPeer = false) { + const onStatusWithPeer = function (ctx, arg1, thisArg) { + ctx.result = arg1 + ctx.peer = thisArg.getPeer() + finishChannel.publish(ctx) + } + + const onStatusWithoutPeer = function (ctx, arg1, thisArg) { + ctx.result = arg1 + finishChannel.publish(ctx) + } + + const onStatus = hasPeer ? onStatusWithPeer : onStatusWithoutPeer + return function wrapEmit (emit) { return function (event, arg1) { switch (event) { @@ -126,8 +136,7 @@ function createWrapEmit (ctx) { errorChannel.publish(ctx) break case 'status': - ctx.result = arg1 - finishChannel.publish(ctx) + onStatus(ctx, arg1, this) break } @@ -138,7 +147,7 @@ function createWrapEmit (ctx) { } } -function callMethod (client, method, args, path, metadata, type) { +function callMethod (client, method, args, path, metadata, type, hasPeer = false) { if (!startChannel.hasSubscribers) return method.apply(client, args) const length = args.length @@ -159,7 +168,7 @@ function callMethod (client, method, args, path, metadata, type) { const call = method.apply(client, args) if (call && typeof call.emit === 'function') { - shimmer.wrap(call, 'emit', createWrapEmit(ctx)) + shimmer.wrap(call, 'emit', createWrapEmit(ctx, hasPeer)) } return call @@ -223,34 +232,45 @@ function getGrpc (client) { } while ((proto = Object.getPrototypeOf(proto))) } -function patch (grpc) { - const proto = grpc.Client.prototype +function patch (hasPeer = false) { + return function patch (grpc) { + const proto = grpc.Client.prototype - instances.set(proto, grpc) + instances.set(proto, grpc) - shimmer.wrap(proto, 'makeBidiStreamRequest', createWrapMakeRequest(types.bidi)) - shimmer.wrap(proto, 'makeClientStreamRequest', createWrapMakeRequest(types.clientStream)) - shimmer.wrap(proto, 'makeServerStreamRequest', createWrapMakeRequest(types.serverStream)) - shimmer.wrap(proto, 'makeUnaryRequest', createWrapMakeRequest(types.unary)) + shimmer.wrap(proto, 'makeBidiStreamRequest', createWrapMakeRequest(types.bidi, hasPeer)) + shimmer.wrap(proto, 'makeClientStreamRequest', createWrapMakeRequest(types.clientStream, hasPeer)) + shimmer.wrap(proto, 'makeServerStreamRequest', createWrapMakeRequest(types.serverStream, hasPeer)) + shimmer.wrap(proto, 'makeUnaryRequest', createWrapMakeRequest(types.unary, hasPeer)) - return grpc + return grpc + } } if (nodeMajor <= 14) { - addHook({ name: 'grpc', versions: ['>=1.24.3'] }, patch) + addHook({ name: 'grpc', versions: ['>=1.24.3'] }, patch(true)) addHook({ name: 'grpc', versions: ['>=1.24.3'], file: 'src/client.js' }, client => { - shimmer.wrap(client, 'makeClientConstructor', createWrapMakeClientConstructor()) + shimmer.wrap(client, 'makeClientConstructor', createWrapMakeClientConstructor(true)) return client }) } -addHook({ name: '@grpc/grpc-js', versions: ['>=1.0.3'] }, patch) +addHook({ name: '@grpc/grpc-js', versions: ['>=1.0.3 <1.1.4'] }, patch(false)) + +addHook({ name: '@grpc/grpc-js', versions: ['>=1.0.3 <1.1.4'], file: 'build/src/make-client.js' }, client => { + shimmer.wrap(client, 'makeClientConstructor', createWrapMakeClientConstructor(false)) + shimmer.wrap(client, 'loadPackageDefinition', createWrapLoadPackageDefinition(false)) + + return client +}) + +addHook({ name: '@grpc/grpc-js', versions: ['>=1.1.4'] }, patch(true)) -addHook({ name: '@grpc/grpc-js', versions: ['>=1.0.3'], file: 'build/src/make-client.js' }, client => { - shimmer.wrap(client, 'makeClientConstructor', createWrapMakeClientConstructor()) - shimmer.wrap(client, 'loadPackageDefinition', createWrapLoadPackageDefinition()) +addHook({ name: '@grpc/grpc-js', versions: ['>=1.1.4'], file: 'build/src/make-client.js' }, client => { + shimmer.wrap(client, 'makeClientConstructor', createWrapMakeClientConstructor(true)) + shimmer.wrap(client, 'loadPackageDefinition', createWrapLoadPackageDefinition(true)) return client }) diff --git a/packages/datadog-plugin-grpc/src/client.js b/packages/datadog-plugin-grpc/src/client.js index 8cc6dfdb91f..0d008cc0070 100644 --- a/packages/datadog-plugin-grpc/src/client.js +++ b/packages/datadog-plugin-grpc/src/client.js @@ -41,7 +41,6 @@ class GrpcClientPlugin extends ClientPlugin { 'grpc.status.code': 0 } }, false) - // needed as precursor for peer.service if (method.service && method.package) { span.setTag('rpc.service', method.package + '.' + method.service) @@ -68,7 +67,7 @@ class GrpcClientPlugin extends ClientPlugin { this.addError(error, span) } - finish ({ span, result }) { + finish ({ span, result, peer }) { if (!span) return const { code, metadata } = result || {} @@ -80,6 +79,21 @@ class GrpcClientPlugin extends ClientPlugin { addMetadataTags(span, metadata, metadataFilter, 'response') } + if (peer) { + // The only scheme we want to support here is ipv[46]:port, although + // more are supported by the library + // https://github.com/grpc/grpc/blob/v1.60.0/doc/naming.md + const parts = peer.split(':') + if (parts[parts.length - 1].match(/^\d+/)) { + const port = parts[parts.length - 1] + const ip = parts.slice(0, -1).join(':') + span.setTag('network.destination.ip', ip) + span.setTag('network.destination.port', port) + } else { + span.setTag('network.destination.ip', peer) + } + } + this.tagPeerService(span) span.finish() } diff --git a/packages/datadog-plugin-grpc/test/client.spec.js b/packages/datadog-plugin-grpc/test/client.spec.js index 17316b648e6..e28685fe19c 100644 --- a/packages/datadog-plugin-grpc/test/client.spec.js +++ b/packages/datadog-plugin-grpc/test/client.spec.js @@ -2,6 +2,7 @@ const agent = require('../../dd-trace/test/plugins/agent') const getPort = require('get-port') +const semver = require('semver') const Readable = require('stream').Readable const getService = require('./service') const loader = require('../../../versions/@grpc/proto-loader').get() @@ -42,14 +43,14 @@ describe('Plugin', () => { server.addService(TestService.service, service) server.start() - resolve(new ClientService(`localhost:${port}`, grpc.credentials.createInsecure())) + resolve(new ClientService(`127.0.0.1:${port}`, grpc.credentials.createInsecure())) }) } else { server.bind(`127.0.0.1:${port}`, grpc.ServerCredentials.createInsecure()) server.addService(TestService.service, service) server.start() - resolve(new ClientService(`localhost:${port}`, grpc.credentials.createInsecure())) + resolve(new ClientService(`127.0.0.1:${port}`, grpc.credentials.createInsecure())) } }) } @@ -126,6 +127,26 @@ describe('Plugin', () => { } ) + if (semver.intersects(version, '>=1.1.4')) { + it('should provide host information', async () => { + const client = await buildClient({ + getUnary: (_, callback) => callback() + }) + + client.getUnary({ first: 'foobar' }, () => {}) + return agent + .use(traces => { + expect(traces[0][0].meta).to.include({ + 'network.destination.ip': '127.0.0.1', + 'network.destination.port': port.toString(), + 'rpc.service': 'test.TestService', + 'span.kind': 'client', + 'component': 'grpc' + }) + }) + }) + } + it('should handle `unary` calls', async () => { const client = await buildClient({ getUnary: (_, callback) => callback() From c8848c3a17eb133d9e322adbedad2c2df94ad42d Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Wed, 7 Feb 2024 13:27:17 -0800 Subject: [PATCH 23/44] docs: remove bundling section from README (#4034) --- README.md | 33 +-------------------------------- 1 file changed, 1 insertion(+), 32 deletions(-) diff --git a/README.md b/README.md index e8d50268703..19069f4ebab 100644 --- a/README.md +++ b/README.md @@ -194,38 +194,7 @@ Regardless of where you open the issue, someone at Datadog will try to help. ## Bundling -Generally, `dd-trace` works by intercepting `require()` calls that a Node.js application makes when loading modules. This includes modules that are built-in to Node.js, like the `fs` module for accessing the filesystem, as well as modules installed from the npm registry, like the `pg` database module. - -Also generally, bundlers work by crawling all of the `require()` calls that an application makes to files on disk, replacing the `require()` calls with custom code, and then concatenating all of the resulting JavaScript into one "bundled" file. When a built-in module is loaded, like `require('fs')`, that call can then remain the same in the resulting bundle. - -Fundamentally APM tools like `dd-trace` stop working at this point. Perhaps they continue to intercept the calls for built-in modules but don't intercept calls to third party libraries. This means that by default when you bundle a `dd-trace` app with a bundler it is likely to capture information about disk access (via `fs`) and outbound HTTP requests (via `http`), but will otherwise omit calls to third party libraries (like extracting incoming request route information for the `express` framework or showing which query is run for the `mysql` database client). - -To get around this, one can treat all third party modules, or at least third party modules that the APM needs to instrument, as being "external" to the bundler. With this setting the instrumented modules remain on disk and continue to be loaded via `require()` while the non-instrumented modules are bundled. Sadly this results in a build with many extraneous files and starts to defeat the purpose of bundling. - -For these reasons it's necessary to have custom-built bundler plugins. Such plugins are able to instruct the bundler on how to behave, injecting intermediary code and otherwise intercepting the "translated" `require()` calls. The result is that many more packages are then included in the bundled JavaScript file. Some applications can have 100% of modules bundled, however native modules still need to remain external to the bundle. - -### ESBuild Support - -This library provides experimental ESBuild support in the form of an ESBuild plugin. Require the `dd-trace/esbuild` module when building your bundle to enable the plugin. - -Here's an example of how one might use `dd-trace` with ESBuild: - -```javascript -const ddPlugin = require('dd-trace/esbuild') -const esbuild = require('esbuild') - -esbuild.build({ - entryPoints: ['app.js'], - bundle: true, - outfile: 'out.js', - plugins: [ddPlugin], - platform: 'node', // allows built-in modules to be required - target: ['node16'] -}).catch((err) => { - console.error(err) - process.exit(1) -}) -``` +If you would like to trace your bundled application then please read this page on [bundling and dd-trace](https://docs.datadoghq.com/tracing/trace_collection/automatic_instrumentation/dd_libraries/nodejs/#bundling). It includes information on how to use our ESBuild plugin and includes caveats for other bundlers. ## Security Vulnerabilities From 861bba8d39c21ce2071dcc86715f8e2aa3c240e9 Mon Sep 17 00:00:00 2001 From: Sam Brenner <106700075+sabrenner@users.noreply.github.com> Date: Wed, 7 Feb 2024 17:12:16 -0500 Subject: [PATCH 24/44] Implement Span Links (#3924) * implement span links --------- Co-authored-by: Ayan Khan Co-authored-by: Juan Fernandez --- index.d.ts | 21 +++++ .../src/encode/agentless-ci-visibility.js | 8 +- packages/dd-trace/src/format.js | 26 +++++- packages/dd-trace/src/noop/span.js | 1 + packages/dd-trace/src/opentelemetry/span.js | 11 ++- packages/dd-trace/src/opentracing/span.js | 38 +++++++++ .../dd-trace/src/opentracing/span_context.js | 18 ++-- packages/dd-trace/src/opentracing/tracer.js | 3 +- packages/dd-trace/test/encode/0.4.spec.js | 54 +++++++++++- packages/dd-trace/test/encode/0.5.spec.js | 77 ++++++++++++++++- packages/dd-trace/test/format.spec.js | 84 ++++++++++++++++++- .../dd-trace/test/opentelemetry/span.spec.js | 14 ++++ .../dd-trace/test/opentracing/span.spec.js | 61 ++++++++++++++ .../dd-trace/test/opentracing/tracer.spec.js | 31 ++++++- 14 files changed, 427 insertions(+), 20 deletions(-) diff --git a/index.d.ts b/index.d.ts index 9396d3bada8..eb298f1fbc8 100644 --- a/index.d.ts +++ b/index.d.ts @@ -142,6 +142,11 @@ export declare interface TraceOptions extends Analyzable { * The type of request. */ type?: string + + /** + * An array of span links + */ + links?: Array<{ context: SpanContext, attributes?: Object }> } /** @@ -153,6 +158,14 @@ export declare interface TraceOptions extends Analyzable { */ export declare interface Span extends opentracing.Span { context (): SpanContext; + + /** + * Causally links another span to the current span + * @param {SpanContext} context The context of the span to link to. + * @param {Object} attributes An optional key value pair of arbitrary values. + * @returns {void} + */ + addLink (context: SpanContext, attributes?: Object): void; } /** @@ -1900,6 +1913,14 @@ export namespace opentelemetry { * use the current time. */ recordException(exception: Exception, time?: TimeInput): void; + + /** + * Causally links another span to the current span + * @param {otel.SpanContext} context The context of the span to link to. + * @param {SpanAttributes} attributes An optional key value pair of arbitrary values. + * @returns {void} + */ + addLink (context: otel.SpanContext, attributes?: SpanAttributes): void; } /** diff --git a/packages/dd-trace/src/encode/agentless-ci-visibility.js b/packages/dd-trace/src/encode/agentless-ci-visibility.js index 998fc991f51..5b7d3b54c75 100644 --- a/packages/dd-trace/src/encode/agentless-ci-visibility.js +++ b/packages/dd-trace/src/encode/agentless-ci-visibility.js @@ -16,6 +16,7 @@ const ALLOWED_CONTENT_TYPES = ['test_session_end', 'test_module_end', 'test_suit const TEST_SUITE_KEYS_LENGTH = 12 const TEST_MODULE_KEYS_LENGTH = 11 const TEST_SESSION_KEYS_LENGTH = 10 +const TEST_AND_SPAN_KEYS_LENGTH = 11 const INTAKE_SOFT_LIMIT = 2 * 1024 * 1024 // 2MB @@ -145,9 +146,7 @@ class AgentlessCiVisibilityEncoder extends AgentEncoder { } _encodeEventContent (bytes, content) { - const keysLength = Object.keys(content).length - - let totalKeysLength = keysLength + let totalKeysLength = TEST_AND_SPAN_KEYS_LENGTH if (content.meta.test_session_id) { totalKeysLength = totalKeysLength + 1 } @@ -161,6 +160,9 @@ class AgentlessCiVisibilityEncoder extends AgentEncoder { if (itrCorrelationId) { totalKeysLength = totalKeysLength + 1 } + if (content.type) { + totalKeysLength = totalKeysLength + 1 + } this._encodeMapPrefix(bytes, totalKeysLength) if (content.type) { this._encodeString(bytes, 'type') diff --git a/packages/dd-trace/src/format.js b/packages/dd-trace/src/format.js index cbe41458a83..d81ea7e5b21 100644 --- a/packages/dd-trace/src/format.js +++ b/packages/dd-trace/src/format.js @@ -33,6 +33,7 @@ const map = { function format (span) { const formatted = formatSpan(span) + extractSpanLinks(formatted, span) extractRootTags(formatted, span) extractChunkTags(formatted, span) extractTags(formatted, span) @@ -53,7 +54,8 @@ function formatSpan (span) { meta: {}, metrics: {}, start: Math.round(span._startTime * 1e6), - duration: Math.round(span._duration * 1e6) + duration: Math.round(span._duration * 1e6), + links: [] } } @@ -64,6 +66,28 @@ function setSingleSpanIngestionTags (span, options) { addTag({}, span.metrics, SPAN_SAMPLING_MAX_PER_SECOND, options.maxPerSecond) } +function extractSpanLinks (trace, span) { + const links = [] + if (span._links) { + for (const link of span._links) { + const { context, attributes } = link + const formattedLink = {} + + formattedLink.trace_id = context.toTraceId(true) + formattedLink.span_id = context.toSpanId(true) + + if (attributes && Object.keys(attributes).length > 0) { + formattedLink.attributes = attributes + } + if (context?._sampling?.priority >= 0) formattedLink.flags = context._sampling.priority > 0 ? 1 : 0 + if (context?._tracestate) formattedLink.tracestate = context._tracestate.toString() + + links.push(formattedLink) + } + } + if (links.length > 0) { trace.meta['_dd.span_links'] = JSON.stringify(links) } +} + function extractTags (trace, span) { const context = span.context() const origin = context._trace.origin diff --git a/packages/dd-trace/src/noop/span.js b/packages/dd-trace/src/noop/span.js index 3c5fac81b1b..bee3ce11702 100644 --- a/packages/dd-trace/src/noop/span.js +++ b/packages/dd-trace/src/noop/span.js @@ -18,6 +18,7 @@ class NoopSpan { getBaggageItem (key) {} setTag (key, value) { return this } addTags (keyValueMap) { return this } + addLink (link) { return this } log () { return this } logEvent () {} finish (finishTime) {} diff --git a/packages/dd-trace/src/opentelemetry/span.js b/packages/dd-trace/src/opentelemetry/span.js index 2ff7a37c577..1bd20ca18e6 100644 --- a/packages/dd-trace/src/opentelemetry/span.js +++ b/packages/dd-trace/src/opentelemetry/span.js @@ -132,7 +132,8 @@ class Span { tags: { [SERVICE_NAME]: _tracer._service, [RESOURCE_NAME]: spanName - } + }, + links }, _tracer._debug) if (attributes) { @@ -148,7 +149,6 @@ class Span { // math for computing opentracing timestamps is apparently lossy... this.startTime = hrStartTime this.kind = kind - this.links = links this._spanProcessor.onStart(this, context) } @@ -191,6 +191,13 @@ class Span { return this } + addLink (context, attributes) { + // extract dd context + const ddSpanContext = context._ddContext + this._ddSpan.addLink(ddSpanContext, attributes) + return this + } + setStatus ({ code, message }) { if (!this.ended && !this._hasStatus && code) { this._hasStatus = true diff --git a/packages/dd-trace/src/opentracing/span.js b/packages/dd-trace/src/opentracing/span.js index 5ba2149503e..52e6747eca4 100644 --- a/packages/dd-trace/src/opentracing/span.js +++ b/packages/dd-trace/src/opentracing/span.js @@ -26,6 +26,7 @@ const unfinishedRegistry = createRegistry('unfinished') const finishedRegistry = createRegistry('finished') const OTEL_ENABLED = !!process.env.DD_TRACE_OTEL_ENABLED +const ALLOWED = ['string', 'number', 'boolean'] const integrationCounters = { span_created: {}, @@ -82,6 +83,9 @@ class DatadogSpan { this._startTime = fields.startTime || this._getTime() + this._links = [] + fields.links && fields.links.forEach(link => this.addLink(link.context, link.attributes)) + if (DD_TRACE_EXPERIMENTAL_SPAN_COUNTS && finishedRegistry) { runtimeMetrics.increment('runtime.node.spans.unfinished') runtimeMetrics.increment('runtime.node.spans.unfinished.by.name', `span_name:${operationName}`) @@ -150,6 +154,13 @@ class DatadogSpan { logEvent () {} + addLink (context, attributes) { + this._links.push({ + context: context._ddContext ? context._ddContext : context, + attributes: this._sanitizeAttributes(attributes) + }) + } + finish (finishTime) { if (this._duration !== undefined) { return @@ -185,6 +196,33 @@ class DatadogSpan { this._processor.process(this) } + _sanitizeAttributes (attributes = {}) { + const sanitizedAttributes = {} + + const addArrayOrScalarAttributes = (key, maybeArray) => { + if (Array.isArray(maybeArray)) { + for (const subkey in maybeArray) { + addArrayOrScalarAttributes(`${key}.${subkey}`, maybeArray[subkey]) + } + } else { + const maybeScalar = maybeArray + if (ALLOWED.includes(typeof maybeScalar)) { + // Wrap the value as a string if it's not already a string + sanitizedAttributes[key] = typeof maybeScalar === 'string' ? maybeScalar : String(maybeScalar) + } else { + log.warn(`Dropping span link attribute. It is not of an allowed type`) + } + } + } + + Object.entries(attributes).forEach(entry => { + const [key, value] = entry + addArrayOrScalarAttributes(key, value) + }) + + return sanitizedAttributes + } + _createContext (parent, fields) { let spanContext let startTime diff --git a/packages/dd-trace/src/opentracing/span_context.js b/packages/dd-trace/src/opentracing/span_context.js index aaa0ae26bc0..0e586279f6f 100644 --- a/packages/dd-trace/src/opentracing/span_context.js +++ b/packages/dd-trace/src/opentracing/span_context.js @@ -28,20 +28,26 @@ class DatadogSpanContext { } } - toTraceId () { + toTraceId (get128bitId = false) { + if (get128bitId) { + return this._traceId.toBuffer().length <= 8 && this._trace.tags[TRACE_ID_128] + ? this._trace.tags[TRACE_ID_128] + this._traceId.toString(16).padStart(16, '0') + : this._traceId.toString(16).padStart(32, '0') + } return this._traceId.toString(10) } - toSpanId () { + toSpanId (get128bitId = false) { + if (get128bitId) { + return this._spanId.toString(16).padStart(16, '0') + } return this._spanId.toString(10) } toTraceparent () { const flags = this._sampling.priority >= AUTO_KEEP ? '01' : '00' - const traceId = this._traceId.toBuffer().length <= 8 && this._trace.tags[TRACE_ID_128] - ? this._trace.tags[TRACE_ID_128] + this._traceId.toString(16).padStart(16, '0') - : this._traceId.toString(16).padStart(32, '0') - const spanId = this._spanId.toString(16).padStart(16, '0') + const traceId = this.toTraceId(true) + const spanId = this.toSpanId(true) const version = (this._traceparent && this._traceparent.version) || '00' return `${version}-${traceId}-${spanId}-${flags}` } diff --git a/packages/dd-trace/src/opentracing/tracer.js b/packages/dd-trace/src/opentracing/tracer.js index 8b18938631b..13e6b9c1500 100644 --- a/packages/dd-trace/src/opentracing/tracer.js +++ b/packages/dd-trace/src/opentracing/tracer.js @@ -61,7 +61,8 @@ class DatadogTracer { startTime: options.startTime, hostname: this._hostname, traceId128BitGenerationEnabled: this._traceId128BitGenerationEnabled, - integrationName: options.integrationName + integrationName: options.integrationName, + links: options.links }, this._debug) span.addTags(this._config.tags) diff --git a/packages/dd-trace/test/encode/0.4.spec.js b/packages/dd-trace/test/encode/0.4.spec.js index e6db8af12f6..5ff1bc96fa9 100644 --- a/packages/dd-trace/test/encode/0.4.spec.js +++ b/packages/dd-trace/test/encode/0.4.spec.js @@ -44,7 +44,8 @@ describe('encode', () => { example: 1 }, start: 123, - duration: 456 + duration: 456, + links: [] }] }) @@ -183,4 +184,55 @@ describe('encode', () => { expect(decodedData.parent_id.toString(16)).to.equal('1234abcd1234abcd') }) }) + + it('should encode spanLinks', () => { + const traceIdHigh = id('10') + const traceId = id('1234abcd1234abcd') + const rootTid = traceIdHigh.toString(16).padStart(16, '0') + const rootT64 = traceId.toString(16).padStart(16, '0') + const traceIdVal = `${rootTid}${rootT64}` + + const encodedLink = `[{"trace_id":"${traceIdVal}","span_id":"1234abcd1234abcd",` + + `"attributes":{"foo":"bar"},"tracestate":"dd=s:-1;o:foo;t.dm:-4;t.usr.id:bar","flags":1}]` + + data[0].meta['_dd.span_links'] = encodedLink + + encoder.encode(data) + + const buffer = encoder.makePayload() + const decoded = msgpack.decode(buffer, { codec }) + const trace = decoded[0] + expect(trace).to.be.instanceof(Array) + expect(trace[0]).to.be.instanceof(Object) + expect(trace[0].trace_id.toString(16)).to.equal(data[0].trace_id.toString()) + expect(trace[0].span_id.toString(16)).to.equal(data[0].span_id.toString()) + expect(trace[0].parent_id.toString(16)).to.equal(data[0].parent_id.toString()) + expect(trace[0].start.toNumber()).to.equal(123) + expect(trace[0].duration.toNumber()).to.equal(456) + expect(trace[0].name).to.equal(data[0].name) + expect(trace[0].meta).to.deep.equal({ bar: 'baz', '_dd.span_links': encodedLink }) + expect(trace[0].metrics).to.deep.equal({ example: 1 }) + }) + + it('should encode spanLinks with just span and trace id', () => { + const traceId = '00000000000000001234abcd1234abcd' + const spanId = '1234abcd1234abcd' + const encodedLink = `[{"trace_id":"${traceId}","span_id":"${spanId}"}]` + data[0].meta['_dd.span_links'] = encodedLink + encoder.encode(data) + + const buffer = encoder.makePayload() + const decoded = msgpack.decode(buffer, { codec }) + const trace = decoded[0] + expect(trace).to.be.instanceof(Array) + expect(trace[0]).to.be.instanceof(Object) + expect(trace[0].trace_id.toString(16)).to.equal(data[0].trace_id.toString()) + expect(trace[0].span_id.toString(16)).to.equal(data[0].span_id.toString()) + expect(trace[0].parent_id.toString(16)).to.equal(data[0].parent_id.toString()) + expect(trace[0].start.toNumber()).to.equal(123) + expect(trace[0].duration.toNumber()).to.equal(456) + expect(trace[0].name).to.equal(data[0].name) + expect(trace[0].meta).to.deep.equal({ bar: 'baz', '_dd.span_links': encodedLink }) + expect(trace[0].metrics).to.deep.equal({ example: 1 }) + }) }) diff --git a/packages/dd-trace/test/encode/0.5.spec.js b/packages/dd-trace/test/encode/0.5.spec.js index 4da755742ad..45f4c972d42 100644 --- a/packages/dd-trace/test/encode/0.5.spec.js +++ b/packages/dd-trace/test/encode/0.5.spec.js @@ -36,7 +36,8 @@ describe('encode 0.5', () => { example: 1 }, start: 123123123123123120, - duration: 456456456456456456 + duration: 456456456456456456, + links: [] }] }) @@ -64,6 +65,80 @@ describe('encode 0.5', () => { expect(stringMap[trace[0][11]]).to.equal('') // unset }) + it('should encode span links', () => { + const traceIdHigh = id('10') + const traceId = id('1234abcd1234abcd') + const rootTid = traceIdHigh.toString(16).padStart(16, '0') + const rootT64 = traceId.toString(16).padStart(16, '0') + const traceIdVal = `${rootTid}${rootT64}` + + const encodedLink = `[{"trace_id":"${traceIdVal}","span_id":"1234abcd1234abcd",` + + `"attributes":{"foo":"bar"},"tracestate":"dd=s:-1;o:foo;t.dm:-4;t.usr.id:bar","flags":1}]` + + data[0].meta['_dd.span_links'] = encodedLink + + encoder.encode(data) + + const buffer = encoder.makePayload() + const decoded = msgpack.decode(buffer, { codec }) + const stringMap = decoded[0] + const trace = decoded[1][0] + + expect(trace).to.be.instanceof(Array) + expect(trace[0]).to.be.instanceof(Array) + expect(stringMap[trace[0][0]]).to.equal(data[0].service) + expect(stringMap[trace[0][1]]).to.equal(data[0].name) + expect(stringMap[trace[0][2]]).to.equal(data[0].resource) + expect(stringMap).to.include('_dd.span_links') + expect(stringMap).to.include(encodedLink) + expect(trace[0][3].toString(16)).to.equal(data[0].trace_id.toString()) + expect(trace[0][4].toString(16)).to.equal(data[0].span_id.toString()) + expect(trace[0][5].toString(16)).to.equal(data[0].parent_id.toString()) + expect(trace[0][6].toNumber()).to.equal(data[0].start) + expect(trace[0][7].toNumber()).to.equal(data[0].duration) + expect(trace[0][8]).to.equal(0) + expect(trace[0][9]).to.deep.equal({ + [stringMap.indexOf('bar')]: stringMap.indexOf('baz'), + [stringMap.indexOf('_dd.span_links')]: stringMap.indexOf(encodedLink) + }) + expect(trace[0][10]).to.deep.equal({ [stringMap.indexOf('example')]: 1 }) + expect(stringMap[trace[0][11]]).to.equal('') // unset + }) + + it('should encode span link with just span and trace id', () => { + const traceId = '00000000000000001234abcd1234abcd' + const spanId = '1234abcd1234abcd' + const encodedLink = `[{"trace_id":"${traceId}","span_id":"${spanId}"}]` + data[0].meta['_dd.span_links'] = encodedLink + + encoder.encode(data) + + const buffer = encoder.makePayload() + const decoded = msgpack.decode(buffer, { codec }) + const stringMap = decoded[0] + const trace = decoded[1][0] + + expect(trace).to.be.instanceof(Array) + expect(trace[0]).to.be.instanceof(Array) + expect(stringMap[trace[0][0]]).to.equal(data[0].service) + expect(stringMap[trace[0][1]]).to.equal(data[0].name) + expect(stringMap[trace[0][2]]).to.equal(data[0].resource) + expect(stringMap).to.include('_dd.span_links') + expect(stringMap).to.include(encodedLink) + expect(trace[0][3].toString(16)).to.equal(data[0].trace_id.toString()) + expect(trace[0][4].toString(16)).to.equal(data[0].span_id.toString()) + expect(trace[0][5].toString(16)).to.equal(data[0].parent_id.toString()) + expect(trace[0][6].toNumber()).to.equal(data[0].start) + expect(trace[0][7].toNumber()).to.equal(data[0].duration) + expect(trace[0][8]).to.equal(0) + expect(trace[0][9]).to.deep.equal({ + [stringMap.indexOf('bar')]: stringMap.indexOf('baz'), + [stringMap.indexOf('_dd.span_links')]: stringMap.indexOf(encodedLink) + }) + expect(trace[0][10]).to.deep.equal({ [stringMap.indexOf('example')]: 1 }) + expect(stringMap[trace[0][11]]).to.equal('') // unset + }) + it('should truncate long IDs', () => { data[0].trace_id = id('ffffffffffffffff1234abcd1234abcd') data[0].span_id = id('ffffffffffffffff1234abcd1234abcd') diff --git a/packages/dd-trace/test/format.spec.js b/packages/dd-trace/test/format.spec.js index d6f218156b4..516fbb25c33 100644 --- a/packages/dd-trace/test/format.spec.js +++ b/packages/dd-trace/test/format.spec.js @@ -24,14 +24,20 @@ const ERROR_STACK = constants.ERROR_STACK const ERROR_TYPE = constants.ERROR_TYPE const spanId = id('0234567812345678') +const spanId2 = id('0254567812345678') +const spanId3 = id('0264567812345678') describe('format', () => { let format let span let trace let spanContext + let spanContext2 + let spanContext3 + let TraceState beforeEach(() => { + TraceState = require('../src/opentracing/propagation/tracestate') spanContext = { _traceId: spanId, _spanId: spanId, @@ -40,9 +46,12 @@ describe('format', () => { _metrics: {}, _sampling: {}, _trace: { - started: [] + started: [], + tags: {} }, - _name: 'operation' + _name: 'operation', + toTraceId: sinon.stub().returns(spanId), + toSpanId: sinon.stub().returns(spanId) } span = { @@ -57,6 +66,23 @@ describe('format', () => { spanContext._trace.started.push(span) + spanContext2 = { + ...spanContext, + _traceId: spanId2, + _spanId: spanId2, + _parentId: spanId2, + toTraceId: sinon.stub().returns(spanId2.toString(16)), + toSpanId: sinon.stub().returns(spanId2.toString(16)) + } + spanContext3 = { + ...spanContext, + _traceId: spanId3, + _spanId: spanId3, + _parentId: spanId3, + toTraceId: sinon.stub().returns(spanId3.toString(16)), + toSpanId: sinon.stub().returns(spanId3.toString(16)) + } + format = require('../src/format') }) @@ -182,6 +208,60 @@ describe('format', () => { ) }) + it('should format span links', () => { + span._links = [ + { + context: spanContext2 + }, + { + context: spanContext3 + } + ] + + trace = format(span) + const spanLinks = JSON.parse(trace.meta['_dd.span_links']) + + expect(spanLinks).to.deep.equal([{ + trace_id: spanId2.toString(16), + span_id: spanId2.toString(16) + }, { + trace_id: spanId3.toString(16), + span_id: spanId3.toString(16) + }]) + }) + + it('creates a span link', () => { + const ts = TraceState.fromString('dd=s:-1;o:foo;t.dm:-4;t.usr.id:bar') + const traceIdHigh = '0000000000000010' + spanContext2._tracestate = ts + spanContext2._trace = { + started: [], + finished: [], + origin: 'synthetics', + tags: { + '_dd.p.tid': traceIdHigh + } + } + + spanContext2._sampling.priority = 0 + const link = { + context: spanContext2, + attributes: { foo: 'bar' } + } + span._links = [link] + + trace = format(span) + const spanLinks = JSON.parse(trace.meta['_dd.span_links']) + + expect(spanLinks).to.deep.equal([{ + trace_id: spanId2.toString(16), + span_id: spanId2.toString(16), + attributes: { foo: 'bar' }, + tracestate: ts.toString(), + flags: 0 + }]) + }) + it('should extract trace chunk tags', () => { spanContext._trace.tags = { chunk: 'test', diff --git a/packages/dd-trace/test/opentelemetry/span.spec.js b/packages/dd-trace/test/opentelemetry/span.spec.js index 48dd3f6076f..98f616076e2 100644 --- a/packages/dd-trace/test/opentelemetry/span.spec.js +++ b/packages/dd-trace/test/opentelemetry/span.spec.js @@ -287,6 +287,20 @@ describe('OTel Span', () => { expect(_tags).to.have.property('baz', 'buz') }) + it('should set span links', () => { + const span = makeSpan('name') + const span2 = makeSpan('name2') + const span3 = makeSpan('name3') + + const { _links } = span._ddSpan + + span.addLink(span2.spanContext()) + expect(_links).to.have.lengthOf(1) + + span.addLink(span3.spanContext()) + expect(_links).to.have.lengthOf(2) + }) + it('should set status', () => { const unset = makeSpan('name') const unsetCtx = unset._ddSpan.context() diff --git a/packages/dd-trace/test/opentracing/span.spec.js b/packages/dd-trace/test/opentracing/span.spec.js index 6566faa053c..c86ad423dee 100644 --- a/packages/dd-trace/test/opentracing/span.spec.js +++ b/packages/dd-trace/test/opentracing/span.spec.js @@ -216,6 +216,67 @@ describe('Span', () => { }) }) + // TODO are these tests trivial? + describe('links', () => { + it('should allow links to be added', () => { + span = new Span(tracer, processor, prioritySampler, { operationName: 'operation' }) + const span2 = new Span(tracer, processor, prioritySampler, { operationName: 'operation' }) + + span.addLink(span2.context()) + expect(span).to.have.property('_links') + expect(span._links).to.have.lengthOf(1) + }) + + it('sanitizes attributes', () => { + span = new Span(tracer, processor, prioritySampler, { operationName: 'operation' }) + const span2 = new Span(tracer, processor, prioritySampler, { operationName: 'operation' }) + + const attributes = { + foo: 'bar', + baz: 'qux' + } + span.addLink(span2.context(), attributes) + expect(span._links[0].attributes).to.deep.equal(attributes) + }) + + it('sanitizes nested attributes', () => { + span = new Span(tracer, processor, prioritySampler, { operationName: 'operation' }) + const span2 = new Span(tracer, processor, prioritySampler, { operationName: 'operation' }) + + const attributes = { + foo: true, + bar: 'hi', + baz: 1, + qux: [1, 2, 3] + } + + span.addLink(span2.context(), attributes) + expect(span._links[0].attributes).to.deep.equal({ + foo: 'true', + bar: 'hi', + baz: '1', + 'qux.0': '1', + 'qux.1': '2', + 'qux.2': '3' + }) + }) + + it('sanitizes invalid attributes', () => { + span = new Span(tracer, processor, prioritySampler, { operationName: 'operation' }) + const span2 = new Span(tracer, processor, prioritySampler, { operationName: 'operation' }) + const attributes = { + foo: () => {}, + bar: Symbol('bar'), + baz: 'valid' + } + + span.addLink(span2.context(), attributes) + expect(span._links[0].attributes).to.deep.equal({ + baz: 'valid' + }) + }) + }) + describe('getBaggageItem', () => { it('should get a baggage item', () => { span = new Span(tracer, processor, prioritySampler, { operationName: 'operation' }) diff --git a/packages/dd-trace/test/opentracing/tracer.spec.js b/packages/dd-trace/test/opentracing/tracer.spec.js index 0c3fb37fbf3..4efb96fc753 100644 --- a/packages/dd-trace/test/opentracing/tracer.spec.js +++ b/packages/dd-trace/test/opentracing/tracer.spec.js @@ -120,7 +120,8 @@ describe('Tracer', () => { startTime: fields.startTime, hostname: undefined, traceId128BitGenerationEnabled: undefined, - integrationName: undefined + integrationName: undefined, + links: undefined }, true) expect(span.addTags).to.have.been.calledWith({ @@ -178,7 +179,8 @@ describe('Tracer', () => { startTime: fields.startTime, hostname: os.hostname(), traceId128BitGenerationEnabled: undefined, - integrationName: undefined + integrationName: undefined, + links: undefined }) expect(testSpan).to.equal(span) @@ -249,7 +251,30 @@ describe('Tracer', () => { startTime: fields.startTime, hostname: undefined, traceId128BitGenerationEnabled: true, - integrationName: undefined + integrationName: undefined, + links: undefined + }) + + expect(testSpan).to.equal(span) + }) + + it('should start a span with span links attached', () => { + const context = new SpanContext() + fields.links = [{ context }] + tracer = new Tracer(config) + const testSpan = tracer.startSpan('name', fields) + + expect(Span).to.have.been.calledWith(tracer, processor, prioritySampler, { + operationName: 'name', + parent: null, + tags: { + 'service.name': 'service' + }, + startTime: fields.startTime, + hostname: undefined, + traceId128BitGenerationEnabled: undefined, + integrationName: undefined, + links: [{ context }] }) expect(testSpan).to.equal(span) From f6e95b98caa3ef5eb8157a26a7c5bacf0a0420b1 Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Wed, 7 Feb 2024 16:05:46 -0800 Subject: [PATCH 25/44] upgrade mocha@8 to mocha@9 (VULN-5521, VULN-5526) (#4036) --- package.json | 2 +- .../datadog-plugin-mysql/test/index.spec.js | 18 ++- .../datadog-plugin-mysql2/test/index.spec.js | 12 +- packages/datadog-plugin-pg/test/index.spec.js | 14 +- yarn.lock | 151 +++++++----------- 5 files changed, 82 insertions(+), 115 deletions(-) diff --git a/package.json b/package.json index fb22352d305..6181b586d23 100644 --- a/package.json +++ b/package.json @@ -130,7 +130,7 @@ "jszip": "^3.5.0", "knex": "^2.4.2", "mkdirp": "^3.0.1", - "mocha": "8", + "mocha": "^9", "multer": "^1.4.5-lts.1", "nock": "^11.3.3", "nyc": "^15.1.0", diff --git a/packages/datadog-plugin-mysql/test/index.spec.js b/packages/datadog-plugin-mysql/test/index.spec.js index 12b4e45457a..e80e608da86 100644 --- a/packages/datadog-plugin-mysql/test/index.spec.js +++ b/packages/datadog-plugin-mysql/test/index.spec.js @@ -6,6 +6,8 @@ const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/c const { expectedSchema, rawExpectedSchema } = require('./naming') +const ddpv = require('mocha/package.json').version + describe('Plugin', () => { let mysql let tracer @@ -319,7 +321,7 @@ describe('Plugin', () => { connection.query('SELECT 1 + 1 AS solution', () => { try { expect(connection._protocol._queue[0].sql).to.equal( - `/*dddbs='serviced',dde='tester',ddps='test',ddpv='8.4.0'*/ SELECT 1 + 1 AS solution`) + `/*dddbs='serviced',dde='tester',ddps='test',ddpv='${ddpv}'*/ SELECT 1 + 1 AS solution`) } catch (e) { done(e) } @@ -333,7 +335,7 @@ describe('Plugin', () => { connection.query('SELECT 1 + 1 AS solution', () => { try { expect(connection._protocol._queue[0].sql).to.equal( - `/*dddbs='db',dde='tester',ddps='test',ddpv='8.4.0'*/ SELECT 1 + 1 AS solution`) + `/*dddbs='db',dde='tester',ddps='test',ddpv='${ddpv}'*/ SELECT 1 + 1 AS solution`) } catch (e) { done(e) } @@ -347,7 +349,7 @@ describe('Plugin', () => { connection.query('SELECT 1 + 1 AS solution', () => { try { expect(connection._protocol._queue[0].sql).to.equal( - `/*dddbs='remappedDB',dde='tester',ddps='test',ddpv='8.4.0'*/ SELECT 1 + 1 AS solution`) + `/*dddbs='remappedDB',dde='tester',ddps='test',ddpv='${ddpv}'*/ SELECT 1 + 1 AS solution`) } catch (e) { done(e) } @@ -375,7 +377,7 @@ describe('Plugin', () => { connection.query('SELECT 1 + 1 AS solution', () => { try { expect(connection._protocol._queue[0].sql).to.equal( - `/*dddbs='serviced',dde='tester',ddps='test',ddpv='8.4.0'*/ SELECT 1 + 1 AS solution`) + `/*dddbs='serviced',dde='tester',ddps='test',ddpv='${ddpv}'*/ SELECT 1 + 1 AS solution`) } catch (e) { done(e) } @@ -422,7 +424,7 @@ describe('Plugin', () => { try { expect(connection._protocol._queue[0].sql).to.equal( `/*dddbs='~!%40%23%24%25%5E%26*()_%2B%7C%3F%3F%2F%3C%3E',dde='tester',` + - `ddps='test',ddpv='8.4.0'*/ SELECT 1 + 1 AS solution`) + `ddps='test',ddpv='${ddpv}'*/ SELECT 1 + 1 AS solution`) done() } catch (e) { done(e) @@ -459,7 +461,7 @@ describe('Plugin', () => { const spanId = traces[0][0].span_id.toString(16).padStart(16, '0') expect(queryText).to.equal( - `/*dddbs='post',dde='tester',ddps='test',ddpv='8.4.0',` + + `/*dddbs='post',dde='tester',ddps='test',ddpv='${ddpv}',` + `traceparent='00-${traceId}-${spanId}-00'*/ SELECT 1 + 1 AS solution`) }).then(done, done) const clock = sinon.useFakeTimers(new Date()) @@ -502,7 +504,7 @@ describe('Plugin', () => { pool.query('SELECT 1 + 1 AS solution', () => { try { expect(pool._allConnections[0]._protocol._queue[0].sql).to.equal( - `/*dddbs='post',dde='tester',ddps='test',ddpv='8.4.0'*/ SELECT 1 + 1 AS solution`) + `/*dddbs='post',dde='tester',ddps='test',ddpv='${ddpv}'*/ SELECT 1 + 1 AS solution`) } catch (e) { done(e) } @@ -539,7 +541,7 @@ describe('Plugin', () => { const spanId = traces[0][0].span_id.toString(16).padStart(16, '0') expect(queryText).to.equal( - `/*dddbs='post',dde='tester',ddps='test',ddpv='8.4.0',` + + `/*dddbs='post',dde='tester',ddps='test',ddpv='${ddpv}',` + `traceparent='00-${traceId}-${spanId}-00'*/ SELECT 1 + 1 AS solution`) }).then(done, done) const clock = sinon.useFakeTimers(new Date()) diff --git a/packages/datadog-plugin-mysql2/test/index.spec.js b/packages/datadog-plugin-mysql2/test/index.spec.js index 38d6c487eaa..a8f372ed48b 100644 --- a/packages/datadog-plugin-mysql2/test/index.spec.js +++ b/packages/datadog-plugin-mysql2/test/index.spec.js @@ -6,6 +6,8 @@ const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/c const { expectedSchema, rawExpectedSchema } = require('./naming') +const ddpv = require('mocha/package.json').version + describe('Plugin', () => { let mysql2 let tracer @@ -366,7 +368,7 @@ describe('Plugin', () => { const connect = connection.query('SELECT 1 + 1 AS solution', (...args) => { try { expect(connect.sql).to.equal(`/*dddbs='serviced',dde='tester',` + - `ddps='test',ddpv='8.4.0'*/ SELECT 1 + 1 AS solution`) + `ddps='test',ddpv='${ddpv}'*/ SELECT 1 + 1 AS solution`) } catch (e) { done(e) } @@ -412,7 +414,7 @@ describe('Plugin', () => { try { expect(connect.sql).to.equal( `/*dddbs='~!%40%23%24%25%5E%26*()_%2B%7C%3F%3F%2F%3C%3E',dde='tester',` + - `ddps='test',ddpv='8.4.0'*/ SELECT 1 + 1 AS solution`) + `ddps='test',ddpv='${ddpv}'*/ SELECT 1 + 1 AS solution`) done() } catch (e) { done(e) @@ -449,7 +451,7 @@ describe('Plugin', () => { const spanId = traces[0][0].span_id.toString(16).padStart(16, '0') expect(queryText).to.equal( - `/*dddbs='post',dde='tester',ddps='test',ddpv='8.4.0',` + + `/*dddbs='post',dde='tester',ddps='test',ddpv='${ddpv}',` + `traceparent='00-${traceId}-${spanId}-00'*/ SELECT 1 + 1 AS solution`) }).then(done, done) const clock = sinon.useFakeTimers(new Date()) @@ -492,7 +494,7 @@ describe('Plugin', () => { const queryPool = pool.query('SELECT 1 + 1 AS solution', () => { try { expect(queryPool.sql).to.equal( - `/*dddbs='post',dde='tester',ddps='test',ddpv='8.4.0'*/ SELECT 1 + 1 AS solution`) + `/*dddbs='post',dde='tester',ddps='test',ddpv='${ddpv}'*/ SELECT 1 + 1 AS solution`) } catch (e) { done(e) } @@ -529,7 +531,7 @@ describe('Plugin', () => { const spanId = traces[0][0].span_id.toString(16).padStart(16, '0') expect(queryText).to.equal( - `/*dddbs='post',dde='tester',ddps='test',ddpv='8.4.0',` + + `/*dddbs='post',dde='tester',ddps='test',ddpv='${ddpv}',` + `traceparent='00-${traceId}-${spanId}-00'*/ SELECT 1 + 1 AS solution`) }).then(done, done) const clock = sinon.useFakeTimers(new Date()) diff --git a/packages/datadog-plugin-pg/test/index.spec.js b/packages/datadog-plugin-pg/test/index.spec.js index 6dd64bf7c1d..5dc866bdfc1 100644 --- a/packages/datadog-plugin-pg/test/index.spec.js +++ b/packages/datadog-plugin-pg/test/index.spec.js @@ -8,6 +8,8 @@ const net = require('net') const { expectedSchema, rawExpectedSchema } = require('./naming') const EventEmitter = require('events') +const ddpv = require('mocha/package.json').version + const clients = { pg: pg => pg.Client } @@ -378,7 +380,7 @@ describe('Plugin', () => { if (client.queryQueue[0] !== undefined) { try { expect(client.queryQueue[0].text).to.equal( - `/*dddbs='serviced',dde='tester',ddps='test',ddpv='8.4.0'*/ SELECT $1::text as message`) + `/*dddbs='serviced',dde='tester',ddps='test',ddpv='${ddpv}'*/ SELECT $1::text as message`) } catch (e) { done(e) } @@ -436,7 +438,7 @@ describe('Plugin', () => { try { expect(clientDBM.queryQueue[0].text).to.equal( `/*dddbs='~!%40%23%24%25%5E%26*()_%2B%7C%3F%3F%2F%3C%3E',dde='tester',` + - `ddps='test',ddpv='8.4.0'*/ SELECT $1::text as message`) + `ddps='test',ddpv='${ddpv}'*/ SELECT $1::text as message`) done() } catch (e) { done(e) @@ -577,7 +579,7 @@ describe('Plugin', () => { const spanId = traces[0][0].span_id.toString(16).padStart(16, '0') expect(queryText).to.equal( - `/*dddbs='post',dde='tester',ddps='test',ddpv='8.4.0',` + + `/*dddbs='post',dde='tester',ddps='test',ddpv='${ddpv}',` + `traceparent='00-${traceId}-${spanId}-00'*/ SELECT $1::text as message`) }).then(done, done) @@ -623,7 +625,7 @@ describe('Plugin', () => { agent.use(traces => { expect(queryText).to.equal( - `/*dddbs='post',dde='tester',ddps='test',ddpv='8.4.0'` + + `/*dddbs='post',dde='tester',ddps='test',ddpv='${ddpv}'` + `*/ SELECT $1::text as message`) }).then(done, done) @@ -647,7 +649,7 @@ describe('Plugin', () => { agent.use(traces => { expect(queryText).to.equal( - `/*dddbs='post',dde='tester',ddps='test',ddpv='8.4.0'` + + `/*dddbs='post',dde='tester',ddps='test',ddpv='${ddpv}'` + `*/ SELECT $1::text as message`) }).then(done, done) @@ -681,7 +683,7 @@ describe('Plugin', () => { agent.use(traces => { expect(queryText).to.equal( - `/*dddbs='post',dde='tester',ddps='test',ddpv='8.4.0'` + + `/*dddbs='post',dde='tester',ddps='test',ddpv='${ddpv}'` + `*/ SELECT $1::text as greeting`) }).then(done, done) diff --git a/yarn.lock b/yarn.lock index 59d734e4d7c..6394d33acad 100644 --- a/yarn.lock +++ b/yarn.lock @@ -962,14 +962,6 @@ ansicolors@~0.3.2: resolved "https://registry.npmjs.org/ansicolors/-/ansicolors-0.3.2.tgz" integrity sha512-QXu7BPrP29VllRxH8GwB7x5iX5qWKAAMLqKQGWTeLWVlNHNOpVMJ91dsxQAIWXpjuW5wqvxu3Jd/nRjrJ+0pqg== -anymatch@~3.1.1: - version "3.1.2" - resolved "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz" - integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== - dependencies: - normalize-path "^3.0.0" - picomatch "^2.0.4" - anymatch@~3.1.2: version "3.1.3" resolved "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz" @@ -1414,22 +1406,7 @@ checksum@^1.0.0: dependencies: optimist "~0.3.5" -chokidar@3.5.1: - version "3.5.1" - resolved "https://registry.npmjs.org/chokidar/-/chokidar-3.5.1.tgz" - integrity sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw== - dependencies: - anymatch "~3.1.1" - braces "~3.0.2" - glob-parent "~5.1.0" - is-binary-path "~2.1.0" - is-glob "~4.0.1" - normalize-path "~3.0.0" - readdirp "~3.5.0" - optionalDependencies: - fsevents "~2.3.1" - -chokidar@^3.3.0: +chokidar@3.5.3, chokidar@^3.3.0: version "3.5.3" resolved "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz" integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== @@ -1684,10 +1661,10 @@ debug@2.6.9, debug@^2.6.9: dependencies: ms "2.0.0" -debug@4.3.1: - version "4.3.1" - resolved "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz" - integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== +debug@4.3.3: + version "4.3.3" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.3.tgz#04266e0b70a98d4462e6e288e38259213332b664" + integrity sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q== dependencies: ms "2.1.2" @@ -2509,7 +2486,7 @@ fs.realpath@^1.0.0: resolved "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== -fsevents@~2.3.1, fsevents@~2.3.2: +fsevents@~2.3.2: version "2.3.2" resolved "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz" integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== @@ -2601,7 +2578,7 @@ getopts@2.3.0: resolved "https://registry.npmjs.org/getopts/-/getopts-2.3.0.tgz" integrity sha512-5eDf9fuSXwxBL6q5HX+dhDj+dslFGWzU5thZ9kNKUkcPtaPdatmUFKwHFrLb/uf/WpA4BHET+AX3Scl56cAjpA== -glob-parent@^5.1.2, glob-parent@~5.1.0, glob-parent@~5.1.2: +glob-parent@^5.1.2, glob-parent@~5.1.2: version "5.1.2" resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz" integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== @@ -2615,10 +2592,10 @@ glob-parent@^6.0.1: dependencies: is-glob "^4.0.3" -glob@7.1.6: - version "7.1.6" - resolved "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz" - integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== +glob@7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023" + integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" @@ -3155,6 +3132,11 @@ is-typedarray@^1.0.0: resolved "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz" integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== +is-unicode-supported@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz#3f26c76a809593b52bfa2ecb5710ed2779b522a7" + integrity sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw== + is-weakmap@^2.0.1: version "2.0.1" resolved "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.1.tgz" @@ -3294,10 +3276,10 @@ js-sdsl@^4.1.4: resolved "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== -js-yaml@4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-4.0.0.tgz" - integrity sha512-pqon0s+4ScYUvX30wxQi3PogGFAlUyH0awepWvwkj4jD4v+ova3RiYw8bmA6x2rDrEaj8i/oWKoRxpVNW+Re8Q== +js-yaml@4.1.0, js-yaml@^4.1.0: + version "4.1.0" + resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== dependencies: argparse "^2.0.1" @@ -3309,13 +3291,6 @@ js-yaml@^3.13.1: argparse "^1.0.7" esprima "^4.0.0" -js-yaml@^4.1.0: - version "4.1.0" - resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz" - integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== - dependencies: - argparse "^2.0.1" - jsesc@^2.5.1: version "2.5.2" resolved "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz" @@ -3471,12 +3446,13 @@ lodash@^4.17.13, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.17.4: resolved "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== -log-symbols@4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/log-symbols/-/log-symbols-4.0.0.tgz" - integrity sha512-FN8JBzLx6CzeMrB0tg6pqlGU1wCrXW+ZXGH481kfsBqer0hToTIiHdjH4Mq8xJUbvATujKCvaREGWpGUionraA== +log-symbols@4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-4.1.0.tgz#3fbdbb95b4683ac9fc785111e792e558d4abd503" + integrity sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg== dependencies: - chalk "^4.0.0" + chalk "^4.1.0" + is-unicode-supported "^0.1.0" long@^5.0.0: version "5.2.0" @@ -3578,10 +3554,10 @@ mimic-fn@^2.1.0: resolved "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== -minimatch@3.0.4: - version "3.0.4" - resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz" - integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== +minimatch@4.2.1: + version "4.2.1" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-4.2.1.tgz#40d9d511a46bdc4e563c22c3080cde9c0d8299b4" + integrity sha512-9Uq1ChtSZO+Mxa/CL1eGizn2vRn3MlLgzhT0Iz8zaY8NdvxvB0d5QdPFmCKf7JKA9Lerx5vRrnwO03jsSfGG9g== dependencies: brace-expansion "^1.1.7" @@ -3631,33 +3607,32 @@ mkdirp@^3.0.1: resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz" integrity "sha1-5E5MVgf7J5wWgkFxPMbg/qmty1A= sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==" -mocha@8: - version "8.4.0" - resolved "https://registry.npmjs.org/mocha/-/mocha-8.4.0.tgz" - integrity sha512-hJaO0mwDXmZS4ghXsvPVriOhsxQ7ofcpQdm8dE+jISUOKopitvnXFQmpRR7jd2K6VBG6E26gU3IAbXXGIbu4sQ== +mocha@^9: + version "9.2.2" + resolved "https://registry.yarnpkg.com/mocha/-/mocha-9.2.2.tgz#d70db46bdb93ca57402c809333e5a84977a88fb9" + integrity sha512-L6XC3EdwT6YrIk0yXpavvLkn8h+EU+Y5UcCHKECyMbdUIxyMuZj4bX4U9e1nvnvUUvQVsV2VHQr5zLdcUkhW/g== dependencies: "@ungap/promise-all-settled" "1.1.2" ansi-colors "4.1.1" browser-stdout "1.3.1" - chokidar "3.5.1" - debug "4.3.1" + chokidar "3.5.3" + debug "4.3.3" diff "5.0.0" escape-string-regexp "4.0.0" find-up "5.0.0" - glob "7.1.6" + glob "7.2.0" growl "1.10.5" he "1.2.0" - js-yaml "4.0.0" - log-symbols "4.0.0" - minimatch "3.0.4" + js-yaml "4.1.0" + log-symbols "4.1.0" + minimatch "4.2.1" ms "2.1.3" - nanoid "3.1.20" - serialize-javascript "5.0.1" + nanoid "3.3.1" + serialize-javascript "6.0.0" strip-json-comments "3.1.1" supports-color "8.1.1" which "2.0.2" - wide-align "1.1.3" - workerpool "6.1.0" + workerpool "6.2.0" yargs "16.2.0" yargs-parser "20.2.4" yargs-unparser "2.0.0" @@ -3710,10 +3685,10 @@ multer@^1.4.5-lts.1: type-is "^1.6.4" xtend "^4.0.0" -nanoid@3.1.20: - version "3.1.20" - resolved "https://registry.npmjs.org/nanoid/-/nanoid-3.1.20.tgz" - integrity sha512-a1cQNyczgKbLX9jwbS/+d7W8fX/RfgYR7lVWwWOGIPNgK2m0MWvrGF6/m4kk6U3QcFMnZf3RIhL0v2Jgh/0Uxw== +nanoid@3.3.1: + version "3.3.1" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.1.tgz#6347a18cac88af88f58af0b3594b723d5e99bb35" + integrity sha512-n6Vs/3KGyxPQd6uO0eH4Bv0ojGSUvuLlIHtC3Y0kEO23YRge8H9x1GCzLn28YX0H66pMkxuaeESFq4tKISKwdw== natural-compare@^1.4.0: version "1.4.0" @@ -4261,13 +4236,6 @@ readable-stream@^2.2.2, readable-stream@~2.3.6: string_decoder "~1.1.1" util-deprecate "~1.0.1" -readdirp@~3.5.0: - version "3.5.0" - resolved "https://registry.npmjs.org/readdirp/-/readdirp-3.5.0.tgz" - integrity sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ== - dependencies: - picomatch "^2.2.1" - readdirp@~3.6.0: version "3.6.0" resolved "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz" @@ -4527,10 +4495,10 @@ send@0.18.0: range-parser "~1.2.1" statuses "2.0.1" -serialize-javascript@5.0.1: - version "5.0.1" - resolved "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-5.0.1.tgz" - integrity sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA== +serialize-javascript@6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.0.tgz#efae5d88f45d7924141da8b5c3a7a7e663fefeb8" + integrity sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag== dependencies: randombytes "^2.1.0" @@ -4690,7 +4658,7 @@ streamsearch@^1.1.0: resolved "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz" integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg== -"string-width@^1.0.2 || 2", string-width@^2.1.1: +string-width@^2.1.1: version "2.1.1" resolved "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz" integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== @@ -5236,13 +5204,6 @@ which@2.0.2, which@^2.0.1, which@^2.0.2: dependencies: isexe "^2.0.0" -wide-align@1.1.3: - version "1.1.3" - resolved "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz" - integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA== - dependencies: - string-width "^1.0.2 || 2" - widest-line@^3.1.0: version "3.1.0" resolved "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz" @@ -5260,10 +5221,10 @@ wordwrap@~0.0.2: resolved "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz" integrity sha512-1tMA907+V4QmxV7dbRvb4/8MaRALK6q9Abid3ndMYnbyo8piisCmeONVqVSXqQA3KaP4SLt5b7ud6E2sqP8TFw== -workerpool@6.1.0: - version "6.1.0" - resolved "https://registry.npmjs.org/workerpool/-/workerpool-6.1.0.tgz" - integrity sha512-toV7q9rWNYha963Pl/qyeZ6wG+3nnsyvolaNUS8+R5Wtw6qJPTxIlOP1ZSvcGhEJw+l3HMMmtiNo9Gl61G4GVg== +workerpool@6.2.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.2.0.tgz#827d93c9ba23ee2019c3ffaff5c27fccea289e8b" + integrity sha512-Rsk5qQHJ9eowMH28Jwhe8HEbmdYDX4lwoMWshiCXugjtHqMD9ZbiqSDLxcsfdqsETPzVUtX5s1Z5kStiIM6l4A== wrap-ansi@^6.2.0: version "6.2.0" From f8ca3522346d818e5ae448e80bd852a4313f4819 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Thu, 8 Feb 2024 17:22:15 +0100 Subject: [PATCH 26/44] Sort the list of env vars for profiler config (#4040) --- packages/dd-trace/src/profiling/config.js | 44 +++++++++++------------ 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/packages/dd-trace/src/profiling/config.js b/packages/dd-trace/src/profiling/config.js index 513c25b9329..dad463d1ea9 100644 --- a/packages/dd-trace/src/profiling/config.js +++ b/packages/dd-trace/src/profiling/config.js @@ -18,34 +18,34 @@ const { isFalse, isTrue } = require('../util') class Config { constructor (options = {}) { const { - DD_PROFILING_ENABLED, - DD_PROFILING_PROFILERS, - DD_ENV, - DD_TAGS, - DD_SERVICE, - DD_VERSION, - DD_TRACE_AGENT_URL, DD_AGENT_HOST, - DD_TRACE_AGENT_PORT, + DD_ENV, + DD_PROFILING_CODEHOTSPOTS_ENABLED, DD_PROFILING_DEBUG_SOURCE_MAPS, - DD_PROFILING_UPLOAD_TIMEOUT, - DD_PROFILING_SOURCE_MAP, - DD_PROFILING_UPLOAD_PERIOD, - DD_PROFILING_PPROF_PREFIX, - DD_PROFILING_HEAP_ENABLED, - DD_PROFILING_V8_PROFILER_BUG_WORKAROUND, - DD_PROFILING_WALLTIME_ENABLED, + DD_PROFILING_ENABLED, + DD_PROFILING_ENDPOINT_COLLECTION_ENABLED, + DD_PROFILING_EXPERIMENTAL_CODEHOTSPOTS_ENABLED, DD_PROFILING_EXPERIMENTAL_CPU_ENABLED, - DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED, + DD_PROFILING_EXPERIMENTAL_ENDPOINT_COLLECTION_ENABLED, + DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES, DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE, DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT, - DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES, - DD_PROFILING_TIMELINE_ENABLED, + DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED, DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED, - DD_PROFILING_CODEHOTSPOTS_ENABLED, - DD_PROFILING_ENDPOINT_COLLECTION_ENABLED, - DD_PROFILING_EXPERIMENTAL_CODEHOTSPOTS_ENABLED, - DD_PROFILING_EXPERIMENTAL_ENDPOINT_COLLECTION_ENABLED + DD_PROFILING_HEAP_ENABLED, + DD_PROFILING_PPROF_PREFIX, + DD_PROFILING_PROFILERS, + DD_PROFILING_SOURCE_MAP, + DD_PROFILING_TIMELINE_ENABLED, + DD_PROFILING_UPLOAD_PERIOD, + DD_PROFILING_UPLOAD_TIMEOUT, + DD_PROFILING_V8_PROFILER_BUG_WORKAROUND, + DD_PROFILING_WALLTIME_ENABLED, + DD_SERVICE, + DD_TAGS, + DD_TRACE_AGENT_PORT, + DD_TRACE_AGENT_URL, + DD_VERSION } = process.env const enabled = isTrue(coalesce(options.enabled, DD_PROFILING_ENABLED, true)) From 9a29346f51e570e863a7cf92eb2cd7e661fa1b23 Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Thu, 8 Feb 2024 13:41:18 -0800 Subject: [PATCH 27/44] allow unix URLs for datastreams (#4041) --- packages/dd-trace/src/datastreams/writer.js | 7 +-- .../dd-trace/test/datastreams/writer.spec.js | 52 +++++++++++++++++++ 2 files changed, 54 insertions(+), 5 deletions(-) create mode 100644 packages/dd-trace/test/datastreams/writer.spec.js diff --git a/packages/dd-trace/src/datastreams/writer.js b/packages/dd-trace/src/datastreams/writer.js index ada79fe8a33..f8c9e021ecc 100644 --- a/packages/dd-trace/src/datastreams/writer.js +++ b/packages/dd-trace/src/datastreams/writer.js @@ -15,13 +15,10 @@ function makeRequest (data, url, cb) { 'Datadog-Meta-Tracer-Version': pkg.version, 'Content-Type': 'application/msgpack', 'Content-Encoding': 'gzip' - } + }, + url } - options.protocol = url.protocol - options.hostname = url.hostname - options.port = url.port - log.debug(() => `Request to the intake: ${JSON.stringify(options)}`) request(data, options, (err, res) => { diff --git a/packages/dd-trace/test/datastreams/writer.spec.js b/packages/dd-trace/test/datastreams/writer.spec.js new file mode 100644 index 00000000000..bf8cd941a14 --- /dev/null +++ b/packages/dd-trace/test/datastreams/writer.spec.js @@ -0,0 +1,52 @@ +'use strict' +require('../setup/tap') +const pkg = require('../../../../package.json') +const stubRequest = sinon.stub() +const msgpack = require('msgpack-lite') +const codec = msgpack.createCodec({ int64: true }) + +const stubZlib = { + gzip: (payload, _opts, fn) => { + fn(undefined, payload) + } +} + +const { DataStreamsWriter } = proxyquire( + '../src/datastreams/writer', { + '../exporters/common/request': stubRequest, + 'zlib': stubZlib + }) + +describe('DataStreamWriter unix', () => { + let writer + const unixConfig = { + hostname: '', + url: new URL('unix:///var/run/datadog/apm.socket'), + port: '' + } + + it('should construct unix config', () => { + writer = new DataStreamsWriter(unixConfig) + expect(writer._url).to.equal(unixConfig.url) + }) + + it("should call 'request' through flush with correct options", () => { + writer = new DataStreamsWriter(unixConfig) + writer.flush({}) + const stubRequestCall = stubRequest.getCalls().at(0) + const decodedPayload = msgpack.decode(stubRequestCall?.args[0], { codec }) + const requestOptions = stubRequestCall?.args[1] + expect(decodedPayload).to.deep.equal({}) + expect(requestOptions).to.deep.equal({ + path: '/v0.1/pipeline_stats', + method: 'POST', + headers: { + 'Datadog-Meta-Lang': 'javascript', + 'Datadog-Meta-Tracer-Version': pkg.version, + 'Content-Type': 'application/msgpack', + 'Content-Encoding': 'gzip' + }, + url: unixConfig.url + }) + }) +}) From d3a4ab92c46e72f317d8afa747238a4430d42aba Mon Sep 17 00:00:00 2001 From: simon-id Date: Fri, 9 Feb 2024 09:11:36 +0100 Subject: [PATCH 28/44] Support schema extraction in express response objects (#3976) --------- Co-authored-by: Ugaitz Urien Co-authored-by: simon-id --- .../datadog-instrumentations/src/express.js | 20 +++++ packages/dd-trace/src/appsec/addresses.js | 2 + .../src/appsec/api_security_sampler.js | 19 +++- packages/dd-trace/src/appsec/channels.js | 3 +- packages/dd-trace/src/appsec/index.js | 19 +++- .../test/appsec/api_security_rules.json | 2 +- .../test/appsec/api_security_sampler.spec.js | 8 +- .../test/appsec/index.express.plugin.spec.js | 87 ++++++++++++++++--- packages/dd-trace/test/appsec/index.spec.js | 59 ++++++++++++- 9 files changed, 193 insertions(+), 26 deletions(-) diff --git a/packages/datadog-instrumentations/src/express.js b/packages/datadog-instrumentations/src/express.js index b07c38a42fe..4916bd92f1c 100644 --- a/packages/datadog-instrumentations/src/express.js +++ b/packages/datadog-instrumentations/src/express.js @@ -19,11 +19,31 @@ function wrapHandle (handle) { const wrapRouterMethod = createWrapRouterMethod('express') +const responseJsonChannel = channel('datadog:express:response:json:start') + +function wrapResponseJson (json) { + return function wrappedJson (obj) { + if (responseJsonChannel.hasSubscribers) { + // backward compat as express 4.x supports deprecated 3.x signature + if (arguments.length === 2 && typeof arguments[1] !== 'number') { + obj = arguments[1] + } + + responseJsonChannel.publish({ req: this.req, body: obj }) + } + + return json.apply(this, arguments) + } +} + addHook({ name: 'express', versions: ['>=4'] }, express => { shimmer.wrap(express.application, 'handle', wrapHandle) shimmer.wrap(express.Router, 'use', wrapRouterMethod) shimmer.wrap(express.Router, 'route', wrapRouterMethod) + shimmer.wrap(express.response, 'json', wrapResponseJson) + shimmer.wrap(express.response, 'jsonp', wrapResponseJson) + return express }) diff --git a/packages/dd-trace/src/appsec/addresses.js b/packages/dd-trace/src/appsec/addresses.js index 343a4b2cbba..c2352f14a61 100644 --- a/packages/dd-trace/src/appsec/addresses.js +++ b/packages/dd-trace/src/appsec/addresses.js @@ -15,6 +15,8 @@ module.exports = { HTTP_INCOMING_GRAPHQL_RESOLVERS: 'graphql.server.all_resolvers', HTTP_INCOMING_GRAPHQL_RESOLVER: 'graphql.server.resolver', + HTTP_OUTGOING_BODY: 'server.response.body', + HTTP_CLIENT_IP: 'http.client_ip', USER_ID: 'usr.id', diff --git a/packages/dd-trace/src/appsec/api_security_sampler.js b/packages/dd-trace/src/appsec/api_security_sampler.js index 3d92288d1c1..68bd896af7e 100644 --- a/packages/dd-trace/src/appsec/api_security_sampler.js +++ b/packages/dd-trace/src/appsec/api_security_sampler.js @@ -5,6 +5,8 @@ const log = require('../log') let enabled let requestSampling +const sampledRequests = new WeakSet() + function configure ({ apiSecurity }) { enabled = apiSecurity.enabled setRequestSampling(apiSecurity.requestSampling) @@ -32,17 +34,28 @@ function parseRequestSampling (requestSampling) { return parsed } -function sampleRequest () { +function sampleRequest (req) { if (!enabled || !requestSampling) { return false } - return Math.random() <= requestSampling + const shouldSample = Math.random() <= requestSampling + + if (shouldSample) { + sampledRequests.add(req) + } + + return shouldSample +} + +function isSampled (req) { + return sampledRequests.has(req) } module.exports = { configure, disable, setRequestSampling, - sampleRequest + sampleRequest, + isSampled } diff --git a/packages/dd-trace/src/appsec/channels.js b/packages/dd-trace/src/appsec/channels.js index f5832f2986c..fe4ce2fb881 100644 --- a/packages/dd-trace/src/appsec/channels.js +++ b/packages/dd-trace/src/appsec/channels.js @@ -16,5 +16,6 @@ module.exports = { queryParser: dc.channel('datadog:query:read:finish'), setCookieChannel: dc.channel('datadog:iast:set-cookie'), nextBodyParsed: dc.channel('apm:next:body-parsed'), - nextQueryParsed: dc.channel('apm:next:query-parsed') + nextQueryParsed: dc.channel('apm:next:query-parsed'), + responseBody: dc.channel('datadog:express:response:json:start') } diff --git a/packages/dd-trace/src/appsec/index.js b/packages/dd-trace/src/appsec/index.js index 90ac6d1f5f3..d6b17ea49b1 100644 --- a/packages/dd-trace/src/appsec/index.js +++ b/packages/dd-trace/src/appsec/index.js @@ -11,7 +11,8 @@ const { passportVerify, queryParser, nextBodyParsed, - nextQueryParsed + nextQueryParsed, + responseBody } = require('./channels') const waf = require('./waf') const addresses = require('./addresses') @@ -53,6 +54,7 @@ function enable (_config) { nextQueryParsed.subscribe(onRequestQueryParsed) queryParser.subscribe(onRequestQueryParsed) cookieParser.subscribe(onRequestCookieParser) + responseBody.subscribe(onResponseBody) if (_config.appsec.eventTracking.enabled) { passportVerify.subscribe(onPassportVerify) @@ -93,7 +95,7 @@ function incomingHttpStartTranslator ({ req, res, abortController }) { persistent[addresses.HTTP_CLIENT_IP] = clientIp } - if (apiSecuritySampler.sampleRequest()) { + if (apiSecuritySampler.sampleRequest(req)) { persistent[addresses.WAF_CONTEXT_PROCESSOR] = { 'extract-schema': true } } @@ -194,6 +196,18 @@ function onRequestCookieParser ({ req, res, abortController, cookies }) { handleResults(results, req, res, rootSpan, abortController) } +function onResponseBody ({ req, body }) { + if (!body || typeof body !== 'object') return + if (!apiSecuritySampler.isSampled(req)) return + + // we don't support blocking at this point, so no results needed + waf.run({ + persistent: { + [addresses.HTTP_OUTGOING_BODY]: body + } + }, req) +} + function onPassportVerify ({ credentials, user }) { const store = storage.getStore() const rootSpan = store?.req && web.root(store.req) @@ -233,6 +247,7 @@ function disable () { if (incomingHttpRequestEnd.hasSubscribers) incomingHttpRequestEnd.unsubscribe(incomingHttpEndTranslator) if (queryParser.hasSubscribers) queryParser.unsubscribe(onRequestQueryParsed) if (cookieParser.hasSubscribers) cookieParser.unsubscribe(onRequestCookieParser) + if (responseBody.hasSubscribers) responseBody.unsubscribe(onResponseBody) if (passportVerify.hasSubscribers) passportVerify.unsubscribe(onPassportVerify) } diff --git a/packages/dd-trace/test/appsec/api_security_rules.json b/packages/dd-trace/test/appsec/api_security_rules.json index fad50fcd358..8202fc82fd0 100644 --- a/packages/dd-trace/test/appsec/api_security_rules.json +++ b/packages/dd-trace/test/appsec/api_security_rules.json @@ -94,7 +94,7 @@ { "inputs": [ { - "address": "http.response.body" + "address": "server.response.body" } ], "output": "_dd.appsec.s.res.body" diff --git a/packages/dd-trace/test/appsec/api_security_sampler.spec.js b/packages/dd-trace/test/appsec/api_security_sampler.spec.js index e36e588ba39..5a69af05a5c 100644 --- a/packages/dd-trace/test/appsec/api_security_sampler.spec.js +++ b/packages/dd-trace/test/appsec/api_security_sampler.spec.js @@ -22,14 +22,14 @@ describe('Api Security Sampler', () => { it('should sample request if enabled and sampling 1', () => { apiSecuritySampler.configure(config) - expect(apiSecuritySampler.sampleRequest()).to.true + expect(apiSecuritySampler.sampleRequest({})).to.true }) it('should not sample request if enabled and sampling 0', () => { config.apiSecurity.requestSampling = 0 apiSecuritySampler.configure(config) - expect(apiSecuritySampler.sampleRequest()).to.false + expect(apiSecuritySampler.sampleRequest({})).to.false }) it('should sample request if enabled and sampling greater than random', () => { @@ -37,7 +37,7 @@ describe('Api Security Sampler', () => { apiSecuritySampler.configure(config) - expect(apiSecuritySampler.sampleRequest()).to.true + expect(apiSecuritySampler.sampleRequest({})).to.true }) it('should not sample request if enabled and sampling less than random', () => { @@ -61,7 +61,7 @@ describe('Api Security Sampler', () => { apiSecuritySampler.configure(config) - expect(apiSecuritySampler.sampleRequest()).to.true + expect(apiSecuritySampler.sampleRequest({})).to.true apiSecuritySampler.setRequestSampling(0) diff --git a/packages/dd-trace/test/appsec/index.express.plugin.spec.js b/packages/dd-trace/test/appsec/index.express.plugin.spec.js index 9b1d2ea52f8..811d4ef5291 100644 --- a/packages/dd-trace/test/appsec/index.express.plugin.spec.js +++ b/packages/dd-trace/test/appsec/index.express.plugin.spec.js @@ -30,7 +30,19 @@ withVersions('express', 'express', version => { }) app.post('/', (req, res) => { - res.end('DONE') + res.send('DONE') + }) + + app.post('/sendjson', (req, res) => { + res.send({ sendResKey: 'sendResValue' }) + }) + + app.post('/jsonp', (req, res) => { + res.jsonp({ jsonpResKey: 'jsonpResValue' }) + }) + + app.post('/json', (req, res) => { + res.jsonp({ jsonResKey: 'jsonResValue' }) }) getPort().then(newPort => { @@ -85,8 +97,7 @@ withVersions('express', 'express', version => { enabled: true, rules: path.join(__dirname, 'api_security_rules.json'), apiSecurity: { - enabled: true, - requestSampling: 1.0 + enabled: true } } }) @@ -96,20 +107,69 @@ withVersions('express', 'express', version => { appsec.disable() }) - it('should get the schema', async () => { - appsec.enable(config) + describe('with requestSampling 1.0', () => { + beforeEach(() => { + config.appsec.apiSecurity.requestSampling = 1.0 + appsec.enable(config) + }) - const expectedSchema = zlib.gzipSync(JSON.stringify([{ 'key': [8] }])).toString('base64') - const res = await axios.post(`http://localhost:${port}/`, { key: 'value' }) + function formatSchema (body) { + return zlib.gzipSync(JSON.stringify(body)).toString('base64') + } - await agent.use((traces) => { - const span = traces[0][0] - expect(span.meta).to.haveOwnProperty('_dd.appsec.s.req.body') - expect(span.meta['_dd.appsec.s.req.body']).to.be.equal(expectedSchema) + it('should get the request body schema', async () => { + const expectedRequestBodySchema = formatSchema([{ 'key': [8] }]) + const res = await axios.post(`http://localhost:${port}/`, { key: 'value' }) + + await agent.use((traces) => { + const span = traces[0][0] + expect(span.meta).to.haveOwnProperty('_dd.appsec.s.req.body') + expect(span.meta).not.to.haveOwnProperty('_dd.appsec.s.res.body') + expect(span.meta['_dd.appsec.s.req.body']).to.be.equal(expectedRequestBodySchema) + }) + + expect(res.status).to.be.equal(200) + expect(res.data).to.be.equal('DONE') }) - expect(res.status).to.be.equal(200) - expect(res.data).to.be.equal('DONE') + it('should get the response body schema with res.send method with object', async () => { + const expectedResponseBodySchema = formatSchema([{ 'sendResKey': [8] }]) + const res = await axios.post(`http://localhost:${port}/sendjson`, { key: 'value' }) + + await agent.use((traces) => { + const span = traces[0][0] + expect(span.meta['_dd.appsec.s.res.body']).to.be.equal(expectedResponseBodySchema) + }) + + expect(res.status).to.be.equal(200) + expect(res.data).to.be.deep.equal({ sendResKey: 'sendResValue' }) + }) + + it('should get the response body schema with res.json method', async () => { + const expectedResponseBodySchema = formatSchema([{ 'jsonResKey': [8] }]) + const res = await axios.post(`http://localhost:${port}/json`, { key: 'value' }) + + await agent.use((traces) => { + const span = traces[0][0] + expect(span.meta['_dd.appsec.s.res.body']).to.be.equal(expectedResponseBodySchema) + }) + + expect(res.status).to.be.equal(200) + expect(res.data).to.be.deep.equal({ jsonResKey: 'jsonResValue' }) + }) + + it('should get the response body schema with res.jsonp method', async () => { + const expectedResponseBodySchema = formatSchema([{ 'jsonpResKey': [8] }]) + const res = await axios.post(`http://localhost:${port}/jsonp`, { key: 'value' }) + + await agent.use((traces) => { + const span = traces[0][0] + expect(span.meta['_dd.appsec.s.res.body']).to.be.equal(expectedResponseBodySchema) + }) + + expect(res.status).to.be.equal(200) + expect(res.data).to.be.deep.equal({ jsonpResKey: 'jsonpResValue' }) + }) }) it('should not get the schema', async () => { @@ -121,6 +181,7 @@ withVersions('express', 'express', version => { await agent.use((traces) => { const span = traces[0][0] expect(span.meta).not.to.haveOwnProperty('_dd.appsec.s.req.body') + expect(span.meta).not.to.haveOwnProperty('_dd.appsec.s.res.body') }) expect(res.status).to.be.equal(200) diff --git a/packages/dd-trace/test/appsec/index.spec.js b/packages/dd-trace/test/appsec/index.spec.js index 82bb57059df..ef8c9829eb4 100644 --- a/packages/dd-trace/test/appsec/index.spec.js +++ b/packages/dd-trace/test/appsec/index.spec.js @@ -11,7 +11,8 @@ const { incomingHttpRequestStart, incomingHttpRequestEnd, queryParser, - passportVerify + passportVerify, + responseBody } = require('../../src/appsec/channels') const Reporter = require('../../src/appsec/reporter') const agent = require('../plugins/agent') @@ -21,6 +22,7 @@ const getPort = require('get-port') const blockedTemplate = require('../../src/appsec/blocked_templates') const { storage } = require('../../../datadog-core') const telemetryMetrics = require('../../src/telemetry/metrics') +const addresses = require('../../src/appsec/addresses') describe('AppSec Index', () => { let config @@ -31,6 +33,7 @@ describe('AppSec Index', () => { let log let appsecTelemetry let graphql + let apiSecuritySampler const RULES = { rules: [{ a: 1 }] } @@ -84,13 +87,18 @@ describe('AppSec Index', () => { disable: sinon.stub() } + apiSecuritySampler = require('../../src/appsec/api_security_sampler') + sinon.spy(apiSecuritySampler, 'sampleRequest') + sinon.spy(apiSecuritySampler, 'isSampled') + AppSec = proxyquire('../../src/appsec', { '../log': log, '../plugins/util/web': web, './blocking': blocking, './passport': passport, './telemetry': appsecTelemetry, - './graphql': graphql + './graphql': graphql, + './api_security_sampler': apiSecuritySampler }) sinon.stub(fs, 'readFileSync').returns(JSON.stringify(RULES)) @@ -534,6 +542,53 @@ describe('AppSec Index', () => { } }, req) }) + + describe('onResponseBody', () => { + beforeEach(() => { + config.appsec.apiSecurity = { + enabled: true, + requestSampling: 1 + } + AppSec.enable(config) + }) + + afterEach(() => { + AppSec.disable() + }) + + it('should not do anything if body is not an object', () => { + responseBody.publish({ req: {}, body: 'string' }) + responseBody.publish({ req: {}, body: null }) + + expect(apiSecuritySampler.isSampled).to.not.been.called + expect(waf.run).to.not.been.called + }) + + it('should not call to the waf if it is not a sampled request', () => { + apiSecuritySampler.isSampled = apiSecuritySampler.isSampled.instantiateFake(() => false) + const req = {} + + responseBody.publish({ req, body: {} }) + + expect(apiSecuritySampler.isSampled).to.have.been.calledOnceWith(req) + expect(waf.run).to.not.been.called + }) + + it('should call to the waf if it is a sampled request', () => { + apiSecuritySampler.isSampled = apiSecuritySampler.isSampled.instantiateFake(() => true) + const req = {} + const body = {} + + responseBody.publish({ req, body }) + + expect(apiSecuritySampler.isSampled).to.have.been.calledOnceWith(req) + expect(waf.run).to.been.calledOnceWith({ + persistent: { + [addresses.HTTP_OUTGOING_BODY]: body + } + }, req) + }) + }) }) describe('Channel handlers', () => { From 70ac60a9edd716e417c75e364af8549e453d3374 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Fri, 9 Feb 2024 13:17:17 +0100 Subject: [PATCH 29/44] [ci-visibility] Early flake detection for jest (#3956) --- integration-tests/ci-visibility-intake.js | 51 ++- integration-tests/ci-visibility.spec.js | 357 ++++++++++++++++-- .../occasionally-failing-test.js | 9 + .../skipped-and-todo-test.js | 11 + .../test-parameterized.js | 7 + .../test-early-flake-detection/test.js | 7 + packages/datadog-instrumentations/src/jest.js | 114 +++++- packages/datadog-plugin-jest/src/index.js | 24 +- .../exporters/ci-visibility-exporter.js | 71 ++-- .../requests/get-library-configuration.js | 10 +- packages/dd-trace/src/plugins/util/test.js | 6 + .../exporters/ci-visibility-exporter.spec.js | 8 +- 12 files changed, 574 insertions(+), 101 deletions(-) create mode 100644 integration-tests/ci-visibility/test-early-flake-detection/occasionally-failing-test.js create mode 100644 integration-tests/ci-visibility/test-early-flake-detection/skipped-and-todo-test.js create mode 100644 integration-tests/ci-visibility/test-early-flake-detection/test-parameterized.js create mode 100644 integration-tests/ci-visibility/test-early-flake-detection/test.js diff --git a/integration-tests/ci-visibility-intake.js b/integration-tests/ci-visibility-intake.js index e7c4157846b..c2fa3aee0e4 100644 --- a/integration-tests/ci-visibility-intake.js +++ b/integration-tests/ci-visibility-intake.js @@ -5,13 +5,20 @@ const codec = msgpack.createCodec({ int64: true }) const http = require('http') const multer = require('multer') const upload = multer() +const zlib = require('zlib') const { FakeAgent } = require('./helpers') const DEFAULT_SETTINGS = { code_coverage: true, tests_skipping: true, - itr_enabled: true + itr_enabled: true, + early_flake_detection: { + enabled: false, + slow_test_retries: { + '5s': 3 + } + } } const DEFAULT_SUITES_TO_SKIP = [] @@ -20,14 +27,20 @@ const DEFAULT_INFO_RESPONSE = { endpoints: ['/evp_proxy/v2'] } const DEFAULT_CORRELATION_ID = '1234' +const DEFAULT_KNOWN_TESTS = ['test-suite1.js.test-name1', 'test-suite2.js.test-name2'] let settings = DEFAULT_SETTINGS let suitesToSkip = DEFAULT_SUITES_TO_SKIP let gitUploadStatus = DEFAULT_GIT_UPLOAD_STATUS let infoResponse = DEFAULT_INFO_RESPONSE let correlationId = DEFAULT_CORRELATION_ID +let knownTests = DEFAULT_KNOWN_TESTS class FakeCiVisIntake extends FakeAgent { + setKnownTests (newKnownTestsResponse) { + knownTests = newKnownTestsResponse + } + setInfoResponse (newInfoResponse) { infoResponse = newInfoResponse } @@ -70,7 +83,7 @@ class FakeCiVisIntake extends FakeAgent { }) }) - app.post(['/api/v2/citestcycle', '/evp_proxy/v2/api/v2/citestcycle'], (req, res) => { + app.post(['/api/v2/citestcycle', '/evp_proxy/:version/api/v2/citestcycle'], (req, res) => { res.status(200).send('OK') this.emit('message', { headers: req.headers, @@ -81,7 +94,7 @@ class FakeCiVisIntake extends FakeAgent { app.post([ '/api/v2/git/repository/search_commits', - '/evp_proxy/v2/api/v2/git/repository/search_commits' + '/evp_proxy/:version/api/v2/git/repository/search_commits' ], (req, res) => { res.status(gitUploadStatus).send(JSON.stringify({ data: [] })) this.emit('message', { @@ -93,7 +106,7 @@ class FakeCiVisIntake extends FakeAgent { app.post([ '/api/v2/git/repository/packfile', - '/evp_proxy/v2/api/v2/git/repository/packfile' + '/evp_proxy/:version/api/v2/git/repository/packfile' ], (req, res) => { res.status(202).send('') this.emit('message', { @@ -104,7 +117,7 @@ class FakeCiVisIntake extends FakeAgent { app.post([ '/api/v2/citestcov', - '/evp_proxy/v2/api/v2/citestcov' + '/evp_proxy/:version/api/v2/citestcov' ], upload.any(), (req, res) => { res.status(200).send('OK') @@ -128,7 +141,7 @@ class FakeCiVisIntake extends FakeAgent { app.post([ '/api/v2/libraries/tests/services/setting', - '/evp_proxy/v2/api/v2/libraries/tests/services/setting' + '/evp_proxy/:version/api/v2/libraries/tests/services/setting' ], (req, res) => { res.status(200).send(JSON.stringify({ data: { @@ -143,7 +156,7 @@ class FakeCiVisIntake extends FakeAgent { app.post([ '/api/v2/ci/tests/skippable', - '/evp_proxy/v2/api/v2/ci/tests/skippable' + '/evp_proxy/:version/api/v2/ci/tests/skippable' ], (req, res) => { res.status(200).send(JSON.stringify({ data: suitesToSkip, @@ -157,6 +170,30 @@ class FakeCiVisIntake extends FakeAgent { }) }) + app.post([ + '/api/v2/ci/libraries/tests', + '/evp_proxy/:version/api/v2/ci/libraries/tests' + ], (req, res) => { + // The endpoint returns compressed data if 'accept-encoding' is set to 'gzip' + const isGzip = req.headers['accept-encoding'] === 'gzip' + const data = JSON.stringify({ + data: { + attributes: { + test_full_names: knownTests + } + } + }) + res.setHeader('content-type', 'application/json') + if (isGzip) { + res.setHeader('content-encoding', 'gzip') + } + res.status(200).send(isGzip ? zlib.gzipSync(data) : data) + this.emit('message', { + headers: req.headers, + url: req.url + }) + }) + return new Promise((resolve, reject) => { const timeoutObj = setTimeout(() => { reject(new Error('Intake timed out starting up')) diff --git a/integration-tests/ci-visibility.spec.js b/integration-tests/ci-visibility.spec.js index dd7d563cda5..c199c11797f 100644 --- a/integration-tests/ci-visibility.spec.js +++ b/integration-tests/ci-visibility.spec.js @@ -24,7 +24,11 @@ const { TEST_ITR_SKIPPING_TYPE, TEST_ITR_SKIPPING_COUNT, TEST_ITR_UNSKIPPABLE, - TEST_ITR_FORCED_RUN + TEST_ITR_FORCED_RUN, + TEST_IS_NEW, + TEST_EARLY_FLAKE_IS_RETRY, + TEST_EARLY_FLAKE_IS_ENABLED, + TEST_NAME } = require('../packages/dd-trace/src/plugins/util/test') const { ERROR_MESSAGE } = require('../packages/dd-trace/src/constants') @@ -61,36 +65,15 @@ const testFrameworks = [ runTestsWithCoverageCommand: './node_modules/nyc/bin/nyc.js -r=text-summary node ./ci-visibility/run-mocha.js', type: 'commonJS' }, - { - ...mochaCommonOptions, - testFile: 'ci-visibility/run-mocha.mjs', - dependencies: ['mocha', 'chai@v4', 'nyc', '@istanbuljs/esm-loader-hook'], - expectedCoverageFiles: [ - 'ci-visibility/run-mocha.mjs', - 'ci-visibility/test/sum.js', - 'ci-visibility/test/ci-visibility-test.js', - 'ci-visibility/test/ci-visibility-test-2.js' - ], - runTestsWithCoverageCommand: - `./node_modules/nyc/bin/nyc.js -r=text-summary ` + - `node --loader=./node_modules/@istanbuljs/esm-loader-hook/index.js ` + - `--loader=${hookFile} ./ci-visibility/run-mocha.mjs`, - type: 'esm' - }, { ...jestCommonOptions, testFile: 'ci-visibility/run-jest.js', runTestsWithCoverageCommand: 'node ./ci-visibility/run-jest.js', type: 'commonJS' - }, - { - ...jestCommonOptions, - testFile: 'ci-visibility/run-jest.mjs', - runTestsWithCoverageCommand: `node --loader=${hookFile} ./ci-visibility/run-jest.mjs`, - type: 'esm' } ] +// TODO: add ESM tests testFrameworks.forEach(({ name, dependencies, @@ -101,16 +84,6 @@ testFrameworks.forEach(({ runTestsWithCoverageCommand, type }) => { - // temporary fix for failing esm tests on the CI, skip for now for the release and comeback to solve the issue - if (type === 'esm') { - return - } - - // to avoid this error: @istanbuljs/esm-loader-hook@0.2.0: The engine "node" - // is incompatible with this module. Expected version ">=16.12.0". Got "14.21.3" - // if (type === 'esm' && name === 'mocha' && semver.satisfies(process.version, '<16.12.0')) { - // return - // } describe(`${name} ${type}`, () => { let receiver let childProcess @@ -491,6 +464,324 @@ testFrameworks.forEach(({ } ) }) + + const reportingOptions = ['agentless', 'evp proxy'] + + reportingOptions.forEach(reportingOption => { + context(`early flake detection when reporting by ${reportingOption}`, () => { + it('retries new tests', (done) => { + const envVars = reportingOption === 'agentless' + ? getCiVisAgentlessConfig(receiver.port) : getCiVisEvpProxyConfig(receiver.port) + if (reportingOption === 'evp proxy') { + receiver.setInfoResponse({ endpoints: ['/evp_proxy/v4'] }) + } + // Tests from ci-visibility/test/ci-visibility-test-2.js will be considered new + receiver.setKnownTests([ + 'ci-visibility/test/ci-visibility-test.js.ci visibility can report tests' + ]) + const NUM_RETRIES_EFD = 3 + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD + } + } + }) + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const testSession = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_IS_ENABLED, 'true') + + const tests = events.filter(event => event.type === 'test').map(event => event.content) + const newTests = tests.filter(test => + test.meta[TEST_SUITE] === 'ci-visibility/test/ci-visibility-test-2.js' + ) + newTests.forEach(test => { + assert.propertyVal(test.meta, TEST_IS_NEW, 'true') + }) + const retriedTests = newTests.filter(test => test.meta[TEST_EARLY_FLAKE_IS_RETRY] === 'true') + // all but one has been retried + assert.equal( + newTests.length - 1, + retriedTests.length + ) + assert.equal(retriedTests.length, NUM_RETRIES_EFD) + // Test name does not change + newTests.forEach(test => { + assert.equal(test.meta[TEST_NAME], 'ci visibility 2 can report tests 2') + }) + }) + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: { ...envVars, TESTS_TO_RUN: 'test/ci-visibility-test' }, + stdio: 'inherit' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) + it('handles parameterized tests as a single unit', (done) => { + const envVars = reportingOption === 'agentless' + ? getCiVisAgentlessConfig(receiver.port) : getCiVisEvpProxyConfig(receiver.port) + if (reportingOption === 'evp proxy') { + receiver.setInfoResponse({ endpoints: ['/evp_proxy/v4'] }) + } + // Tests from ci-visibility/test-early-flake-detection/test-parameterized.js will be considered new + receiver.setKnownTests([ + 'ci-visibility/test-early-flake-detection/test.js.ci visibility can report tests' + ]) + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': 3 + } + } + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + + const testSession = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_IS_ENABLED, 'true') + + const newTests = tests.filter(test => + test.meta[TEST_SUITE] === 'ci-visibility/test-early-flake-detection/test-parameterized.js' + ) + newTests.forEach(test => { + assert.propertyVal(test.meta, TEST_IS_NEW, 'true') + }) + // Each parameter is repeated independently + const testsForFirstParameter = tests.filter(test => test.resource === + 'ci-visibility/test-early-flake-detection/test-parameterized.js.parameterized test parameter 1' + ) + + const testsForSecondParameter = tests.filter(test => test.resource === + 'ci-visibility/test-early-flake-detection/test-parameterized.js.parameterized test parameter 2' + ) + + assert.equal(testsForFirstParameter.length, testsForSecondParameter.length) + + // all but one have been retried + assert.equal( + testsForFirstParameter.length - 1, + testsForFirstParameter.filter(test => test.meta[TEST_EARLY_FLAKE_IS_RETRY] === 'true').length + ) + + assert.equal( + testsForSecondParameter.length - 1, + testsForSecondParameter.filter(test => test.meta[TEST_EARLY_FLAKE_IS_RETRY] === 'true').length + ) + }) + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: { ...envVars, TESTS_TO_RUN: 'test-early-flake-detection/test' }, + stdio: 'inherit' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) + it('is disabled if DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED is false', (done) => { + const envVars = reportingOption === 'agentless' + ? getCiVisAgentlessConfig(receiver.port) : getCiVisEvpProxyConfig(receiver.port) + if (reportingOption === 'evp proxy') { + receiver.setInfoResponse({ endpoints: ['/evp_proxy/v4'] }) + } + // Tests from ci-visibility/test/ci-visibility-test-2.js will be considered new + receiver.setKnownTests([ + 'ci-visibility/test/ci-visibility-test.js.ci visibility can report tests' + ]) + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': 3 + } + } + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const testSession = events.find(event => event.type === 'test_session_end').content + assert.notProperty(testSession.meta, TEST_EARLY_FLAKE_IS_ENABLED) + + const tests = events.filter(event => event.type === 'test').map(event => event.content) + const newTests = tests.filter(test => + test.meta[TEST_SUITE] === 'true' + ) + // new tests are not detected + assert.equal(newTests.length, 0) + }) + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: { + ...envVars, + TESTS_TO_RUN: 'test/ci-visibility-test', + DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED: 'false' + }, + stdio: 'inherit' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) + it('retries flaky tests', (done) => { + const envVars = reportingOption === 'agentless' + ? getCiVisAgentlessConfig(receiver.port) : getCiVisEvpProxyConfig(receiver.port) + if (reportingOption === 'evp proxy') { + receiver.setInfoResponse({ endpoints: ['/evp_proxy/v4'] }) + } + // Tests from ci-visibility/test/occasionally-failing-test will be considered new + receiver.setKnownTests([]) + + const NUM_RETRIES_EFD = 5 + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD + } + } + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const testSession = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_IS_ENABLED, 'true') + + const tests = events.filter(event => event.type === 'test').map(event => event.content) + + const retriedTests = tests.filter(test => test.meta[TEST_EARLY_FLAKE_IS_RETRY] === 'true') + // all but one has been retried + assert.equal( + tests.length - 1, + retriedTests.length + ) + assert.equal(retriedTests.length, NUM_RETRIES_EFD) + // Out of NUM_RETRIES_EFD + 1 total runs, half will be passing and half will be failing, + // based on the global counter in the test file + const passingTests = tests.filter(test => test.meta[TEST_STATUS] === 'pass') + const failingTests = tests.filter(test => test.meta[TEST_STATUS] === 'fail') + assert.equal(passingTests.length, (NUM_RETRIES_EFD + 1) / 2) + assert.equal(failingTests.length, (NUM_RETRIES_EFD + 1) / 2) + // Test name does not change + retriedTests.forEach(test => { + assert.equal(test.meta[TEST_NAME], 'fail occasionally fails') + }) + }) + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: { ...envVars, TESTS_TO_RUN: 'test-early-flake-detection/occasionally-failing-test' }, + stdio: 'inherit' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) + it('does not retry new tests that are skipped', (done) => { + const envVars = reportingOption === 'agentless' + ? getCiVisAgentlessConfig(receiver.port) : getCiVisEvpProxyConfig(receiver.port) + if (reportingOption === 'evp proxy') { + receiver.setInfoResponse({ endpoints: ['/evp_proxy/v4'] }) + } + // Tests from ci-visibility/test/skipped-and-todo-test will be considered new + receiver.setKnownTests([]) + + const NUM_RETRIES_EFD = 5 + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD + } + } + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const testSession = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_IS_ENABLED, 'true') + + const tests = events.filter(event => event.type === 'test').map(event => event.content) + + const newSkippedTests = tests.filter( + test => test.meta[TEST_NAME] === 'ci visibility skip will not be retried' + ) + const newTodoTests = tests.filter( + test => test.meta[TEST_NAME] === 'ci visibility todo will not be retried' + ) + assert.equal(newSkippedTests.length, 1) + assert.equal(newTodoTests.length, 1) + assert.notProperty(newSkippedTests[0].meta, TEST_EARLY_FLAKE_IS_RETRY) + assert.notProperty(newTodoTests[0].meta, TEST_EARLY_FLAKE_IS_RETRY) + }) + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: { ...envVars, TESTS_TO_RUN: 'test-early-flake-detection/skipped-and-todo-test' }, + stdio: 'inherit' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) + }) + }) } it('can run tests and report spans', (done) => { diff --git a/integration-tests/ci-visibility/test-early-flake-detection/occasionally-failing-test.js b/integration-tests/ci-visibility/test-early-flake-detection/occasionally-failing-test.js new file mode 100644 index 00000000000..22b6d91935b --- /dev/null +++ b/integration-tests/ci-visibility/test-early-flake-detection/occasionally-failing-test.js @@ -0,0 +1,9 @@ +const { expect } = require('chai') + +let globalCounter = 0 + +describe('fail', () => { + it('occasionally fails', () => { + expect((globalCounter++) % 2).to.equal(0) + }) +}) diff --git a/integration-tests/ci-visibility/test-early-flake-detection/skipped-and-todo-test.js b/integration-tests/ci-visibility/test-early-flake-detection/skipped-and-todo-test.js new file mode 100644 index 00000000000..8be47e28cec --- /dev/null +++ b/integration-tests/ci-visibility/test-early-flake-detection/skipped-and-todo-test.js @@ -0,0 +1,11 @@ +const { expect } = require('chai') + +describe('ci visibility', () => { + it('can report tests', () => { + expect(1 + 2).to.equal(3) + }) + it.todo('todo will not be retried') + it.skip('skip will not be retried', () => { + expect(1 + 2).to.equal(4) + }) +}) diff --git a/integration-tests/ci-visibility/test-early-flake-detection/test-parameterized.js b/integration-tests/ci-visibility/test-early-flake-detection/test-parameterized.js new file mode 100644 index 00000000000..8ff884c6c28 --- /dev/null +++ b/integration-tests/ci-visibility/test-early-flake-detection/test-parameterized.js @@ -0,0 +1,7 @@ +const { expect } = require('chai') + +describe('parameterized', () => { + test.each(['parameter 1', 'parameter 2'])('test %s', (value) => { + expect(value.startsWith('parameter')).toEqual(true) + }) +}) diff --git a/integration-tests/ci-visibility/test-early-flake-detection/test.js b/integration-tests/ci-visibility/test-early-flake-detection/test.js new file mode 100644 index 00000000000..e3306f69374 --- /dev/null +++ b/integration-tests/ci-visibility/test-early-flake-detection/test.js @@ -0,0 +1,7 @@ +const { expect } = require('chai') + +describe('ci visibility', () => { + it('can report tests', () => { + expect(1 + 2).to.equal(3) + }) +}) diff --git a/packages/datadog-instrumentations/src/jest.js b/packages/datadog-instrumentations/src/jest.js index 1c8ef07e0b8..03dcdc638ee 100644 --- a/packages/datadog-instrumentations/src/jest.js +++ b/packages/datadog-instrumentations/src/jest.js @@ -38,10 +38,12 @@ const testErrCh = channel('ci:jest:test:err') const skippableSuitesCh = channel('ci:jest:test-suite:skippable') const libraryConfigurationCh = channel('ci:jest:library-configuration') +const knownTestsCh = channel('ci:jest:known-tests') const itrSkippedSuitesCh = channel('ci:jest:itr:skipped-suites') let skippableSuites = [] +let knownTests = [] let isCodeCoverageEnabled = false let isSuitesSkippingEnabled = false let isUserCodeCoverageEnabled = false @@ -49,6 +51,11 @@ let isSuitesSkipped = false let numSkippedSuites = 0 let hasUnskippableSuites = false let hasForcedToRunSuites = false +let isEarlyFlakeDetectionEnabled = false +let earlyFlakeDetectionNumRetries = 0 + +const EFD_STRING = "Retried by Datadog's Early Flake Detection" +const EFD_TEST_NAME_REGEX = new RegExp(EFD_STRING + ' \\(#\\d+\\): ', 'g') const sessionAsyncResource = new AsyncResource('bound-anonymous-fn') @@ -62,6 +69,7 @@ const specStatusToTestStatus = { const asyncResources = new WeakMap() const originalTestFns = new WeakMap() +const retriedTestsToNumAttempts = new Map() // based on https://github.com/facebook/jest/blob/main/packages/jest-circus/src/formatNodeAssertErrors.ts#L41 function formatJestError (errors) { @@ -90,6 +98,14 @@ function getTestEnvironmentOptions (config) { return {} } +function getEfdTestName (testName, numAttempt) { + return `${EFD_STRING} (#${numAttempt}): ${testName}` +} + +function removeEfdTestName (testName) { + return testName.replace(EFD_TEST_NAME_REGEX, '') +} + function getWrappedEnvironment (BaseEnvironment, jestVersion) { return class DatadogEnvironment extends BaseEnvironment { constructor (config, context) { @@ -101,6 +117,38 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) { this.global._ddtrace = global._ddtrace this.testEnvironmentOptions = getTestEnvironmentOptions(config) + + this.isEarlyFlakeDetectionEnabled = this.testEnvironmentOptions._ddIsEarlyFlakeDetectionEnabled + + if (this.isEarlyFlakeDetectionEnabled) { + earlyFlakeDetectionNumRetries = this.testEnvironmentOptions._ddEarlyFlakeDetectionNumRetries + try { + this.knownTestsForThisSuite = this.getKnownTestsForSuite(this.testEnvironmentOptions._ddKnownTests) + } catch (e) { + // If there has been an error parsing the tests, we'll disable Early Flake Deteciton + this.isEarlyFlakeDetectionEnabled = false + } + } + } + + // Function that receives a list of known tests for a test service and + // returns the ones that belong to the current suite + getKnownTestsForSuite (knownTests) { + let knownTestsForSuite = knownTests + // If jest runs in band, the known tests are not serialized, so they're an array. + if (!Array.isArray(knownTests)) { + knownTestsForSuite = JSON.parse(knownTestsForSuite) + } + return knownTestsForSuite + .filter(test => test.includes(this.testSuite)) + .map(test => test.replace(`jest.${this.testSuite}.`, '').trim()) + } + + // Add the `add_test` event we don't have the test object yet, so + // we use its describe block to get the full name + getTestNameFromAddTestEvent (event, state) { + const describeSuffix = getJestTestName(state.currentDescribeBlock) + return removeEfdTestName(`${describeSuffix} ${event.testName}`).trim() } async handleTestEvent (event, state) { @@ -124,23 +172,55 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) { } } if (event.name === 'test_start') { + let isNewTest = false + let numEfdRetry = null const testParameters = getTestParametersString(this.nameToParams, event.test.name) // Async resource for this test is created here // It is used later on by the test_done handler const asyncResource = new AsyncResource('bound-anonymous-fn') asyncResources.set(event.test, asyncResource) + const testName = getJestTestName(event.test) + + if (this.isEarlyFlakeDetectionEnabled) { + const originalTestName = removeEfdTestName(testName) + isNewTest = retriedTestsToNumAttempts.has(originalTestName) + if (isNewTest) { + numEfdRetry = retriedTestsToNumAttempts.get(originalTestName) + retriedTestsToNumAttempts.set(originalTestName, numEfdRetry + 1) + } + } + asyncResource.runInAsyncScope(() => { testStartCh.publish({ - name: getJestTestName(event.test), + name: removeEfdTestName(testName), suite: this.testSuite, runner: 'jest-circus', testParameters, - frameworkVersion: jestVersion + frameworkVersion: jestVersion, + isNew: isNewTest, + isEfdRetry: numEfdRetry > 0 }) originalTestFns.set(event.test, event.test.fn) event.test.fn = asyncResource.bind(event.test.fn) }) } + if (event.name === 'add_test') { + if (this.isEarlyFlakeDetectionEnabled) { + const testName = this.getTestNameFromAddTestEvent(event, state) + const isNew = !this.knownTestsForThisSuite?.includes(testName) + const isSkipped = event.mode === 'todo' || event.mode === 'skip' + if (isNew && !isSkipped && !retriedTestsToNumAttempts.has(testName)) { + retriedTestsToNumAttempts.set(testName, 0) + for (let retryIndex = 0; retryIndex < earlyFlakeDetectionNumRetries; retryIndex++) { + if (this.global.test) { + this.global.test(getEfdTestName(event.testName, retryIndex), event.fn, event.timeout) + } else { + log.error('Early flake detection could not retry test because global.test is undefined') + } + } + } + } + } if (event.name === 'test_done') { const asyncResource = asyncResources.get(event.test) asyncResource.runInAsyncScope(() => { @@ -206,7 +286,7 @@ addHook({ } // TODO: could we get the rootDir from each test? const [test] = shardedTests - const rootDir = test && test.context && test.context.config && test.context.config.rootDir + const rootDir = test?.context?.config?.rootDir const jestSuitesToRun = getJestSuitesToRun(skippableSuites, shardedTests, rootDir || process.cwd()) @@ -247,11 +327,32 @@ function cliWrapper (cli, jestVersion) { if (!err) { isCodeCoverageEnabled = libraryConfig.isCodeCoverageEnabled isSuitesSkippingEnabled = libraryConfig.isSuitesSkippingEnabled + isEarlyFlakeDetectionEnabled = libraryConfig.isEarlyFlakeDetectionEnabled + earlyFlakeDetectionNumRetries = libraryConfig.earlyFlakeDetectionNumRetries } } catch (err) { log.error(err) } + if (isEarlyFlakeDetectionEnabled) { + const knownTestsPromise = new Promise((resolve) => { + onDone = resolve + }) + + sessionAsyncResource.runInAsyncScope(() => { + knownTestsCh.publish({ onDone }) + }) + + try { + const { err, knownTests: receivedKnownTests } = await knownTestsPromise + if (!err) { + knownTests = receivedKnownTests + } + } catch (err) { + log.error(err) + } + } + if (isSuitesSkippingEnabled) { const skippableSuitesPromise = new Promise((resolve) => { onDone = resolve @@ -322,7 +423,8 @@ function cliWrapper (cli, jestVersion) { numSkippedSuites, hasUnskippableSuites, hasForcedToRunSuites, - error + error, + isEarlyFlakeDetectionEnabled }) }) @@ -438,6 +540,7 @@ function configureTestEnvironment (readConfigsResult) { // because `jestAdapterWrapper` runs in a different process. We have to go through `testEnvironmentOptions` configs.forEach(config => { config.testEnvironmentOptions._ddTestCodeCoverageEnabled = isCodeCoverageEnabled + config.testEnvironmentOptions._ddKnownTests = knownTests }) isUserCodeCoverageEnabled = !!readConfigsResult.globalConfig.collectCoverage @@ -498,6 +601,9 @@ addHook({ _ddForcedToRun, _ddUnskippable, _ddItrCorrelationId, + _ddKnownTests, + _ddIsEarlyFlakeDetectionEnabled, + _ddEarlyFlakeDetectionNumRetries, ...restOfTestEnvironmentOptions } = testEnvironmentOptions diff --git a/packages/datadog-plugin-jest/src/index.js b/packages/datadog-plugin-jest/src/index.js index 49a8d4b2a05..9c0240f5986 100644 --- a/packages/datadog-plugin-jest/src/index.js +++ b/packages/datadog-plugin-jest/src/index.js @@ -14,7 +14,10 @@ const { TEST_ITR_UNSKIPPABLE, TEST_ITR_FORCED_RUN, TEST_CODE_OWNERS, - ITR_CORRELATION_ID + ITR_CORRELATION_ID, + TEST_IS_NEW, + TEST_EARLY_FLAKE_IS_RETRY, + TEST_EARLY_FLAKE_IS_ENABLED } = require('../../dd-trace/src/plugins/util/test') const { COMPONENT } = require('../../dd-trace/src/constants') const id = require('../../dd-trace/src/id') @@ -81,7 +84,8 @@ class JestPlugin extends CiPlugin { numSkippedSuites, hasUnskippableSuites, hasForcedToRunSuites, - error + error, + isEarlyFlakeDetectionEnabled }) => { this.testSessionSpan.setTag(TEST_STATUS, status) this.testModuleSpan.setTag(TEST_STATUS, status) @@ -106,6 +110,10 @@ class JestPlugin extends CiPlugin { } ) + if (isEarlyFlakeDetectionEnabled) { + this.testSessionSpan.setTag(TEST_EARLY_FLAKE_IS_ENABLED, 'true') + } + this.testModuleSpan.finish() this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'module') this.testSessionSpan.finish() @@ -116,13 +124,15 @@ class JestPlugin extends CiPlugin { // Test suites can be run in a different process from jest's main one. // This subscriber changes the configuration objects from jest to inject the trace id - // of the test session to the processes that run the test suites. + // of the test session to the processes that run the test suites, and other data. this.addSub('ci:jest:session:configuration', configs => { configs.forEach(config => { config._ddTestSessionId = this.testSessionSpan.context().toTraceId() config._ddTestModuleId = this.testModuleSpan.context().toSpanId() config._ddTestCommand = this.testSessionSpan.context()._tags[TEST_COMMAND] config._ddItrCorrelationId = this.itrCorrelationId + config._ddIsEarlyFlakeDetectionEnabled = !!this.libraryConfig?.isEarlyFlakeDetectionEnabled + config._ddEarlyFlakeDetectionNumRetries = this.libraryConfig?.earlyFlakeDetectionNumRetries ?? 0 }) }) @@ -286,7 +296,7 @@ class JestPlugin extends CiPlugin { } startTestSpan (test) { - const { suite, name, runner, testParameters, frameworkVersion, testStartLine } = test + const { suite, name, runner, testParameters, frameworkVersion, testStartLine, isNew, isEfdRetry } = test const extraTags = { [JEST_TEST_RUNNER]: runner, @@ -296,6 +306,12 @@ class JestPlugin extends CiPlugin { if (testStartLine) { extraTags[TEST_SOURCE_START] = testStartLine } + if (isNew) { + extraTags[TEST_IS_NEW] = 'true' + if (isEfdRetry) { + extraTags[TEST_EARLY_FLAKE_IS_RETRY] = 'true' + } + } return super.startTestSpan(name, suite, this.testSuiteSpan, extraTags) } diff --git a/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js b/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js index 28c7e79744c..f8ae4d7b1b1 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js +++ b/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js @@ -83,6 +83,7 @@ class CiVisibilityExporter extends AgentInfoExporter { shouldRequestKnownTests () { return !!( this._config.isEarlyFlakeDetectionEnabled && + this._canUseCiVisProtocol && this._libraryConfig?.isEarlyFlakeDetectionEnabled ) } @@ -99,6 +100,19 @@ class CiVisibilityExporter extends AgentInfoExporter { return this._canUseCiVisProtocol } + getRequestConfiguration (testConfiguration) { + return { + url: this._getApiUrl(), + env: this._config.env, + service: this._config.service, + isEvpProxy: !!this._isUsingEvpProxy, + isGzipCompatible: this._isGzipCompatible, + evpProxyPrefix: this.evpProxyPrefix, + custom: getTestConfigurationTags(this._config.tags), + ...testConfiguration + } + } + // We can't call the skippable endpoint until git upload has finished, // hence the this._gitUploadPromise.then getSkippableSuites (testConfiguration, callback) { @@ -109,18 +123,7 @@ class CiVisibilityExporter extends AgentInfoExporter { if (gitUploadError) { return callback(gitUploadError, []) } - const configuration = { - url: this._getApiUrl(), - site: this._config.site, - env: this._config.env, - service: this._config.service, - isEvpProxy: !!this._isUsingEvpProxy, - isGzipCompatible: this._isGzipCompatible, - evpProxyPrefix: this.evpProxyPrefix, - custom: getTestConfigurationTags(this._config.tags), - ...testConfiguration - } - getSkippableSuitesRequest(configuration, callback) + getSkippableSuitesRequest(this.getRequestConfiguration(testConfiguration), callback) }) } @@ -128,24 +131,7 @@ class CiVisibilityExporter extends AgentInfoExporter { if (!this.shouldRequestKnownTests()) { return callback(null) } - this._canUseCiVisProtocolPromise.then((canUseCiVisProtocol) => { - if (!canUseCiVisProtocol) { - return callback( - new Error('Known tests can not be requested because CI Visibility protocol can not be used') - ) - } - const configuration = { - url: this._getApiUrl(), - env: this._config.env, - service: this._config.service, - isEvpProxy: !!this._isUsingEvpProxy, - evpProxyPrefix: this.evpProxyPrefix, - custom: getTestConfigurationTags(this._config.tags), - isGzipCompatible: this._isGzipCompatible, - ...testConfiguration - } - getKnownTestsRequest(configuration, callback) - }) + getKnownTestsRequest(this.getRequestConfiguration(testConfiguration), callback) } /** @@ -162,22 +148,15 @@ class CiVisibilityExporter extends AgentInfoExporter { if (!canUseCiVisProtocol) { return callback(null, {}) } - const configuration = { - url: this._getApiUrl(), - env: this._config.env, - service: this._config.service, - isEvpProxy: !!this._isUsingEvpProxy, - evpProxyPrefix: this.evpProxyPrefix, - custom: getTestConfigurationTags(this._config.tags), - ...testConfiguration - } + const configuration = this.getRequestConfiguration(testConfiguration) + getLibraryConfigurationRequest(configuration, (err, libraryConfig) => { /** * **Important**: this._libraryConfig remains empty in testing frameworks * where the tests run in a subprocess, like Jest, * because `getLibraryConfiguration` is called only once in the main process. */ - this._libraryConfig = this.getConfiguration(libraryConfig) + this._libraryConfig = this.filterConfiguration(libraryConfig) if (err) { callback(err, {}) @@ -188,19 +167,19 @@ class CiVisibilityExporter extends AgentInfoExporter { return callback(gitUploadError, {}) } getLibraryConfigurationRequest(configuration, (err, finalLibraryConfig) => { - this._libraryConfig = this.getConfiguration(finalLibraryConfig) + this._libraryConfig = this.filterConfiguration(finalLibraryConfig) callback(err, this._libraryConfig) }) }) } else { - callback(null, libraryConfig) + callback(null, this._libraryConfig) } }) }) } // Takes into account potential kill switches - getConfiguration (remoteConfiguration) { + filterConfiguration (remoteConfiguration) { if (!remoteConfiguration) { return {} } @@ -209,14 +188,16 @@ class CiVisibilityExporter extends AgentInfoExporter { isSuitesSkippingEnabled, isItrEnabled, requireGit, - isEarlyFlakeDetectionEnabled + isEarlyFlakeDetectionEnabled, + earlyFlakeDetectionNumRetries } = remoteConfiguration return { isCodeCoverageEnabled, isSuitesSkippingEnabled, isItrEnabled, requireGit, - isEarlyFlakeDetectionEnabled: isEarlyFlakeDetectionEnabled && this._config.isEarlyFlakeDetectionEnabled + isEarlyFlakeDetectionEnabled: isEarlyFlakeDetectionEnabled && this._config.isEarlyFlakeDetectionEnabled, + earlyFlakeDetectionNumRetries } } diff --git a/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js b/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js index 61ed0e70ca1..9d722ea3887 100644 --- a/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js +++ b/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js @@ -11,6 +11,8 @@ const { getErrorTypeFromStatusCode } = require('../telemetry') +const DEFAULT_NUM_RETRIES_EARLY_FLAKE_DETECTION = 2 + function getLibraryConfiguration ({ url, isEvpProxy, @@ -89,7 +91,8 @@ function getLibraryConfiguration ({ code_coverage: isCodeCoverageEnabled, tests_skipping: isSuitesSkippingEnabled, itr_enabled: isItrEnabled, - require_git: requireGit + require_git: requireGit, + early_flake_detection: earlyFlakeDetectionConfig } } } = JSON.parse(res) @@ -99,8 +102,9 @@ function getLibraryConfiguration ({ isSuitesSkippingEnabled, isItrEnabled, requireGit, - // TODO: change to backend response - isEarlyFlakeDetectionEnabled: false + isEarlyFlakeDetectionEnabled: earlyFlakeDetectionConfig?.enabled ?? false, + earlyFlakeDetectionNumRetries: + earlyFlakeDetectionConfig?.slow_test_retries?.['5s'] || DEFAULT_NUM_RETRIES_EARLY_FLAKE_DETECTION } log.debug(() => `Remote settings: ${JSON.stringify(settings)}`) diff --git a/packages/dd-trace/src/plugins/util/test.js b/packages/dd-trace/src/plugins/util/test.js index 2f64e9863b6..ff64b18d60f 100644 --- a/packages/dd-trace/src/plugins/util/test.js +++ b/packages/dd-trace/src/plugins/util/test.js @@ -48,6 +48,9 @@ const TEST_MODULE_ID = 'test_module_id' const TEST_SUITE_ID = 'test_suite_id' const TEST_TOOLCHAIN = 'test.toolchain' const TEST_SKIPPED_BY_ITR = 'test.skipped_by_itr' +const TEST_IS_NEW = 'test.is_new' +const TEST_EARLY_FLAKE_IS_RETRY = 'test.early_flake.is_retry' +const TEST_EARLY_FLAKE_IS_ENABLED = 'test.early_flake.is_enabled' const CI_APP_ORIGIN = 'ciapp-test' @@ -87,6 +90,9 @@ module.exports = { JEST_WORKER_COVERAGE_PAYLOAD_CODE, TEST_SOURCE_START, TEST_SKIPPED_BY_ITR, + TEST_IS_NEW, + TEST_EARLY_FLAKE_IS_RETRY, + TEST_EARLY_FLAKE_IS_ENABLED, getTestEnvironmentMetadata, getTestParametersString, finishAllTraceSpans, diff --git a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js index 0fa82baba52..fcc773ecc58 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js @@ -166,7 +166,7 @@ describe('CI Visibility Exporter', () => { const ciVisibilityExporter = new CiVisibilityExporter({ port, isIntelligentTestRunnerEnabled: true }) ciVisibilityExporter.getLibraryConfiguration({}, (err, libraryConfig) => { - expect(libraryConfig).to.eql({ + expect(libraryConfig).to.contain({ requireGit: false, isCodeCoverageEnabled: true, isItrEnabled: true, @@ -666,7 +666,7 @@ describe('CI Visibility Exporter', () => { }) }) context('if early flake detection is enabled but can not use CI Visibility protocol', () => { - it('should raise an error', (done) => { + it('should not request known tests', (done) => { const scope = nock(`http://localhost:${port}`) .post('/api/v2/ci/libraries/tests') .reply(200) @@ -676,9 +676,7 @@ describe('CI Visibility Exporter', () => { ciVisibilityExporter._resolveCanUseCiVisProtocol(false) ciVisibilityExporter._libraryConfig = { isEarlyFlakeDetectionEnabled: true } ciVisibilityExporter.getKnownTests({}, (err) => { - expect(err.message).to.include( - 'Known tests can not be requested because CI Visibility protocol can not be used' - ) + expect(err).to.be.null expect(scope.isDone()).not.to.be.true done() }) From 6d65ffd22fc7026a7eacb37ceab04d9d3b3a5dcf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Fri, 9 Feb 2024 16:38:19 +0100 Subject: [PATCH 30/44] [ci-visibility] Fix DD_TAGS (#4044) --- ci/init.js | 4 ---- integration-tests/ci-visibility.spec.js | 8 +++++++- integration-tests/cucumber/cucumber.spec.js | 8 +++++++- integration-tests/cypress/cypress.spec.js | 6 +++++- integration-tests/playwright/playwright.spec.js | 6 +++++- packages/dd-trace/src/config.js | 7 +++++++ 6 files changed, 31 insertions(+), 8 deletions(-) diff --git a/ci/init.js b/ci/init.js index 81849b0e1e1..23451c10174 100644 --- a/ci/init.js +++ b/ci/init.js @@ -1,15 +1,11 @@ /* eslint-disable no-console */ const tracer = require('../packages/dd-trace') -const { ORIGIN_KEY } = require('../packages/dd-trace/src/constants') const { isTrue } = require('../packages/dd-trace/src/util') const isJestWorker = !!process.env.JEST_WORKER_ID const options = { startupLogs: false, - tags: { - [ORIGIN_KEY]: 'ciapp-test' - }, isCiVisibility: true, flushInterval: isJestWorker ? 0 : 5000 } diff --git a/integration-tests/ci-visibility.spec.js b/integration-tests/ci-visibility.spec.js index c199c11797f..c3110aac356 100644 --- a/integration-tests/ci-visibility.spec.js +++ b/integration-tests/ci-visibility.spec.js @@ -805,6 +805,11 @@ testFrameworks.forEach(({ if (extraStdout) { assert.include(testOutput, extraStdout) } + // Can read DD_TAGS + testSpans.forEach(testSpan => { + assert.propertyVal(testSpan.meta, 'test.customtag', 'customvalue') + assert.propertyVal(testSpan.meta, 'test.customtag2', 'customvalue2') + }) done() }) @@ -813,7 +818,8 @@ testFrameworks.forEach(({ cwd, env: { DD_TRACE_AGENT_PORT: receiver.port, - NODE_OPTIONS: type === 'esm' ? `-r dd-trace/ci/init --loader=${hookFile}` : '-r dd-trace/ci/init' + NODE_OPTIONS: type === 'esm' ? `-r dd-trace/ci/init --loader=${hookFile}` : '-r dd-trace/ci/init', + DD_TAGS: 'test.customtag:customvalue,test.customtag2:customvalue2' }, stdio: 'pipe' }) diff --git a/integration-tests/cucumber/cucumber.spec.js b/integration-tests/cucumber/cucumber.spec.js index 7ed484e4993..9aa5a23698b 100644 --- a/integration-tests/cucumber/cucumber.spec.js +++ b/integration-tests/cucumber/cucumber.spec.js @@ -225,6 +225,9 @@ versions.forEach(version => { assert.exists(testSuiteId) assert.equal(testModuleId.toString(10), testModuleEventContent.test_module_id.toString(10)) assert.equal(testSessionId.toString(10), testSessionEventContent.test_session_id.toString(10)) + // Can read DD_TAGS + assert.propertyVal(meta, 'test.customtag', 'customvalue') + assert.propertyVal(meta, 'test.customtag2', 'customvalue2') }) stepEvents.forEach(stepEvent => { @@ -237,7 +240,10 @@ versions.forEach(version => { runTestsCommand, { cwd, - env: envVars, + env: { + ...envVars, + DD_TAGS: 'test.customtag:customvalue,test.customtag2:customvalue2' + }, stdio: 'pipe' } ) diff --git a/integration-tests/cypress/cypress.spec.js b/integration-tests/cypress/cypress.spec.js index 85381f41e2e..0c45a356081 100644 --- a/integration-tests/cypress/cypress.spec.js +++ b/integration-tests/cypress/cypress.spec.js @@ -282,6 +282,9 @@ moduleType.forEach(({ assert.exists(testSuiteId) assert.equal(testModuleId.toString(10), testModuleEventContent.test_module_id.toString(10)) assert.equal(testSessionId.toString(10), testSessionEventContent.test_session_id.toString(10)) + // Can read DD_TAGS + assert.propertyVal(meta, 'test.customtag', 'customvalue') + assert.propertyVal(meta, 'test.customtag2', 'customvalue2') }) }, 25000) @@ -296,7 +299,8 @@ moduleType.forEach(({ cwd, env: { ...restEnvVars, - CYPRESS_BASE_URL: `http://localhost:${webAppPort}` + CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, + DD_TAGS: 'test.customtag:customvalue,test.customtag2:customvalue2' }, stdio: 'pipe' } diff --git a/integration-tests/playwright/playwright.spec.js b/integration-tests/playwright/playwright.spec.js index bb0329086b4..61f28384870 100644 --- a/integration-tests/playwright/playwright.spec.js +++ b/integration-tests/playwright/playwright.spec.js @@ -99,6 +99,9 @@ versions.forEach((version) => { testEvents.forEach(testEvent => { assert.exists(testEvent.content.metrics[TEST_SOURCE_START]) + // Can read DD_TAGS + assert.propertyVal(testEvent.content.meta, 'test.customtag', 'customvalue') + assert.propertyVal(testEvent.content.meta, 'test.customtag2', 'customvalue2') }) stepEvents.forEach(stepEvent => { @@ -120,7 +123,8 @@ versions.forEach((version) => { cwd, env: { ...envVars, - PW_BASE_URL: `http://localhost:${webAppPort}` + PW_BASE_URL: `http://localhost:${webAppPort}`, + DD_TAGS: 'test.customtag:customvalue,test.customtag2:customvalue2' }, stdio: 'pipe' } diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index 8969617f024..7182a9badc1 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -13,6 +13,7 @@ const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA } = require('./plugins/util/tags') const { getGitMetadataFromGitProperties, removeUserSensitiveInfo } = require('./git_properties') const { updateConfig } = require('./telemetry') const { getIsGCPFunction, getIsAzureFunctionConsumptionPlan } = require('./serverless') +const { ORIGIN_KEY } = require('./constants') const fromEntries = Object.fromEntries || (entries => entries.reduce((obj, [k, v]) => Object.assign(obj, { [k]: v }), {})) @@ -710,6 +711,12 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) 'runtime-id': uuid() }) + if (this.isCiVisibility) { + tagger.add(this.tags, { + [ORIGIN_KEY]: 'ciapp-test' + }) + } + if (this.gitMetadataEnabled) { this.repositoryUrl = removeUserSensitiveInfo( coalesce( From 270e902139179752e26fb9ffb69f89ffb1491000 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Mon, 12 Feb 2024 10:52:08 +0100 Subject: [PATCH 31/44] [ci-visibility] Fix test source file to be relative to the repository root (#4030) --- integration-tests/ci-visibility.spec.js | 5 +++ integration-tests/cucumber/cucumber.spec.js | 32 +++---------------- integration-tests/cypress/cypress.spec.js | 4 ++- integration-tests/helpers.js | 8 +++-- .../playwright/playwright.spec.js | 10 +++++- .../datadog-instrumentations/src/cucumber.js | 20 ++++++------ packages/datadog-instrumentations/src/jest.js | 3 ++ packages/datadog-plugin-cucumber/src/index.js | 19 +++++++---- packages/datadog-plugin-cypress/src/plugin.js | 13 +++++++- .../datadog-plugin-cypress/test/index.spec.js | 13 +++++--- packages/datadog-plugin-jest/src/index.js | 21 +++++++++++- packages/datadog-plugin-mocha/src/index.js | 11 ++++++- .../datadog-plugin-playwright/src/index.js | 18 ++++++++--- packages/dd-trace/src/plugins/ci_plugin.js | 2 ++ packages/dd-trace/src/plugins/util/test.js | 1 - 15 files changed, 120 insertions(+), 60 deletions(-) diff --git a/integration-tests/ci-visibility.spec.js b/integration-tests/ci-visibility.spec.js index c3110aac356..8a8765fb0ea 100644 --- a/integration-tests/ci-visibility.spec.js +++ b/integration-tests/ci-visibility.spec.js @@ -25,6 +25,7 @@ const { TEST_ITR_SKIPPING_COUNT, TEST_ITR_UNSKIPPABLE, TEST_ITR_FORCED_RUN, + TEST_SOURCE_FILE, TEST_IS_NEW, TEST_EARLY_FLAKE_IS_RETRY, TEST_EARLY_FLAKE_IS_ENABLED, @@ -811,6 +812,10 @@ testFrameworks.forEach(({ assert.propertyVal(testSpan.meta, 'test.customtag2', 'customvalue2') }) + testSpans.forEach(testSpan => { + assert.equal(testSpan.meta[TEST_SOURCE_FILE].startsWith('ci-visibility/test/ci-visibility-test'), true) + }) + done() }) diff --git a/integration-tests/cucumber/cucumber.spec.js b/integration-tests/cucumber/cucumber.spec.js index 9aa5a23698b..77765852a47 100644 --- a/integration-tests/cucumber/cucumber.spec.js +++ b/integration-tests/cucumber/cucumber.spec.js @@ -25,10 +25,10 @@ const { TEST_ITR_SKIPPING_COUNT, TEST_CODE_COVERAGE_LINES_PCT, TEST_ITR_FORCED_RUN, - TEST_ITR_UNSKIPPABLE + TEST_ITR_UNSKIPPABLE, + TEST_SOURCE_FILE } = require('../../packages/dd-trace/src/plugins/util/test') -const hookFile = 'dd-trace/loader-hook.mjs' const isOldNode = semver.satisfies(process.version, '<=16') const versions = ['7.0.0', isOldNode ? '9' : 'latest'] @@ -43,19 +43,6 @@ const moduleType = [ `ci-visibility/features/farewell.feature --parallel 2 --publish-quiet`, featuresPath: 'ci-visibility/features/', fileExtension: 'js' - }, - { - type: 'esm', - runTestsCommand: `node --loader=${hookFile} ./node_modules/.bin/cucumber-js ci-visibility/features-esm/*.feature`, - runTestsWithCoverageCommand: - `./node_modules/nyc/bin/nyc.js -r=text-summary ` + - `node --loader=./node_modules/@istanbuljs/esm-loader-hook/index.js ` + - `--loader=${hookFile} ./node_modules/.bin/cucumber-js ci-visibility/features-esm/*.feature`, - parallelModeCommand: - `node --loader=${hookFile} ./node_modules/.bin/cucumber-js ` + - `ci-visibility/features-esm/farewell.feature --parallel 2 --publish-quiet`, - featuresPath: 'ci-visibility/features-esm/', - fileExtension: 'mjs' } ] @@ -68,24 +55,14 @@ versions.forEach(version => { featuresPath, fileExtension }) => { - // temporary fix for failing esm tests on the CI, skip for now for the release and comeback to solve the issue - if (type === 'esm') { - return - } - - // esm support by cucumber was only added on >= 8.0.0 - // if (type === 'esm' && semver.satisfies(version, '<8.0.0')) { - // return - // } - + // TODO: add esm tests describe(`cucumber@${version} ${type}`, () => { let sandbox, cwd, receiver, childProcess before(async function () { // add an explicit timeout to make tests less flaky this.timeout(50000) - sandbox = await createSandbox([`@cucumber/cucumber@${version}`, 'assert', - 'nyc', '@istanbuljs/esm-loader-hook'], true) + sandbox = await createSandbox([`@cucumber/cucumber@${version}`, 'assert', 'nyc'], true) cwd = sandbox.folder }) @@ -225,6 +202,7 @@ versions.forEach(version => { assert.exists(testSuiteId) assert.equal(testModuleId.toString(10), testModuleEventContent.test_module_id.toString(10)) assert.equal(testSessionId.toString(10), testSessionEventContent.test_session_id.toString(10)) + assert.equal(meta[TEST_SOURCE_FILE].startsWith('ci-visibility/features'), true) // Can read DD_TAGS assert.propertyVal(meta, 'test.customtag', 'customvalue') assert.propertyVal(meta, 'test.customtag2', 'customvalue2') diff --git a/integration-tests/cypress/cypress.spec.js b/integration-tests/cypress/cypress.spec.js index 0c45a356081..5443b4e040c 100644 --- a/integration-tests/cypress/cypress.spec.js +++ b/integration-tests/cypress/cypress.spec.js @@ -26,7 +26,8 @@ const { TEST_ITR_SKIPPING_COUNT, TEST_ITR_SKIPPING_TYPE, TEST_ITR_UNSKIPPABLE, - TEST_ITR_FORCED_RUN + TEST_ITR_FORCED_RUN, + TEST_SOURCE_FILE } = require('../../packages/dd-trace/src/plugins/util/test') const { ERROR_MESSAGE } = require('../../packages/dd-trace/src/constants') const semver = require('semver') @@ -282,6 +283,7 @@ moduleType.forEach(({ assert.exists(testSuiteId) assert.equal(testModuleId.toString(10), testModuleEventContent.test_module_id.toString(10)) assert.equal(testSessionId.toString(10), testSessionEventContent.test_session_id.toString(10)) + assert.equal(meta[TEST_SOURCE_FILE].startsWith('cypress/e2e/'), true) // Can read DD_TAGS assert.propertyVal(meta, 'test.customtag', 'customvalue') assert.propertyVal(meta, 'test.customtag2', 'customvalue2') diff --git a/integration-tests/helpers.js b/integration-tests/helpers.js index dcf13b40cee..c6a51e57f04 100644 --- a/integration-tests/helpers.js +++ b/integration-tests/helpers.js @@ -273,8 +273,10 @@ async function curlAndAssertMessage (agent, procOrUrl, fn, timeout, expectedMess } function getCiVisAgentlessConfig (port) { + // We remove GITHUB_WORKSPACE so the repository root is not assigned to dd-trace-js + const { GITHUB_WORKSPACE, ...rest } = process.env return { - ...process.env, + ...rest, DD_API_KEY: '1', DD_CIVISIBILITY_AGENTLESS_ENABLED: 1, DD_CIVISIBILITY_AGENTLESS_URL: `http://127.0.0.1:${port}`, @@ -283,8 +285,10 @@ function getCiVisAgentlessConfig (port) { } function getCiVisEvpProxyConfig (port) { + // We remove GITHUB_WORKSPACE so the repository root is not assigned to dd-trace-js + const { GITHUB_WORKSPACE, ...rest } = process.env return { - ...process.env, + ...rest, DD_TRACE_AGENT_PORT: port, NODE_OPTIONS: '-r dd-trace/ci/init', DD_CIVISIBILITY_AGENTLESS_ENABLED: '0' diff --git a/integration-tests/playwright/playwright.spec.js b/integration-tests/playwright/playwright.spec.js index 61f28384870..0f3bc3e11b4 100644 --- a/integration-tests/playwright/playwright.spec.js +++ b/integration-tests/playwright/playwright.spec.js @@ -12,7 +12,12 @@ const { } = require('../helpers') const { FakeCiVisIntake } = require('../ci-visibility-intake') const webAppServer = require('../ci-visibility/web-app-server') -const { TEST_STATUS, TEST_SOURCE_START, TEST_TYPE } = require('../../packages/dd-trace/src/plugins/util/test') +const { + TEST_STATUS, + TEST_SOURCE_START, + TEST_TYPE, + TEST_SOURCE_FILE +} = require('../../packages/dd-trace/src/plugins/util/test') const versions = ['1.18.0', 'latest'] @@ -99,6 +104,9 @@ versions.forEach((version) => { testEvents.forEach(testEvent => { assert.exists(testEvent.content.metrics[TEST_SOURCE_START]) + assert.equal( + testEvent.content.meta[TEST_SOURCE_FILE].startsWith('ci-visibility/playwright-tests/'), true + ) // Can read DD_TAGS assert.propertyVal(testEvent.content.meta, 'test.customtag', 'customvalue') assert.propertyVal(testEvent.content.meta, 'test.customtag2', 'customvalue2') diff --git a/packages/datadog-instrumentations/src/cucumber.js b/packages/datadog-instrumentations/src/cucumber.js index 738b52d82a9..72d55001ea1 100644 --- a/packages/datadog-instrumentations/src/cucumber.js +++ b/packages/datadog-instrumentations/src/cucumber.js @@ -96,11 +96,11 @@ function wrapRun (pl, isLatestVersion) { const asyncResource = new AsyncResource('bound-anonymous-fn') return asyncResource.runInAsyncScope(() => { - const testSuiteFullPath = this.pickle.uri + const testFileAbsolutePath = this.pickle.uri - if (!pickleResultByFile[testSuiteFullPath]) { // first test in suite + if (!pickleResultByFile[testFileAbsolutePath]) { // first test in suite isUnskippable = isMarkedAsUnskippable(this.pickle) - const testSuitePath = getTestSuitePath(testSuiteFullPath, process.cwd()) + const testSuitePath = getTestSuitePath(testFileAbsolutePath, process.cwd()) isForcedToRun = isUnskippable && skippableSuites.includes(testSuitePath) testSuiteStartCh.publish({ testSuitePath, isUnskippable, isForcedToRun, itrCorrelationId }) @@ -113,7 +113,7 @@ function wrapRun (pl, isLatestVersion) { testStartCh.publish({ testName: this.pickle.name, - fullTestSuite: testSuiteFullPath, + testFileAbsolutePath, testSourceLine }) try { @@ -123,21 +123,21 @@ function wrapRun (pl, isLatestVersion) { const { status, skipReason, errorMessage } = isLatestVersion ? getStatusFromResultLatest(result) : getStatusFromResult(result) - if (!pickleResultByFile[testSuiteFullPath]) { - pickleResultByFile[testSuiteFullPath] = [status] + if (!pickleResultByFile[testFileAbsolutePath]) { + pickleResultByFile[testFileAbsolutePath] = [status] } else { - pickleResultByFile[testSuiteFullPath].push(status) + pickleResultByFile[testFileAbsolutePath].push(status) } testFinishCh.publish({ status, skipReason, errorMessage }) // last test in suite - if (pickleResultByFile[testSuiteFullPath].length === pickleByFile[testSuiteFullPath].length) { - const testSuiteStatus = getSuiteStatusFromTestStatuses(pickleResultByFile[testSuiteFullPath]) + if (pickleResultByFile[testFileAbsolutePath].length === pickleByFile[testFileAbsolutePath].length) { + const testSuiteStatus = getSuiteStatusFromTestStatuses(pickleResultByFile[testFileAbsolutePath]) if (global.__coverage__) { const coverageFiles = getCoveredFilenamesFromCoverage(global.__coverage__) testSuiteCodeCoverageCh.publish({ coverageFiles, - suiteFile: testSuiteFullPath + suiteFile: testFileAbsolutePath }) // We need to reset coverage to get a code coverage per suite // Before that, we preserve the original coverage diff --git a/packages/datadog-instrumentations/src/jest.js b/packages/datadog-instrumentations/src/jest.js index 03dcdc638ee..2d67aabb17c 100644 --- a/packages/datadog-instrumentations/src/jest.js +++ b/packages/datadog-instrumentations/src/jest.js @@ -113,6 +113,7 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) { const rootDir = config.globalConfig ? config.globalConfig.rootDir : config.rootDir this.rootDir = rootDir this.testSuite = getTestSuitePath(context.testPath, rootDir) + this.testFileAbsolutePath = context.testPath this.nameToParams = {} this.global._ddtrace = global._ddtrace @@ -194,6 +195,7 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) { testStartCh.publish({ name: removeEfdTestName(testName), suite: this.testSuite, + testFileAbsolutePath: this.testFileAbsolutePath, runner: 'jest-circus', testParameters, frameworkVersion: jestVersion, @@ -244,6 +246,7 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) { testSkippedCh.publish({ name: getJestTestName(event.test), suite: this.testSuite, + testFileAbsolutePath: this.testFileAbsolutePath, runner: 'jest-circus', frameworkVersion: jestVersion, testStartLine: getTestLineStart(event.test.asyncError, this.testSuite) diff --git a/packages/datadog-plugin-cucumber/src/index.js b/packages/datadog-plugin-cucumber/src/index.js index e44d0c5bd70..51146d5022d 100644 --- a/packages/datadog-plugin-cucumber/src/index.js +++ b/packages/datadog-plugin-cucumber/src/index.js @@ -14,7 +14,8 @@ const { TEST_ITR_UNSKIPPABLE, TEST_ITR_FORCED_RUN, TEST_CODE_OWNERS, - ITR_CORRELATION_ID + ITR_CORRELATION_ID, + TEST_SOURCE_FILE } = require('../../dd-trace/src/plugins/util/test') const { RESOURCE_NAME } = require('../../../ext/tags') const { COMPONENT, ERROR_MESSAGE } = require('../../dd-trace/src/constants') @@ -122,7 +123,7 @@ class CucumberPlugin extends CiPlugin { } const relativeCoverageFiles = [...coverageFiles, suiteFile] - .map(filename => getTestSuitePath(filename, this.sourceRoot)) + .map(filename => getTestSuitePath(filename, this.repositoryRoot)) this.telemetry.distribution(TELEMETRY_CODE_COVERAGE_NUM_FILES, {}, relativeCoverageFiles.length) @@ -136,10 +137,11 @@ class CucumberPlugin extends CiPlugin { this.telemetry.ciVisEvent(TELEMETRY_CODE_COVERAGE_FINISHED, 'suite', { library: 'istanbul' }) }) - this.addSub('ci:cucumber:test:start', ({ testName, fullTestSuite, testSourceLine }) => { + this.addSub('ci:cucumber:test:start', ({ testName, testFileAbsolutePath, testSourceLine }) => { const store = storage.getStore() - const testSuite = getTestSuitePath(fullTestSuite, this.sourceRoot) - const testSpan = this.startTestSpan(testName, testSuite, testSourceLine) + const testSuite = getTestSuitePath(testFileAbsolutePath, this.sourceRoot) + const testSourceFile = getTestSuitePath(testFileAbsolutePath, this.repositoryRoot) + const testSpan = this.startTestSpan(testName, testSuite, testSourceFile, testSourceLine) this.enter(testSpan, store) }) @@ -191,12 +193,15 @@ class CucumberPlugin extends CiPlugin { }) } - startTestSpan (testName, testSuite, testSourceLine) { + startTestSpan (testName, testSuite, testSourceFile, testSourceLine) { return super.startTestSpan( testName, testSuite, this.testSuiteSpan, - { [TEST_SOURCE_START]: testSourceLine } + { + [TEST_SOURCE_START]: testSourceLine, + [TEST_SOURCE_FILE]: testSourceFile + } ) } } diff --git a/packages/datadog-plugin-cypress/src/plugin.js b/packages/datadog-plugin-cypress/src/plugin.js index 9909c9d3a3c..8d4d93a6c14 100644 --- a/packages/datadog-plugin-cypress/src/plugin.js +++ b/packages/datadog-plugin-cypress/src/plugin.js @@ -24,7 +24,8 @@ const { TEST_SKIPPED_BY_ITR, TEST_ITR_UNSKIPPABLE, TEST_ITR_FORCED_RUN, - ITR_CORRELATION_ID + ITR_CORRELATION_ID, + TEST_SOURCE_FILE } = require('../../dd-trace/src/plugins/util/test') const { ORIGIN_KEY, COMPONENT } = require('../../dd-trace/src/constants') const log = require('../../dd-trace/src/log') @@ -361,6 +362,11 @@ module.exports = (on, config) => { cypressTestName === test.name && spec.relative === test.suite ) const skippedTestSpan = getTestSpan(cypressTestName, spec.relative) + if (spec.absolute && repositoryRoot) { + skippedTestSpan.setTag(TEST_SOURCE_FILE, getTestSuitePath(spec.absolute, repositoryRoot)) + } else { + skippedTestSpan.setTag(TEST_SOURCE_FILE, spec.relative) + } skippedTestSpan.setTag(TEST_STATUS, 'skip') if (isSkippedByItr) { skippedTestSpan.setTag(TEST_SKIPPED_BY_ITR, 'true') @@ -392,6 +398,11 @@ module.exports = (on, config) => { if (itrCorrelationId) { finishedTest.testSpan.setTag(ITR_CORRELATION_ID, itrCorrelationId) } + if (spec.absolute && repositoryRoot) { + finishedTest.testSpan.setTag(TEST_SOURCE_FILE, getTestSuitePath(spec.absolute, repositoryRoot)) + } else { + finishedTest.testSpan.setTag(TEST_SOURCE_FILE, spec.relative) + } finishedTest.testSpan.finish(finishedTest.finishTime) }) diff --git a/packages/datadog-plugin-cypress/test/index.spec.js b/packages/datadog-plugin-cypress/test/index.spec.js index 60bdc1fd22f..39dc9855b66 100644 --- a/packages/datadog-plugin-cypress/test/index.spec.js +++ b/packages/datadog-plugin-cypress/test/index.spec.js @@ -51,15 +51,18 @@ describe('Plugin', function () { this.timeout(testTimeout) it('instruments tests', function (done) { process.env.DD_TRACE_AGENT_PORT = agentListenPort + const testSuiteFolder = semver.intersects(version, '>=10') + ? 'app-10' : 'app' + cypressExecutable.run({ - project: semver.intersects(version, '>=10') - ? './packages/datadog-plugin-cypress/test/app-10' : './packages/datadog-plugin-cypress/test/app', + project: `./packages/datadog-plugin-cypress/test/${testSuiteFolder}`, config: { baseUrl: `http://localhost:${appPort}` }, quiet: true, headless: true }) + agent.use(traces => { const passedTestSpan = traces[0][0] const failedTestSpan = traces[1][0] @@ -77,7 +80,8 @@ describe('Plugin', function () { [TEST_NAME]: 'can visit a page renders a hello world', [TEST_STATUS]: 'pass', [TEST_SUITE]: 'cypress/integration/integration-test.js', - [TEST_SOURCE_FILE]: 'cypress/integration/integration-test.js', + [TEST_SOURCE_FILE]: + `packages/datadog-plugin-cypress/test/${testSuiteFolder}/cypress/integration/integration-test.js`, [TEST_TYPE]: 'browser', [ORIGIN_KEY]: CI_APP_ORIGIN, [TEST_IS_RUM_ACTIVE]: 'true', @@ -102,7 +106,8 @@ describe('Plugin', function () { [TEST_NAME]: 'can visit a page will fail', [TEST_STATUS]: 'fail', [TEST_SUITE]: 'cypress/integration/integration-test.js', - [TEST_SOURCE_FILE]: 'cypress/integration/integration-test.js', + [TEST_SOURCE_FILE]: + `packages/datadog-plugin-cypress/test/${testSuiteFolder}/cypress/integration/integration-test.js`, [TEST_TYPE]: 'browser', [ORIGIN_KEY]: CI_APP_ORIGIN, [ERROR_TYPE]: 'AssertionError', diff --git a/packages/datadog-plugin-jest/src/index.js b/packages/datadog-plugin-jest/src/index.js index 9c0240f5986..fb4768973e6 100644 --- a/packages/datadog-plugin-jest/src/index.js +++ b/packages/datadog-plugin-jest/src/index.js @@ -15,6 +15,8 @@ const { TEST_ITR_FORCED_RUN, TEST_CODE_OWNERS, ITR_CORRELATION_ID, + TEST_SOURCE_FILE, + getTestSuitePath, TEST_IS_NEW, TEST_EARLY_FLAKE_IS_RETRY, TEST_EARLY_FLAKE_IS_ENABLED @@ -296,7 +298,17 @@ class JestPlugin extends CiPlugin { } startTestSpan (test) { - const { suite, name, runner, testParameters, frameworkVersion, testStartLine, isNew, isEfdRetry } = test + const { + suite, + name, + runner, + testParameters, + frameworkVersion, + testStartLine, + testFileAbsolutePath, + isNew, + isEfdRetry + } = test const extraTags = { [JEST_TEST_RUNNER]: runner, @@ -306,6 +318,13 @@ class JestPlugin extends CiPlugin { if (testStartLine) { extraTags[TEST_SOURCE_START] = testStartLine } + if (testFileAbsolutePath) { + extraTags[TEST_SOURCE_FILE] = getTestSuitePath(testFileAbsolutePath, this.repositoryRoot) + } else { + // If for whatever we don't have the full path, we'll set the source file to the suite name + extraTags[TEST_SOURCE_FILE] = suite + } + if (isNew) { extraTags[TEST_IS_NEW] = 'true' if (isEfdRetry) { diff --git a/packages/datadog-plugin-mocha/src/index.js b/packages/datadog-plugin-mocha/src/index.js index 644371c3ec6..9e736f5c3ae 100644 --- a/packages/datadog-plugin-mocha/src/index.js +++ b/packages/datadog-plugin-mocha/src/index.js @@ -15,7 +15,8 @@ const { TEST_ITR_UNSKIPPABLE, TEST_ITR_FORCED_RUN, TEST_CODE_OWNERS, - ITR_CORRELATION_ID + ITR_CORRELATION_ID, + TEST_SOURCE_FILE } = require('../../dd-trace/src/plugins/util/test') const { COMPONENT } = require('../../dd-trace/src/constants') const { @@ -244,6 +245,14 @@ class MochaPlugin extends CiPlugin { const testSuite = getTestSuitePath(testSuiteAbsolutePath, this.sourceRoot) const testSuiteSpan = this._testSuites.get(testSuiteAbsolutePath) + const testSourceFile = getTestSuitePath(testSuiteAbsolutePath, this.repositoryRoot) + + if (testSourceFile) { + extraTags[TEST_SOURCE_FILE] = testSourceFile + } else { + extraTags[TEST_SOURCE_FILE] = testSuite + } + return super.startTestSpan(testName, testSuite, testSuiteSpan, extraTags) } } diff --git a/packages/datadog-plugin-playwright/src/index.js b/packages/datadog-plugin-playwright/src/index.js index 9fa6196240e..45de3c4afe4 100644 --- a/packages/datadog-plugin-playwright/src/index.js +++ b/packages/datadog-plugin-playwright/src/index.js @@ -9,7 +9,8 @@ const { getTestSuitePath, getTestSuiteCommonTags, TEST_SOURCE_START, - TEST_CODE_OWNERS + TEST_CODE_OWNERS, + TEST_SOURCE_FILE } = require('../../dd-trace/src/plugins/util/test') const { RESOURCE_NAME } = require('../../../ext/tags') const { COMPONENT } = require('../../dd-trace/src/constants') @@ -79,7 +80,8 @@ class PlaywrightPlugin extends CiPlugin { this.addSub('ci:playwright:test:start', ({ testName, testSuiteAbsolutePath, testSourceLine }) => { const store = storage.getStore() const testSuite = getTestSuitePath(testSuiteAbsolutePath, this.rootDir) - const span = this.startTestSpan(testName, testSuite, testSourceLine) + const testSourceFile = getTestSuitePath(testSuiteAbsolutePath, this.repositoryRoot) + const span = this.startTestSpan(testName, testSuite, testSourceFile, testSourceLine) this.enter(span, store) }) @@ -126,9 +128,17 @@ class PlaywrightPlugin extends CiPlugin { }) } - startTestSpan (testName, testSuite, testSourceLine) { + startTestSpan (testName, testSuite, testSourceFile, testSourceLine) { const testSuiteSpan = this._testSuites.get(testSuite) - return super.startTestSpan(testName, testSuite, testSuiteSpan, { [TEST_SOURCE_START]: testSourceLine }) + + const extraTags = { + [TEST_SOURCE_START]: testSourceLine + } + if (testSourceFile) { + extraTags[TEST_SOURCE_FILE] = testSourceFile || testSuite + } + + return super.startTestSpan(testName, testSuite, testSuiteSpan, extraTags) } } diff --git a/packages/dd-trace/src/plugins/ci_plugin.js b/packages/dd-trace/src/plugins/ci_plugin.js index e8565bfe174..1d98dad1218 100644 --- a/packages/dd-trace/src/plugins/ci_plugin.js +++ b/packages/dd-trace/src/plugins/ci_plugin.js @@ -166,6 +166,8 @@ module.exports = class CiPlugin extends Plugin { [CI_WORKSPACE_PATH]: repositoryRoot } = this.testEnvironmentMetadata + this.repositoryRoot = repositoryRoot || process.cwd() + this.codeOwnersEntries = getCodeOwnersFileEntries(repositoryRoot) this.isUnsupportedCIProvider = !ciProviderName diff --git a/packages/dd-trace/src/plugins/util/test.js b/packages/dd-trace/src/plugins/util/test.js index ff64b18d60f..ba18076bcca 100644 --- a/packages/dd-trace/src/plugins/util/test.js +++ b/packages/dd-trace/src/plugins/util/test.js @@ -259,7 +259,6 @@ function getTestCommonTags (name, suite, version, testFramework) { [SAMPLING_PRIORITY]: AUTO_KEEP, [TEST_NAME]: name, [TEST_SUITE]: suite, - [TEST_SOURCE_FILE]: suite, [RESOURCE_NAME]: `${suite}.${name}`, [TEST_FRAMEWORK_VERSION]: version, [LIBRARY_VERSION]: ddTraceVersion From f8b04fd346b5caf58a4ddab14adec5907f2536dc Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Mon, 12 Feb 2024 08:08:31 -0800 Subject: [PATCH 32/44] allow metrics UDP fallback in case of network errors (#4042) --- packages/dd-trace/src/dogstatsd.js | 8 +++---- packages/dd-trace/test/dogstatsd.spec.js | 28 +++++++++++++++++++++++- 2 files changed, 30 insertions(+), 6 deletions(-) diff --git a/packages/dd-trace/src/dogstatsd.js b/packages/dd-trace/src/dogstatsd.js index 65a1dd618d7..4427ecbe0ed 100644 --- a/packages/dd-trace/src/dogstatsd.js +++ b/packages/dd-trace/src/dogstatsd.js @@ -67,16 +67,14 @@ class DogStatsDClient { request(buffer, this._httpOptions, (err) => { if (err) { log.error('HTTP error from agent: ' + err.stack) - if (err.status) { + if (err.status === 404) { // Inside this if-block, we have connectivity to the agent, but // we're not getting a 200 from the proxy endpoint. If it's a 404, // then we know we'll never have the endpoint, so just clear out the // options. Either way, we can give UDP a try. - if (err.status === 404) { - this._httpOptions = null - } - this._sendUdp(queue) + this._httpOptions = null } + this._sendUdp(queue) } }) } diff --git a/packages/dd-trace/test/dogstatsd.spec.js b/packages/dd-trace/test/dogstatsd.spec.js index 0666c559374..2054bbaa29e 100644 --- a/packages/dd-trace/test/dogstatsd.spec.js +++ b/packages/dd-trace/test/dogstatsd.spec.js @@ -296,7 +296,7 @@ describe('dogstatsd', () => { client.flush() }) - it('should fail over to UDP', (done) => { + it('should fail over to UDP when receiving HTTP 404 error from agent', (done) => { assertData = () => { setTimeout(() => { try { @@ -321,6 +321,32 @@ describe('dogstatsd', () => { client.flush() }) + it('should fail over to UDP when receiving network error from agent', (done) => { + setTimeout(() => { + try { + expect(udp4.send).to.have.been.called + expect(udp4.send.firstCall.args[0].toString()).to.equal('test.foo:10|c\n') + expect(udp4.send.firstCall.args[2]).to.equal(14) + done() + } catch (e) { + done(e) + } + }, 10) + + statusCode = null + + // host exists but port does not, ECONNREFUSED + client = new DogStatsDClient({ + metricsProxyUrl: `http://localhost:32700`, + host: 'localhost', + port: 8125 + }) + + client.increment('test.foo', 10) + + client.flush() + }) + describe('CustomMetrics', () => { it('.gauge()', () => { client = new CustomMetrics({ dogstatsd: {} }) From d6589fd18f7beb92905d22f27e9b1ec2d06baba4 Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Mon, 12 Feb 2024 20:46:37 +0100 Subject: [PATCH 33/44] Fix mongoose IAST exec with callback (#4045) * Add test for fix * Another test, expecting to detect the vuln * fix mongoose instru not supporting deferred callback * add more tests * make sure callback is not wrapped twice --------- Co-authored-by: simon-id --- .../datadog-instrumentations/src/mongoose.js | 33 ++- ...n-mongodb-analyzer.mongoose.plugin.spec.js | 216 ++++++++++++------ 2 files changed, 172 insertions(+), 77 deletions(-) diff --git a/packages/datadog-instrumentations/src/mongoose.js b/packages/datadog-instrumentations/src/mongoose.js index 4b13eaccdb3..b155124285e 100644 --- a/packages/datadog-instrumentations/src/mongoose.js +++ b/packages/datadog-instrumentations/src/mongoose.js @@ -79,21 +79,26 @@ addHook({ }) let callbackWrapped = false - const lastArgumentIndex = arguments.length - 1 - if (typeof arguments[lastArgumentIndex] === 'function') { - // is a callback, wrap it to execute finish() - shimmer.wrap(arguments, lastArgumentIndex, originalCb => { - return function () { - finish() + const wrapCallbackIfExist = (args) => { + const lastArgumentIndex = args.length - 1 - return originalCb.apply(this, arguments) - } - }) + if (typeof args[lastArgumentIndex] === 'function') { + // is a callback, wrap it to execute finish() + shimmer.wrap(args, lastArgumentIndex, originalCb => { + return function () { + finish() + + return originalCb.apply(this, arguments) + } + }) - callbackWrapped = true + callbackWrapped = true + } } + wrapCallbackIfExist(arguments) + return asyncResource.runInAsyncScope(() => { startCh.publish({ filters, @@ -106,8 +111,16 @@ addHook({ if (!callbackWrapped) { shimmer.wrap(res, 'exec', originalExec => { return function wrappedExec () { + if (!callbackWrapped) { + wrapCallbackIfExist(arguments) + } + const execResult = originalExec.apply(this, arguments) + if (callbackWrapped || typeof execResult?.then !== 'function') { + return execResult + } + // wrap them method, wrap resolve and reject methods shimmer.wrap(execResult, 'then', originalThen => { return function wrappedThen () { diff --git a/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.mongoose.plugin.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.mongoose.plugin.spec.js index 787f737c156..f09264225a9 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.mongoose.plugin.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.mongoose.plugin.spec.js @@ -52,91 +52,173 @@ describe('nosql injection detection in mongodb - whole feature', () => { prepareTestServerForIastInExpress('Test with mongoose', expressVersion, (testThatRequestHasVulnerability, testThatRequestHasNoVulnerability) => { - testThatRequestHasVulnerability({ - fn: async (req, res) => { - Test.find({ - name: req.query.key, - value: [1, 2, - 'value', - false, req.query.key] - }).then(() => { - res.end() - }) - }, - vulnerability: 'NOSQL_MONGODB_INJECTION', - makeRequest: (done, config) => { - axios.get(`http://localhost:${config.port}/?key=value`).catch(done) - } - }) - - testThatRequestHasVulnerability({ - fn: async (req, res) => { - Test.find({ - name: { - child: [req.query.key] - } - }).then(() => { - res.end() - }) - }, - vulnerability: 'NOSQL_MONGODB_INJECTION', - makeRequest: (done, config) => { - axios.get(`http://localhost:${config.port}/?key=value`).catch(done) - } - }) - - testThatRequestHasVulnerability({ - testDescription: 'should have NOSQL_MONGODB_INJECTION vulnerability in correct file and line', - fn: async (req, res) => { - const filter = { - name: { - child: [req.query.key] - } + describe('using promises', () => { + testThatRequestHasVulnerability({ + fn: async (req, res) => { + Test.find({ + name: req.query.key, + value: [1, 2, + 'value', + false, req.query.key] + }).then(() => { + res.end() + }) + }, + vulnerability: 'NOSQL_MONGODB_INJECTION', + makeRequest: (done, config) => { + axios.get(`http://localhost:${config.port}/?key=value`).catch(done) } - require(tmpFilePath)(Test, filter, () => { - res.end() - }) - }, - vulnerability: 'NOSQL_MONGODB_INJECTION', - makeRequest: (done, config) => { - axios.get(`http://localhost:${config.port}/?key=value`).catch(done) - }, - occurrences: { - occurrences: 1, - location: { - path: vulnerableMethodFilename, - line: 4 + }) + + testThatRequestHasVulnerability({ + fn: async (req, res) => { + Test.find({ + name: { + child: [req.query.key] + } + }).then(() => { + res.end() + }) + }, + vulnerability: 'NOSQL_MONGODB_INJECTION', + makeRequest: (done, config) => { + axios.get(`http://localhost:${config.port}/?key=value`).catch(done) } - } - }) + }) - if (semver.satisfies(specificMongooseVersion, '>=6')) { - testThatRequestHasNoVulnerability({ - testDescription: 'should not have NOSQL_MONGODB_INJECTION vulnerability with mongoose.sanitizeFilter', + testThatRequestHasVulnerability({ + testDescription: 'should have NOSQL_MONGODB_INJECTION vulnerability using promise in exec method', fn: async (req, res) => { - const filter = mongoose.sanitizeFilter({ + Test.find({ name: { child: [req.query.key] } + }).exec().then(() => { + res.end() }) - Test.find(filter).then(() => { + }, + vulnerability: 'NOSQL_MONGODB_INJECTION', + makeRequest: (done, config) => { + axios.get(`http://localhost:${config.port}/?key=value`).catch(done) + } + }) + + testThatRequestHasVulnerability({ + testDescription: 'should have NOSQL_MONGODB_INJECTION vulnerability in correct file and line', + fn: async (req, res) => { + const filter = { + name: { + child: [req.query.key] + } + } + require(tmpFilePath)(Test, filter, () => { res.end() }) }, vulnerability: 'NOSQL_MONGODB_INJECTION', makeRequest: (done, config) => { axios.get(`http://localhost:${config.port}/?key=value`).catch(done) + }, + occurrences: { + occurrences: 1, + location: { + path: vulnerableMethodFilename, + line: 4 + } } }) - } - testThatRequestHasNoVulnerability(async (req, res) => { - Test.find({ - name: 'test' - }).then(() => { - res.end() + if (semver.satisfies(specificMongooseVersion, '>=6')) { + testThatRequestHasNoVulnerability({ + testDescription: 'should not have NOSQL_MONGODB_INJECTION vulnerability with mongoose.sanitizeFilter', + fn: async (req, res) => { + const filter = mongoose.sanitizeFilter({ + name: { + child: [req.query.key] + } + }) + Test.find(filter).then(() => { + res.end() + }) + }, + vulnerability: 'NOSQL_MONGODB_INJECTION', + makeRequest: (done, config) => { + axios.get(`http://localhost:${config.port}/?key=value`).catch(done) + } + }) + } + + testThatRequestHasNoVulnerability(async (req, res) => { + Test.find({ + name: 'test' + }).then(() => { + res.end() + }) + }, 'NOSQL_MONGODB_INJECTION') + }) + + if (semver.satisfies(specificMongooseVersion, '<7')) { + describe('using callbacks', () => { + testThatRequestHasNoVulnerability(async (req, res) => { + try { + Test.find({ + name: 'test' + }).exec(() => { + res.end() + }) + } catch (e) { + res.writeHead(500) + res.end() + } + }, 'NOSQL_MONGODB_INJECTION') + + testThatRequestHasVulnerability({ + textDescription: 'should have NOSQL_MONGODB_INJECTION vulnerability using callback in exec', + fn: async (req, res) => { + try { + Test.find({ + name: req.query.key, + value: [1, 2, + 'value', + false, req.query.key] + }).exec(() => { + res.end() + }) + } catch (e) { + res.writeHead(500) + res.end() + } + }, + vulnerability: 'NOSQL_MONGODB_INJECTION', + makeRequest: (done, config) => { + axios.get(`http://localhost:${config.port}/?key=value`).catch(done) + } + }) + + testThatRequestHasVulnerability({ + textDescription: 'should have NOSQL_MONGODB_INJECTION vulnerability using callback in find', + fn: async (req, res) => { + try { + Test.find({ + name: req.query.key, + value: [1, 2, + 'value', + false, req.query.key] + }, () => { + res.end() + }) + } catch (e) { + res.writeHead(500) + res.end() + } + }, + vulnerability: 'NOSQL_MONGODB_INJECTION', + makeRequest: (done, config) => { + axios.get(`http://localhost:${config.port}/?key=value`).catch(done) + } + }) }) - }, 'NOSQL_MONGODB_INJECTION') + } }) }) }) From 1c32782fe882c8a7bbd9bd08dcde22ba9655f196 Mon Sep 17 00:00:00 2001 From: Ida Liu <119438987+ida613@users.noreply.github.com> Date: Mon, 12 Feb 2024 15:27:23 -0500 Subject: [PATCH 34/44] APM uninstrumentation (#4014) extends remote config support for DD_TRACING_ENABLED --- packages/dd-trace/src/config.js | 9 ++-- packages/dd-trace/src/proxy.js | 53 +++++++++++-------- packages/dd-trace/src/span_processor.js | 6 ++- packages/dd-trace/test/proxy.spec.js | 32 ++++++++++- packages/dd-trace/test/span_processor.spec.js | 20 +++++++ 5 files changed, 89 insertions(+), 31 deletions(-) diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index 7182a9badc1..9cb9a020c1e 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -110,10 +110,6 @@ class Config { log.use(this.logger) log.toggle(this.debug, this.logLevel, this) - const DD_TRACING_ENABLED = coalesce( - process.env.DD_TRACING_ENABLED, - true - ) const DD_PROFILING_ENABLED = coalesce( options.profiling, // TODO: remove when enabled by default process.env.DD_EXPERIMENTAL_PROFILING_ENABLED, @@ -570,7 +566,6 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) const defaultFlushInterval = inAWSLambda ? 0 : 2000 - this.tracing = !isFalse(DD_TRACING_ENABLED) this.dbmPropagationMode = DD_DBM_PROPAGATION_MODE this.dsmEnabled = isTrue(DD_DATA_STREAMS_ENABLED) this.openAiLogsEnabled = DD_OPENAI_LOGS_ENABLED @@ -786,6 +781,7 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) this._setBoolean(defaults, 'logInjection', false) this._setArray(defaults, 'headerTags', []) this._setValue(defaults, 'tags', {}) + this._setBoolean(defaults, 'tracing', true) } _applyEnvironment () { @@ -799,6 +795,7 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) DD_TRACE_HEADER_TAGS, DD_TRACE_SAMPLE_RATE, DD_TRACE_TAGS, + DD_TRACING_ENABLED, DD_VERSION } = process.env @@ -816,6 +813,7 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) this._setBoolean(env, 'logInjection', DD_LOGS_INJECTION) this._setArray(env, 'headerTags', DD_TRACE_HEADER_TAGS) this._setTags(env, 'tags', tags) + this._setBoolean(env, 'tracing', DD_TRACING_ENABLED) } _applyOptions (options) { @@ -850,6 +848,7 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) this._setBoolean(opts, 'logInjection', options.log_injection_enabled) this._setArray(opts, 'headerTags', headerTags) this._setTags(opts, 'tags', tags) + this._setBoolean(opts, 'tracing', options.tracing_enabled) } _setBoolean (obj, name, value) { diff --git a/packages/dd-trace/src/proxy.js b/packages/dd-trace/src/proxy.js index 1cbdf582c7c..3228145a67d 100644 --- a/packages/dd-trace/src/proxy.js +++ b/packages/dd-trace/src/proxy.js @@ -19,6 +19,7 @@ class Tracer extends NoopProxy { this._initialized = false this._pluginManager = new PluginManager(this) this.dogstatsd = new dogstatsd.NoopDogStatsDClient() + this._tracingInitialized = false } init (options) { @@ -28,6 +29,7 @@ class Tracer extends NoopProxy { try { const config = new Config(options) // TODO: support dynamic code config + telemetry.start(config, this._pluginManager) if (config.dogstatsd) { // Custom Metrics @@ -60,11 +62,7 @@ class Tracer extends NoopProxy { } else { config.configure(conf.lib_config, true) } - - if (config.tracing) { - this._tracer.configure(config) - this._pluginManager.configure(config) - } + this._enableOrDisableTracing(config) }) } @@ -89,25 +87,9 @@ class Tracer extends NoopProxy { runtimeMetrics.start(config) } - if (config.tracing) { - // TODO: This should probably not require tracing to be enabled. - telemetry.start(config, this._pluginManager) - - // dirty require for now so zero appsec code is executed unless explicitly enabled - if (config.appsec.enabled) { - require('./appsec').enable(config) - } - - this._tracer = new DatadogTracer(config) - this.appsec = new AppsecSdk(this._tracer, config) - - if (config.iast.enabled) { - require('./appsec/iast').enable(config, this._tracer) - } - - this._pluginManager.configure(config) - setStartupLogPluginManager(this._pluginManager) + this._enableOrDisableTracing(config) + if (config.tracing) { if (config.isManualApiEnabled) { const TestApiManualPlugin = require('./ci-visibility/test-api-manual/test-api-manual-plugin') this._testApiManualPlugin = new TestApiManualPlugin(this) @@ -121,6 +103,31 @@ class Tracer extends NoopProxy { return this } + _enableOrDisableTracing (config) { + if (config.tracing !== false) { + // dirty require for now so zero appsec code is executed unless explicitly enabled + if (config.appsec.enabled) { + require('./appsec').enable(config) + } + if (!this._tracingInitialized) { + this._tracer = new DatadogTracer(config) + this.appsec = new AppsecSdk(this._tracer, config) + this._tracingInitialized = true + } + if (config.iast.enabled) { + require('./appsec/iast').enable(config, this._tracer) + } + } else { + require('./appsec').disable() + require('./appsec/iast').disable() + } + if (this._tracingInitialized) { + this._tracer.configure(config) + this._pluginManager.configure(config) + setStartupLogPluginManager(this._pluginManager) + } + } + profilerStarted () { if (!this._profilerStarted) { throw new Error('profilerStarted() must be called after init()') diff --git a/packages/dd-trace/src/span_processor.js b/packages/dd-trace/src/span_processor.js index aea348b11fb..f8feec0886b 100644 --- a/packages/dd-trace/src/span_processor.js +++ b/packages/dd-trace/src/span_processor.js @@ -27,10 +27,14 @@ class SpanProcessor { const active = [] const formatted = [] const trace = spanContext._trace - const { flushMinSpans } = this._config + const { flushMinSpans, tracing } = this._config const { started, finished } = trace if (trace.record === false) return + if (tracing === false) { + this._erase(trace, active) + return + } if (started.length === finished.length || finished.length >= flushMinSpans) { this._prioritySampler.sample(spanContext) this._spanSampler.sample(spanContext) diff --git a/packages/dd-trace/test/proxy.spec.js b/packages/dd-trace/test/proxy.spec.js index 21a5443826e..b9f58ed49e3 100644 --- a/packages/dd-trace/test/proxy.spec.js +++ b/packages/dd-trace/test/proxy.spec.js @@ -130,7 +130,8 @@ describe('TracerProxy', () => { } appsec = { - enable: sinon.spy() + enable: sinon.spy(), + disable: sinon.spy() } telemetry = { @@ -138,7 +139,8 @@ describe('TracerProxy', () => { } iast = { - enable: sinon.spy() + enable: sinon.spy(), + disable: sinon.spy() } remoteConfig = { @@ -233,6 +235,31 @@ describe('TracerProxy', () => { expect(pluginManager.configure).to.have.been.calledWith(config) }) + it('should support applying remote config', () => { + const RemoteConfigProxy = proxyquire('../src/proxy', { + './tracer': DatadogTracer, + './appsec': appsec, + './appsec/iast': iast, + './appsec/remote_config': remoteConfig, + './appsec/sdk': AppsecSdk + }) + + const remoteConfigProxy = new RemoteConfigProxy() + remoteConfigProxy.init() + expect(DatadogTracer).to.have.been.calledOnce + expect(AppsecSdk).to.have.been.calledOnce + + let conf = { tracing_enabled: false } + rc.emit('APM_TRACING', 'apply', { lib_config: conf }) + expect(appsec.disable).to.have.been.called + expect(iast.disable).to.have.been.called + + conf = { tracing_enabled: true } + rc.emit('APM_TRACING', 'apply', { lib_config: conf }) + expect(DatadogTracer).to.have.been.calledOnce + expect(AppsecSdk).to.have.been.calledOnce + }) + it('should start capturing runtimeMetrics when configured', () => { config.runtimeMetrics = true @@ -365,6 +392,7 @@ describe('TracerProxy', () => { './log': log, './profiler': null, // this will cause the import failure error './appsec': appsec, + './telemetry': telemetry, './appsec/remote_config': remoteConfig }) diff --git a/packages/dd-trace/test/span_processor.spec.js b/packages/dd-trace/test/span_processor.spec.js index 584fd680023..5198e1702bc 100644 --- a/packages/dd-trace/test/span_processor.spec.js +++ b/packages/dd-trace/test/span_processor.spec.js @@ -131,4 +131,24 @@ describe('SpanProcessor', () => { expect(SpanSampler).to.have.been.calledWith(config.sampler) }) + + it('should erase the trace and stop execution when tracing=false', () => { + const config = { + tracing: false, + stats: { + enabled: false + } + } + + const processor = new SpanProcessor(exporter, prioritySampler, config) + trace.started = [activeSpan] + trace.finished = [finishedSpan] + + processor.process(finishedSpan) + + expect(trace).to.have.deep.property('started', []) + expect(trace).to.have.deep.property('finished', []) + expect(finishedSpan.context()).to.have.deep.property('_tags', {}) + expect(exporter.export).not.to.have.been.called + }) }) From 729ccbe544e25b1f4144bfe4d5ad3d7c4d86f92c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Tue, 13 Feb 2024 13:31:10 +0100 Subject: [PATCH 35/44] [ci-visibility] Add browser name as test configuration in playwright (#4048) --- .../playwright/playwright.spec.js | 5 +- .../src/playwright.js | 50 +++++++++++++++---- .../datadog-plugin-playwright/src/index.js | 12 +++-- packages/dd-trace/src/plugins/util/test.js | 4 ++ 4 files changed, 57 insertions(+), 14 deletions(-) diff --git a/integration-tests/playwright/playwright.spec.js b/integration-tests/playwright/playwright.spec.js index 0f3bc3e11b4..0c09a180b9e 100644 --- a/integration-tests/playwright/playwright.spec.js +++ b/integration-tests/playwright/playwright.spec.js @@ -16,7 +16,8 @@ const { TEST_STATUS, TEST_SOURCE_START, TEST_TYPE, - TEST_SOURCE_FILE + TEST_SOURCE_FILE, + TEST_CONFIGURATION_BROWSER_NAME } = require('../../packages/dd-trace/src/plugins/util/test') const versions = ['1.18.0', 'latest'] @@ -110,6 +111,8 @@ versions.forEach((version) => { // Can read DD_TAGS assert.propertyVal(testEvent.content.meta, 'test.customtag', 'customvalue') assert.propertyVal(testEvent.content.meta, 'test.customtag2', 'customvalue2') + // Adds the browser used + assert.propertyVal(testEvent.content.meta, TEST_CONFIGURATION_BROWSER_NAME, 'chromium') }) stepEvents.forEach(stepEvent => { diff --git a/packages/datadog-instrumentations/src/playwright.js b/packages/datadog-instrumentations/src/playwright.js index a209e228ffb..4093faaf6de 100644 --- a/packages/datadog-instrumentations/src/playwright.js +++ b/packages/datadog-instrumentations/src/playwright.js @@ -73,14 +73,41 @@ function getRootDir (playwrightRunner) { if (playwrightRunner._configDir) { return playwrightRunner._configDir } - if (playwrightRunner._config && playwrightRunner._config.config) { - return playwrightRunner._config.config.rootDir + if (playwrightRunner._config) { + return playwrightRunner._config.config?.rootDir || process.cwd() } return process.cwd() } -function testBeginHandler (test) { - const { _requireFile: testSuiteAbsolutePath, title: testName, _type, location: { line: testSourceLine } } = test +function getProjectsFromRunner (runner) { + const config = getPlaywrightConfig(runner) + return config.projects?.map(({ project }) => project) +} + +function getProjectsFromDispatcher (dispatcher) { + const newConfig = dispatcher._config?.config?.projects + if (newConfig) { + return newConfig + } + // old + return dispatcher._loader?.fullConfig()?.projects +} + +function getBrowserNameFromProjects (projects, projectId) { + if (!projects) { + return null + } + return projects.find(project => + project.__projectId === projectId || project._id === projectId + )?.name +} + +function testBeginHandler (test, browserName) { + const { + _requireFile: testSuiteAbsolutePath, + title: testName, _type, + location: { line: testSourceLine } + } = test if (_type === 'beforeAll' || _type === 'afterAll') { return @@ -100,7 +127,7 @@ function testBeginHandler (test) { const testAsyncResource = new AsyncResource('bound-anonymous-fn') testToAr.set(test, testAsyncResource) testAsyncResource.runInAsyncScope(() => { - testStartCh.publish({ testName, testSuiteAbsolutePath, testSourceLine }) + testStartCh.publish({ testName, testSuiteAbsolutePath, testSourceLine, browserName }) }) } @@ -166,11 +193,12 @@ function dispatcherHook (dispatcherExport) { shimmer.wrap(dispatcherExport.Dispatcher.prototype, '_createWorker', createWorker => function () { const dispatcher = this const worker = createWorker.apply(this, arguments) - worker.process.on('message', ({ method, params }) => { if (method === 'testBegin') { const { test } = dispatcher._testById.get(params.testId) - testBeginHandler(test) + const projects = getProjectsFromDispatcher(dispatcher) + const browser = getBrowserNameFromProjects(projects, test._projectId) + testBeginHandler(test, browser) } else if (method === 'testEnd') { const { test } = dispatcher._testById.get(params.testId) @@ -203,7 +231,9 @@ function dispatcherHookNew (dispatcherExport, runWrapper) { worker.on('testBegin', ({ testId }) => { const test = getTestByTestId(dispatcher, testId) - testBeginHandler(test) + const projects = getProjectsFromDispatcher(dispatcher) + const browser = getBrowserNameFromProjects(projects, test._projectId) + testBeginHandler(test, browser) }) worker.on('testEnd', ({ testId, status, errors, annotations }) => { const test = getTestByTestId(dispatcher, testId) @@ -226,6 +256,7 @@ function runnerHook (runnerExport, playwrightVersion) { testSessionAsyncResource.runInAsyncScope(() => { testSessionStartCh.publish({ command, frameworkVersion: playwrightVersion, rootDir }) }) + const projects = getProjectsFromRunner(this) const runAllTestsReturn = await runAllTests.apply(this, arguments) @@ -234,7 +265,8 @@ function runnerHook (runnerExport, playwrightVersion) { // there were tests that did not go through `testBegin` or `testEnd`, // because they were skipped tests.forEach(test => { - testBeginHandler(test) + const browser = getBrowserNameFromProjects(projects, test._projectId) + testBeginHandler(test, browser) testEndHandler(test, [], 'skip') }) }) diff --git a/packages/datadog-plugin-playwright/src/index.js b/packages/datadog-plugin-playwright/src/index.js index 45de3c4afe4..8c8e66c999e 100644 --- a/packages/datadog-plugin-playwright/src/index.js +++ b/packages/datadog-plugin-playwright/src/index.js @@ -10,7 +10,8 @@ const { getTestSuiteCommonTags, TEST_SOURCE_START, TEST_CODE_OWNERS, - TEST_SOURCE_FILE + TEST_SOURCE_FILE, + TEST_CONFIGURATION_BROWSER_NAME } = require('../../dd-trace/src/plugins/util/test') const { RESOURCE_NAME } = require('../../../ext/tags') const { COMPONENT } = require('../../dd-trace/src/constants') @@ -77,11 +78,11 @@ class PlaywrightPlugin extends CiPlugin { this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'suite') }) - this.addSub('ci:playwright:test:start', ({ testName, testSuiteAbsolutePath, testSourceLine }) => { + this.addSub('ci:playwright:test:start', ({ testName, testSuiteAbsolutePath, testSourceLine, browserName }) => { const store = storage.getStore() const testSuite = getTestSuitePath(testSuiteAbsolutePath, this.rootDir) const testSourceFile = getTestSuitePath(testSuiteAbsolutePath, this.repositoryRoot) - const span = this.startTestSpan(testName, testSuite, testSourceFile, testSourceLine) + const span = this.startTestSpan(testName, testSuite, testSourceFile, testSourceLine, browserName) this.enter(span, store) }) @@ -128,7 +129,7 @@ class PlaywrightPlugin extends CiPlugin { }) } - startTestSpan (testName, testSuite, testSourceFile, testSourceLine) { + startTestSpan (testName, testSuite, testSourceFile, testSourceLine, browserName) { const testSuiteSpan = this._testSuites.get(testSuite) const extraTags = { @@ -137,6 +138,9 @@ class PlaywrightPlugin extends CiPlugin { if (testSourceFile) { extraTags[TEST_SOURCE_FILE] = testSourceFile || testSuite } + if (browserName) { + extraTags[TEST_CONFIGURATION_BROWSER_NAME] = browserName + } return super.startTestSpan(testName, testSuite, testSuiteSpan, extraTags) } diff --git a/packages/dd-trace/src/plugins/util/test.js b/packages/dd-trace/src/plugins/util/test.js index ba18076bcca..40aef42237f 100644 --- a/packages/dd-trace/src/plugins/util/test.js +++ b/packages/dd-trace/src/plugins/util/test.js @@ -48,6 +48,9 @@ const TEST_MODULE_ID = 'test_module_id' const TEST_SUITE_ID = 'test_suite_id' const TEST_TOOLCHAIN = 'test.toolchain' const TEST_SKIPPED_BY_ITR = 'test.skipped_by_itr' +// Browser used in browser test. Namespaced by test.configuration because it affects the fingerprint +const TEST_CONFIGURATION_BROWSER_NAME = 'test.configuration.browser_name' +// Early flake detection const TEST_IS_NEW = 'test.is_new' const TEST_EARLY_FLAKE_IS_RETRY = 'test.early_flake.is_retry' const TEST_EARLY_FLAKE_IS_ENABLED = 'test.early_flake.is_enabled' @@ -90,6 +93,7 @@ module.exports = { JEST_WORKER_COVERAGE_PAYLOAD_CODE, TEST_SOURCE_START, TEST_SKIPPED_BY_ITR, + TEST_CONFIGURATION_BROWSER_NAME, TEST_IS_NEW, TEST_EARLY_FLAKE_IS_RETRY, TEST_EARLY_FLAKE_IS_ENABLED, From b9f6e269b5a2a3749a2f719af43225a1ff9ec4da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Tue, 13 Feb 2024 17:20:09 +0100 Subject: [PATCH 36/44] [ci-visibility] Fix typo in log.error (#4055) --- .../src/ci-visibility/early-flake-detection/get-known-tests.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/dd-trace/src/ci-visibility/early-flake-detection/get-known-tests.js b/packages/dd-trace/src/ci-visibility/early-flake-detection/get-known-tests.js index e9df9daa04c..144a1c006bd 100644 --- a/packages/dd-trace/src/ci-visibility/early-flake-detection/get-known-tests.js +++ b/packages/dd-trace/src/ci-visibility/early-flake-detection/get-known-tests.js @@ -38,7 +38,7 @@ function getKnownTests ({ } else { const apiKey = process.env.DATADOG_API_KEY || process.env.DD_API_KEY if (!apiKey) { - return done(new Error('Skippable suites were not fetched because Datadog API key is not defined.')) + return done(new Error('Known tests were not fetched because Datadog API key is not defined.')) } options.headers['dd-api-key'] = apiKey From a406a24afdcfba6029735d2b18b01b3ee8e198ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Wed, 14 Feb 2024 13:19:24 +0100 Subject: [PATCH 37/44] [ci-visibility] Work around jest's `--forceExit` (#4049) --- integration-tests/ci-visibility-intake.js | 23 ++++-- integration-tests/ci-visibility.spec.js | 78 +++++++++++++++++++ integration-tests/config-jest.js | 8 ++ integration-tests/helpers.js | 6 +- packages/datadog-instrumentations/src/jest.js | 30 ++++++- packages/datadog-plugin-jest/src/index.js | 10 ++- .../requests/get-library-configuration.js | 4 +- 7 files changed, 146 insertions(+), 13 deletions(-) create mode 100644 integration-tests/config-jest.js diff --git a/integration-tests/ci-visibility-intake.js b/integration-tests/ci-visibility-intake.js index c2fa3aee0e4..f1d7a332f24 100644 --- a/integration-tests/ci-visibility-intake.js +++ b/integration-tests/ci-visibility-intake.js @@ -35,6 +35,7 @@ let gitUploadStatus = DEFAULT_GIT_UPLOAD_STATUS let infoResponse = DEFAULT_INFO_RESPONSE let correlationId = DEFAULT_CORRELATION_ID let knownTests = DEFAULT_KNOWN_TESTS +let waitingTime = null class FakeCiVisIntake extends FakeAgent { setKnownTests (newKnownTestsResponse) { @@ -61,6 +62,10 @@ class FakeCiVisIntake extends FakeAgent { settings = newSettings } + setWaitingTime (newWaitingTime) { + waitingTime = newWaitingTime + } + async start () { const app = express() app.use(bodyParser.raw({ limit: Infinity, type: 'application/msgpack' })) @@ -83,13 +88,16 @@ class FakeCiVisIntake extends FakeAgent { }) }) + // It can be slowed down with setWaitingTime app.post(['/api/v2/citestcycle', '/evp_proxy/:version/api/v2/citestcycle'], (req, res) => { - res.status(200).send('OK') - this.emit('message', { - headers: req.headers, - payload: msgpack.decode(req.body, { codec }), - url: req.url - }) + this.waitingTimeoutId = setTimeout(() => { + res.status(200).send('OK') + this.emit('message', { + headers: req.headers, + payload: msgpack.decode(req.body, { codec }), + url: req.url + }) + }, waitingTime || 0) }) app.post([ @@ -214,6 +222,9 @@ class FakeCiVisIntake extends FakeAgent { gitUploadStatus = DEFAULT_GIT_UPLOAD_STATUS infoResponse = DEFAULT_INFO_RESPONSE this.removeAllListeners() + if (this.waitingTimeoutId) { + clearTimeout(this.waitingTimeoutId) + } return super.stop() } diff --git a/integration-tests/ci-visibility.spec.js b/integration-tests/ci-visibility.spec.js index 8a8765fb0ea..222de08dac2 100644 --- a/integration-tests/ci-visibility.spec.js +++ b/integration-tests/ci-visibility.spec.js @@ -783,6 +783,84 @@ testFrameworks.forEach(({ }) }) }) + it('works with --forceExit and logs a warning', (done) => { + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + assert.include(testOutput, "Jest's '--forceExit' flag has been passed") + const events = payloads.flatMap(({ payload }) => payload.events) + + const testSession = events.find(event => event.type === 'test_session_end') + const testModule = events.find(event => event.type === 'test_module_end') + const testSuites = events.filter(event => event.type === 'test_suite_end') + const tests = events.filter(event => event.type === 'test') + + assert.exists(testSession) + assert.exists(testModule) + assert.equal(testSuites.length, 2) + assert.equal(tests.length, 2) + }) + // Needs to run with the CLI if we want --forceExit to work + childProcess = exec( + 'node ./node_modules/jest/bin/jest --config config-jest.js --forceExit', + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + DD_TRACE_DEBUG: '1', + DD_TRACE_LOG_LEVEL: 'warn' + }, + stdio: 'inherit' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + childProcess.stdout.on('data', (chunk) => { + testOutput += chunk.toString() + }) + childProcess.stderr.on('data', (chunk) => { + testOutput += chunk.toString() + }) + }) + it('does not hang if server is not available and logs an error', (done) => { + // Very slow intake + receiver.setWaitingTime(30000) + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + assert.include(testOutput, "Jest's '--forceExit' flag has been passed") + assert.include(testOutput, 'Timeout waiting for the tracer to flush') + const events = payloads.flatMap(({ payload }) => payload.events) + + assert.equal(events.length, 0) + }, 12000) + // Needs to run with the CLI if we want --forceExit to work + childProcess = exec( + 'node ./node_modules/jest/bin/jest --config config-jest.js --forceExit', + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + DD_TRACE_DEBUG: '1', + DD_TRACE_LOG_LEVEL: 'warn' + }, + stdio: 'inherit' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + receiver.setWaitingTime(0) + done() + }).catch(done) + }) + childProcess.stdout.on('data', (chunk) => { + testOutput += chunk.toString() + }) + childProcess.stderr.on('data', (chunk) => { + testOutput += chunk.toString() + }) + }) } it('can run tests and report spans', (done) => { diff --git a/integration-tests/config-jest.js b/integration-tests/config-jest.js new file mode 100644 index 00000000000..a7cfdc15765 --- /dev/null +++ b/integration-tests/config-jest.js @@ -0,0 +1,8 @@ +module.exports = { + projects: [__dirname], + testPathIgnorePatterns: ['/node_modules/'], + cache: false, + testMatch: [ + '**/ci-visibility/test/ci-visibility-test*' + ] +} diff --git a/integration-tests/helpers.js b/integration-tests/helpers.js index c6a51e57f04..37838e774e4 100644 --- a/integration-tests/helpers.js +++ b/integration-tests/helpers.js @@ -280,7 +280,8 @@ function getCiVisAgentlessConfig (port) { DD_API_KEY: '1', DD_CIVISIBILITY_AGENTLESS_ENABLED: 1, DD_CIVISIBILITY_AGENTLESS_URL: `http://127.0.0.1:${port}`, - NODE_OPTIONS: '-r dd-trace/ci/init' + NODE_OPTIONS: '-r dd-trace/ci/init', + DD_INSTRUMENTATION_TELEMETRY_ENABLED: 'false' } } @@ -291,7 +292,8 @@ function getCiVisEvpProxyConfig (port) { ...rest, DD_TRACE_AGENT_PORT: port, NODE_OPTIONS: '-r dd-trace/ci/init', - DD_CIVISIBILITY_AGENTLESS_ENABLED: '0' + DD_CIVISIBILITY_AGENTLESS_ENABLED: '0', + DD_INSTRUMENTATION_TELEMETRY_ENABLED: 'false' } } diff --git a/packages/datadog-instrumentations/src/jest.js b/packages/datadog-instrumentations/src/jest.js index 2d67aabb17c..30787ff3398 100644 --- a/packages/datadog-instrumentations/src/jest.js +++ b/packages/datadog-instrumentations/src/jest.js @@ -42,6 +42,9 @@ const knownTestsCh = channel('ci:jest:known-tests') const itrSkippedSuitesCh = channel('ci:jest:itr:skipped-suites') +// Maximum time we'll wait for the tracer to flush +const FLUSH_TIMEOUT = 10000 + let skippableSuites = [] let knownTests = [] let isCodeCoverageEnabled = false @@ -415,6 +418,21 @@ function cliWrapper (cli, jestVersion) { status = 'fail' error = new Error(`Failed test suites: ${numFailedTestSuites}. Failed tests: ${numFailedTests}`) } + let timeoutId + + // Pass the resolve callback to defer it to DC listener + const flushPromise = new Promise((resolve) => { + onDone = () => { + clearTimeout(timeoutId) + resolve() + } + }) + + const timeoutPromise = new Promise((resolve) => { + timeoutId = setTimeout(() => { + resolve('timeout') + }, FLUSH_TIMEOUT).unref() + }) sessionAsyncResource.runInAsyncScope(() => { testSessionFinishCh.publish({ @@ -427,9 +445,15 @@ function cliWrapper (cli, jestVersion) { hasUnskippableSuites, hasForcedToRunSuites, error, - isEarlyFlakeDetectionEnabled + isEarlyFlakeDetectionEnabled, + onDone }) }) + const waitingResult = await Promise.race([flushPromise, timeoutPromise]) + + if (waitingResult === 'timeout') { + log.error('Timeout waiting for the tracer to flush') + } numSkippedSuites = 0 @@ -548,6 +572,10 @@ function configureTestEnvironment (readConfigsResult) { isUserCodeCoverageEnabled = !!readConfigsResult.globalConfig.collectCoverage + if (readConfigsResult.globalConfig.forceExit) { + log.warn("Jest's '--forceExit' flag has been passed. This may cause loss of data.") + } + if (isCodeCoverageEnabled) { const globalConfig = { ...readConfigsResult.globalConfig, diff --git a/packages/datadog-plugin-jest/src/index.js b/packages/datadog-plugin-jest/src/index.js index fb4768973e6..a227b5a78ae 100644 --- a/packages/datadog-plugin-jest/src/index.js +++ b/packages/datadog-plugin-jest/src/index.js @@ -87,7 +87,8 @@ class JestPlugin extends CiPlugin { hasUnskippableSuites, hasForcedToRunSuites, error, - isEarlyFlakeDetectionEnabled + isEarlyFlakeDetectionEnabled, + onDone }) => { this.testSessionSpan.setTag(TEST_STATUS, status) this.testModuleSpan.setTag(TEST_STATUS, status) @@ -121,7 +122,12 @@ class JestPlugin extends CiPlugin { this.testSessionSpan.finish() this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'session') finishAllTraceSpans(this.testSessionSpan) - this.tracer._exporter.flush() + + this.tracer._exporter.flush(() => { + if (onDone) { + onDone() + } + }) }) // Test suites can be run in a different process from jest's main one. diff --git a/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js b/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js index 9d722ea3887..2653410a538 100644 --- a/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js +++ b/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js @@ -11,7 +11,7 @@ const { getErrorTypeFromStatusCode } = require('../telemetry') -const DEFAULT_NUM_RETRIES_EARLY_FLAKE_DETECTION = 2 +const DEFAULT_EARLY_FLAKE_DETECTION_NUM_RETRIES = 2 function getLibraryConfiguration ({ url, @@ -104,7 +104,7 @@ function getLibraryConfiguration ({ requireGit, isEarlyFlakeDetectionEnabled: earlyFlakeDetectionConfig?.enabled ?? false, earlyFlakeDetectionNumRetries: - earlyFlakeDetectionConfig?.slow_test_retries?.['5s'] || DEFAULT_NUM_RETRIES_EARLY_FLAKE_DETECTION + earlyFlakeDetectionConfig?.slow_test_retries?.['5s'] || DEFAULT_EARLY_FLAKE_DETECTION_NUM_RETRIES } log.debug(() => `Remote settings: ${JSON.stringify(settings)}`) From a3d44af243360575a0d76cd811fa14f8aeacd564 Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Wed, 14 Feb 2024 15:06:50 +0100 Subject: [PATCH 38/44] Fix child_process test in node <16 (#4059) --- .../test/child_process.spec.js | 24 +++++- .../test/index.spec.js | 86 ++++++++++--------- 2 files changed, 68 insertions(+), 42 deletions(-) diff --git a/packages/datadog-instrumentations/test/child_process.spec.js b/packages/datadog-instrumentations/test/child_process.spec.js index c4ab71dbde5..f21c956affc 100644 --- a/packages/datadog-instrumentations/test/child_process.spec.js +++ b/packages/datadog-instrumentations/test/child_process.spec.js @@ -3,6 +3,7 @@ const { promisify } = require('util') const agent = require('../../dd-trace/test/plugins/agent') const dc = require('dc-polyfill') +const { NODE_MAJOR } = require('../../../version') describe('child process', () => { const modules = ['child_process', 'node:child_process'] @@ -303,18 +304,37 @@ describe('child process', () => { it('should execute error callback with `exit 1` command', () => { let childError try { - childProcess[methodName]('node -e "process.exit(1)"', { shell: true }) + childProcess[methodName]('node -e "process.exit(1)"') } catch (error) { childError = error } finally { expect(start).to.have.been.calledOnceWith({ command: 'node -e "process.exit(1)"', - shell: true, + shell: false, error: childError }) expect(finish).to.have.been.calledOnce } }) + if (methodName !== 'execFileSync' || NODE_MAJOR > 16) { + // when a process return an invalid code, in node <=16, in execFileSync with shell:true + // an exception is not thrown + it('should execute error callback with `exit 1` command with shell: true', () => { + let childError + try { + childProcess[methodName]('node -e "process.exit(1)"', { shell: true }) + } catch (error) { + childError = error + } finally { + expect(start).to.have.been.calledOnceWith({ + command: 'node -e "process.exit(1)"', + shell: true, + error: childError + }) + expect(finish).to.have.been.calledOnce + } + }) + } }) }) }) diff --git a/packages/datadog-plugin-child_process/test/index.spec.js b/packages/datadog-plugin-child_process/test/index.spec.js index 1f56fe26538..b836d206d5e 100644 --- a/packages/datadog-plugin-child_process/test/index.spec.js +++ b/packages/datadog-plugin-child_process/test/index.spec.js @@ -4,6 +4,7 @@ const ChildProcessPlugin = require('../src') const { storage } = require('../../datadog-core') const agent = require('../../dd-trace/test/plugins/agent') const { expectSomeSpan } = require('../../dd-trace/test/plugins/helpers') +const { NODE_MAJOR } = require('../../../version') function noop () {} @@ -519,53 +520,58 @@ describe('Child process plugin', () => { } }) - it('should be instrumented with error code (override shell default behavior)', (done) => { - const command = [ 'node', '-badOption' ] - const options = { - stdio: 'pipe', - shell: true - } - const errorExpected = { - type: 'system', - name: 'command_execution', - error: 1, - meta: { - component: 'subprocess', - 'cmd.shell': 'node -badOption', - 'cmd.exit_code': '9' + if (methodName !== 'execFileSync' || NODE_MAJOR > 16) { + // when a process return an invalid code, in node <=16, in execFileSync with shell:true + // an exception is not thrown + it('should be instrumented with error code (override shell default behavior)', (done) => { + const command = [ 'node', '-badOption' ] + const options = { + stdio: 'pipe', + shell: true } - } - const noErrorExpected = { - type: 'system', - name: 'command_execution', - error: 0, - meta: { - component: 'subprocess', - 'cmd.shell': 'node -badOption', - 'cmd.exit_code': '9' + const errorExpected = { + type: 'system', + name: 'command_execution', + error: 1, + meta: { + component: 'subprocess', + 'cmd.shell': 'node -badOption', + 'cmd.exit_code': '9' + } } - } - const args = normalizeArgs(methodName, command, options) + const noErrorExpected = { + type: 'system', + name: 'command_execution', + error: 0, + meta: { + component: 'subprocess', + 'cmd.shell': 'node -badOption', + 'cmd.exit_code': '9' + } + } - if (async) { - expectSomeSpan(agent, errorExpected).then(done, done) - const res = childProcess[methodName].apply(null, args) - res.on('close', noop) - } else { - try { - if (methodName === 'spawnSync') { - expectSomeSpan(agent, noErrorExpected).then(done, done) - } else { - expectSomeSpan(agent, errorExpected).then(done, done) + const args = normalizeArgs(methodName, command, options) + + if (async) { + expectSomeSpan(agent, errorExpected).then(done, done) + const res = childProcess[methodName].apply(null, args) + res.on('close', noop) + } else { + try { + if (methodName === 'spawnSync') { + expectSomeSpan(agent, noErrorExpected).then(done, done) + } else { + expectSomeSpan(agent, errorExpected).then(done, done) + } + childProcess[methodName].apply(null, args) + } catch { + // process exit with code 1, exceptions are expected } - childProcess[methodName].apply(null, args) - } catch { - // process exit with code 1, exceptions are expected } - } - }) + }) + } }) }) }) From 2aaf8e00da5c9d8b54f0921306213c063c93ab72 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Wed, 14 Feb 2024 18:06:25 +0100 Subject: [PATCH 39/44] [ci-visibility] Fix ci visibility tests (#4062) --- integration-tests/ci-visibility-intake.js | 3 ++- integration-tests/ci-visibility.spec.js | 25 ++++++++++++----------- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/integration-tests/ci-visibility-intake.js b/integration-tests/ci-visibility-intake.js index f1d7a332f24..2674579541b 100644 --- a/integration-tests/ci-visibility-intake.js +++ b/integration-tests/ci-visibility-intake.js @@ -35,7 +35,7 @@ let gitUploadStatus = DEFAULT_GIT_UPLOAD_STATUS let infoResponse = DEFAULT_INFO_RESPONSE let correlationId = DEFAULT_CORRELATION_ID let knownTests = DEFAULT_KNOWN_TESTS -let waitingTime = null +let waitingTime = 0 class FakeCiVisIntake extends FakeAgent { setKnownTests (newKnownTestsResponse) { @@ -225,6 +225,7 @@ class FakeCiVisIntake extends FakeAgent { if (this.waitingTimeoutId) { clearTimeout(this.waitingTimeoutId) } + waitingTime = 0 return super.stop() } diff --git a/integration-tests/ci-visibility.spec.js b/integration-tests/ci-visibility.spec.js index 222de08dac2..ed2770fafba 100644 --- a/integration-tests/ci-visibility.spec.js +++ b/integration-tests/ci-visibility.spec.js @@ -827,14 +827,6 @@ testFrameworks.forEach(({ it('does not hang if server is not available and logs an error', (done) => { // Very slow intake receiver.setWaitingTime(30000) - const eventsPromise = receiver - .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { - assert.include(testOutput, "Jest's '--forceExit' flag has been passed") - assert.include(testOutput, 'Timeout waiting for the tracer to flush') - const events = payloads.flatMap(({ payload }) => payload.events) - - assert.equal(events.length, 0) - }, 12000) // Needs to run with the CLI if we want --forceExit to work childProcess = exec( 'node ./node_modules/jest/bin/jest --config config-jest.js --forceExit', @@ -848,11 +840,20 @@ testFrameworks.forEach(({ stdio: 'inherit' } ) + const EXPECTED_FORCE_EXIT_LOG_MESSAGE = "Jest's '--forceExit' flag has been passed" + const EXPECTED_TIMEOUT_LOG_MESSAGE = 'Timeout waiting for the tracer to flush' childProcess.on('exit', () => { - eventsPromise.then(() => { - receiver.setWaitingTime(0) - done() - }).catch(done) + assert.include( + testOutput, + EXPECTED_FORCE_EXIT_LOG_MESSAGE, + `"${EXPECTED_FORCE_EXIT_LOG_MESSAGE}" log message is not in test output: ${testOutput}` + ) + assert.include( + testOutput, + EXPECTED_TIMEOUT_LOG_MESSAGE, + `"${EXPECTED_TIMEOUT_LOG_MESSAGE}" log message is not in the test output: ${testOutput}` + ) + done() }) childProcess.stdout.on('data', (chunk) => { testOutput += chunk.toString() From b8797d2a974784d6d258f34045a98b14725c00e9 Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Wed, 14 Feb 2024 18:08:57 +0100 Subject: [PATCH 40/44] Reduce dogstatsd flakyness (#4061) --- packages/dd-trace/test/dogstatsd.spec.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/dd-trace/test/dogstatsd.spec.js b/packages/dd-trace/test/dogstatsd.spec.js index 2054bbaa29e..f210fd17d3a 100644 --- a/packages/dd-trace/test/dogstatsd.spec.js +++ b/packages/dd-trace/test/dogstatsd.spec.js @@ -322,7 +322,7 @@ describe('dogstatsd', () => { }) it('should fail over to UDP when receiving network error from agent', (done) => { - setTimeout(() => { + udp4.send = sinon.stub().callsFake(() => { try { expect(udp4.send).to.have.been.called expect(udp4.send.firstCall.args[0].toString()).to.equal('test.foo:10|c\n') @@ -331,7 +331,7 @@ describe('dogstatsd', () => { } catch (e) { done(e) } - }, 10) + }) statusCode = null From 0a1d9a3e3dec336079bcba069000dc515adf281e Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Wed, 14 Feb 2024 12:17:14 -0800 Subject: [PATCH 41/44] v3.x: disable serverless performance test w/ node.js v14 --- .github/workflows/serverless-performance.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/serverless-performance.yml b/.github/workflows/serverless-performance.yml index 34dea3a4205..eabf4b9f360 100644 --- a/.github/workflows/serverless-performance.yml +++ b/.github/workflows/serverless-performance.yml @@ -10,8 +10,6 @@ jobs: max-parallel: 4 matrix: include: - - node-version: 14.15 - aws-runtime-name: "nodejs14.x" - node-version: 16.14 aws-runtime-name: "nodejs16.x" - node-version: 18.12 @@ -49,4 +47,4 @@ jobs: AWS_ACCESS_KEY_ID: ${{ secrets.SERVERLESS_AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.SERVERLESS_AWS_SECRET_ACCESS_KEY }} AWS_REGION: sa-east-1 - DD_API_KEY: ${{ secrets.SERVERLESS_DD_API_KEY }} \ No newline at end of file + DD_API_KEY: ${{ secrets.SERVERLESS_DD_API_KEY }} From 909a182bfb957d1c4b70d08017ff369e95be3c72 Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Wed, 14 Feb 2024 12:27:55 -0800 Subject: [PATCH 42/44] fix node v14 compat for #4041 contribution (#4064) --- packages/dd-trace/test/datastreams/writer.spec.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/dd-trace/test/datastreams/writer.spec.js b/packages/dd-trace/test/datastreams/writer.spec.js index bf8cd941a14..4cfd4dd87e6 100644 --- a/packages/dd-trace/test/datastreams/writer.spec.js +++ b/packages/dd-trace/test/datastreams/writer.spec.js @@ -33,7 +33,7 @@ describe('DataStreamWriter unix', () => { it("should call 'request' through flush with correct options", () => { writer = new DataStreamsWriter(unixConfig) writer.flush({}) - const stubRequestCall = stubRequest.getCalls().at(0) + const stubRequestCall = stubRequest.getCalls()[0] const decodedPayload = msgpack.decode(stubRequestCall?.args[0], { codec }) const requestOptions = stubRequestCall?.args[1] expect(decodedPayload).to.deep.equal({}) From e491d9c4a5a8a084c7e32b2c32617a7e7132b32e Mon Sep 17 00:00:00 2001 From: Ida Liu <119438987+ida613@users.noreply.github.com> Date: Wed, 14 Feb 2024 16:43:34 -0500 Subject: [PATCH 43/44] APM uninstrumentation: small fix to preserve original behaviour (#4057) --- packages/dd-trace/src/proxy.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/dd-trace/src/proxy.js b/packages/dd-trace/src/proxy.js index 3228145a67d..57b0aca3691 100644 --- a/packages/dd-trace/src/proxy.js +++ b/packages/dd-trace/src/proxy.js @@ -117,10 +117,11 @@ class Tracer extends NoopProxy { if (config.iast.enabled) { require('./appsec/iast').enable(config, this._tracer) } - } else { + } else if (this._tracingInitialized) { require('./appsec').disable() require('./appsec/iast').disable() } + if (this._tracingInitialized) { this._tracer.configure(config) this._pluginManager.configure(config) From 838444bc7416a016b0def285af388945bc234244 Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Mon, 12 Feb 2024 14:17:15 -0800 Subject: [PATCH 44/44] v3.48.0 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 6181b586d23..1ee8ce19f31 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "dd-trace", - "version": "3.47.0", + "version": "3.48.0", "description": "Datadog APM tracing client for JavaScript", "main": "index.js", "typings": "index.d.ts",